summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChandan Singh <csingh43@bloomberg.net>2019-11-05 13:00:30 +0000
committerChandan Singh <csingh43@bloomberg.net>2019-11-05 13:00:30 +0000
commite06c2295b063245dbdb2397e5bd8c4d0a79ba10d (patch)
tree224a111e1daee5a7d450e7d5e0c31fd94c1e19c6
parentc1f423f8653c5a9e3e72283879e8cecabf25ed3f (diff)
downloadbuildstream-e06c2295b063245dbdb2397e5bd8c4d0a79ba10d.tar.gz
Blacken codebase
-rw-r--r--src/buildstream/__init__.py4
-rw-r--r--src/buildstream/__main__.py3
-rw-r--r--src/buildstream/_artifact.py80
-rw-r--r--src/buildstream/_artifactcache.py137
-rw-r--r--src/buildstream/_artifactelement.py12
-rw-r--r--src/buildstream/_basecache.py73
-rw-r--r--src/buildstream/_cachekey.py4
-rw-r--r--src/buildstream/_cas/cascache.py291
-rw-r--r--src/buildstream/_cas/casremote.py42
-rw-r--r--src/buildstream/_cas/casserver.py194
-rw-r--r--src/buildstream/_context.py222
-rw-r--r--src/buildstream/_elementfactory.py15
-rw-r--r--src/buildstream/_exceptions.py65
-rw-r--r--src/buildstream/_frontend/app.py591
-rw-r--r--src/buildstream/_frontend/cli.py1255
-rw-r--r--src/buildstream/_frontend/complete.py106
-rw-r--r--src/buildstream/_frontend/linuxapp.py7
-rw-r--r--src/buildstream/_frontend/profile.py3
-rw-r--r--src/buildstream/_frontend/status.py200
-rw-r--r--src/buildstream/_frontend/widget.py491
-rw-r--r--src/buildstream/_fuse/fuse.py838
-rw-r--r--src/buildstream/_fuse/hardlinks.py45
-rw-r--r--src/buildstream/_fuse/mount.py42
-rw-r--r--src/buildstream/_gitsourcebase.py563
-rw-r--r--src/buildstream/_includes.py101
-rw-r--r--src/buildstream/_loader/loader.py315
-rw-r--r--src/buildstream/_loader/metaelement.py20
-rw-r--r--src/buildstream/_loader/metasource.py6
-rw-r--r--src/buildstream/_message.py76
-rw-r--r--src/buildstream/_messenger.py101
-rw-r--r--src/buildstream/_options/option.py16
-rw-r--r--src/buildstream/_options/optionarch.py14
-rw-r--r--src/buildstream/_options/optionbool.py16
-rw-r--r--src/buildstream/_options/optioneltmask.py4
-rw-r--r--src/buildstream/_options/optionenum.py30
-rw-r--r--src/buildstream/_options/optionflags.py34
-rw-r--r--src/buildstream/_options/optionos.py3
-rw-r--r--src/buildstream/_options/optionpool.py80
-rw-r--r--src/buildstream/_pipeline.py133
-rw-r--r--src/buildstream/_platform/darwin.py7
-rw-r--r--src/buildstream/_platform/fallback.py10
-rw-r--r--src/buildstream/_platform/linux.py38
-rw-r--r--src/buildstream/_platform/platform.py81
-rw-r--r--src/buildstream/_platform/win32.py5
-rw-r--r--src/buildstream/_plugincontext.py171
-rw-r--r--src/buildstream/_profile.py53
-rw-r--r--src/buildstream/_project.py478
-rw-r--r--src/buildstream/_projectrefs.py25
-rw-r--r--src/buildstream/_protos/buildstream/v2/artifact_pb2.py957
-rw-r--r--src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py83
-rw-r--r--src/buildstream/_protos/buildstream/v2/buildstream_pb2.py1028
-rw-r--r--src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py164
-rw-r--r--src/buildstream/_protos/buildstream/v2/source_pb2.py429
-rw-r--r--src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py83
-rw-r--r--src/buildstream/_protos/google/api/annotations_pb2.py52
-rw-r--r--src/buildstream/_protos/google/api/annotations_pb2_grpc.py1
-rw-r--r--src/buildstream/_protos/google/api/http_pb2.py557
-rw-r--r--src/buildstream/_protos/google/api/http_pb2_grpc.py1
-rw-r--r--src/buildstream/_protos/google/bytestream/bytestream_pb2.py720
-rw-r--r--src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py107
-rw-r--r--src/buildstream/_protos/google/longrunning/operations_pb2.py837
-rw-r--r--src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py135
-rw-r--r--src/buildstream/_protos/google/rpc/code_pb2.py189
-rw-r--r--src/buildstream/_protos/google/rpc/code_pb2_grpc.py1
-rw-r--r--src/buildstream/_protos/google/rpc/status_pb2.py154
-rw-r--r--src/buildstream/_protos/google/rpc/status_pb2_grpc.py1
-rw-r--r--src/buildstream/_remote.py105
-rw-r--r--src/buildstream/_scheduler/jobs/elementjob.py24
-rw-r--r--src/buildstream/_scheduler/jobs/job.py244
-rw-r--r--src/buildstream/_scheduler/jobs/jobpickler.py13
-rw-r--r--src/buildstream/_scheduler/queues/buildqueue.py13
-rw-r--r--src/buildstream/_scheduler/queues/queue.py87
-rw-r--r--src/buildstream/_scheduler/resources.py16
-rw-r--r--src/buildstream/_scheduler/scheduler.py109
-rw-r--r--src/buildstream/_signals.py20
-rw-r--r--src/buildstream/_site.py14
-rw-r--r--src/buildstream/_sourcecache.py93
-rw-r--r--src/buildstream/_sourcefactory.py15
-rw-r--r--src/buildstream/_state.py17
-rw-r--r--src/buildstream/_stream.py829
-rw-r--r--src/buildstream/_version.py153
-rw-r--r--src/buildstream/_workspaces.py148
-rw-r--r--src/buildstream/buildelement.py78
-rw-r--r--src/buildstream/element.py958
-rw-r--r--src/buildstream/plugin.py187
-rw-r--r--src/buildstream/plugins/elements/autotools.py5
-rw-r--r--src/buildstream/plugins/elements/compose.py51
-rw-r--r--src/buildstream/plugins/elements/filter.py88
-rw-r--r--src/buildstream/plugins/elements/import.py48
-rw-r--r--src/buildstream/plugins/elements/junction.py34
-rw-r--r--src/buildstream/plugins/elements/manual.py5
-rw-r--r--src/buildstream/plugins/elements/pip.py5
-rw-r--r--src/buildstream/plugins/elements/script.py12
-rw-r--r--src/buildstream/plugins/elements/stack.py4
-rw-r--r--src/buildstream/plugins/sources/_downloadablefilesource.py82
-rw-r--r--src/buildstream/plugins/sources/bzr.py148
-rw-r--r--src/buildstream/plugins/sources/deb.py10
-rw-r--r--src/buildstream/plugins/sources/local.py7
-rw-r--r--src/buildstream/plugins/sources/patch.py24
-rw-r--r--src/buildstream/plugins/sources/pip.py113
-rw-r--r--src/buildstream/plugins/sources/remote.py14
-rw-r--r--src/buildstream/plugins/sources/tar.py58
-rw-r--r--src/buildstream/plugins/sources/workspace.py11
-rw-r--r--src/buildstream/plugins/sources/zip.py24
-rw-r--r--src/buildstream/sandbox/_config.py11
-rw-r--r--src/buildstream/sandbox/_mount.py35
-rw-r--r--src/buildstream/sandbox/_mounter.py65
-rw-r--r--src/buildstream/sandbox/_sandboxbuildbox.py84
-rw-r--r--src/buildstream/sandbox/_sandboxbwrap.py148
-rw-r--r--src/buildstream/sandbox/_sandboxchroot.py94
-rw-r--r--src/buildstream/sandbox/_sandboxdummy.py13
-rw-r--r--src/buildstream/sandbox/_sandboxremote.py343
-rw-r--r--src/buildstream/sandbox/sandbox.py144
-rw-r--r--src/buildstream/scriptelement.py98
-rw-r--r--src/buildstream/source.py295
-rw-r--r--src/buildstream/storage/_casbaseddirectory.py223
-rw-r--r--src/buildstream/storage/_filebaseddirectory.py121
-rw-r--r--src/buildstream/storage/directory.py18
-rw-r--r--src/buildstream/testing/__init__.py16
-rw-r--r--src/buildstream/testing/_fixtures.py1
-rw-r--r--src/buildstream/testing/_sourcetests/build_checkout.py39
-rw-r--r--src/buildstream/testing/_sourcetests/conftest.py5
-rw-r--r--src/buildstream/testing/_sourcetests/fetch.py62
-rw-r--r--src/buildstream/testing/_sourcetests/mirror.py319
-rw-r--r--src/buildstream/testing/_sourcetests/source_determinism.py82
-rw-r--r--src/buildstream/testing/_sourcetests/track.py296
-rw-r--r--src/buildstream/testing/_sourcetests/track_cross_junction.py153
-rw-r--r--src/buildstream/testing/_sourcetests/utils.py19
-rw-r--r--src/buildstream/testing/_sourcetests/workspace.py115
-rw-r--r--src/buildstream/testing/_utils/junction.py39
-rw-r--r--src/buildstream/testing/_utils/site.py43
-rw-r--r--src/buildstream/testing/integration.py24
-rw-r--r--src/buildstream/testing/repo.py7
-rw-r--r--src/buildstream/testing/runcli.py312
-rw-r--r--src/buildstream/types.py18
-rw-r--r--src/buildstream/utils.py327
-rw-r--r--tests/artifactcache/artifactservice.py21
-rw-r--r--tests/artifactcache/config.py220
-rw-r--r--tests/artifactcache/expiry.py270
-rw-r--r--tests/artifactcache/junctions.py177
-rw-r--r--tests/artifactcache/pull.py103
-rw-r--r--tests/artifactcache/push.py117
-rw-r--r--tests/cachekey/cachekey.py142
-rwxr-xr-xtests/cachekey/update.py35
-rwxr-xr-xtests/conftest.py95
-rw-r--r--tests/elements/filter.py357
-rw-r--r--tests/elements/filter/basic/element_plugins/dynamic.py9
-rw-r--r--tests/examples/autotools.py66
-rw-r--r--tests/examples/developing.py91
-rw-r--r--tests/examples/first-project.py20
-rw-r--r--tests/examples/flatpak-autotools.py71
-rw-r--r--tests/examples/integration-commands.py41
-rw-r--r--tests/examples/junctions.py62
-rw-r--r--tests/examples/running-commands.py39
-rw-r--r--tests/external_plugins.py28
-rw-r--r--tests/format/assertion.py49
-rw-r--r--tests/format/dependencies.py164
-rw-r--r--tests/format/include.py337
-rw-r--r--tests/format/include_composition.py113
-rw-r--r--tests/format/invalid_keys.py27
-rw-r--r--tests/format/junctions.py352
-rw-r--r--tests/format/listdirectiveerrors.py48
-rw-r--r--tests/format/optionarch.py98
-rw-r--r--tests/format/optionbool.py150
-rw-r--r--tests/format/optioneltmask.py112
-rw-r--r--tests/format/optionenum.py158
-rw-r--r--tests/format/optionexports.py51
-rw-r--r--tests/format/optionflags.py181
-rw-r--r--tests/format/optionos.py55
-rw-r--r--tests/format/optionoverrides.py17
-rw-r--r--tests/format/options.py367
-rw-r--r--tests/format/project.py189
-rw-r--r--tests/format/project/plugin-no-load-ref/plugins/noloadref.py1
-rw-r--r--tests/format/project/plugin-preflight-error/errorplugin/preflighterror.py7
-rw-r--r--tests/format/projectoverrides.py19
-rw-r--r--tests/format/variables.py243
-rw-r--r--tests/frontend/__init__.py6
-rw-r--r--tests/frontend/artifact_delete.py152
-rw-r--r--tests/frontend/artifact_list_contents.py90
-rw-r--r--tests/frontend/artifact_log.py44
-rw-r--r--tests/frontend/artifact_show.py82
-rw-r--r--tests/frontend/buildcheckout.py830
-rw-r--r--tests/frontend/completions.py513
-rw-r--r--tests/frontend/compose_splits.py26
-rw-r--r--tests/frontend/configurable_warnings.py43
-rw-r--r--tests/frontend/configuredwarning/plugins/corewarn.py6
-rw-r--r--tests/frontend/configuredwarning/plugins/warninga.py4
-rw-r--r--tests/frontend/configuredwarning/plugins/warningb.py4
-rw-r--r--tests/frontend/consistencyerror/plugins/consistencybug.py1
-rw-r--r--tests/frontend/consistencyerror/plugins/consistencyerror.py6
-rw-r--r--tests/frontend/cross_junction_workspace.py62
-rw-r--r--tests/frontend/fetch.py116
-rw-r--r--tests/frontend/help.py33
-rw-r--r--tests/frontend/init.py151
-rw-r--r--tests/frontend/large_directory.py29
-rw-r--r--tests/frontend/logging.py97
-rw-r--r--tests/frontend/main.py8
-rw-r--r--tests/frontend/mirror.py450
-rw-r--r--tests/frontend/order.py103
-rw-r--r--tests/frontend/overlaps.py59
-rw-r--r--tests/frontend/progress.py87
-rw-r--r--tests/frontend/project/sources/fetch_source.py22
-rw-r--r--tests/frontend/pull.py432
-rw-r--r--tests/frontend/push.py560
-rw-r--r--tests/frontend/rebuild.py17
-rw-r--r--tests/frontend/remote-caches.py46
-rw-r--r--tests/frontend/show.py607
-rw-r--r--tests/frontend/source_checkout.py295
-rw-r--r--tests/frontend/track.py269
-rw-r--r--tests/frontend/version.py10
-rw-r--r--tests/frontend/workspace.py1247
-rw-r--r--tests/integration/artifact.py104
-rw-r--r--tests/integration/autotools.py85
-rw-r--r--tests/integration/build-uid.py47
-rw-r--r--tests/integration/cachedfail.py214
-rw-r--r--tests/integration/cmake.py56
-rw-r--r--tests/integration/compose-symlinks.py15
-rw-r--r--tests/integration/compose.py159
-rw-r--r--tests/integration/filter.py29
-rw-r--r--tests/integration/import.py58
-rw-r--r--tests/integration/make.py35
-rw-r--r--tests/integration/manual.py194
-rw-r--r--tests/integration/messages.py71
-rw-r--r--tests/integration/pip_element.py139
-rw-r--r--tests/integration/pip_source.py230
-rw-r--r--tests/integration/project/files/pip-source/app1.py4
-rw-r--r--tests/integration/pullbuildtrees.py170
-rw-r--r--tests/integration/sandbox-bwrap.py53
-rw-r--r--tests/integration/script.py305
-rw-r--r--tests/integration/shell.py427
-rw-r--r--tests/integration/shellbuildtrees.py509
-rw-r--r--tests/integration/sockets.py21
-rw-r--r--tests/integration/source-determinism.py74
-rw-r--r--tests/integration/stack.py24
-rw-r--r--tests/integration/symlinks.py72
-rw-r--r--tests/integration/workspace.py318
-rw-r--r--tests/internals/cascache.py4
-rw-r--r--tests/internals/context.py80
-rw-r--r--tests/internals/loader.py35
-rw-r--r--tests/internals/pluginfactory.py369
-rw-r--r--tests/internals/pluginfactory/wrongtype/foo.py2
-rw-r--r--tests/internals/pluginloading.py31
-rw-r--r--tests/internals/pluginloading/customelement/pluginelements/foo.py1
-rw-r--r--tests/internals/pluginloading/customsource/pluginsources/foo.py1
-rw-r--r--tests/internals/storage.py15
-rw-r--r--tests/internals/storage_vdir_import.py141
-rw-r--r--tests/internals/utils_save_atomic.py48
-rw-r--r--tests/internals/yaml.py706
-rw-r--r--tests/plugins/deprecationwarnings/deprecationwarnings.py27
-rw-r--r--tests/remoteexecution/buildfail.py42
-rw-r--r--tests/remoteexecution/buildtree.py78
-rw-r--r--tests/remoteexecution/junction.py82
-rw-r--r--tests/remoteexecution/partial.py77
-rw-r--r--tests/remoteexecution/simple.py50
-rw-r--r--tests/sandboxes/fallback.py40
-rw-r--r--tests/sandboxes/missing-command.py11
-rw-r--r--tests/sandboxes/missing_dependencies.py71
-rw-r--r--tests/sandboxes/mounting/mount_simple.py18
-rw-r--r--tests/sandboxes/remote-exec-config.py82
-rw-r--r--tests/sandboxes/selection.py61
-rw-r--r--tests/sourcecache/cache.py55
-rw-r--r--tests/sourcecache/config.py27
-rw-r--r--tests/sourcecache/fetch.py110
-rw-r--r--tests/sourcecache/project/plugins/elements/always_fail.py1
-rw-r--r--tests/sourcecache/push.py175
-rw-r--r--tests/sourcecache/source-checkout.py37
-rw-r--r--tests/sourcecache/staging.py58
-rw-r--r--tests/sourcecache/workspace.py71
-rw-r--r--tests/sources/bzr.py33
-rw-r--r--tests/sources/deb.py75
-rw-r--r--tests/sources/git.py1044
-rw-r--r--tests/sources/keytest.py3
-rw-r--r--tests/sources/local.py183
-rw-r--r--tests/sources/no-fetch-cached/plugins/sources/always_cached.py1
-rw-r--r--tests/sources/no_fetch_cached.py24
-rw-r--r--tests/sources/patch.py107
-rw-r--r--tests/sources/pip.py46
-rw-r--r--tests/sources/previous_source_access.py32
-rw-r--r--tests/sources/previous_source_access/plugins/sources/foo_transform.py29
-rw-r--r--tests/sources/remote.py183
-rw-r--r--tests/sources/tar.py289
-rw-r--r--tests/sources/zip.py125
-rw-r--r--tests/testutils/__init__.py7
-rw-r--r--tests/testutils/artifactshare.py40
-rw-r--r--tests/testutils/context.py4
-rw-r--r--tests/testutils/element_generators.py24
-rw-r--r--tests/testutils/file_server.py4
-rw-r--r--tests/testutils/filetypegenerator.py2
-rw-r--r--tests/testutils/ftp_server.py6
-rw-r--r--tests/testutils/http_server.py55
-rw-r--r--tests/testutils/junction.py9
-rw-r--r--tests/testutils/patch.py10
-rw-r--r--tests/testutils/python_repo.py45
-rw-r--r--tests/testutils/repo/bzr.py42
-rw-r--r--tests/testutils/repo/git.py69
-rw-r--r--tests/testutils/repo/tar.py15
-rw-r--r--tests/testutils/repo/zip.py17
-rw-r--r--tests/testutils/setuptools.py15
298 files changed, 23010 insertions, 16790 deletions
diff --git a/src/buildstream/__init__.py b/src/buildstream/__init__.py
index cd8d0f1cf..c78fcbbf6 100644
--- a/src/buildstream/__init__.py
+++ b/src/buildstream/__init__.py
@@ -19,11 +19,13 @@
# Plugin author facing APIs
import os
+
if "_BST_COMPLETION" not in os.environ:
# Special sauce to get the version from versioneer
from ._version import get_versions
- __version__ = get_versions()['version']
+
+ __version__ = get_versions()["version"]
del get_versions
from .utils import UtilError, ProgramNotFoundError
diff --git a/src/buildstream/__main__.py b/src/buildstream/__main__.py
index 4b0fdabfe..556a0f67e 100644
--- a/src/buildstream/__main__.py
+++ b/src/buildstream/__main__.py
@@ -11,7 +11,8 @@
# This is used when we need to run BuildStream before installing,
# like when we build documentation.
#
-if __name__ == '__main__':
+if __name__ == "__main__":
# pylint: disable=no-value-for-parameter
from ._frontend.cli import cli
+
cli()
diff --git a/src/buildstream/_artifact.py b/src/buildstream/_artifact.py
index e5174eaea..a06b189ed 100644
--- a/src/buildstream/_artifact.py
+++ b/src/buildstream/_artifact.py
@@ -47,7 +47,7 @@ from .storage._casbaseddirectory import CasBasedDirectory
# strong_key (str): The elements strong cache key, dependent on context
# weak_key (str): The elements weak cache key
#
-class Artifact():
+class Artifact:
version = 0
@@ -61,11 +61,19 @@ class Artifact():
self._tmpdir = context.tmpdir
self._proto = None
- self._metadata_keys = None # Strong and weak key tuple extracted from the artifact
- self._metadata_dependencies = None # Dictionary of dependency strong keys from the artifact
- self._metadata_workspaced = None # Boolean of whether it's a workspaced artifact
- self._metadata_workspaced_dependencies = None # List of which dependencies are workspaced from the artifact
- self._cached = None # Boolean of whether the artifact is cached
+ self._metadata_keys = (
+ None # Strong and weak key tuple extracted from the artifact
+ )
+ self._metadata_dependencies = (
+ None # Dictionary of dependency strong keys from the artifact
+ )
+ self._metadata_workspaced = (
+ None # Boolean of whether it's a workspaced artifact
+ )
+ self._metadata_workspaced_dependencies = (
+ None # List of which dependencies are workspaced from the artifact
+ )
+ self._cached = None # Boolean of whether the artifact is cached
# get_files():
#
@@ -193,12 +201,16 @@ class Artifact():
artifact.buildtree.CopyFrom(buildtreevdir._get_digest())
size += buildtreevdir.get_size()
- os.makedirs(os.path.dirname(os.path.join(
- self._artifactdir, element.get_artifact_name())), exist_ok=True)
+ os.makedirs(
+ os.path.dirname(
+ os.path.join(self._artifactdir, element.get_artifact_name())
+ ),
+ exist_ok=True,
+ )
keys = utils._deduplicate([self._cache_key, self._weak_cache_key])
for key in keys:
path = os.path.join(self._artifactdir, element.get_artifact_name(key=key))
- with utils.save_file_atomic(path, mode='wb') as f:
+ with utils.save_file_atomic(path, mode="wb") as f:
f.write(artifact.SerializeToString())
return size
@@ -247,7 +259,7 @@ class Artifact():
# Load the public data from the artifact
artifact = self._get_proto()
meta_file = self._cas.objpath(artifact.public_data)
- data = _yaml.load(meta_file, shortname='public.yaml')
+ data = _yaml.load(meta_file, shortname="public.yaml")
return data
@@ -263,9 +275,11 @@ class Artifact():
def load_build_result(self):
artifact = self._get_proto()
- build_result = (artifact.build_success,
- artifact.build_error,
- artifact.build_error_details)
+ build_result = (
+ artifact.build_success,
+ artifact.build_error,
+ artifact.build_error_details,
+ )
return build_result
@@ -307,7 +321,9 @@ class Artifact():
# Extract proto
artifact = self._get_proto()
- self._metadata_dependencies = {dep.element_name: dep.cache_key for dep in artifact.build_deps}
+ self._metadata_dependencies = {
+ dep.element_name: dep.cache_key for dep in artifact.build_deps
+ }
return self._metadata_dependencies
@@ -345,8 +361,9 @@ class Artifact():
# Extract proto
artifact = self._get_proto()
- self._metadata_workspaced_dependencies = [dep.element_name for dep in artifact.build_deps
- if dep.was_workspaced]
+ self._metadata_workspaced_dependencies = [
+ dep.element_name for dep in artifact.build_deps if dep.was_workspaced
+ ]
return self._metadata_workspaced_dependencies
@@ -371,7 +388,11 @@ class Artifact():
if deps == Scope.BUILD:
try:
dependency_refs = [
- os.path.join(dep.project_name, _get_normal_name(dep.element_name), dep.cache_key)
+ os.path.join(
+ dep.project_name,
+ _get_normal_name(dep.element_name),
+ dep.cache_key,
+ )
for dep in artifact.build_deps
]
except AttributeError:
@@ -389,7 +410,9 @@ class Artifact():
# 1. It might trigger unnecessary rebuilds.
# 2. It would be impossible to support cyclic runtime dependencies
# in the future
- raise ArtifactError("Dependency scope: {} is not supported for artifacts".format(deps))
+ raise ArtifactError(
+ "Dependency scope: {} is not supported for artifacts".format(deps)
+ )
return dependency_refs
@@ -419,12 +442,18 @@ class Artifact():
# Determine whether directories are required
require_directories = context.require_artifact_directories
# Determine whether file contents are required as well
- require_files = (context.require_artifact_files or
- self._element._artifact_files_required())
+ require_files = (
+ context.require_artifact_files or self._element._artifact_files_required()
+ )
# Check whether 'files' subdirectory is available, with or without file contents
- if (require_directories and str(artifact.files) and
- not self._cas.contains_directory(artifact.files, with_files=require_files)):
+ if (
+ require_directories
+ and str(artifact.files)
+ and not self._cas.contains_directory(
+ artifact.files, with_files=require_files
+ )
+ ):
self._cached = False
return False
@@ -471,11 +500,12 @@ class Artifact():
key = self.get_extract_key()
- proto_path = os.path.join(self._artifactdir,
- self._element.get_artifact_name(key=key))
+ proto_path = os.path.join(
+ self._artifactdir, self._element.get_artifact_name(key=key)
+ )
artifact = ArtifactProto()
try:
- with open(proto_path, mode='r+b') as f:
+ with open(proto_path, mode="r+b") as f:
artifact.ParseFromString(f.read())
except FileNotFoundError:
return None
diff --git a/src/buildstream/_artifactcache.py b/src/buildstream/_artifactcache.py
index d9112cd58..2eb738db1 100644
--- a/src/buildstream/_artifactcache.py
+++ b/src/buildstream/_artifactcache.py
@@ -22,8 +22,12 @@ import grpc
from ._basecache import BaseCache
from ._exceptions import ArtifactError, CASError, CASCacheError, CASRemoteError
-from ._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc, \
- artifact_pb2, artifact_pb2_grpc
+from ._protos.buildstream.v2 import (
+ buildstream_pb2,
+ buildstream_pb2_grpc,
+ artifact_pb2,
+ artifact_pb2_grpc,
+)
from ._remote import BaseRemote
from .storage._casbaseddirectory import CasBasedDirectory
@@ -37,7 +41,6 @@ from . import utils
# artifact remotes.
#
class ArtifactRemote(BaseRemote):
-
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.artifact_service = None
@@ -77,8 +80,10 @@ class ArtifactRemote(BaseRemote):
except grpc.RpcError as e:
# Check if this remote has the artifact service
if e.code() == grpc.StatusCode.UNIMPLEMENTED:
- return ("Configured remote does not have the BuildStream "
- "capabilities service. Please check remote configuration.")
+ return (
+ "Configured remote does not have the BuildStream "
+ "capabilities service. Please check remote configuration."
+ )
# Else raise exception with details
return "Remote initialisation failed: {}".format(e.details())
@@ -86,7 +91,7 @@ class ArtifactRemote(BaseRemote):
return "Configured remote does not support artifact service"
if self.spec.push and not response.artifact_capabilities.allow_updates:
- return 'Artifact server does not allow push'
+ return "Artifact server does not allow push"
return None
@@ -190,7 +195,12 @@ class ArtifactCache(BaseCache):
# ([str]) - A list of artifact names as generated in LRU order
#
def list_artifacts(self, *, glob=None):
- return [ref for _, ref in sorted(list(self._list_refs_mtimes(self.artifactdir, glob_expr=glob)))]
+ return [
+ ref
+ for _, ref in sorted(
+ list(self._list_refs_mtimes(self.artifactdir, glob_expr=glob))
+ )
+ ]
# remove():
#
@@ -229,7 +239,9 @@ class ArtifactCache(BaseCache):
removed = []
modified = []
- self.cas.diff_trees(digest_a, digest_b, added=added, removed=removed, modified=modified)
+ self.cas.diff_trees(
+ digest_a, digest_b, added=added, removed=removed, modified=modified
+ )
return modified, removed, added
@@ -259,14 +271,20 @@ class ArtifactCache(BaseCache):
# can perform file checks on their end
for remote in storage_remotes:
remote.init()
- element.status("Pushing data from artifact {} -> {}".format(display_key, remote))
+ element.status(
+ "Pushing data from artifact {} -> {}".format(display_key, remote)
+ )
if self._push_artifact_blobs(artifact, remote):
- element.info("Pushed data from artifact {} -> {}".format(display_key, remote))
+ element.info(
+ "Pushed data from artifact {} -> {}".format(display_key, remote)
+ )
else:
- element.info("Remote ({}) already has all data of artifact {} cached".format(
- remote, element._get_brief_display_key()
- ))
+ element.info(
+ "Remote ({}) already has all data of artifact {} cached".format(
+ remote, element._get_brief_display_key()
+ )
+ )
for remote in index_remotes:
remote.init()
@@ -276,9 +294,11 @@ class ArtifactCache(BaseCache):
element.info("Pushed artifact {} -> {}".format(display_key, remote))
pushed = True
else:
- element.info("Remote ({}) already has artifact {} cached".format(
- remote, element._get_brief_display_key()
- ))
+ element.info(
+ "Remote ({}) already has artifact {} cached".format(
+ remote, element._get_brief_display_key()
+ )
+ )
return pushed
@@ -296,7 +316,7 @@ class ArtifactCache(BaseCache):
#
def pull(self, element, key, *, pull_buildtrees=False):
artifact = None
- display_key = key[:self.context.log_key_length]
+ display_key = key[: self.context.log_key_length]
project = element._get_project()
errors = []
@@ -311,16 +331,20 @@ class ArtifactCache(BaseCache):
element.info("Pulled artifact {} <- {}".format(display_key, remote))
break
else:
- element.info("Remote ({}) does not have artifact {} cached".format(
- remote, display_key
- ))
+ element.info(
+ "Remote ({}) does not have artifact {} cached".format(
+ remote, display_key
+ )
+ )
except CASError as e:
element.warn("Could not pull from remote {}: {}".format(remote, e))
errors.append(e)
if errors and not artifact:
- raise ArtifactError("Failed to pull artifact {}".format(display_key),
- detail="\n".join(str(e) for e in errors))
+ raise ArtifactError(
+ "Failed to pull artifact {}".format(display_key),
+ detail="\n".join(str(e) for e in errors),
+ )
# If we don't have an artifact, we can't exactly pull our
# artifact
@@ -332,22 +356,32 @@ class ArtifactCache(BaseCache):
for remote in self._storage_remotes[project]:
remote.init()
try:
- element.status("Pulling data for artifact {} <- {}".format(display_key, remote))
-
- if self._pull_artifact_storage(element, artifact, remote, pull_buildtrees=pull_buildtrees):
- element.info("Pulled data for artifact {} <- {}".format(display_key, remote))
+ element.status(
+ "Pulling data for artifact {} <- {}".format(display_key, remote)
+ )
+
+ if self._pull_artifact_storage(
+ element, artifact, remote, pull_buildtrees=pull_buildtrees
+ ):
+ element.info(
+ "Pulled data for artifact {} <- {}".format(display_key, remote)
+ )
return True
- element.info("Remote ({}) does not have artifact {} cached".format(
- remote, display_key
- ))
+ element.info(
+ "Remote ({}) does not have artifact {} cached".format(
+ remote, display_key
+ )
+ )
except CASError as e:
element.warn("Could not pull from remote {}: {}".format(remote, e))
errors.append(e)
if errors:
- raise ArtifactError("Failed to pull artifact {}".format(display_key),
- detail="\n".join(str(e) for e in errors))
+ raise ArtifactError(
+ "Failed to pull artifact {}".format(display_key),
+ detail="\n".join(str(e) for e in errors),
+ )
return False
@@ -389,8 +423,10 @@ class ArtifactCache(BaseCache):
push_remotes = []
if not push_remotes:
- raise ArtifactError("push_message was called, but no remote artifact " +
- "servers are configured as push remotes.")
+ raise ArtifactError(
+ "push_message was called, but no remote artifact "
+ + "servers are configured as push remotes."
+ )
for remote in push_remotes:
message_digest = remote.push_message(message)
@@ -411,8 +447,10 @@ class ArtifactCache(BaseCache):
newref = element.get_artifact_name(newkey)
if not os.path.exists(os.path.join(self.artifactdir, newref)):
- os.link(os.path.join(self.artifactdir, oldref),
- os.path.join(self.artifactdir, newref))
+ os.link(
+ os.path.join(self.artifactdir, oldref),
+ os.path.join(self.artifactdir, newref),
+ )
# get_artifact_logs():
#
@@ -426,7 +464,7 @@ class ArtifactCache(BaseCache):
#
def get_artifact_logs(self, ref):
cache_id = self.cas.resolve_ref(ref, update_mtime=True)
- vdir = CasBasedDirectory(self.cas, digest=cache_id).descend('logs')
+ vdir = CasBasedDirectory(self.cas, digest=cache_id).descend("logs")
return vdir
# fetch_missing_blobs():
@@ -518,7 +556,7 @@ class ArtifactCache(BaseCache):
for root, _, files in os.walk(self.artifactdir):
for artifact_file in files:
artifact = artifact_pb2.Artifact()
- with open(os.path.join(root, artifact_file), 'r+b') as f:
+ with open(os.path.join(root, artifact_file), "r+b") as f:
artifact.ParseFromString(f.read())
if str(artifact.files):
@@ -536,7 +574,7 @@ class ArtifactCache(BaseCache):
for root, _, files in os.walk(self.artifactdir):
for artifact_file in files:
artifact = artifact_pb2.Artifact()
- with open(os.path.join(root, artifact_file), 'r+b') as f:
+ with open(os.path.join(root, artifact_file), "r+b") as f:
artifact.ParseFromString(f.read())
if str(artifact.public_data):
@@ -584,11 +622,15 @@ class ArtifactCache(BaseCache):
except CASRemoteError as cas_error:
if cas_error.reason != "cache-too-full":
- raise ArtifactError("Failed to push artifact blobs: {}".format(cas_error))
+ raise ArtifactError(
+ "Failed to push artifact blobs: {}".format(cas_error)
+ )
return False
except grpc.RpcError as e:
if e.code() != grpc.StatusCode.RESOURCE_EXHAUSTED:
- raise ArtifactError("Failed to push artifact blobs: {}".format(e.details()))
+ raise ArtifactError(
+ "Failed to push artifact blobs: {}".format(e.details())
+ )
return False
return True
@@ -613,7 +655,9 @@ class ArtifactCache(BaseCache):
artifact_proto = artifact._get_proto()
- keys = list(utils._deduplicate([artifact_proto.strong_key, artifact_proto.weak_key]))
+ keys = list(
+ utils._deduplicate([artifact_proto.strong_key, artifact_proto.weak_key])
+ )
# Check whether the artifact is on the server
for key in keys:
@@ -621,15 +665,18 @@ class ArtifactCache(BaseCache):
remote.get_artifact(element.get_artifact_name(key=key))
except grpc.RpcError as e:
if e.code() != grpc.StatusCode.NOT_FOUND:
- raise ArtifactError("Error checking artifact cache: {}"
- .format(e.details()))
+ raise ArtifactError(
+ "Error checking artifact cache: {}".format(e.details())
+ )
else:
return False
# If not, we send the artifact proto
for key in keys:
try:
- remote.update_artifact(element.get_artifact_name(key=key), artifact_proto)
+ remote.update_artifact(
+ element.get_artifact_name(key=key), artifact_proto
+ )
except grpc.RpcError as e:
raise ArtifactError("Failed to push artifact: {}".format(e.details()))
@@ -711,7 +758,7 @@ class ArtifactCache(BaseCache):
# Write the artifact proto to cache
artifact_path = os.path.join(self.artifactdir, artifact_name)
os.makedirs(os.path.dirname(artifact_path), exist_ok=True)
- with utils.save_file_atomic(artifact_path, mode='wb') as f:
+ with utils.save_file_atomic(artifact_path, mode="wb") as f:
f.write(artifact.SerializeToString())
return artifact
diff --git a/src/buildstream/_artifactelement.py b/src/buildstream/_artifactelement.py
index 48c3d1769..dfdd751a3 100644
--- a/src/buildstream/_artifactelement.py
+++ b/src/buildstream/_artifactelement.py
@@ -40,7 +40,7 @@ if TYPE_CHECKING:
class ArtifactElement(Element):
# A hash of ArtifactElement by ref
- __instantiated_artifacts = {} # type: Dict[str, ArtifactElement]
+ __instantiated_artifacts = {} # type: Dict[str, ArtifactElement]
# ArtifactElement's require this as the sandbox will use a normal
# directory when we checkout
@@ -138,7 +138,7 @@ class ArtifactElement(Element):
# sandbox (Sandbox)
#
def configure_sandbox(self, sandbox):
- install_root = self.get_variable('install-root')
+ install_root = self.get_variable("install-root")
# Tell the sandbox to mount the build root and install root
sandbox.mark_directory(install_root)
@@ -173,11 +173,15 @@ class ArtifactElement(Element):
#
def verify_artifact_ref(ref):
try:
- project, element, key = ref.split('/', 2) # This will raise a Value error if unable to split
+ project, element, key = ref.split(
+ "/", 2
+ ) # This will raise a Value error if unable to split
# Explicitly raise a ValueError if the key length is not as expected
if not _cachekey.is_key(key):
raise ValueError
except ValueError:
- raise ArtifactElementError("Artifact: {} is not of the expected format".format(ref))
+ raise ArtifactElementError(
+ "Artifact: {} is not of the expected format".format(ref)
+ )
return project, element, key
diff --git a/src/buildstream/_basecache.py b/src/buildstream/_basecache.py
index fc2e92456..d277fa504 100644
--- a/src/buildstream/_basecache.py
+++ b/src/buildstream/_basecache.py
@@ -37,21 +37,21 @@ if TYPE_CHECKING:
# Base Cache for Caches to derive from
#
-class BaseCache():
+class BaseCache:
# None of these should ever be called in the base class, but this appeases
# pylint to some degree
- spec_name = None # type: str
- spec_error = None # type: Type[BstError]
- config_node_name = None # type: str
- index_remote_class = None # type: Type[BaseRemote]
+ spec_name = None # type: str
+ spec_error = None # type: Type[BstError]
+ config_node_name = None # type: str
+ index_remote_class = None # type: Type[BaseRemote]
storage_remote_class = CASRemote # type: Type[BaseRemote]
def __init__(self, context):
self.context = context
self.cas = context.get_cascache()
- self._remotes_setup = False # Check to prevent double-setup of remotes
+ self._remotes_setup = False # Check to prevent double-setup of remotes
# Per-project list of Remote instances.
self._storage_remotes = {}
self._index_remotes = {}
@@ -68,7 +68,9 @@ class BaseCache():
# against fork() with open gRPC channels.
#
def has_open_grpc_channels(self):
- for project_remotes in chain(self._index_remotes.values(), self._storage_remotes.values()):
+ for project_remotes in chain(
+ self._index_remotes.values(), self._storage_remotes.values()
+ ):
for remote in project_remotes:
if remote.channel:
return True
@@ -80,7 +82,9 @@ class BaseCache():
#
def close_grpc_channels(self):
# Close all remotes and their gRPC channels
- for project_remotes in chain(self._index_remotes.values(), self._storage_remotes.values()):
+ for project_remotes in chain(
+ self._index_remotes.values(), self._storage_remotes.values()
+ ):
for remote in project_remotes:
remote.close()
@@ -116,8 +120,12 @@ class BaseCache():
artifacts = config_node.get_sequence(cls.config_node_name, default=[])
except LoadError:
provenance = config_node.get_node(cls.config_node_name).get_provenance()
- raise _yaml.LoadError("{}: '{}' must be a single remote mapping, or a list of mappings"
- .format(provenance, cls.config_node_name), _yaml.LoadErrorReason.INVALID_DATA)
+ raise _yaml.LoadError(
+ "{}: '{}' must be a single remote mapping, or a list of mappings".format(
+ provenance, cls.config_node_name
+ ),
+ _yaml.LoadErrorReason.INVALID_DATA,
+ )
for spec_node in artifacts:
cache_specs.append(RemoteSpec.new_from_config_node(spec_node))
@@ -144,8 +152,9 @@ class BaseCache():
project_specs = getattr(project, cls.spec_name)
context_specs = getattr(context, cls.spec_name)
- return list(utils._deduplicate(
- project_extra_specs + project_specs + context_specs))
+ return list(
+ utils._deduplicate(project_extra_specs + project_specs + context_specs)
+ )
# setup_remotes():
#
@@ -198,7 +207,9 @@ class BaseCache():
# on_failure (callable): Called if we fail to contact one of the caches.
#
def initialize_remotes(self, *, on_failure=None):
- index_remotes, storage_remotes = self._create_remote_instances(on_failure=on_failure)
+ index_remotes, storage_remotes = self._create_remote_instances(
+ on_failure=on_failure
+ )
# Assign remote instances to their respective projects
for project in self.context.get_projects():
@@ -221,8 +232,12 @@ class BaseCache():
yield remote_list[remote_spec]
- self._index_remotes[project] = list(get_remotes(index_remotes, remote_specs))
- self._storage_remotes[project] = list(get_remotes(storage_remotes, remote_specs))
+ self._index_remotes[project] = list(
+ get_remotes(index_remotes, remote_specs)
+ )
+ self._storage_remotes[project] = list(
+ get_remotes(storage_remotes, remote_specs)
+ )
# has_fetch_remotes():
#
@@ -266,8 +281,9 @@ class BaseCache():
# Check whether the specified element's project has push remotes
index_remotes = self._index_remotes[plugin._get_project()]
storage_remotes = self._storage_remotes[plugin._get_project()]
- return (any(remote.spec.push for remote in index_remotes) and
- any(remote.spec.push for remote in storage_remotes))
+ return any(remote.spec.push for remote in index_remotes) and any(
+ remote.spec.push for remote in storage_remotes
+ )
################################################
# Local Private Methods #
@@ -323,8 +339,9 @@ class BaseCache():
storage_remotes[remote_spec] = storage
self._has_fetch_remotes = storage_remotes and index_remotes
- self._has_push_remotes = (any(spec.push for spec in storage_remotes) and
- any(spec.push for spec in index_remotes))
+ self._has_push_remotes = any(spec.push for spec in storage_remotes) and any(
+ spec.push for spec in index_remotes
+ )
return index_remotes, storage_remotes
@@ -366,8 +383,7 @@ class BaseCache():
#
def _message(self, message_type, message, **kwargs):
args = dict(kwargs)
- self.context.messenger.message(
- Message(message_type, message, **args))
+ self.context.messenger.message(Message(message_type, message, **args))
# _set_remotes():
#
@@ -392,9 +408,14 @@ class BaseCache():
#
def _initialize_remotes(self):
def remote_failed(remote, error):
- self._message(MessageType.WARN, "Failed to initialize remote {}: {}".format(remote.url, error))
-
- with self.context.messenger.timed_activity("Initializing remote caches", silent_nested=True):
+ self._message(
+ MessageType.WARN,
+ "Failed to initialize remote {}: {}".format(remote.url, error),
+ )
+
+ with self.context.messenger.timed_activity(
+ "Initializing remote caches", silent_nested=True
+ ):
self.initialize_remotes(on_failure=remote_failed)
# _list_refs_mtimes()
@@ -421,7 +442,9 @@ class BaseCache():
for root, _, files in os.walk(path):
for filename in files:
ref_path = os.path.join(root, filename)
- relative_path = os.path.relpath(ref_path, base_path) # Relative to refs head
+ relative_path = os.path.relpath(
+ ref_path, base_path
+ ) # Relative to refs head
if not glob_expr or fnmatch(relative_path, glob_expr):
# Obtain the mtime (the time a file was last modified)
yield (os.path.getmtime(ref_path), relative_path)
diff --git a/src/buildstream/_cachekey.py b/src/buildstream/_cachekey.py
index 89d47671e..8c6382bd5 100644
--- a/src/buildstream/_cachekey.py
+++ b/src/buildstream/_cachekey.py
@@ -62,5 +62,7 @@ def is_key(key):
# (str): An sha256 hex digest of the given value
#
def generate_key(value):
- ustring = ujson.dumps(value, sort_keys=True, escape_forward_slashes=False).encode('utf-8')
+ ustring = ujson.dumps(value, sort_keys=True, escape_forward_slashes=False).encode(
+ "utf-8"
+ )
return hashlib.sha256(ustring).hexdigest()
diff --git a/src/buildstream/_cas/cascache.py b/src/buildstream/_cas/cascache.py
index 022730445..3caa745da 100644
--- a/src/buildstream/_cas/cascache.py
+++ b/src/buildstream/_cas/cascache.py
@@ -34,7 +34,10 @@ import time
import grpc
from .._protos.google.rpc import code_pb2
-from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
+from .._protos.build.bazel.remote.execution.v2 import (
+ remote_execution_pb2,
+ remote_execution_pb2_grpc,
+)
from .._protos.build.buildgrid import local_cas_pb2, local_cas_pb2_grpc
from .. import _signals, utils
@@ -68,15 +71,20 @@ class CASLogLevel(FastEnum):
# protect_session_blobs (bool): Disable expiry for blobs used in the current session
# log_level (LogLevel): Log level to give to buildbox-casd for logging
#
-class CASCache():
-
+class CASCache:
def __init__(
- self, path, *, casd=True, cache_quota=None, protect_session_blobs=True, log_level=CASLogLevel.WARNING
+ self,
+ path,
+ *,
+ casd=True,
+ cache_quota=None,
+ protect_session_blobs=True,
+ log_level=CASLogLevel.WARNING
):
- self.casdir = os.path.join(path, 'cas')
- self.tmpdir = os.path.join(path, 'tmp')
- os.makedirs(os.path.join(self.casdir, 'refs', 'heads'), exist_ok=True)
- os.makedirs(os.path.join(self.casdir, 'objects'), exist_ok=True)
+ self.casdir = os.path.join(path, "cas")
+ self.tmpdir = os.path.join(path, "tmp")
+ os.makedirs(os.path.join(self.casdir, "refs", "heads"), exist_ok=True)
+ os.makedirs(os.path.join(self.casdir, "objects"), exist_ok=True)
os.makedirs(self.tmpdir, exist_ok=True)
self._casd_channel = None
@@ -88,19 +96,21 @@ class CASCache():
if casd:
# Place socket in global/user temporary directory to avoid hitting
# the socket path length limit.
- self._casd_socket_tempdir = tempfile.mkdtemp(prefix='buildstream')
- self._casd_socket_path = os.path.join(self._casd_socket_tempdir, 'casd.sock')
+ self._casd_socket_tempdir = tempfile.mkdtemp(prefix="buildstream")
+ self._casd_socket_path = os.path.join(
+ self._casd_socket_tempdir, "casd.sock"
+ )
- casd_args = [utils.get_host_tool('buildbox-casd')]
- casd_args.append('--bind=unix:' + self._casd_socket_path)
- casd_args.append('--log-level=' + log_level.value)
+ casd_args = [utils.get_host_tool("buildbox-casd")]
+ casd_args.append("--bind=unix:" + self._casd_socket_path)
+ casd_args.append("--log-level=" + log_level.value)
if cache_quota is not None:
- casd_args.append('--quota-high={}'.format(int(cache_quota)))
- casd_args.append('--quota-low={}'.format(int(cache_quota / 2)))
+ casd_args.append("--quota-high={}".format(int(cache_quota)))
+ casd_args.append("--quota-low={}".format(int(cache_quota / 2)))
if protect_session_blobs:
- casd_args.append('--protect-session-blobs')
+ casd_args.append("--protect-session-blobs")
casd_args.append(path)
@@ -112,7 +122,8 @@ class CASCache():
# The frontend will take care of it if needed
with _signals.blocked([signal.SIGINT], ignore=False):
self._casd_process = subprocess.Popen(
- casd_args, cwd=path, stdout=logfile_fp, stderr=subprocess.STDOUT)
+ casd_args, cwd=path, stdout=logfile_fp, stderr=subprocess.STDOUT
+ )
self._cache_usage_monitor = _CASCacheUsageMonitor(self)
else:
@@ -123,16 +134,16 @@ class CASCache():
# Popen objects are not pickle-able, however, child processes only
# need the information whether a casd subprocess was started or not.
- assert '_casd_process' in state
- state['_casd_process'] = bool(self._casd_process)
+ assert "_casd_process" in state
+ state["_casd_process"] = bool(self._casd_process)
# The usage monitor is not pickle-able, but we also don't need it in
# child processes currently. Make sure that if this changes, we get a
# bug report, by setting _cache_usage_monitor_forbidden.
- assert '_cache_usage_monitor' in state
- assert '_cache_usage_monitor_forbidden' in state
- state['_cache_usage_monitor'] = None
- state['_cache_usage_monitor_forbidden'] = True
+ assert "_cache_usage_monitor" in state
+ assert "_cache_usage_monitor_forbidden" in state
+ state["_cache_usage_monitor"] = None
+ state["_cache_usage_monitor_forbidden"] = True
return state
@@ -144,16 +155,24 @@ class CASCache():
# casd is not ready yet, try again after a 10ms delay,
# but don't wait for more than 15s
if time.time() > self._casd_start_time + 15:
- raise CASCacheError("Timed out waiting for buildbox-casd to become ready")
+ raise CASCacheError(
+ "Timed out waiting for buildbox-casd to become ready"
+ )
time.sleep(0.01)
- self._casd_channel = grpc.insecure_channel('unix:' + self._casd_socket_path)
- self._casd_cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(self._casd_channel)
- self._local_cas = local_cas_pb2_grpc.LocalContentAddressableStorageStub(self._casd_channel)
+ self._casd_channel = grpc.insecure_channel("unix:" + self._casd_socket_path)
+ self._casd_cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(
+ self._casd_channel
+ )
+ self._local_cas = local_cas_pb2_grpc.LocalContentAddressableStorageStub(
+ self._casd_channel
+ )
# Call GetCapabilities() to establish connection to casd
- capabilities = remote_execution_pb2_grpc.CapabilitiesStub(self._casd_channel)
+ capabilities = remote_execution_pb2_grpc.CapabilitiesStub(
+ self._casd_channel
+ )
capabilities.GetCapabilities(remote_execution_pb2.GetCapabilitiesRequest())
# _get_cas():
@@ -179,10 +198,12 @@ class CASCache():
# Preflight check.
#
def preflight(self):
- headdir = os.path.join(self.casdir, 'refs', 'heads')
- objdir = os.path.join(self.casdir, 'objects')
+ headdir = os.path.join(self.casdir, "refs", "heads")
+ objdir = os.path.join(self.casdir, "objects")
if not (os.path.isdir(headdir) and os.path.isdir(objdir)):
- raise CASCacheError("CAS repository check failed for '{}'".format(self.casdir))
+ raise CASCacheError(
+ "CAS repository check failed for '{}'".format(self.casdir)
+ )
# has_open_grpc_channels():
#
@@ -268,7 +289,9 @@ class CASCache():
if e.code() == grpc.StatusCode.NOT_FOUND:
return False
if e.code() == grpc.StatusCode.UNIMPLEMENTED:
- raise CASCacheError("Unsupported buildbox-casd version: FetchTree unimplemented") from e
+ raise CASCacheError(
+ "Unsupported buildbox-casd version: FetchTree unimplemented"
+ ) from e
raise
# checkout():
@@ -285,7 +308,7 @@ class CASCache():
directory = remote_execution_pb2.Directory()
- with open(self.objpath(tree), 'rb') as f:
+ with open(self.objpath(tree), "rb") as f:
directory.ParseFromString(f.read())
for filenode in directory.files:
@@ -297,8 +320,16 @@ class CASCache():
utils.safe_copy(self.objpath(filenode.digest), fullpath)
if filenode.is_executable:
- os.chmod(fullpath, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR |
- stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
+ os.chmod(
+ fullpath,
+ stat.S_IRUSR
+ | stat.S_IWUSR
+ | stat.S_IXUSR
+ | stat.S_IRGRP
+ | stat.S_IXGRP
+ | stat.S_IROTH
+ | stat.S_IXOTH,
+ )
for dirnode in directory.directories:
fullpath = os.path.join(dest, dirnode.name)
@@ -365,7 +396,7 @@ class CASCache():
# (str): The path of the object
#
def objpath(self, digest):
- return os.path.join(self.casdir, 'objects', digest.hash[:2], digest.hash[2:])
+ return os.path.join(self.casdir, "objects", digest.hash[:2], digest.hash[2:])
# add_object():
#
@@ -383,7 +414,15 @@ class CASCache():
#
# Either `path` or `buffer` must be passed, but not both.
#
- def add_object(self, *, digest=None, path=None, buffer=None, link_directly=False, instance_name=None):
+ def add_object(
+ self,
+ *,
+ digest=None,
+ path=None,
+ buffer=None,
+ link_directly=False,
+ instance_name=None
+ ):
# Exactly one of the two parameters has to be specified
assert (path is None) != (buffer is None)
@@ -411,13 +450,21 @@ class CASCache():
response = local_cas.CaptureFiles(request)
if len(response.responses) != 1:
- raise CASCacheError("Expected 1 response from CaptureFiles, got {}".format(len(response.responses)))
+ raise CASCacheError(
+ "Expected 1 response from CaptureFiles, got {}".format(
+ len(response.responses)
+ )
+ )
blob_response = response.responses[0]
if blob_response.status.code == code_pb2.RESOURCE_EXHAUSTED:
raise CASCacheError("Cache too full", reason="cache-too-full")
if blob_response.status.code != code_pb2.OK:
- raise CASCacheError("Failed to capture blob {}: {}".format(path, blob_response.status.code))
+ raise CASCacheError(
+ "Failed to capture blob {}: {}".format(
+ path, blob_response.status.code
+ )
+ )
digest.CopyFrom(blob_response.digest)
return digest
@@ -440,17 +487,23 @@ class CASCache():
response = local_cas.CaptureTree(request)
if len(response.responses) != 1:
- raise CASCacheError("Expected 1 response from CaptureTree, got {}".format(len(response.responses)))
+ raise CASCacheError(
+ "Expected 1 response from CaptureTree, got {}".format(
+ len(response.responses)
+ )
+ )
tree_response = response.responses[0]
if tree_response.status.code == code_pb2.RESOURCE_EXHAUSTED:
raise CASCacheError("Cache too full", reason="cache-too-full")
if tree_response.status.code != code_pb2.OK:
- raise CASCacheError("Failed to capture tree {}: {}".format(path, tree_response.status.code))
+ raise CASCacheError(
+ "Failed to capture tree {}: {}".format(path, tree_response.status.code)
+ )
treepath = self.objpath(tree_response.tree_digest)
tree = remote_execution_pb2.Tree()
- with open(treepath, 'rb') as f:
+ with open(treepath, "rb") as f:
tree.ParseFromString(f.read())
root_directory = tree.root.SerializeToString()
@@ -467,7 +520,7 @@ class CASCache():
def set_ref(self, ref, tree):
refpath = self._refpath(ref)
os.makedirs(os.path.dirname(refpath), exist_ok=True)
- with utils.save_file_atomic(refpath, 'wb', tempdir=self.tmpdir) as f:
+ with utils.save_file_atomic(refpath, "wb", tempdir=self.tmpdir) as f:
f.write(tree.SerializeToString())
# resolve_ref():
@@ -485,7 +538,7 @@ class CASCache():
refpath = self._refpath(ref)
try:
- with open(refpath, 'rb') as f:
+ with open(refpath, "rb") as f:
if update_mtime:
os.utime(refpath)
@@ -494,7 +547,9 @@ class CASCache():
return digest
except FileNotFoundError as e:
- raise CASCacheError("Attempt to access unavailable ref: {}".format(e)) from e
+ raise CASCacheError(
+ "Attempt to access unavailable ref: {}".format(e)
+ ) from e
# update_mtime()
#
@@ -507,7 +562,9 @@ class CASCache():
try:
os.utime(self._refpath(ref))
except FileNotFoundError as e:
- raise CASCacheError("Attempt to access unavailable ref: {}".format(e)) from e
+ raise CASCacheError(
+ "Attempt to access unavailable ref: {}".format(e)
+ ) from e
# remove():
#
@@ -521,7 +578,7 @@ class CASCache():
def remove(self, ref, *, basedir=None):
if basedir is None:
- basedir = os.path.join(self.casdir, 'refs', 'heads')
+ basedir = os.path.join(self.casdir, "refs", "heads")
# Remove cache ref
self._remove_ref(ref, basedir)
@@ -559,7 +616,9 @@ class CASCache():
missing_blobs = dict()
# Limit size of FindMissingBlobs request
for required_blobs_group in _grouper(iter(blobs), 512):
- request = remote_execution_pb2.FindMissingBlobsRequest(instance_name=instance_name)
+ request = remote_execution_pb2.FindMissingBlobsRequest(
+ instance_name=instance_name
+ )
for required_digest in required_blobs_group:
d = request.blob_digests.add()
@@ -568,8 +627,12 @@ class CASCache():
try:
response = cas.FindMissingBlobs(request)
except grpc.RpcError as e:
- if e.code() == grpc.StatusCode.INVALID_ARGUMENT and e.details().startswith("Invalid instance name"):
- raise CASCacheError("Unsupported buildbox-casd version: FindMissingBlobs failed") from e
+ if e.code() == grpc.StatusCode.INVALID_ARGUMENT and e.details().startswith(
+ "Invalid instance name"
+ ):
+ raise CASCacheError(
+ "Unsupported buildbox-casd version: FindMissingBlobs failed"
+ ) from e
raise
for missing_digest in response.missing_blob_digests:
@@ -611,7 +674,7 @@ class CASCache():
directory = remote_execution_pb2.Directory()
- with open(self.objpath(directory_digest), 'rb') as f:
+ with open(self.objpath(directory_digest), "rb") as f:
directory.ParseFromString(f.read())
for filenode in directory.files:
@@ -626,21 +689,23 @@ class CASCache():
dir_b = remote_execution_pb2.Directory()
if tree_a:
- with open(self.objpath(tree_a), 'rb') as f:
+ with open(self.objpath(tree_a), "rb") as f:
dir_a.ParseFromString(f.read())
if tree_b:
- with open(self.objpath(tree_b), 'rb') as f:
+ with open(self.objpath(tree_b), "rb") as f:
dir_b.ParseFromString(f.read())
a = 0
b = 0
while a < len(dir_a.files) or b < len(dir_b.files):
- if b < len(dir_b.files) and (a >= len(dir_a.files) or
- dir_a.files[a].name > dir_b.files[b].name):
+ if b < len(dir_b.files) and (
+ a >= len(dir_a.files) or dir_a.files[a].name > dir_b.files[b].name
+ ):
added.append(os.path.join(path, dir_b.files[b].name))
b += 1
- elif a < len(dir_a.files) and (b >= len(dir_b.files) or
- dir_b.files[b].name > dir_a.files[a].name):
+ elif a < len(dir_a.files) and (
+ b >= len(dir_b.files) or dir_b.files[b].name > dir_a.files[a].name
+ ):
removed.append(os.path.join(path, dir_a.files[a].name))
a += 1
else:
@@ -653,24 +718,43 @@ class CASCache():
a = 0
b = 0
while a < len(dir_a.directories) or b < len(dir_b.directories):
- if b < len(dir_b.directories) and (a >= len(dir_a.directories) or
- dir_a.directories[a].name > dir_b.directories[b].name):
- self.diff_trees(None, dir_b.directories[b].digest,
- added=added, removed=removed, modified=modified,
- path=os.path.join(path, dir_b.directories[b].name))
+ if b < len(dir_b.directories) and (
+ a >= len(dir_a.directories)
+ or dir_a.directories[a].name > dir_b.directories[b].name
+ ):
+ self.diff_trees(
+ None,
+ dir_b.directories[b].digest,
+ added=added,
+ removed=removed,
+ modified=modified,
+ path=os.path.join(path, dir_b.directories[b].name),
+ )
b += 1
- elif a < len(dir_a.directories) and (b >= len(dir_b.directories) or
- dir_b.directories[b].name > dir_a.directories[a].name):
- self.diff_trees(dir_a.directories[a].digest, None,
- added=added, removed=removed, modified=modified,
- path=os.path.join(path, dir_a.directories[a].name))
+ elif a < len(dir_a.directories) and (
+ b >= len(dir_b.directories)
+ or dir_b.directories[b].name > dir_a.directories[a].name
+ ):
+ self.diff_trees(
+ dir_a.directories[a].digest,
+ None,
+ added=added,
+ removed=removed,
+ modified=modified,
+ path=os.path.join(path, dir_a.directories[a].name),
+ )
a += 1
else:
# Subdirectory exists in both directories
if dir_a.directories[a].digest.hash != dir_b.directories[b].digest.hash:
- self.diff_trees(dir_a.directories[a].digest, dir_b.directories[b].digest,
- added=added, removed=removed, modified=modified,
- path=os.path.join(path, dir_a.directories[a].name))
+ self.diff_trees(
+ dir_a.directories[a].digest,
+ dir_b.directories[b].digest,
+ added=added,
+ removed=removed,
+ modified=modified,
+ path=os.path.join(path, dir_a.directories[a].name),
+ )
a += 1
b += 1
@@ -703,7 +787,7 @@ class CASCache():
return os.path.join(log_dir, str(self._casd_start_time) + ".log")
def _refpath(self, ref):
- return os.path.join(self.casdir, 'refs', 'heads', ref)
+ return os.path.join(self.casdir, "refs", "heads", ref)
# _remove_ref()
#
@@ -754,7 +838,9 @@ class CASCache():
break
# Something went wrong here
- raise CASCacheError("System error while removing ref '{}': {}".format(ref, e)) from e
+ raise CASCacheError(
+ "System error while removing ref '{}': {}".format(ref, e)
+ ) from e
def _get_subdir(self, tree, subdir):
head, name = os.path.split(subdir)
@@ -763,7 +849,7 @@ class CASCache():
directory = remote_execution_pb2.Directory()
- with open(self.objpath(tree), 'rb') as f:
+ with open(self.objpath(tree), "rb") as f:
directory.ParseFromString(f.read())
for dirnode in directory.directories:
@@ -772,7 +858,9 @@ class CASCache():
raise CASCacheError("Subdirectory {} not found".format(name))
- def _reachable_refs_dir(self, reachable, tree, update_mtime=False, check_exists=False):
+ def _reachable_refs_dir(
+ self, reachable, tree, update_mtime=False, check_exists=False
+ ):
if tree.hash in reachable:
return
try:
@@ -783,7 +871,7 @@ class CASCache():
directory = remote_execution_pb2.Directory()
- with open(self.objpath(tree), 'rb') as f:
+ with open(self.objpath(tree), "rb") as f:
directory.ParseFromString(f.read())
except FileNotFoundError:
@@ -802,7 +890,12 @@ class CASCache():
reachable.add(filenode.digest.hash)
for dirnode in directory.directories:
- self._reachable_refs_dir(reachable, dirnode.digest, update_mtime=update_mtime, check_exists=check_exists)
+ self._reachable_refs_dir(
+ reachable,
+ dirnode.digest,
+ update_mtime=update_mtime,
+ check_exists=check_exists,
+ )
# _temporary_object():
#
@@ -813,8 +906,7 @@ class CASCache():
@contextlib.contextmanager
def _temporary_object(self):
with utils._tempnamedfile(dir=self.tmpdir) as f:
- os.chmod(f.name,
- stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
+ os.chmod(f.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
yield f
# _ensure_blob():
@@ -851,7 +943,9 @@ class CASCache():
return _CASBatchRead(remote)
# Helper function for _fetch_directory().
- def _fetch_directory_node(self, remote, digest, batch, fetch_queue, fetch_next_queue, *, recursive=False):
+ def _fetch_directory_node(
+ self, remote, digest, batch, fetch_queue, fetch_next_queue, *, recursive=False
+ ):
in_local_cache = os.path.exists(self.objpath(digest))
if in_local_cache:
@@ -891,19 +985,27 @@ class CASCache():
while len(fetch_queue) + len(fetch_next_queue) > 0:
if not fetch_queue:
- batch = self._fetch_directory_batch(remote, batch, fetch_queue, fetch_next_queue)
+ batch = self._fetch_directory_batch(
+ remote, batch, fetch_queue, fetch_next_queue
+ )
dir_digest = fetch_queue.pop(0)
objpath = self._ensure_blob(remote, dir_digest)
directory = remote_execution_pb2.Directory()
- with open(objpath, 'rb') as f:
+ with open(objpath, "rb") as f:
directory.ParseFromString(f.read())
for dirnode in directory.directories:
- batch = self._fetch_directory_node(remote, dirnode.digest, batch,
- fetch_queue, fetch_next_queue, recursive=True)
+ batch = self._fetch_directory_node(
+ remote,
+ dirnode.digest,
+ batch,
+ fetch_queue,
+ fetch_next_queue,
+ recursive=True,
+ )
# Fetch final batch
self._fetch_directory_batch(remote, batch, fetch_queue, fetch_next_queue)
@@ -913,7 +1015,7 @@ class CASCache():
tree = remote_execution_pb2.Tree()
- with open(objpath, 'rb') as f:
+ with open(objpath, "rb") as f:
tree.ParseFromString(f.read())
tree.children.extend([tree.root])
@@ -1014,7 +1116,10 @@ class CASCache():
if messenger:
messenger.message(
- Message(MessageType.WARN, "Buildbox-casd didn't exit in time and has been killed")
+ Message(
+ MessageType.WARN,
+ "Buildbox-casd didn't exit in time and has been killed",
+ )
)
self._casd_process = None
return
@@ -1050,7 +1155,9 @@ class CASCache():
# (subprocess.Process): The casd process that is used for the current cascache
#
def get_casd_process(self):
- assert self._casd_process is not None, "This should only be called with a running buildbox-casd process"
+ assert (
+ self._casd_process is not None
+ ), "This should only be called with a running buildbox-casd process"
return self._casd_process
@@ -1062,8 +1169,7 @@ class CASCache():
# used_size (int): Total size used by the local cache, in bytes.
# quota_size (int): Disk quota for the local cache, in bytes.
#
-class _CASCacheUsage():
-
+class _CASCacheUsage:
def __init__(self, used_size, quota_size):
self.used_size = used_size
self.quota_size = quota_size
@@ -1080,10 +1186,11 @@ class _CASCacheUsage():
elif self.quota_size is None:
return utils._pretty_size(self.used_size, dec_places=1)
else:
- return "{} / {} ({}%)" \
- .format(utils._pretty_size(self.used_size, dec_places=1),
- utils._pretty_size(self.quota_size, dec_places=1),
- self.used_percent)
+ return "{} / {} ({}%)".format(
+ utils._pretty_size(self.used_size, dec_places=1),
+ utils._pretty_size(self.quota_size, dec_places=1),
+ self.used_percent,
+ )
# _CASCacheUsageMonitor
diff --git a/src/buildstream/_cas/casremote.py b/src/buildstream/_cas/casremote.py
index a054b288a..f6be2cdab 100644
--- a/src/buildstream/_cas/casremote.py
+++ b/src/buildstream/_cas/casremote.py
@@ -32,7 +32,6 @@ _MAX_DIGESTS = _MAX_PAYLOAD_BYTES / 80
class BlobNotFound(CASRemoteError):
-
def __init__(self, blob, msg):
self.blob = blob
super().__init__(msg)
@@ -41,7 +40,6 @@ class BlobNotFound(CASRemoteError):
# Represents a single remote CAS cache.
#
class CASRemote(BaseRemote):
-
def __init__(self, spec, cascache, **kwargs):
super().__init__(spec, **kwargs)
@@ -85,12 +83,14 @@ class CASRemote(BaseRemote):
self.init()
- return self.cascache.add_object(buffer=message_buffer, instance_name=self.local_cas_instance_name)
+ return self.cascache.add_object(
+ buffer=message_buffer, instance_name=self.local_cas_instance_name
+ )
# Represents a batch of blobs queued for fetching.
#
-class _CASBatchRead():
+class _CASBatchRead:
def __init__(self, remote):
self._remote = remote
self._requests = []
@@ -123,22 +123,34 @@ class _CASBatchRead():
for response in batch_response.responses:
if response.status.code == code_pb2.NOT_FOUND:
if missing_blobs is None:
- raise BlobNotFound(response.digest.hash, "Failed to download blob {}: {}".format(
- response.digest.hash, response.status.code))
+ raise BlobNotFound(
+ response.digest.hash,
+ "Failed to download blob {}: {}".format(
+ response.digest.hash, response.status.code
+ ),
+ )
missing_blobs.append(response.digest)
if response.status.code != code_pb2.OK:
- raise CASRemoteError("Failed to download blob {}: {}".format(
- response.digest.hash, response.status.code))
+ raise CASRemoteError(
+ "Failed to download blob {}: {}".format(
+ response.digest.hash, response.status.code
+ )
+ )
if response.digest.size_bytes != len(response.data):
- raise CASRemoteError("Failed to download blob {}: expected {} bytes, received {} bytes".format(
- response.digest.hash, response.digest.size_bytes, len(response.data)))
+ raise CASRemoteError(
+ "Failed to download blob {}: expected {} bytes, received {} bytes".format(
+ response.digest.hash,
+ response.digest.size_bytes,
+ len(response.data),
+ )
+ )
# Represents a batch of blobs queued for upload.
#
-class _CASBatchUpdate():
+class _CASBatchUpdate:
def __init__(self, remote):
self._remote = remote
self._requests = []
@@ -175,5 +187,9 @@ class _CASBatchUpdate():
else:
reason = None
- raise CASRemoteError("Failed to upload blob {}: {}".format(
- response.digest.hash, response.status.code), reason=reason)
+ raise CASRemoteError(
+ "Failed to upload blob {}: {}".format(
+ response.digest.hash, response.status.code
+ ),
+ reason=reason,
+ )
diff --git a/src/buildstream/_cas/casserver.py b/src/buildstream/_cas/casserver.py
index d4241435a..327b087c4 100644
--- a/src/buildstream/_cas/casserver.py
+++ b/src/buildstream/_cas/casserver.py
@@ -30,11 +30,20 @@ import grpc
from google.protobuf.message import DecodeError
import click
-from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
+from .._protos.build.bazel.remote.execution.v2 import (
+ remote_execution_pb2,
+ remote_execution_pb2_grpc,
+)
from .._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
from .._protos.google.rpc import code_pb2
-from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc, \
- artifact_pb2, artifact_pb2_grpc, source_pb2, source_pb2_grpc
+from .._protos.buildstream.v2 import (
+ buildstream_pb2,
+ buildstream_pb2_grpc,
+ artifact_pb2,
+ artifact_pb2_grpc,
+ source_pb2,
+ source_pb2_grpc,
+)
from .. import utils
from .._exceptions import CASError, CASCacheError
@@ -58,11 +67,13 @@ _MAX_PAYLOAD_BYTES = 1024 * 1024
#
@contextmanager
def create_server(repo, *, enable_push, quota, index_only):
- cas = CASCache(os.path.abspath(repo), cache_quota=quota, protect_session_blobs=False)
+ cas = CASCache(
+ os.path.abspath(repo), cache_quota=quota, protect_session_blobs=False
+ )
try:
- artifactdir = os.path.join(os.path.abspath(repo), 'artifacts', 'refs')
- sourcedir = os.path.join(os.path.abspath(repo), 'source_protos')
+ artifactdir = os.path.join(os.path.abspath(repo), "artifacts", "refs")
+ sourcedir = os.path.join(os.path.abspath(repo), "source_protos")
# Use max_workers default from Python 3.5+
max_workers = (os.cpu_count() or 1) * 5
@@ -70,31 +81,42 @@ def create_server(repo, *, enable_push, quota, index_only):
if not index_only:
bytestream_pb2_grpc.add_ByteStreamServicer_to_server(
- _ByteStreamServicer(cas, enable_push=enable_push), server)
+ _ByteStreamServicer(cas, enable_push=enable_push), server
+ )
remote_execution_pb2_grpc.add_ContentAddressableStorageServicer_to_server(
- _ContentAddressableStorageServicer(cas, enable_push=enable_push), server)
+ _ContentAddressableStorageServicer(cas, enable_push=enable_push), server
+ )
remote_execution_pb2_grpc.add_CapabilitiesServicer_to_server(
- _CapabilitiesServicer(), server)
+ _CapabilitiesServicer(), server
+ )
buildstream_pb2_grpc.add_ReferenceStorageServicer_to_server(
- _ReferenceStorageServicer(cas, enable_push=enable_push), server)
+ _ReferenceStorageServicer(cas, enable_push=enable_push), server
+ )
artifact_pb2_grpc.add_ArtifactServiceServicer_to_server(
- _ArtifactServicer(cas, artifactdir, update_cas=not index_only), server)
+ _ArtifactServicer(cas, artifactdir, update_cas=not index_only), server
+ )
source_pb2_grpc.add_SourceServiceServicer_to_server(
- _SourceServicer(sourcedir), server)
+ _SourceServicer(sourcedir), server
+ )
# Create up reference storage and artifact capabilities
artifact_capabilities = buildstream_pb2.ArtifactCapabilities(
- allow_updates=enable_push)
+ allow_updates=enable_push
+ )
source_capabilities = buildstream_pb2.SourceCapabilities(
- allow_updates=enable_push)
+ allow_updates=enable_push
+ )
buildstream_pb2_grpc.add_CapabilitiesServicer_to_server(
- _BuildStreamCapabilitiesServicer(artifact_capabilities, source_capabilities),
- server)
+ _BuildStreamCapabilitiesServicer(
+ artifact_capabilities, source_capabilities
+ ),
+ server,
+ )
yield server
@@ -103,58 +125,76 @@ def create_server(repo, *, enable_push, quota, index_only):
@click.command(short_help="CAS Artifact Server")
-@click.option('--port', '-p', type=click.INT, required=True, help="Port number")
-@click.option('--server-key', help="Private server key for TLS (PEM-encoded)")
-@click.option('--server-cert', help="Public server certificate for TLS (PEM-encoded)")
-@click.option('--client-certs', help="Public client certificates for TLS (PEM-encoded)")
-@click.option('--enable-push', is_flag=True,
- help="Allow clients to upload blobs and update artifact cache")
-@click.option('--quota', type=click.INT, default=10e9, show_default=True,
- help="Maximum disk usage in bytes")
-@click.option('--index-only', is_flag=True,
- help="Only provide the BuildStream artifact and source services (\"index\"), not the CAS (\"storage\")")
-@click.argument('repo')
-def server_main(repo, port, server_key, server_cert, client_certs, enable_push,
- quota, index_only):
+@click.option("--port", "-p", type=click.INT, required=True, help="Port number")
+@click.option("--server-key", help="Private server key for TLS (PEM-encoded)")
+@click.option("--server-cert", help="Public server certificate for TLS (PEM-encoded)")
+@click.option("--client-certs", help="Public client certificates for TLS (PEM-encoded)")
+@click.option(
+ "--enable-push",
+ is_flag=True,
+ help="Allow clients to upload blobs and update artifact cache",
+)
+@click.option(
+ "--quota",
+ type=click.INT,
+ default=10e9,
+ show_default=True,
+ help="Maximum disk usage in bytes",
+)
+@click.option(
+ "--index-only",
+ is_flag=True,
+ help='Only provide the BuildStream artifact and source services ("index"), not the CAS ("storage")',
+)
+@click.argument("repo")
+def server_main(
+ repo, port, server_key, server_cert, client_certs, enable_push, quota, index_only
+):
# Handle SIGTERM by calling sys.exit(0), which will raise a SystemExit exception,
# properly executing cleanup code in `finally` clauses and context managers.
# This is required to terminate buildbox-casd on SIGTERM.
signal.signal(signal.SIGTERM, lambda signalnum, frame: sys.exit(0))
- with create_server(repo,
- quota=quota,
- enable_push=enable_push,
- index_only=index_only) as server:
+ with create_server(
+ repo, quota=quota, enable_push=enable_push, index_only=index_only
+ ) as server:
use_tls = bool(server_key)
if bool(server_cert) != use_tls:
- click.echo("ERROR: --server-key and --server-cert are both required for TLS", err=True)
+ click.echo(
+ "ERROR: --server-key and --server-cert are both required for TLS",
+ err=True,
+ )
sys.exit(-1)
if client_certs and not use_tls:
- click.echo("ERROR: --client-certs can only be used with --server-key", err=True)
+ click.echo(
+ "ERROR: --client-certs can only be used with --server-key", err=True
+ )
sys.exit(-1)
if use_tls:
# Read public/private key pair
- with open(server_key, 'rb') as f:
+ with open(server_key, "rb") as f:
server_key_bytes = f.read()
- with open(server_cert, 'rb') as f:
+ with open(server_cert, "rb") as f:
server_cert_bytes = f.read()
if client_certs:
- with open(client_certs, 'rb') as f:
+ with open(client_certs, "rb") as f:
client_certs_bytes = f.read()
else:
client_certs_bytes = None
- credentials = grpc.ssl_server_credentials([(server_key_bytes, server_cert_bytes)],
- root_certificates=client_certs_bytes,
- require_client_auth=bool(client_certs))
- server.add_secure_port('[::]:{}'.format(port), credentials)
+ credentials = grpc.ssl_server_credentials(
+ [(server_key_bytes, server_cert_bytes)],
+ root_certificates=client_certs_bytes,
+ require_client_auth=bool(client_certs),
+ )
+ server.add_secure_port("[::]:{}".format(port), credentials)
else:
- server.add_insecure_port('[::]:{}'.format(port))
+ server.add_insecure_port("[::]:{}".format(port))
# Run artifact server
server.start()
@@ -183,7 +223,7 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
return
try:
- with open(self.cas.objpath(client_digest), 'rb') as f:
+ with open(self.cas.objpath(client_digest), "rb") as f:
if os.fstat(f.fileno()).st_size != client_digest.size_bytes:
context.set_code(grpc.StatusCode.NOT_FOUND)
return
@@ -234,7 +274,9 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
break
try:
- os.posix_fallocate(out.fileno(), 0, client_digest.size_bytes)
+ os.posix_fallocate(
+ out.fileno(), 0, client_digest.size_bytes
+ )
break
except OSError as e:
# Multiple upload can happen in the same time
@@ -280,7 +322,9 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
return response
-class _ContentAddressableStorageServicer(remote_execution_pb2_grpc.ContentAddressableStorageServicer):
+class _ContentAddressableStorageServicer(
+ remote_execution_pb2_grpc.ContentAddressableStorageServicer
+):
def __init__(self, cas, *, enable_push):
super().__init__()
self.cas = cas
@@ -317,7 +361,7 @@ class _ContentAddressableStorageServicer(remote_execution_pb2_grpc.ContentAddres
blob_response.digest.size_bytes = digest.size_bytes
try:
objpath = self.cas.objpath(digest)
- with open(objpath, 'rb') as f:
+ with open(objpath, "rb") as f:
if os.fstat(f.fileno()).st_size != digest.size_bytes:
blob_response.status.code = code_pb2.NOT_FOUND
continue
@@ -382,7 +426,9 @@ class _CapabilitiesServicer(remote_execution_pb2_grpc.CapabilitiesServicer):
cache_capabilities.digest_function.append(remote_execution_pb2.SHA256)
cache_capabilities.action_cache_update_capabilities.update_enabled = False
cache_capabilities.max_batch_total_size_bytes = _MAX_PAYLOAD_BYTES
- cache_capabilities.symlink_absolute_path_strategy = remote_execution_pb2.CacheCapabilities.ALLOWED
+ cache_capabilities.symlink_absolute_path_strategy = (
+ remote_execution_pb2.CacheCapabilities.ALLOWED
+ )
response.deprecated_api_version.major = 2
response.low_api_version.major = 2
@@ -437,7 +483,6 @@ class _ReferenceStorageServicer(buildstream_pb2_grpc.ReferenceStorageServicer):
class _ArtifactServicer(artifact_pb2_grpc.ArtifactServiceServicer):
-
def __init__(self, cas, artifactdir, *, update_cas=True):
super().__init__()
self.cas = cas
@@ -451,7 +496,7 @@ class _ArtifactServicer(artifact_pb2_grpc.ArtifactServiceServicer):
context.abort(grpc.StatusCode.NOT_FOUND, "Artifact proto not found")
artifact = artifact_pb2.Artifact()
- with open(artifact_path, 'rb') as f:
+ with open(artifact_path, "rb") as f:
artifact.ParseFromString(f.read())
# Artifact-only servers will not have blobs on their system,
@@ -489,11 +534,9 @@ class _ArtifactServicer(artifact_pb2_grpc.ArtifactServiceServicer):
except FileNotFoundError:
os.unlink(artifact_path)
- context.abort(grpc.StatusCode.NOT_FOUND,
- "Artifact files incomplete")
+ context.abort(grpc.StatusCode.NOT_FOUND, "Artifact files incomplete")
except DecodeError:
- context.abort(grpc.StatusCode.NOT_FOUND,
- "Artifact files not valid")
+ context.abort(grpc.StatusCode.NOT_FOUND, "Artifact files not valid")
return artifact
@@ -516,7 +559,7 @@ class _ArtifactServicer(artifact_pb2_grpc.ArtifactServiceServicer):
# Add the artifact proto to the cas
artifact_path = os.path.join(self.artifactdir, request.cache_key)
os.makedirs(os.path.dirname(artifact_path), exist_ok=True)
- with utils.save_file_atomic(artifact_path, mode='wb') as f:
+ with utils.save_file_atomic(artifact_path, mode="wb") as f:
f.write(artifact.SerializeToString())
return artifact
@@ -527,19 +570,25 @@ class _ArtifactServicer(artifact_pb2_grpc.ArtifactServiceServicer):
def _check_directory(self, name, digest, context):
try:
directory = remote_execution_pb2.Directory()
- with open(self.cas.objpath(digest), 'rb') as f:
+ with open(self.cas.objpath(digest), "rb") as f:
directory.ParseFromString(f.read())
except FileNotFoundError:
- context.abort(grpc.StatusCode.FAILED_PRECONDITION,
- "Artifact {} specified but no files found".format(name))
+ context.abort(
+ grpc.StatusCode.FAILED_PRECONDITION,
+ "Artifact {} specified but no files found".format(name),
+ )
except DecodeError:
- context.abort(grpc.StatusCode.FAILED_PRECONDITION,
- "Artifact {} specified but directory not found".format(name))
+ context.abort(
+ grpc.StatusCode.FAILED_PRECONDITION,
+ "Artifact {} specified but directory not found".format(name),
+ )
def _check_file(self, name, digest, context):
if not os.path.exists(self.cas.objpath(digest)):
- context.abort(grpc.StatusCode.FAILED_PRECONDITION,
- "Artifact {} specified but not found".format(name))
+ context.abort(
+ grpc.StatusCode.FAILED_PRECONDITION,
+ "Artifact {} specified but not found".format(name),
+ )
class _BuildStreamCapabilitiesServicer(buildstream_pb2_grpc.CapabilitiesServicer):
@@ -564,8 +613,7 @@ class _SourceServicer(source_pb2_grpc.SourceServiceServicer):
except FileNotFoundError:
context.abort(grpc.StatusCode.NOT_FOUND, "Source not found")
except DecodeError:
- context.abort(grpc.StatusCode.NOT_FOUND,
- "Sources gives invalid directory")
+ context.abort(grpc.StatusCode.NOT_FOUND, "Sources gives invalid directory")
return source_proto
@@ -576,7 +624,7 @@ class _SourceServicer(source_pb2_grpc.SourceServiceServicer):
def _get_source(self, cache_key):
path = os.path.join(self.sourcedir, cache_key)
source_proto = source_pb2.Source()
- with open(path, 'r+b') as f:
+ with open(path, "r+b") as f:
source_proto.ParseFromString(f.read())
os.utime(path)
return source_proto
@@ -584,18 +632,18 @@ class _SourceServicer(source_pb2_grpc.SourceServiceServicer):
def _set_source(self, cache_key, source_proto):
path = os.path.join(self.sourcedir, cache_key)
os.makedirs(os.path.dirname(path), exist_ok=True)
- with utils.save_file_atomic(path, 'w+b') as f:
+ with utils.save_file_atomic(path, "w+b") as f:
f.write(source_proto.SerializeToString())
def _digest_from_download_resource_name(resource_name):
- parts = resource_name.split('/')
+ parts = resource_name.split("/")
# Accept requests from non-conforming BuildStream 1.1.x clients
if len(parts) == 2:
- parts.insert(0, 'blobs')
+ parts.insert(0, "blobs")
- if len(parts) != 3 or parts[0] != 'blobs':
+ if len(parts) != 3 or parts[0] != "blobs":
return None
try:
@@ -608,15 +656,15 @@ def _digest_from_download_resource_name(resource_name):
def _digest_from_upload_resource_name(resource_name):
- parts = resource_name.split('/')
+ parts = resource_name.split("/")
# Accept requests from non-conforming BuildStream 1.1.x clients
if len(parts) == 2:
- parts.insert(0, 'uploads')
+ parts.insert(0, "uploads")
parts.insert(1, str(uuid.uuid4()))
- parts.insert(2, 'blobs')
+ parts.insert(2, "blobs")
- if len(parts) < 5 or parts[0] != 'uploads' or parts[2] != 'blobs':
+ if len(parts) < 5 or parts[0] != "uploads" or parts[2] != "blobs":
return None
try:
diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py
index 879555089..17fe691d4 100644
--- a/src/buildstream/_context.py
+++ b/src/buildstream/_context.py
@@ -46,13 +46,12 @@ from .sandbox import SandboxRemote
# verbosity levels and basically anything pertaining to the context
# in which BuildStream was invoked.
#
-class Context():
-
+class Context:
def __init__(self, *, use_casd=True):
# Whether we are running as part of a test suite. This is only relevant
# for developing BuildStream itself.
- self.is_running_in_test_suite = 'BST_TEST_SUITE' in os.environ
+ self.is_running_in_test_suite = "BST_TEST_SUITE" in os.environ
# Filename indicating which configuration file was used, or None for the defaults
self.config_origin = None
@@ -216,8 +215,9 @@ class Context():
# a $XDG_CONFIG_HOME/buildstream.conf file
#
if not config:
- default_config = os.path.join(os.environ['XDG_CONFIG_HOME'],
- 'buildstream.conf')
+ default_config = os.path.join(
+ os.environ["XDG_CONFIG_HOME"], "buildstream.conf"
+ )
if os.path.exists(default_config):
config = default_config
@@ -231,19 +231,34 @@ class Context():
user_config._composite(defaults)
# Give obsoletion warnings
- if 'builddir' in defaults:
- raise LoadError("builddir is obsolete, use cachedir", LoadErrorReason.INVALID_DATA)
+ if "builddir" in defaults:
+ raise LoadError(
+ "builddir is obsolete, use cachedir", LoadErrorReason.INVALID_DATA
+ )
- if 'artifactdir' in defaults:
+ if "artifactdir" in defaults:
raise LoadError("artifactdir is obsolete", LoadErrorReason.INVALID_DATA)
- defaults.validate_keys([
- 'cachedir', 'sourcedir', 'builddir', 'logdir', 'scheduler', 'build',
- 'artifacts', 'source-caches', 'logging', 'projects', 'cache', 'prompt',
- 'workspacedir', 'remote-execution',
- ])
-
- for directory in ['cachedir', 'sourcedir', 'logdir', 'workspacedir']:
+ defaults.validate_keys(
+ [
+ "cachedir",
+ "sourcedir",
+ "builddir",
+ "logdir",
+ "scheduler",
+ "build",
+ "artifacts",
+ "source-caches",
+ "logging",
+ "projects",
+ "cache",
+ "prompt",
+ "workspacedir",
+ "remote-execution",
+ ]
+ )
+
+ for directory in ["cachedir", "sourcedir", "logdir", "workspacedir"]:
# Allow the ~ tilde expansion and any environment variables in
# path specification in the config files.
#
@@ -256,25 +271,35 @@ class Context():
# Relative paths don't make sense in user configuration. The exception is
# workspacedir where `.` is useful as it will be combined with the name
# specified on the command line.
- if not os.path.isabs(path) and not (directory == 'workspacedir' and path == '.'):
- raise LoadError("{} must be an absolute path".format(directory), LoadErrorReason.INVALID_DATA)
+ if not os.path.isabs(path) and not (
+ directory == "workspacedir" and path == "."
+ ):
+ raise LoadError(
+ "{} must be an absolute path".format(directory),
+ LoadErrorReason.INVALID_DATA,
+ )
# add directories not set by users
- self.tmpdir = os.path.join(self.cachedir, 'tmp')
- self.casdir = os.path.join(self.cachedir, 'cas')
- self.builddir = os.path.join(self.cachedir, 'build')
- self.artifactdir = os.path.join(self.cachedir, 'artifacts', 'refs')
+ self.tmpdir = os.path.join(self.cachedir, "tmp")
+ self.casdir = os.path.join(self.cachedir, "cas")
+ self.builddir = os.path.join(self.cachedir, "build")
+ self.artifactdir = os.path.join(self.cachedir, "artifacts", "refs")
# Move old artifact cas to cas if it exists and create symlink
- old_casdir = os.path.join(self.cachedir, 'artifacts', 'cas')
- if (os.path.exists(old_casdir) and not os.path.islink(old_casdir) and
- not os.path.exists(self.casdir)):
+ old_casdir = os.path.join(self.cachedir, "artifacts", "cas")
+ if (
+ os.path.exists(old_casdir)
+ and not os.path.islink(old_casdir)
+ and not os.path.exists(self.casdir)
+ ):
os.rename(old_casdir, self.casdir)
os.symlink(self.casdir, old_casdir)
# Cleanup old extract directories
- old_extractdirs = [os.path.join(self.cachedir, 'artifacts', 'extract'),
- os.path.join(self.cachedir, 'extract')]
+ old_extractdirs = [
+ os.path.join(self.cachedir, "artifacts", "extract"),
+ os.path.join(self.cachedir, "extract"),
+ ]
for old_extractdir in old_extractdirs:
if os.path.isdir(old_extractdir):
shutil.rmtree(old_extractdir, ignore_errors=True)
@@ -282,21 +307,24 @@ class Context():
# Load quota configuration
# We need to find the first existing directory in the path of our
# casdir - the casdir may not have been created yet.
- cache = defaults.get_mapping('cache')
- cache.validate_keys(['quota', 'pull-buildtrees', 'cache-buildtrees'])
+ cache = defaults.get_mapping("cache")
+ cache.validate_keys(["quota", "pull-buildtrees", "cache-buildtrees"])
cas_volume = self.casdir
while not os.path.exists(cas_volume):
cas_volume = os.path.dirname(cas_volume)
- self.config_cache_quota_string = cache.get_str('quota')
+ self.config_cache_quota_string = cache.get_str("quota")
try:
- self.config_cache_quota = utils._parse_size(self.config_cache_quota_string,
- cas_volume)
+ self.config_cache_quota = utils._parse_size(
+ self.config_cache_quota_string, cas_volume
+ )
except utils.UtilError as e:
- raise LoadError("{}\nPlease specify the value in bytes or as a % of full disk space.\n"
- "\nValid values are, for example: 800M 10G 1T 50%\n"
- .format(str(e)), LoadErrorReason.INVALID_DATA) from e
+ raise LoadError(
+ "{}\nPlease specify the value in bytes or as a % of full disk space.\n"
+ "\nValid values are, for example: 800M 10G 1T 50%\n".format(str(e)),
+ LoadErrorReason.INVALID_DATA,
+ ) from e
# Load artifact share configuration
self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
@@ -305,72 +333,90 @@ class Context():
self.source_cache_specs = SourceCache.specs_from_config_node(defaults)
# Load remote execution config getting pull-artifact-files from it
- remote_execution = defaults.get_mapping('remote-execution', default=None)
+ remote_execution = defaults.get_mapping("remote-execution", default=None)
if remote_execution:
- self.pull_artifact_files = remote_execution.get_bool('pull-artifact-files', default=True)
+ self.pull_artifact_files = remote_execution.get_bool(
+ "pull-artifact-files", default=True
+ )
# This stops it being used in the remote service set up
- remote_execution.safe_del('pull-artifact-files')
+ remote_execution.safe_del("pull-artifact-files")
# Don't pass the remote execution settings if that was the only option
if remote_execution.keys() == []:
- del defaults['remote-execution']
+ del defaults["remote-execution"]
else:
self.pull_artifact_files = True
self.remote_execution_specs = SandboxRemote.specs_from_config_node(defaults)
# Load pull build trees configuration
- self.pull_buildtrees = cache.get_bool('pull-buildtrees')
+ self.pull_buildtrees = cache.get_bool("pull-buildtrees")
# Load cache build trees configuration
- self.cache_buildtrees = cache.get_enum('cache-buildtrees', _CacheBuildTrees)
+ self.cache_buildtrees = cache.get_enum("cache-buildtrees", _CacheBuildTrees)
# Load logging config
- logging = defaults.get_mapping('logging')
- logging.validate_keys([
- 'key-length', 'verbose',
- 'error-lines', 'message-lines',
- 'debug', 'element-format', 'message-format'
- ])
- self.log_key_length = logging.get_int('key-length')
- self.log_debug = logging.get_bool('debug')
- self.log_verbose = logging.get_bool('verbose')
- self.log_error_lines = logging.get_int('error-lines')
- self.log_message_lines = logging.get_int('message-lines')
- self.log_element_format = logging.get_str('element-format')
- self.log_message_format = logging.get_str('message-format')
+ logging = defaults.get_mapping("logging")
+ logging.validate_keys(
+ [
+ "key-length",
+ "verbose",
+ "error-lines",
+ "message-lines",
+ "debug",
+ "element-format",
+ "message-format",
+ ]
+ )
+ self.log_key_length = logging.get_int("key-length")
+ self.log_debug = logging.get_bool("debug")
+ self.log_verbose = logging.get_bool("verbose")
+ self.log_error_lines = logging.get_int("error-lines")
+ self.log_message_lines = logging.get_int("message-lines")
+ self.log_element_format = logging.get_str("element-format")
+ self.log_message_format = logging.get_str("message-format")
# Load scheduler config
- scheduler = defaults.get_mapping('scheduler')
- scheduler.validate_keys([
- 'on-error', 'fetchers', 'builders',
- 'pushers', 'network-retries'
- ])
- self.sched_error_action = scheduler.get_enum('on-error', _SchedulerErrorAction)
- self.sched_fetchers = scheduler.get_int('fetchers')
- self.sched_builders = scheduler.get_int('builders')
- self.sched_pushers = scheduler.get_int('pushers')
- self.sched_network_retries = scheduler.get_int('network-retries')
+ scheduler = defaults.get_mapping("scheduler")
+ scheduler.validate_keys(
+ ["on-error", "fetchers", "builders", "pushers", "network-retries"]
+ )
+ self.sched_error_action = scheduler.get_enum("on-error", _SchedulerErrorAction)
+ self.sched_fetchers = scheduler.get_int("fetchers")
+ self.sched_builders = scheduler.get_int("builders")
+ self.sched_pushers = scheduler.get_int("pushers")
+ self.sched_network_retries = scheduler.get_int("network-retries")
# Load build config
- build = defaults.get_mapping('build')
- build.validate_keys(['max-jobs', 'dependencies'])
- self.build_max_jobs = build.get_int('max-jobs')
-
- self.build_dependencies = build.get_str('dependencies')
- if self.build_dependencies not in ['plan', 'all']:
- provenance = build.get_scalar('dependencies').get_provenance()
- raise LoadError("{}: Invalid value for 'dependencies'. Choose 'plan' or 'all'."
- .format(provenance), LoadErrorReason.INVALID_DATA)
+ build = defaults.get_mapping("build")
+ build.validate_keys(["max-jobs", "dependencies"])
+ self.build_max_jobs = build.get_int("max-jobs")
+
+ self.build_dependencies = build.get_str("dependencies")
+ if self.build_dependencies not in ["plan", "all"]:
+ provenance = build.get_scalar("dependencies").get_provenance()
+ raise LoadError(
+ "{}: Invalid value for 'dependencies'. Choose 'plan' or 'all'.".format(
+ provenance
+ ),
+ LoadErrorReason.INVALID_DATA,
+ )
# Load per-projects overrides
- self._project_overrides = defaults.get_mapping('projects', default={})
+ self._project_overrides = defaults.get_mapping("projects", default={})
# Shallow validation of overrides, parts of buildstream which rely
# on the overrides are expected to validate elsewhere.
for overrides in self._project_overrides.values():
- overrides.validate_keys(['artifacts', 'source-caches', 'options',
- 'strict', 'default-mirror',
- 'remote-execution'])
+ overrides.validate_keys(
+ [
+ "artifacts",
+ "source-caches",
+ "options",
+ "strict",
+ "default-mirror",
+ "remote-execution",
+ ]
+ )
@property
def platform(self):
@@ -473,7 +519,7 @@ class Context():
# so work out if we should be strict, and then cache the result
toplevel = self.get_toplevel_project()
overrides = self.get_overrides(toplevel.name)
- self._strict_build_plan = overrides.get_bool('strict', default=True)
+ self._strict_build_plan = overrides.get_bool("strict", default=True)
# If it was set by the CLI, it overrides any config
# Ditto if we've already computed this, then we return the computed
@@ -504,12 +550,12 @@ class Context():
# preferred locations of things from user configuration
# files.
def _init_xdg(self):
- if not os.environ.get('XDG_CACHE_HOME'):
- os.environ['XDG_CACHE_HOME'] = os.path.expanduser('~/.cache')
- if not os.environ.get('XDG_CONFIG_HOME'):
- os.environ['XDG_CONFIG_HOME'] = os.path.expanduser('~/.config')
- if not os.environ.get('XDG_DATA_HOME'):
- os.environ['XDG_DATA_HOME'] = os.path.expanduser('~/.local/share')
+ if not os.environ.get("XDG_CACHE_HOME"):
+ os.environ["XDG_CACHE_HOME"] = os.path.expanduser("~/.cache")
+ if not os.environ.get("XDG_CONFIG_HOME"):
+ os.environ["XDG_CONFIG_HOME"] = os.path.expanduser("~/.config")
+ if not os.environ.get("XDG_DATA_HOME"):
+ os.environ["XDG_DATA_HOME"] = os.path.expanduser("~/.local/share")
def get_cascache(self):
if self._cascache is None:
@@ -520,10 +566,12 @@ class Context():
else:
log_level = CASLogLevel.WARNING
- self._cascache = CASCache(self.cachedir,
- casd=self.use_casd,
- cache_quota=self.config_cache_quota,
- log_level=log_level)
+ self._cascache = CASCache(
+ self.cachedir,
+ casd=self.use_casd,
+ cache_quota=self.config_cache_quota,
+ log_level=log_level,
+ )
return self._cascache
# prepare_fork():
diff --git a/src/buildstream/_elementfactory.py b/src/buildstream/_elementfactory.py
index d6591bf4c..5d219c627 100644
--- a/src/buildstream/_elementfactory.py
+++ b/src/buildstream/_elementfactory.py
@@ -30,14 +30,15 @@ from .element import Element
# plugin_origins (list): Data used to search for external Element plugins
#
class ElementFactory(PluginContext):
+ def __init__(self, plugin_base, *, format_versions={}, plugin_origins=None):
- def __init__(self, plugin_base, *,
- format_versions={},
- plugin_origins=None):
-
- super().__init__(plugin_base, Element, [_site.element_plugins],
- plugin_origins=plugin_origins,
- format_versions=format_versions)
+ super().__init__(
+ plugin_base,
+ Element,
+ [_site.element_plugins],
+ plugin_origins=plugin_origins,
+ format_versions=format_versions,
+ )
# create():
#
diff --git a/src/buildstream/_exceptions.py b/src/buildstream/_exceptions.py
index 947b83149..f05e38162 100644
--- a/src/buildstream/_exceptions.py
+++ b/src/buildstream/_exceptions.py
@@ -51,8 +51,10 @@ def get_last_exception():
# Used by regression tests
#
def get_last_task_error():
- if 'BST_TEST_SUITE' not in os.environ:
- raise BstError("Getting the last task error is only supported when running tests")
+ if "BST_TEST_SUITE" not in os.environ:
+ raise BstError(
+ "Getting the last task error is only supported when running tests"
+ )
global _last_task_error_domain
global _last_task_error_reason
@@ -71,7 +73,7 @@ def get_last_task_error():
# tests about how things failed in a machine readable way
#
def set_last_task_error(domain, reason):
- if 'BST_TEST_SUITE' in os.environ:
+ if "BST_TEST_SUITE" in os.environ:
global _last_task_error_domain
global _last_task_error_reason
@@ -107,8 +109,9 @@ class ErrorDomain(Enum):
# context can then be communicated back to the main process.
#
class BstError(Exception):
-
- def __init__(self, message, *, detail=None, domain=None, reason=None, temporary=False):
+ def __init__(
+ self, message, *, detail=None, domain=None, reason=None, temporary=False
+ ):
global _last_exception
super().__init__(message)
@@ -132,7 +135,7 @@ class BstError(Exception):
self.reason = reason
# Hold on to the last raised exception for testing purposes
- if 'BST_TEST_SUITE' in os.environ:
+ if "BST_TEST_SUITE" in os.environ:
_last_exception = self
@@ -145,7 +148,9 @@ class BstError(Exception):
#
class PluginError(BstError):
def __init__(self, message, reason=None, temporary=False):
- super().__init__(message, domain=ErrorDomain.PLUGIN, reason=reason, temporary=False)
+ super().__init__(
+ message, domain=ErrorDomain.PLUGIN, reason=reason, temporary=False
+ )
# LoadErrorReason
@@ -261,7 +266,9 @@ class ImplError(BstError):
# Raised if the current platform is not supported.
class PlatformError(BstError):
def __init__(self, message, reason=None, detail=None):
- super().__init__(message, domain=ErrorDomain.PLATFORM, reason=reason, detail=detail)
+ super().__init__(
+ message, domain=ErrorDomain.PLATFORM, reason=reason, detail=detail
+ )
# SandboxError
@@ -270,7 +277,9 @@ class PlatformError(BstError):
#
class SandboxError(BstError):
def __init__(self, message, detail=None, reason=None):
- super().__init__(message, detail=detail, domain=ErrorDomain.SANDBOX, reason=reason)
+ super().__init__(
+ message, detail=detail, domain=ErrorDomain.SANDBOX, reason=reason
+ )
# SourceCacheError
@@ -279,7 +288,9 @@ class SandboxError(BstError):
#
class SourceCacheError(BstError):
def __init__(self, message, detail=None, reason=None):
- super().__init__(message, detail=detail, domain=ErrorDomain.SANDBOX, reason=reason)
+ super().__init__(
+ message, detail=detail, domain=ErrorDomain.SANDBOX, reason=reason
+ )
# ArtifactError
@@ -288,7 +299,13 @@ class SourceCacheError(BstError):
#
class ArtifactError(BstError):
def __init__(self, message, *, detail=None, reason=None, temporary=False):
- super().__init__(message, detail=detail, domain=ErrorDomain.ARTIFACT, reason=reason, temporary=True)
+ super().__init__(
+ message,
+ detail=detail,
+ domain=ErrorDomain.ARTIFACT,
+ reason=reason,
+ temporary=True,
+ )
# RemoteError
@@ -297,7 +314,9 @@ class ArtifactError(BstError):
#
class RemoteError(BstError):
def __init__(self, message, *, detail=None, reason=None):
- super().__init__(message, detail=detail, domain=ErrorDomain.REMOTE, reason=reason)
+ super().__init__(
+ message, detail=detail, domain=ErrorDomain.REMOTE, reason=reason
+ )
# CASError
@@ -306,7 +325,13 @@ class RemoteError(BstError):
#
class CASError(BstError):
def __init__(self, message, *, detail=None, reason=None, temporary=False):
- super().__init__(message, detail=detail, domain=ErrorDomain.CAS, reason=reason, temporary=True)
+ super().__init__(
+ message,
+ detail=detail,
+ domain=ErrorDomain.CAS,
+ reason=reason,
+ temporary=True,
+ )
# CASRemoteError
@@ -329,9 +354,10 @@ class CASCacheError(CASError):
# Raised from pipeline operations
#
class PipelineError(BstError):
-
def __init__(self, message, *, detail=None, reason=None):
- super().__init__(message, detail=detail, domain=ErrorDomain.PIPELINE, reason=reason)
+ super().__init__(
+ message, detail=detail, domain=ErrorDomain.PIPELINE, reason=reason
+ )
# StreamError
@@ -339,7 +365,6 @@ class PipelineError(BstError):
# Raised when a stream operation fails
#
class StreamError(BstError):
-
def __init__(self, message=None, *, detail=None, reason=None, terminated=False):
# The empty string should never appear to a user,
@@ -348,7 +373,9 @@ class StreamError(BstError):
if message is None:
message = ""
- super().__init__(message, detail=detail, domain=ErrorDomain.STREAM, reason=reason)
+ super().__init__(
+ message, detail=detail, domain=ErrorDomain.STREAM, reason=reason
+ )
self.terminated = terminated
@@ -377,4 +404,6 @@ class SkipJob(Exception):
#
class ArtifactElementError(BstError):
def __init__(self, message, *, detail=None, reason=None):
- super().__init__(message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason)
+ super().__init__(
+ message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason
+ )
diff --git a/src/buildstream/_frontend/app.py b/src/buildstream/_frontend/app.py
index 45160afbc..3be035c0c 100644
--- a/src/buildstream/_frontend/app.py
+++ b/src/buildstream/_frontend/app.py
@@ -56,19 +56,18 @@ INDENT = 4
# main_options (dict): The main CLI options of the `bst`
# command, before any subcommand
#
-class App():
-
+class App:
def __init__(self, main_options):
#
# Public members
#
- self.context = None # The Context object
- self.stream = None # The Stream object
- self.project = None # The toplevel Project object
- self.logger = None # The LogLine object
- self.interactive = None # Whether we are running in interactive mode
- self.colors = None # Whether to use colors in logging
+ self.context = None # The Context object
+ self.stream = None # The Stream object
+ self.project = None # The toplevel Project object
+ self.logger = None # The LogLine object
+ self.interactive = None # Whether we are running in interactive mode
+ self.colors = None # Whether to use colors in logging
#
# Private members
@@ -76,18 +75,18 @@ class App():
self._session_start = datetime.datetime.now()
self._session_name = None
self._main_options = main_options # Main CLI options, before any command
- self._status = None # The Status object
- self._fail_messages = {} # Failure messages by unique plugin id
+ self._status = None # The Status object
+ self._fail_messages = {} # Failure messages by unique plugin id
self._interactive_failures = None # Whether to handle failures interactively
- self._started = False # Whether a session has started
- self._set_project_dir = False # Whether -C option was used
- self._state = None # Frontend reads this and registers callbacks
+ self._started = False # Whether a session has started
+ self._set_project_dir = False # Whether -C option was used
+ self._state = None # Frontend reads this and registers callbacks
# UI Colors Profiles
- self._content_profile = Profile(fg='yellow')
- self._format_profile = Profile(fg='cyan', dim=True)
- self._success_profile = Profile(fg='green')
- self._error_profile = Profile(fg='red', dim=True)
+ self._content_profile = Profile(fg="yellow")
+ self._format_profile = Profile(fg="cyan", dim=True)
+ self._success_profile = Profile(fg="green")
+ self._error_profile = Profile(fg="red", dim=True)
self._detail_profile = Profile(dim=True)
#
@@ -96,31 +95,31 @@ class App():
is_a_tty = sys.stdout.isatty() and sys.stderr.isatty()
# Enable interactive mode if we're attached to a tty
- if main_options['no_interactive']:
+ if main_options["no_interactive"]:
self.interactive = False
else:
self.interactive = is_a_tty
# Handle errors interactively if we're in interactive mode
# and --on-error was not specified on the command line
- if main_options.get('on_error') is not None:
+ if main_options.get("on_error") is not None:
self._interactive_failures = False
else:
self._interactive_failures = self.interactive
# Use color output if we're attached to a tty, unless
# otherwise specified on the command line
- if main_options['colors'] is None:
+ if main_options["colors"] is None:
self.colors = is_a_tty
- elif main_options['colors']:
+ elif main_options["colors"]:
self.colors = True
else:
self.colors = False
- if main_options['directory']:
+ if main_options["directory"]:
self._set_project_dir = True
else:
- main_options['directory'] = os.getcwd()
+ main_options["directory"] = os.getcwd()
# create()
#
@@ -133,9 +132,10 @@ class App():
#
@classmethod
def create(cls, *args, **kwargs):
- if sys.platform.startswith('linux'):
+ if sys.platform.startswith("linux"):
# Use an App with linux specific features
from .linuxapp import LinuxApp # pylint: disable=cyclic-import
+
return LinuxApp(*args, **kwargs)
else:
# The base App() class is default
@@ -163,8 +163,8 @@ class App():
#
@contextmanager
def initialized(self, *, session_name=None):
- directory = self._main_options['directory']
- config = self._main_options['config']
+ directory = self._main_options["directory"]
+ config = self._main_options["config"]
self._session_name = session_name
@@ -184,19 +184,19 @@ class App():
# the command line when used, trumps the config files.
#
override_map = {
- 'strict': '_strict_build_plan',
- 'debug': 'log_debug',
- 'verbose': 'log_verbose',
- 'error_lines': 'log_error_lines',
- 'message_lines': 'log_message_lines',
- 'on_error': 'sched_error_action',
- 'fetchers': 'sched_fetchers',
- 'builders': 'sched_builders',
- 'pushers': 'sched_pushers',
- 'max_jobs': 'build_max_jobs',
- 'network_retries': 'sched_network_retries',
- 'pull_buildtrees': 'pull_buildtrees',
- 'cache_buildtrees': 'cache_buildtrees'
+ "strict": "_strict_build_plan",
+ "debug": "log_debug",
+ "verbose": "log_verbose",
+ "error_lines": "log_error_lines",
+ "message_lines": "log_message_lines",
+ "on_error": "sched_error_action",
+ "fetchers": "sched_fetchers",
+ "builders": "sched_builders",
+ "pushers": "sched_pushers",
+ "max_jobs": "build_max_jobs",
+ "network_retries": "sched_network_retries",
+ "pull_buildtrees": "pull_buildtrees",
+ "cache_buildtrees": "cache_buildtrees",
}
for cli_option, context_attr in override_map.items():
option_value = self._main_options.get(cli_option)
@@ -208,10 +208,13 @@ class App():
self._error_exit(e, "Error instantiating platform")
# Create the stream right away, we'll need to pass it around.
- self.stream = Stream(self.context, self._session_start,
- session_start_callback=self.session_start_cb,
- interrupt_callback=self._interrupt_handler,
- ticker_callback=self._tick)
+ self.stream = Stream(
+ self.context,
+ self._session_start,
+ session_start_callback=self.session_start_cb,
+ interrupt_callback=self._interrupt_handler,
+ ticker_callback=self._tick,
+ )
self._state = self.stream.get_state()
@@ -219,13 +222,16 @@ class App():
self._state.register_task_failed_callback(self._job_failed)
# Create the logger right before setting the message handler
- self.logger = LogLine(self.context, self._state,
- self._content_profile,
- self._format_profile,
- self._success_profile,
- self._error_profile,
- self._detail_profile,
- indent=INDENT)
+ self.logger = LogLine(
+ self.context,
+ self._state,
+ self._content_profile,
+ self._format_profile,
+ self._success_profile,
+ self._error_profile,
+ self._detail_profile,
+ indent=INDENT,
+ )
# Propagate pipeline feedback to the user
self.context.messenger.set_message_handler(self._message_handler)
@@ -248,10 +254,16 @@ class App():
self.stream.init()
# Create our status printer, only available in interactive
- self._status = Status(self.context, self._state,
- self._content_profile, self._format_profile,
- self._success_profile, self._error_profile,
- self.stream, colors=self.colors)
+ self._status = Status(
+ self.context,
+ self._state,
+ self._content_profile,
+ self._format_profile,
+ self._success_profile,
+ self._error_profile,
+ self.stream,
+ colors=self.colors,
+ )
# Mark the beginning of the session
if session_name:
@@ -261,9 +273,13 @@ class App():
# Load the Project
#
try:
- self.project = Project(directory, self.context, cli_options=self._main_options['option'],
- default_mirror=self._main_options.get('default_mirror'),
- fetch_subprojects=self.stream.fetch_subprojects)
+ self.project = Project(
+ directory,
+ self.context,
+ cli_options=self._main_options["option"],
+ default_mirror=self._main_options.get("default_mirror"),
+ fetch_subprojects=self.stream.fetch_subprojects,
+ )
self.stream.set_project(self.project)
except LoadError as e:
@@ -272,7 +288,10 @@ class App():
# We don't want to slow down users that just made a mistake, so
# don't stop them with an offer to create a project for them.
if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
- click.echo("No project found. You can create a new project like so:", err=True)
+ click.echo(
+ "No project found. You can create a new project like so:",
+ err=True,
+ )
click.echo("", err=True)
click.echo(" bst init", err=True)
@@ -290,8 +309,14 @@ class App():
if session_name:
elapsed = self.stream.elapsed_time
- if isinstance(e, StreamError) and e.terminated: # pylint: disable=no-member
- self._message(MessageType.WARN, session_name + ' Terminated', elapsed=elapsed)
+ if (
+ isinstance(e, StreamError) and e.terminated
+ ): # pylint: disable=no-member
+ self._message(
+ MessageType.WARN,
+ session_name + " Terminated",
+ elapsed=elapsed,
+ )
else:
self._message(MessageType.FAIL, session_name, elapsed=elapsed)
@@ -304,14 +329,20 @@ class App():
# Exit with the error
self._error_exit(e)
except RecursionError:
- click.echo("RecursionError: Dependency depth is too large. Maximum recursion depth exceeded.",
- err=True)
+ click.echo(
+ "RecursionError: Dependency depth is too large. Maximum recursion depth exceeded.",
+ err=True,
+ )
sys.exit(-1)
else:
# No exceptions occurred, print session time and summary
if session_name:
- self._message(MessageType.SUCCESS, session_name, elapsed=self.stream.elapsed_time)
+ self._message(
+ MessageType.SUCCESS,
+ session_name,
+ elapsed=self.stream.elapsed_time,
+ )
if self._started:
self._print_summary()
@@ -331,67 +362,91 @@ class App():
# force (bool): Allow overwriting an existing project.conf
# target_directory (str): The target directory the project should be initialized in
#
- def init_project(self, project_name, format_version=BST_FORMAT_VERSION, element_path='elements',
- force=False, target_directory=None):
+ def init_project(
+ self,
+ project_name,
+ format_version=BST_FORMAT_VERSION,
+ element_path="elements",
+ force=False,
+ target_directory=None,
+ ):
if target_directory:
directory = os.path.abspath(target_directory)
else:
- directory = self._main_options['directory']
+ directory = self._main_options["directory"]
directory = os.path.abspath(directory)
- project_path = os.path.join(directory, 'project.conf')
+ project_path = os.path.join(directory, "project.conf")
try:
if self._set_project_dir:
- raise AppError("Attempted to use -C or --directory with init.",
- reason='init-with-set-directory',
- detail="Please use 'bst init {}' instead.".format(directory))
+ raise AppError(
+ "Attempted to use -C or --directory with init.",
+ reason="init-with-set-directory",
+ detail="Please use 'bst init {}' instead.".format(directory),
+ )
# Abort if the project.conf already exists, unless `--force` was specified in `bst init`
if not force and os.path.exists(project_path):
- raise AppError("A project.conf already exists at: {}".format(project_path),
- reason='project-exists')
+ raise AppError(
+ "A project.conf already exists at: {}".format(project_path),
+ reason="project-exists",
+ )
if project_name:
# If project name was specified, user interaction is not desired, just
# perform some validation and write the project.conf
- node._assert_symbol_name(project_name, 'project name')
+ node._assert_symbol_name(project_name, "project name")
self._assert_format_version(format_version)
self._assert_element_path(element_path)
elif not self.interactive:
- raise AppError("Cannot initialize a new project without specifying the project name",
- reason='unspecified-project-name')
+ raise AppError(
+ "Cannot initialize a new project without specifying the project name",
+ reason="unspecified-project-name",
+ )
else:
# Collect the parameters using an interactive session
- project_name, format_version, element_path = \
- self._init_project_interactive(project_name, format_version, element_path)
+ (
+ project_name,
+ format_version,
+ element_path,
+ ) = self._init_project_interactive(
+ project_name, format_version, element_path
+ )
# Create the directory if it doesnt exist
try:
os.makedirs(directory, exist_ok=True)
except IOError as e:
- raise AppError("Error creating project directory {}: {}".format(directory, e)) from e
+ raise AppError(
+ "Error creating project directory {}: {}".format(directory, e)
+ ) from e
# Create the elements sub-directory if it doesnt exist
elements_path = os.path.join(directory, element_path)
try:
os.makedirs(elements_path, exist_ok=True)
except IOError as e:
- raise AppError("Error creating elements sub-directory {}: {}"
- .format(elements_path, e)) from e
+ raise AppError(
+ "Error creating elements sub-directory {}: {}".format(
+ elements_path, e
+ )
+ ) from e
# Dont use ruamel.yaml here, because it doesnt let
# us programatically insert comments or whitespace at
# the toplevel.
try:
- with open(project_path, 'w') as f:
- f.write("# Unique project name\n" +
- "name: {}\n\n".format(project_name) +
- "# Required BuildStream format version\n" +
- "format-version: {}\n\n".format(format_version) +
- "# Subdirectory where elements are stored\n" +
- "element-path: {}\n".format(element_path))
+ with open(project_path, "w") as f:
+ f.write(
+ "# Unique project name\n"
+ + "name: {}\n\n".format(project_name)
+ + "# Required BuildStream format version\n"
+ + "format-version: {}\n\n".format(format_version)
+ + "# Subdirectory where elements are stored\n"
+ + "element-path: {}\n".format(element_path)
+ )
except IOError as e:
raise AppError("Error writing {}: {}".format(project_path, e)) from e
@@ -419,15 +474,18 @@ class App():
_, key, dim = element_key
if self.colors:
- prompt = self._format_profile.fmt('[') + \
- self._content_profile.fmt(key, dim=dim) + \
- self._format_profile.fmt('@') + \
- self._content_profile.fmt(element_name) + \
- self._format_profile.fmt(':') + \
- self._content_profile.fmt('$PWD') + \
- self._format_profile.fmt(']$') + ' '
+ prompt = (
+ self._format_profile.fmt("[")
+ + self._content_profile.fmt(key, dim=dim)
+ + self._format_profile.fmt("@")
+ + self._content_profile.fmt(element_name)
+ + self._format_profile.fmt(":")
+ + self._content_profile.fmt("$PWD")
+ + self._format_profile.fmt("]$")
+ + " "
+ )
else:
- prompt = '[{}@{}:${{PWD}}]$ '.format(key, element_name)
+ prompt = "[{}@{}:${{PWD}}]$ ".format(key, element_name)
return prompt
@@ -473,8 +531,7 @@ class App():
#
def _message(self, message_type, message, **kwargs):
args = dict(kwargs)
- self.context.messenger.message(
- Message(message_type, message, **args))
+ self.context.messenger.message(Message(message_type, message, **args))
# Exception handler
#
@@ -482,8 +539,7 @@ class App():
# Print the regular BUG message
formatted = "".join(traceback.format_exception(etype, value, tb))
- self._message(MessageType.BUG, str(value),
- detail=formatted)
+ self._message(MessageType.BUG, str(value), detail=formatted)
# If the scheduler has started, try to terminate all jobs gracefully,
# otherwise exit immediately.
@@ -498,8 +554,11 @@ class App():
def _maybe_render_status(self):
# If we're suspended or terminating, then dont render the status area
- if self._status and self.stream and \
- not (self.stream.suspended or self.stream.terminated):
+ if (
+ self._status
+ and self.stream
+ and not (self.stream.suspended or self.stream.terminated)
+ ):
self._status.render()
#
@@ -518,33 +577,39 @@ class App():
# the currently ongoing tasks. We can also print something more
# intelligent, like how many tasks remain to complete overall.
with self._interrupted():
- click.echo("\nUser interrupted with ^C\n" +
- "\n"
- "Choose one of the following options:\n" +
- " (c)ontinue - Continue queueing jobs as much as possible\n" +
- " (q)uit - Exit after all ongoing jobs complete\n" +
- " (t)erminate - Terminate any ongoing jobs and exit\n" +
- "\n" +
- "Pressing ^C again will terminate jobs and exit\n",
- err=True)
+ click.echo(
+ "\nUser interrupted with ^C\n" + "\n"
+ "Choose one of the following options:\n"
+ + " (c)ontinue - Continue queueing jobs as much as possible\n"
+ + " (q)uit - Exit after all ongoing jobs complete\n"
+ + " (t)erminate - Terminate any ongoing jobs and exit\n"
+ + "\n"
+ + "Pressing ^C again will terminate jobs and exit\n",
+ err=True,
+ )
try:
- choice = click.prompt("Choice:",
- value_proc=_prefix_choice_value_proc(['continue', 'quit', 'terminate']),
- default='continue', err=True)
+ choice = click.prompt(
+ "Choice:",
+ value_proc=_prefix_choice_value_proc(
+ ["continue", "quit", "terminate"]
+ ),
+ default="continue",
+ err=True,
+ )
except click.Abort:
# Ensure a newline after automatically printed '^C'
click.echo("", err=True)
- choice = 'terminate'
+ choice = "terminate"
- if choice == 'terminate':
+ if choice == "terminate":
click.echo("\nTerminating all jobs at user request\n", err=True)
self.stream.terminate()
else:
- if choice == 'quit':
+ if choice == "quit":
click.echo("\nCompleting ongoing tasks before quitting\n", err=True)
self.stream.quit()
- elif choice == 'continue':
+ elif choice == "continue":
click.echo("\nContinuing\n", err=True)
def _tick(self):
@@ -574,9 +639,13 @@ class App():
# the failure message reaches us ??
if not failure:
self._status.clear()
- click.echo("\n\n\nBUG: Message handling out of sync, " +
- "unable to retrieve failure message for element {}\n\n\n\n\n"
- .format(full_name), err=True)
+ click.echo(
+ "\n\n\nBUG: Message handling out of sync, "
+ + "unable to retrieve failure message for element {}\n\n\n\n\n".format(
+ full_name
+ ),
+ err=True,
+ )
else:
self._handle_failure(element, action_name, failure, full_name)
@@ -601,66 +670,92 @@ class App():
# Interactive mode for element failures
with self._interrupted():
- summary = ("\n{} failure on element: {}\n".format(failure.action_name, full_name) +
- "\n" +
- "Choose one of the following options:\n" +
- " (c)ontinue - Continue queueing jobs as much as possible\n" +
- " (q)uit - Exit after all ongoing jobs complete\n" +
- " (t)erminate - Terminate any ongoing jobs and exit\n" +
- " (r)etry - Retry this job\n")
+ summary = (
+ "\n{} failure on element: {}\n".format(failure.action_name, full_name)
+ + "\n"
+ + "Choose one of the following options:\n"
+ + " (c)ontinue - Continue queueing jobs as much as possible\n"
+ + " (q)uit - Exit after all ongoing jobs complete\n"
+ + " (t)erminate - Terminate any ongoing jobs and exit\n"
+ + " (r)etry - Retry this job\n"
+ )
if failure.logfile:
summary += " (l)og - View the full log file\n"
if failure.sandbox:
- summary += " (s)hell - Drop into a shell in the failed build sandbox\n"
+ summary += (
+ " (s)hell - Drop into a shell in the failed build sandbox\n"
+ )
summary += "\nPressing ^C will terminate jobs and exit\n"
- choices = ['continue', 'quit', 'terminate', 'retry']
+ choices = ["continue", "quit", "terminate", "retry"]
if failure.logfile:
- choices += ['log']
+ choices += ["log"]
if failure.sandbox:
- choices += ['shell']
+ choices += ["shell"]
- choice = ''
- while choice not in ['continue', 'quit', 'terminate', 'retry']:
+ choice = ""
+ while choice not in ["continue", "quit", "terminate", "retry"]:
click.echo(summary, err=True)
- self._notify("BuildStream failure", "{} on element {}"
- .format(failure.action_name, full_name))
+ self._notify(
+ "BuildStream failure",
+ "{} on element {}".format(failure.action_name, full_name),
+ )
try:
- choice = click.prompt("Choice:", default='continue', err=True,
- value_proc=_prefix_choice_value_proc(choices))
+ choice = click.prompt(
+ "Choice:",
+ default="continue",
+ err=True,
+ value_proc=_prefix_choice_value_proc(choices),
+ )
except click.Abort:
# Ensure a newline after automatically printed '^C'
click.echo("", err=True)
- choice = 'terminate'
+ choice = "terminate"
# Handle choices which you can come back from
#
- if choice == 'shell':
- click.echo("\nDropping into an interactive shell in the failed build sandbox\n", err=True)
+ if choice == "shell":
+ click.echo(
+ "\nDropping into an interactive shell in the failed build sandbox\n",
+ err=True,
+ )
try:
unique_id, element_key = element
prompt = self.shell_prompt(full_name, element_key)
- self.stream.shell(None, Scope.BUILD, prompt, isolate=True,
- usebuildtree='always', unique_id=unique_id)
+ self.stream.shell(
+ None,
+ Scope.BUILD,
+ prompt,
+ isolate=True,
+ usebuildtree="always",
+ unique_id=unique_id,
+ )
except BstError as e:
- click.echo("Error while attempting to create interactive shell: {}".format(e), err=True)
- elif choice == 'log':
- with open(failure.logfile, 'r') as logfile:
+ click.echo(
+ "Error while attempting to create interactive shell: {}".format(
+ e
+ ),
+ err=True,
+ )
+ elif choice == "log":
+ with open(failure.logfile, "r") as logfile:
content = logfile.read()
click.echo_via_pager(content)
- if choice == 'terminate':
+ if choice == "terminate":
click.echo("\nTerminating all jobs\n", err=True)
self.stream.terminate()
else:
- if choice == 'quit':
+ if choice == "quit":
click.echo("\nCompleting ongoing tasks before quitting\n", err=True)
self.stream.quit()
- elif choice == 'continue':
- click.echo("\nContinuing with other non failing elements\n", err=True)
- elif choice == 'retry':
+ elif choice == "continue":
+ click.echo(
+ "\nContinuing with other non failing elements\n", err=True
+ )
+ elif choice == "retry":
click.echo("\nRetrying failed job\n", err=True)
unique_id = element[0]
self.stream._failure_retry(action_name, unique_id)
@@ -672,19 +767,21 @@ class App():
def session_start_cb(self):
self._started = True
if self._session_name:
- self.logger.print_heading(self.project,
- self.stream,
- log_file=self._main_options['log_file'],
- styling=self.colors)
+ self.logger.print_heading(
+ self.project,
+ self.stream,
+ log_file=self._main_options["log_file"],
+ styling=self.colors,
+ )
#
# Print a summary of the queues
#
def _print_summary(self):
click.echo("", err=True)
- self.logger.print_summary(self.stream,
- self._main_options['log_file'],
- styling=self.colors)
+ self.logger.print_summary(
+ self.stream, self._main_options["log_file"], styling=self.colors
+ )
# _error_exit()
#
@@ -700,7 +797,9 @@ class App():
def _error_exit(self, error, prefix=None):
click.echo("", err=True)
- if self.context is None or self.context.log_debug is None: # Context might not be initialized, default to cmd
+ if (
+ self.context is None or self.context.log_debug is None
+ ): # Context might not be initialized, default to cmd
debug = self._main_options["debug"]
else:
debug = self.context.log_debug
@@ -716,7 +815,7 @@ class App():
click.echo(main_error, err=True)
if error.detail:
indent = " " * INDENT
- detail = '\n' + indent + indent.join(error.detail.splitlines(True))
+ detail = "\n" + indent + indent.join(error.detail.splitlines(True))
click.echo(detail, err=True)
sys.exit(-1)
@@ -732,7 +831,10 @@ class App():
return
# Hold on to the failure messages
- if message.message_type in [MessageType.FAIL, MessageType.BUG] and message.element_name is not None:
+ if (
+ message.message_type in [MessageType.FAIL, MessageType.BUG]
+ and message.element_name is not None
+ ):
self._fail_messages[message.element_name] = message
# Send to frontend if appropriate
@@ -749,8 +851,8 @@ class App():
self._maybe_render_status()
# Additionally log to a file
- if self._main_options['log_file']:
- click.echo(text, file=self._main_options['log_file'], color=False, nl=False)
+ if self._main_options["log_file"]:
+ click.echo(text, file=self._main_options["log_file"], color=False, nl=False)
@contextmanager
def _interrupted(self):
@@ -764,25 +866,27 @@ class App():
# Some validation routines for project initialization
#
def _assert_format_version(self, format_version):
- message = "The version must be supported by this " + \
- "version of buildstream (0 - {})\n".format(BST_FORMAT_VERSION)
+ message = (
+ "The version must be supported by this "
+ + "version of buildstream (0 - {})\n".format(BST_FORMAT_VERSION)
+ )
# Validate that it is an integer
try:
number = int(format_version)
except ValueError as e:
- raise AppError(message, reason='invalid-format-version') from e
+ raise AppError(message, reason="invalid-format-version") from e
# Validate that the specified version is supported
if number < 0 or number > BST_FORMAT_VERSION:
- raise AppError(message, reason='invalid-format-version')
+ raise AppError(message, reason="invalid-format-version")
def _assert_element_path(self, element_path):
message = "The element path cannot be an absolute path or contain any '..' components\n"
# Validate the path is not absolute
if os.path.isabs(element_path):
- raise AppError(message, reason='invalid-element-path')
+ raise AppError(message, reason="invalid-element-path")
# Validate that the path does not contain any '..' components
path = element_path
@@ -790,8 +894,8 @@ class App():
split = os.path.split(path)
path = split[0]
basename = split[1]
- if basename == '..':
- raise AppError(message, reason='invalid-element-path')
+ if basename == "..":
+ raise AppError(message, reason="invalid-element-path")
# _init_project_interactive()
#
@@ -807,11 +911,12 @@ class App():
# format_version (int): The user selected format version
# element_path (str): The user selected element path
#
- def _init_project_interactive(self, project_name, format_version=BST_FORMAT_VERSION, element_path='elements'):
-
+ def _init_project_interactive(
+ self, project_name, format_version=BST_FORMAT_VERSION, element_path="elements"
+ ):
def project_name_proc(user_input):
try:
- node._assert_symbol_name(user_input, 'project name')
+ node._assert_symbol_name(user_input, "project name")
except LoadError as e:
message = "{}\n\n{}\n".format(e, e.detail)
raise UsageError(message) from e
@@ -831,63 +936,122 @@ class App():
raise UsageError(str(e)) from e
return user_input
- w = TextWrapper(initial_indent=' ', subsequent_indent=' ', width=79)
+ w = TextWrapper(initial_indent=" ", subsequent_indent=" ", width=79)
# Collect project name
click.echo("", err=True)
- click.echo(self._content_profile.fmt("Choose a unique name for your project"), err=True)
- click.echo(self._format_profile.fmt("-------------------------------------"), err=True)
+ click.echo(
+ self._content_profile.fmt("Choose a unique name for your project"), err=True
+ )
+ click.echo(
+ self._format_profile.fmt("-------------------------------------"), err=True
+ )
click.echo("", err=True)
- click.echo(self._detail_profile.fmt(
- w.fill("The project name is a unique symbol for your project and will be used "
- "to distinguish your project from others in user preferences, namspaceing "
- "of your project's artifacts in shared artifact caches, and in any case where "
- "BuildStream needs to distinguish between multiple projects.")), err=True)
+ click.echo(
+ self._detail_profile.fmt(
+ w.fill(
+ "The project name is a unique symbol for your project and will be used "
+ "to distinguish your project from others in user preferences, namspaceing "
+ "of your project's artifacts in shared artifact caches, and in any case where "
+ "BuildStream needs to distinguish between multiple projects."
+ )
+ ),
+ err=True,
+ )
click.echo("", err=True)
- click.echo(self._detail_profile.fmt(
- w.fill("The project name must contain only alphanumeric characters, "
- "may not start with a digit, and may contain dashes or underscores.")), err=True)
+ click.echo(
+ self._detail_profile.fmt(
+ w.fill(
+ "The project name must contain only alphanumeric characters, "
+ "may not start with a digit, and may contain dashes or underscores."
+ )
+ ),
+ err=True,
+ )
click.echo("", err=True)
- project_name = click.prompt(self._content_profile.fmt("Project name"),
- value_proc=project_name_proc, err=True)
+ project_name = click.prompt(
+ self._content_profile.fmt("Project name"),
+ value_proc=project_name_proc,
+ err=True,
+ )
click.echo("", err=True)
# Collect format version
- click.echo(self._content_profile.fmt("Select the minimum required format version for your project"), err=True)
- click.echo(self._format_profile.fmt("-----------------------------------------------------------"), err=True)
+ click.echo(
+ self._content_profile.fmt(
+ "Select the minimum required format version for your project"
+ ),
+ err=True,
+ )
+ click.echo(
+ self._format_profile.fmt(
+ "-----------------------------------------------------------"
+ ),
+ err=True,
+ )
click.echo("", err=True)
- click.echo(self._detail_profile.fmt(
- w.fill("The format version is used to provide users who build your project "
- "with a helpful error message in the case that they do not have a recent "
- "enough version of BuildStream supporting all the features which your "
- "project might use.")), err=True)
+ click.echo(
+ self._detail_profile.fmt(
+ w.fill(
+ "The format version is used to provide users who build your project "
+ "with a helpful error message in the case that they do not have a recent "
+ "enough version of BuildStream supporting all the features which your "
+ "project might use."
+ )
+ ),
+ err=True,
+ )
click.echo("", err=True)
- click.echo(self._detail_profile.fmt(
- w.fill("The lowest version allowed is 0, the currently installed version of BuildStream "
- "supports up to format version {}.".format(BST_FORMAT_VERSION))), err=True)
+ click.echo(
+ self._detail_profile.fmt(
+ w.fill(
+ "The lowest version allowed is 0, the currently installed version of BuildStream "
+ "supports up to format version {}.".format(BST_FORMAT_VERSION)
+ )
+ ),
+ err=True,
+ )
click.echo("", err=True)
- format_version = click.prompt(self._content_profile.fmt("Format version"),
- value_proc=format_version_proc,
- default=format_version, err=True)
+ format_version = click.prompt(
+ self._content_profile.fmt("Format version"),
+ value_proc=format_version_proc,
+ default=format_version,
+ err=True,
+ )
click.echo("", err=True)
# Collect element path
click.echo(self._content_profile.fmt("Select the element path"), err=True)
click.echo(self._format_profile.fmt("-----------------------"), err=True)
click.echo("", err=True)
- click.echo(self._detail_profile.fmt(
- w.fill("The element path is a project subdirectory where element .bst files are stored "
- "within your project.")), err=True)
+ click.echo(
+ self._detail_profile.fmt(
+ w.fill(
+ "The element path is a project subdirectory where element .bst files are stored "
+ "within your project."
+ )
+ ),
+ err=True,
+ )
click.echo("", err=True)
- click.echo(self._detail_profile.fmt(
- w.fill("Elements will be displayed in logs as filenames relative to "
- "the element path, and similarly, dependencies must be expressed as filenames "
- "relative to the element path.")), err=True)
+ click.echo(
+ self._detail_profile.fmt(
+ w.fill(
+ "Elements will be displayed in logs as filenames relative to "
+ "the element path, and similarly, dependencies must be expressed as filenames "
+ "relative to the element path."
+ )
+ ),
+ err=True,
+ )
click.echo("", err=True)
- element_path = click.prompt(self._content_profile.fmt("Element path"),
- value_proc=element_path_proc,
- default=element_path, err=True)
+ element_path = click.prompt(
+ self._content_profile.fmt("Element path"),
+ value_proc=element_path_proc,
+ default=element_path,
+ err=True,
+ )
return (project_name, format_version, element_path)
@@ -905,9 +1069,10 @@ class App():
# ask for a new input.
#
def _prefix_choice_value_proc(choices):
-
def value_proc(user_input):
- remaining_candidate = [choice for choice in choices if choice.startswith(user_input)]
+ remaining_candidate = [
+ choice for choice in choices if choice.startswith(user_input)
+ ]
if not remaining_candidate:
raise UsageError("Expected one of {}, got {}".format(choices, user_input))
@@ -915,6 +1080,10 @@ def _prefix_choice_value_proc(choices):
if len(remaining_candidate) == 1:
return remaining_candidate[0]
else:
- raise UsageError("Ambiguous input. '{}' can refer to one of {}".format(user_input, remaining_candidate))
+ raise UsageError(
+ "Ambiguous input. '{}' can refer to one of {}".format(
+ user_input, remaining_candidate
+ )
+ )
return value_proc
diff --git a/src/buildstream/_frontend/cli.py b/src/buildstream/_frontend/cli.py
index 67ea02d59..9a73ab375 100644
--- a/src/buildstream/_frontend/cli.py
+++ b/src/buildstream/_frontend/cli.py
@@ -17,8 +17,8 @@ from ..utils import _get_compression, UtilError
# Helper classes and methods for Click #
##################################################################
-class FastEnumType(click.Choice):
+class FastEnumType(click.Choice):
def __init__(self, enum):
self._enum = enum
super().__init__(enum.values())
@@ -45,7 +45,7 @@ class FastEnumType(click.Choice):
#
def search_command(args, *, context=None):
if context is None:
- context = cli.make_context('bst', args, resilient_parsing=True)
+ context = cli.make_context("bst", args, resilient_parsing=True)
# Loop into the deepest command
command = cli
@@ -54,9 +54,9 @@ def search_command(args, *, context=None):
command = command_ctx.command.get_command(command_ctx, cmd)
if command is None:
return None
- command_ctx = command.make_context(command.name, [command.name],
- parent=command_ctx,
- resilient_parsing=True)
+ command_ctx = command.make_context(
+ command.name, [command.name], parent=command_ctx, resilient_parsing=True
+ )
return command_ctx
@@ -64,9 +64,16 @@ def search_command(args, *, context=None):
# Completion for completing command names as help arguments
def complete_commands(cmd, args, incomplete):
command_ctx = search_command(args[1:])
- if command_ctx and command_ctx.command and isinstance(command_ctx.command, click.MultiCommand):
- return [subcommand + " " for subcommand in command_ctx.command.list_commands(command_ctx)
- if not command_ctx.command.get_command(command_ctx, subcommand).hidden]
+ if (
+ command_ctx
+ and command_ctx.command
+ and isinstance(command_ctx.command, click.MultiCommand)
+ ):
+ return [
+ subcommand + " "
+ for subcommand in command_ctx.command.list_commands(command_ctx)
+ if not command_ctx.command.get_command(command_ctx, subcommand).hidden
+ ]
return []
@@ -80,18 +87,19 @@ def complete_target(args, incomplete):
"""
from .. import utils
- project_conf = 'project.conf'
+
+ project_conf = "project.conf"
# First resolve the directory, in case there is an
# active --directory/-C option
#
- base_directory = '.'
+ base_directory = "."
idx = -1
try:
- idx = args.index('-C')
+ idx = args.index("-C")
except ValueError:
try:
- idx = args.index('--directory')
+ idx = args.index("--directory")
except ValueError:
pass
@@ -100,7 +108,9 @@ def complete_target(args, incomplete):
else:
# Check if this directory or any of its parent directories
# contain a project config file
- base_directory, _ = utils._search_upward_for_files(base_directory, [project_conf])
+ base_directory, _ = utils._search_upward_for_files(
+ base_directory, [project_conf]
+ )
if base_directory is None:
# No project_conf was found in base_directory or its parents, no need
@@ -116,7 +126,7 @@ def complete_target(args, incomplete):
return []
# The project is not required to have an element-path
- element_directory = project.get_str('element-path', default='')
+ element_directory = project.get_str("element-path", default="")
# If a project was loaded, use its element-path to
# adjust our completion's base directory
@@ -132,19 +142,20 @@ def complete_target(args, incomplete):
def complete_artifact(orig_args, args, incomplete):
from .._context import Context
+
with Context(use_casd=False) as ctx:
config = None
if orig_args:
for i, arg in enumerate(orig_args):
- if arg in ('-c', '--config'):
+ if arg in ("-c", "--config"):
try:
config = orig_args[i + 1]
except IndexError:
pass
if args:
for i, arg in enumerate(args):
- if arg in ('-c', '--config'):
+ if arg in ("-c", "--config"):
try:
config = args[i + 1]
except IndexError:
@@ -153,7 +164,11 @@ def complete_artifact(orig_args, args, incomplete):
# element targets are valid artifact names
complete_list = complete_target(args, incomplete)
- complete_list.extend(ref for ref in ctx.artifactcache.list_artifacts() if ref.startswith(incomplete))
+ complete_list.extend(
+ ref
+ for ref in ctx.artifactcache.list_artifacts()
+ if ref.startswith(incomplete)
+ )
return complete_list
@@ -167,38 +182,45 @@ def override_completions(orig_args, cmd, cmd_param, args, incomplete):
:return: all the possible user-specified completions for the param
"""
- if cmd.name == 'help':
+ if cmd.name == "help":
return complete_commands(cmd, args, incomplete)
# We can't easily extend click's data structures without
# modifying click itself, so just do some weak special casing
# right here and select which parameters we want to handle specially.
if isinstance(cmd_param.type, click.Path):
- if (cmd_param.name == 'elements' or
- cmd_param.name == 'element' or
- cmd_param.name == 'except_' or
- cmd_param.opts == ['--track'] or
- cmd_param.opts == ['--track-except']):
+ if (
+ cmd_param.name == "elements"
+ or cmd_param.name == "element"
+ or cmd_param.name == "except_"
+ or cmd_param.opts == ["--track"]
+ or cmd_param.opts == ["--track-except"]
+ ):
return complete_target(args, incomplete)
- if cmd_param.name == 'artifacts' or cmd_param.name == 'target':
+ if cmd_param.name == "artifacts" or cmd_param.name == "target":
return complete_artifact(orig_args, args, incomplete)
raise CompleteUnhandled()
def validate_output_streams():
- if sys.platform == 'win32':
+ if sys.platform == "win32":
# Windows does not support 'fcntl', the module is unavailable there as
# of Python 3.7, therefore early-out here.
return
import fcntl
+
for stream in (sys.stdout, sys.stderr):
fileno = stream.fileno()
flags = fcntl.fcntl(fileno, fcntl.F_GETFL)
if flags & os.O_NONBLOCK:
- click.echo("{} is currently set to O_NONBLOCK, try opening a new shell"
- .format(stream.name), err=True)
+ click.echo(
+ "{} is currently set to O_NONBLOCK, try opening a new shell".format(
+ stream.name
+ ),
+ err=True,
+ )
sys.exit(-1)
@@ -210,7 +232,8 @@ def handle_bst_force_start_method_env():
if existing_start_method is None:
multiprocessing.set_start_method(start_method)
print(
- bst_force_start_method_str + ": multiprocessing start method forced to:",
+ bst_force_start_method_str
+ + ": multiprocessing start method forced to:",
start_method,
file=sys.stderr,
flush=True,
@@ -220,14 +243,16 @@ def handle_bst_force_start_method_env():
# multiple times in the same executable, so guard against that
# here.
print(
- bst_force_start_method_str + ": multiprocessing start method already set to:",
+ bst_force_start_method_str
+ + ": multiprocessing start method already set to:",
existing_start_method,
file=sys.stderr,
flush=True,
)
else:
print(
- bst_force_start_method_str + ": cannot set multiprocessing start method to:",
+ bst_force_start_method_str
+ + ": cannot set multiprocessing start method to:",
start_method,
", already set to:",
existing_start_method,
@@ -237,8 +262,9 @@ def handle_bst_force_start_method_env():
sys.exit(-1)
-def override_main(self, args=None, prog_name=None, complete_var=None,
- standalone_mode=True, **extra):
+def override_main(
+ self, args=None, prog_name=None, complete_var=None, standalone_mode=True, **extra
+):
# Hook for the Bash completion. This only activates if the Bash
# completion is actually enabled, otherwise this is quite a fast
@@ -250,7 +276,7 @@ def override_main(self, args=None, prog_name=None, complete_var=None,
#
# The below is a quicker exit path for the sake
# of making completions respond faster.
- if 'BST_TEST_SUITE' not in os.environ:
+ if "BST_TEST_SUITE" not in os.environ:
sys.stdout.flush()
sys.stderr.flush()
os._exit(0)
@@ -269,14 +295,20 @@ def override_main(self, args=None, prog_name=None, complete_var=None,
# case of testing, our tests preceed our entrypoint, so we do our best.
handle_bst_force_start_method_env()
- original_main(self, args=args, prog_name=prog_name, complete_var=None,
- standalone_mode=standalone_mode, **extra)
+ original_main(
+ self,
+ args=args,
+ prog_name=prog_name,
+ complete_var=None,
+ standalone_mode=standalone_mode,
+ **extra
+ )
original_main = click.BaseCommand.main
# Disable type checking since mypy doesn't support assigning to a method.
# See https://github.com/python/mypy/issues/2427.
-click.BaseCommand.main = override_main # type: ignore
+click.BaseCommand.main = override_main # type: ignore
##################################################################
@@ -287,58 +319,118 @@ def print_version(ctx, param, value):
return
from .. import __version__
+
click.echo(__version__)
ctx.exit()
-@click.group(context_settings=dict(help_option_names=['-h', '--help']))
-@click.option('--version', is_flag=True, callback=print_version,
- expose_value=False, is_eager=True)
-@click.option('--config', '-c',
- type=click.Path(exists=True, dir_okay=False, readable=True),
- help="Configuration file to use")
-@click.option('--directory', '-C', default=None, # Set to os.getcwd() later.
- type=click.Path(file_okay=False, readable=True),
- help="Project directory (default: current directory)")
-@click.option('--on-error', default=None,
- type=FastEnumType(_SchedulerErrorAction),
- help="What to do when an error is encountered")
-@click.option('--fetchers', type=click.INT, default=None,
- help="Maximum simultaneous download tasks")
-@click.option('--builders', type=click.INT, default=None,
- help="Maximum simultaneous build tasks")
-@click.option('--pushers', type=click.INT, default=None,
- help="Maximum simultaneous upload tasks")
-@click.option('--max-jobs', type=click.INT, default=None,
- help="Number of parallel jobs allowed for a given build task")
-@click.option('--network-retries', type=click.INT, default=None,
- help="Maximum retries for network tasks")
-@click.option('--no-interactive', is_flag=True,
- help="Force non interactive mode, otherwise this is automatically decided")
-@click.option('--verbose/--no-verbose', default=None,
- help="Be extra verbose")
-@click.option('--debug/--no-debug', default=None,
- help="Print debugging output")
-@click.option('--error-lines', type=click.INT, default=None,
- help="Maximum number of lines to show from a task log")
-@click.option('--message-lines', type=click.INT, default=None,
- help="Maximum number of lines to show in a detailed message")
-@click.option('--log-file',
- type=click.File(mode='w', encoding='UTF-8'),
- help="A file to store the main log (allows storing the main log while in interactive mode)")
-@click.option('--colors/--no-colors', default=None,
- help="Force enable/disable ANSI color codes in output")
-@click.option('--strict/--no-strict', default=None, is_flag=True,
- help="Elements must be rebuilt when their dependencies have changed")
-@click.option('--option', '-o', type=click.Tuple([str, str]), multiple=True, metavar='OPTION VALUE',
- help="Specify a project option")
-@click.option('--default-mirror', default=None,
- help="The mirror to fetch from first, before attempting other mirrors")
-@click.option('--pull-buildtrees', is_flag=True, default=None,
- help="Include an element's build tree when pulling remote element artifacts")
-@click.option('--cache-buildtrees', default=None,
- type=FastEnumType(_CacheBuildTrees),
- help="Cache artifact build tree content on creation")
+@click.group(context_settings=dict(help_option_names=["-h", "--help"]))
+@click.option(
+ "--version", is_flag=True, callback=print_version, expose_value=False, is_eager=True
+)
+@click.option(
+ "--config",
+ "-c",
+ type=click.Path(exists=True, dir_okay=False, readable=True),
+ help="Configuration file to use",
+)
+@click.option(
+ "--directory",
+ "-C",
+ default=None, # Set to os.getcwd() later.
+ type=click.Path(file_okay=False, readable=True),
+ help="Project directory (default: current directory)",
+)
+@click.option(
+ "--on-error",
+ default=None,
+ type=FastEnumType(_SchedulerErrorAction),
+ help="What to do when an error is encountered",
+)
+@click.option(
+ "--fetchers",
+ type=click.INT,
+ default=None,
+ help="Maximum simultaneous download tasks",
+)
+@click.option(
+ "--builders", type=click.INT, default=None, help="Maximum simultaneous build tasks"
+)
+@click.option(
+ "--pushers", type=click.INT, default=None, help="Maximum simultaneous upload tasks"
+)
+@click.option(
+ "--max-jobs",
+ type=click.INT,
+ default=None,
+ help="Number of parallel jobs allowed for a given build task",
+)
+@click.option(
+ "--network-retries",
+ type=click.INT,
+ default=None,
+ help="Maximum retries for network tasks",
+)
+@click.option(
+ "--no-interactive",
+ is_flag=True,
+ help="Force non interactive mode, otherwise this is automatically decided",
+)
+@click.option("--verbose/--no-verbose", default=None, help="Be extra verbose")
+@click.option("--debug/--no-debug", default=None, help="Print debugging output")
+@click.option(
+ "--error-lines",
+ type=click.INT,
+ default=None,
+ help="Maximum number of lines to show from a task log",
+)
+@click.option(
+ "--message-lines",
+ type=click.INT,
+ default=None,
+ help="Maximum number of lines to show in a detailed message",
+)
+@click.option(
+ "--log-file",
+ type=click.File(mode="w", encoding="UTF-8"),
+ help="A file to store the main log (allows storing the main log while in interactive mode)",
+)
+@click.option(
+ "--colors/--no-colors",
+ default=None,
+ help="Force enable/disable ANSI color codes in output",
+)
+@click.option(
+ "--strict/--no-strict",
+ default=None,
+ is_flag=True,
+ help="Elements must be rebuilt when their dependencies have changed",
+)
+@click.option(
+ "--option",
+ "-o",
+ type=click.Tuple([str, str]),
+ multiple=True,
+ metavar="OPTION VALUE",
+ help="Specify a project option",
+)
+@click.option(
+ "--default-mirror",
+ default=None,
+ help="The mirror to fetch from first, before attempting other mirrors",
+)
+@click.option(
+ "--pull-buildtrees",
+ is_flag=True,
+ default=None,
+ help="Include an element's build tree when pulling remote element artifacts",
+)
+@click.option(
+ "--cache-buildtrees",
+ default=None,
+ type=FastEnumType(_CacheBuildTrees),
+ help="Cache artifact build tree content on creation",
+)
@click.pass_context
def cli(context, **kwargs):
"""Build and manipulate BuildStream projects
@@ -357,17 +449,24 @@ def cli(context, **kwargs):
##################################################################
# Help Command #
##################################################################
-@cli.command(name="help", short_help="Print usage information",
- context_settings={"help_option_names": []})
-@click.argument("command", nargs=-1, metavar='COMMAND')
+@cli.command(
+ name="help",
+ short_help="Print usage information",
+ context_settings={"help_option_names": []},
+)
+@click.argument("command", nargs=-1, metavar="COMMAND")
@click.pass_context
def help_command(ctx, command):
"""Print usage information about a given command
"""
command_ctx = search_command(command, context=ctx.parent)
if not command_ctx:
- click.echo("Not a valid command: '{} {}'"
- .format(ctx.parent.info_name, " ".join(command)), err=True)
+ click.echo(
+ "Not a valid command: '{} {}'".format(
+ ctx.parent.info_name, " ".join(command)
+ ),
+ err=True,
+ )
sys.exit(-1)
click.echo(command_ctx.command.get_help(command_ctx), err=True)
@@ -377,24 +476,42 @@ def help_command(ctx, command):
detail = " "
if command:
detail = " {} ".format(" ".join(command))
- click.echo("\nFor usage on a specific command: {} help{}COMMAND"
- .format(ctx.parent.info_name, detail), err=True)
+ click.echo(
+ "\nFor usage on a specific command: {} help{}COMMAND".format(
+ ctx.parent.info_name, detail
+ ),
+ err=True,
+ )
##################################################################
# Init Command #
##################################################################
@cli.command(short_help="Initialize a new BuildStream project")
-@click.option('--project-name', type=click.STRING,
- help="The project name to use")
-@click.option('--format-version', type=click.INT, default=BST_FORMAT_VERSION, show_default=True,
- help="The required format version")
-@click.option('--element-path', type=click.Path(), default="elements", show_default=True,
- help="The subdirectory to store elements in")
-@click.option('--force', '-f', is_flag=True,
- help="Allow overwriting an existing project.conf")
-@click.argument('target-directory', nargs=1, required=False,
- type=click.Path(file_okay=False, writable=True))
+@click.option("--project-name", type=click.STRING, help="The project name to use")
+@click.option(
+ "--format-version",
+ type=click.INT,
+ default=BST_FORMAT_VERSION,
+ show_default=True,
+ help="The required format version",
+)
+@click.option(
+ "--element-path",
+ type=click.Path(),
+ default="elements",
+ show_default=True,
+ help="The subdirectory to store elements in",
+)
+@click.option(
+ "--force", "-f", is_flag=True, help="Allow overwriting an existing project.conf"
+)
+@click.argument(
+ "target-directory",
+ nargs=1,
+ required=False,
+ type=click.Path(file_okay=False, writable=True),
+)
@click.pass_obj
def init(app, project_name, format_version, element_path, force, target_directory):
"""Initialize a new BuildStream project
@@ -405,20 +522,29 @@ def init(app, project_name, format_version, element_path, force, target_director
Unless `--project-name` is specified, this will be an
interactive session.
"""
- app.init_project(project_name, format_version, element_path, force, target_directory)
+ app.init_project(
+ project_name, format_version, element_path, force, target_directory
+ )
##################################################################
# Build Command #
##################################################################
@cli.command(short_help="Build elements in a pipeline")
-@click.option('--deps', '-d', default=None,
- type=click.Choice(['plan', 'all']),
- help='The dependencies to build')
-@click.option('--remote', '-r', default=None,
- help="The URL of the remote cache (defaults to the first configured cache)")
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@click.option(
+ "--deps",
+ "-d",
+ default=None,
+ type=click.Choice(["plan", "all"]),
+ help="The dependencies to build",
+)
+@click.option(
+ "--remote",
+ "-r",
+ default=None,
+ help="The URL of the remote cache (defaults to the first configured cache)",
+)
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def build(app, elements, deps, remote):
"""Build elements in a pipeline
@@ -447,30 +573,50 @@ def build(app, elements, deps, remote):
# Junction elements cannot be built, exclude them from default targets
ignore_junction_targets = True
- app.stream.build(elements,
- selection=deps,
- ignore_junction_targets=ignore_junction_targets,
- remote=remote)
+ app.stream.build(
+ elements,
+ selection=deps,
+ ignore_junction_targets=ignore_junction_targets,
+ remote=remote,
+ )
##################################################################
# Show Command #
##################################################################
@cli.command(short_help="Show elements in the pipeline")
-@click.option('--except', 'except_', multiple=True,
- type=click.Path(readable=False),
- help="Except certain dependencies")
-@click.option('--deps', '-d', default='all', show_default=True,
- type=click.Choice(['none', 'plan', 'run', 'build', 'all']),
- help='The dependencies to show')
-@click.option('--order', default="stage", show_default=True,
- type=click.Choice(['stage', 'alpha']),
- help='Staging or alphabetic ordering of dependencies')
-@click.option('--format', '-f', 'format_', metavar='FORMAT', default=None,
- type=click.STRING,
- help='Format string for each element')
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@click.option(
+ "--except",
+ "except_",
+ multiple=True,
+ type=click.Path(readable=False),
+ help="Except certain dependencies",
+)
+@click.option(
+ "--deps",
+ "-d",
+ default="all",
+ show_default=True,
+ type=click.Choice(["none", "plan", "run", "build", "all"]),
+ help="The dependencies to show",
+)
+@click.option(
+ "--order",
+ default="stage",
+ show_default=True,
+ type=click.Choice(["stage", "alpha"]),
+ help="Staging or alphabetic ordering of dependencies",
+)
+@click.option(
+ "--format",
+ "-f",
+ "format_",
+ metavar="FORMAT",
+ default=None,
+ type=click.STRING,
+ help="Format string for each element",
+)
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def show(app, elements, deps, except_, order, format_):
"""Show elements in the pipeline
@@ -533,9 +679,9 @@ def show(app, elements, deps, except_, order, format_):
if not elements:
elements = app.project.get_default_targets()
- dependencies = app.stream.load_selection(elements,
- selection=deps,
- except_targets=except_)
+ dependencies = app.stream.load_selection(
+ elements, selection=deps, except_targets=except_
+ )
if order == "alpha":
dependencies = sorted(dependencies)
@@ -551,25 +697,48 @@ def show(app, elements, deps, except_, order, format_):
# Shell Command #
##################################################################
@cli.command(short_help="Shell into an element's sandbox environment")
-@click.option('--build', '-b', 'build_', is_flag=True,
- help='Stage dependencies and sources to build')
-@click.option('--sysroot', '-s', default=None,
- type=click.Path(exists=True, file_okay=False, readable=True),
- help="An existing sysroot")
-@click.option('--mount', type=click.Tuple([click.Path(exists=True), str]), multiple=True,
- metavar='HOSTPATH PATH',
- help="Mount a file or directory into the sandbox")
-@click.option('--isolate', is_flag=True,
- help='Create an isolated build sandbox')
-@click.option('--use-buildtree', '-t', 'cli_buildtree', type=click.Choice(['ask', 'try', 'always', 'never']),
- default='ask', show_default=True,
- help=('Use a buildtree. If `always` is set, will always fail to '
- 'build if a buildtree is not available.'))
-@click.option('--pull', 'pull_', is_flag=True,
- help='Attempt to pull missing or incomplete artifacts')
-@click.argument('element', required=False,
- type=click.Path(readable=False))
-@click.argument('command', type=click.STRING, nargs=-1)
+@click.option(
+ "--build",
+ "-b",
+ "build_",
+ is_flag=True,
+ help="Stage dependencies and sources to build",
+)
+@click.option(
+ "--sysroot",
+ "-s",
+ default=None,
+ type=click.Path(exists=True, file_okay=False, readable=True),
+ help="An existing sysroot",
+)
+@click.option(
+ "--mount",
+ type=click.Tuple([click.Path(exists=True), str]),
+ multiple=True,
+ metavar="HOSTPATH PATH",
+ help="Mount a file or directory into the sandbox",
+)
+@click.option("--isolate", is_flag=True, help="Create an isolated build sandbox")
+@click.option(
+ "--use-buildtree",
+ "-t",
+ "cli_buildtree",
+ type=click.Choice(["ask", "try", "always", "never"]),
+ default="ask",
+ show_default=True,
+ help=(
+ "Use a buildtree. If `always` is set, will always fail to "
+ "build if a buildtree is not available."
+ ),
+)
+@click.option(
+ "--pull",
+ "pull_",
+ is_flag=True,
+ help="Attempt to pull missing or incomplete artifacts",
+)
+@click.argument("element", required=False, type=click.Path(readable=False))
+@click.argument("command", type=click.STRING, nargs=-1)
@click.pass_obj
def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, command):
"""Run a command in the target element's sandbox environment
@@ -613,8 +782,9 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, c
if not element:
raise AppError('Missing argument "ELEMENT".')
- elements = app.stream.load_selection((element,), selection=selection,
- use_artifact_config=True)
+ elements = app.stream.load_selection(
+ (element,), selection=selection, use_artifact_config=True
+ )
# last one will be the element we want to stage, previous ones are
# elements to try and pull
@@ -625,10 +795,7 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, c
element_key = element._get_display_key()
prompt = app.shell_prompt(element_name, element_key)
- mounts = [
- HostMount(path, host_path)
- for host_path, path in mount
- ]
+ mounts = [HostMount(path, host_path) for host_path, path in mount]
cached = element._cached_buildtree()
buildtree_exists = element._buildtree_exists()
@@ -637,27 +804,38 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, c
if buildtree_exists or pull_:
use_buildtree = cli_buildtree
if not cached and use_buildtree == "always":
- click.echo("WARNING: buildtree is not cached locally, will attempt to pull from available remotes",
- err=True)
+ click.echo(
+ "WARNING: buildtree is not cached locally, will attempt to pull from available remotes",
+ err=True,
+ )
else:
if cli_buildtree == "always":
# Exit early if it won't be possible to even fetch a buildtree with always option
- raise AppError("Artifact was created without buildtree, unable to launch shell with it")
- click.echo("WARNING: Artifact created without buildtree, shell will be loaded without it",
- err=True)
+ raise AppError(
+ "Artifact was created without buildtree, unable to launch shell with it"
+ )
+ click.echo(
+ "WARNING: Artifact created without buildtree, shell will be loaded without it",
+ err=True,
+ )
else:
# If the value has defaulted to ask and in non interactive mode, don't consider the buildtree, this
# being the default behaviour of the command
if app.interactive and cli_buildtree == "ask":
- if cached and bool(click.confirm('Do you want to use the cached buildtree?')):
+ if cached and bool(
+ click.confirm("Do you want to use the cached buildtree?")
+ ):
use_buildtree = "always"
elif buildtree_exists:
try:
- choice = click.prompt("Do you want to pull & use a cached buildtree?",
- type=click.Choice(['try', 'always', 'never']),
- err=True, show_choices=True)
+ choice = click.prompt(
+ "Do you want to pull & use a cached buildtree?",
+ type=click.Choice(["try", "always", "never"]),
+ err=True,
+ show_choices=True,
+ )
except click.Abort:
- click.echo('Aborting', err=True)
+ click.echo("Aborting", err=True)
sys.exit(-1)
if choice != "never":
@@ -668,15 +846,21 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, c
click.echo("WARNING: using a buildtree from a failed build.", err=True)
try:
- exitcode = app.stream.shell(element, scope, prompt,
- directory=sysroot,
- mounts=mounts,
- isolate=isolate,
- command=command,
- usebuildtree=use_buildtree,
- pull_dependencies=pull_dependencies)
+ exitcode = app.stream.shell(
+ element,
+ scope,
+ prompt,
+ directory=sysroot,
+ mounts=mounts,
+ isolate=isolate,
+ command=command,
+ usebuildtree=use_buildtree,
+ pull_dependencies=pull_dependencies,
+ )
except BstError as e:
- raise AppError("Error launching shell: {}".format(e), detail=e.detail) from e
+ raise AppError(
+ "Error launching shell: {}".format(e), detail=e.detail
+ ) from e
# If there were no errors, we return the shell's exit code here.
sys.exit(exitcode)
@@ -694,20 +878,40 @@ def source():
# Source Fetch Command #
##################################################################
@source.command(name="fetch", short_help="Fetch sources in a pipeline")
-@click.option('--except', 'except_', multiple=True,
- type=click.Path(readable=False),
- help="Except certain dependencies from fetching")
-@click.option('--deps', '-d', default='plan', show_default=True,
- type=click.Choice(['none', 'plan', 'all']),
- help='The dependencies to fetch')
-@click.option('--track', 'track_', is_flag=True,
- help="Track new source references before fetching")
-@click.option('--track-cross-junctions', '-J', is_flag=True,
- help="Allow tracking to cross junction boundaries")
-@click.option('--remote', '-r', default=None,
- help="The URL of the remote source cache (defaults to the first configured cache)")
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@click.option(
+ "--except",
+ "except_",
+ multiple=True,
+ type=click.Path(readable=False),
+ help="Except certain dependencies from fetching",
+)
+@click.option(
+ "--deps",
+ "-d",
+ default="plan",
+ show_default=True,
+ type=click.Choice(["none", "plan", "all"]),
+ help="The dependencies to fetch",
+)
+@click.option(
+ "--track",
+ "track_",
+ is_flag=True,
+ help="Track new source references before fetching",
+)
+@click.option(
+ "--track-cross-junctions",
+ "-J",
+ is_flag=True,
+ help="Allow tracking to cross junction boundaries",
+)
+@click.option(
+ "--remote",
+ "-r",
+ default=None,
+ help="The URL of the remote source cache (defaults to the first configured cache)",
+)
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def source_fetch(app, elements, deps, track_, except_, track_cross_junctions, remote):
"""Fetch sources required to build the pipeline
@@ -734,40 +938,57 @@ def source_fetch(app, elements, deps, track_, except_, track_cross_junctions, re
from .._pipeline import PipelineSelection
if track_cross_junctions and not track_:
- click.echo("ERROR: The --track-cross-junctions option can only be used with --track", err=True)
+ click.echo(
+ "ERROR: The --track-cross-junctions option can only be used with --track",
+ err=True,
+ )
sys.exit(-1)
if track_ and deps == PipelineSelection.PLAN:
- click.echo("WARNING: --track specified for tracking of a build plan\n\n"
- "Since tracking modifies the build plan, all elements will be tracked.", err=True)
+ click.echo(
+ "WARNING: --track specified for tracking of a build plan\n\n"
+ "Since tracking modifies the build plan, all elements will be tracked.",
+ err=True,
+ )
deps = PipelineSelection.ALL
with app.initialized(session_name="Fetch"):
if not elements:
elements = app.project.get_default_targets()
- app.stream.fetch(elements,
- selection=deps,
- except_targets=except_,
- track_targets=track_,
- track_cross_junctions=track_cross_junctions,
- remote=remote)
+ app.stream.fetch(
+ elements,
+ selection=deps,
+ except_targets=except_,
+ track_targets=track_,
+ track_cross_junctions=track_cross_junctions,
+ remote=remote,
+ )
##################################################################
# Source Track Command #
##################################################################
@source.command(name="track", short_help="Track new source references")
-@click.option('--except', 'except_', multiple=True,
- type=click.Path(readable=False),
- help="Except certain dependencies from tracking")
-@click.option('--deps', '-d', default='none', show_default=True,
- type=click.Choice(['none', 'all']),
- help='The dependencies to track')
-@click.option('--cross-junctions', '-J', is_flag=True,
- help="Allow crossing junction boundaries")
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@click.option(
+ "--except",
+ "except_",
+ multiple=True,
+ type=click.Path(readable=False),
+ help="Except certain dependencies from tracking",
+)
+@click.option(
+ "--deps",
+ "-d",
+ default="none",
+ show_default=True,
+ type=click.Choice(["none", "all"]),
+ help="The dependencies to track",
+)
+@click.option(
+ "--cross-junctions", "-J", is_flag=True, help="Allow crossing junction boundaries"
+)
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def source_track(app, elements, deps, except_, cross_junctions):
"""Consults the specified tracking branches for new versions available
@@ -797,41 +1018,61 @@ def source_track(app, elements, deps, except_, cross_junctions):
# Substitute 'none' for 'redirect' so that element redirections
# will be done
- if deps == 'none':
- deps = 'redirect'
- app.stream.track(elements,
- selection=deps,
- except_targets=except_,
- cross_junctions=cross_junctions)
+ if deps == "none":
+ deps = "redirect"
+ app.stream.track(
+ elements,
+ selection=deps,
+ except_targets=except_,
+ cross_junctions=cross_junctions,
+ )
##################################################################
# Source Checkout Command #
##################################################################
-@source.command(name='checkout', short_help='Checkout sources of an element')
-@click.option('--force', '-f', is_flag=True,
- help="Allow files to be overwritten")
-@click.option('--except', 'except_', multiple=True,
- type=click.Path(readable=False),
- help="Except certain dependencies")
-@click.option('--deps', '-d', default='none', show_default=True,
- type=click.Choice(['build', 'none', 'run', 'all']),
- help='The dependencies whose sources to checkout')
-@click.option('--tar', default=None, metavar='LOCATION',
- type=click.Path(),
- help="Create a tarball containing the sources instead "
- "of a file tree.")
-@click.option('--compression', default=None,
- type=click.Choice(['gz', 'xz', 'bz2']),
- help="The compression option of the tarball created.")
-@click.option('--include-build-scripts', 'build_scripts', is_flag=True)
-@click.option('--directory', default='source-checkout',
- type=click.Path(file_okay=False),
- help="The directory to checkout the sources to")
-@click.argument('element', required=False, type=click.Path(readable=False))
+@source.command(name="checkout", short_help="Checkout sources of an element")
+@click.option("--force", "-f", is_flag=True, help="Allow files to be overwritten")
+@click.option(
+ "--except",
+ "except_",
+ multiple=True,
+ type=click.Path(readable=False),
+ help="Except certain dependencies",
+)
+@click.option(
+ "--deps",
+ "-d",
+ default="none",
+ show_default=True,
+ type=click.Choice(["build", "none", "run", "all"]),
+ help="The dependencies whose sources to checkout",
+)
+@click.option(
+ "--tar",
+ default=None,
+ metavar="LOCATION",
+ type=click.Path(),
+ help="Create a tarball containing the sources instead " "of a file tree.",
+)
+@click.option(
+ "--compression",
+ default=None,
+ type=click.Choice(["gz", "xz", "bz2"]),
+ help="The compression option of the tarball created.",
+)
+@click.option("--include-build-scripts", "build_scripts", is_flag=True)
+@click.option(
+ "--directory",
+ default="source-checkout",
+ type=click.Path(file_okay=False),
+ help="The directory to checkout the sources to",
+)
+@click.argument("element", required=False, type=click.Path(readable=False))
@click.pass_obj
-def source_checkout(app, element, directory, force, deps, except_,
- tar, compression, build_scripts):
+def source_checkout(
+ app, element, directory, force, deps, except_, tar, compression, build_scripts
+):
"""Checkout sources of an element to the specified location
When this command is executed from a workspace directory, the default
@@ -856,14 +1097,16 @@ def source_checkout(app, element, directory, force, deps, except_,
if not element:
raise AppError('Missing argument "ELEMENT".')
- app.stream.source_checkout(element,
- location=location,
- force=force,
- deps=deps,
- except_targets=except_,
- tar=bool(tar),
- compression=compression,
- include_build_scripts=build_scripts)
+ app.stream.source_checkout(
+ element,
+ location=location,
+ force=force,
+ deps=deps,
+ except_targets=except_,
+ tar=bool(tar),
+ compression=compression,
+ include_build_scripts=build_scripts,
+ )
##################################################################
@@ -877,39 +1120,57 @@ def workspace():
##################################################################
# Workspace Open Command #
##################################################################
-@workspace.command(name='open', short_help="Open a new workspace")
-@click.option('--no-checkout', is_flag=True,
- help="Do not checkout the source, only link to the given directory")
-@click.option('--force', '-f', is_flag=True,
- help="The workspace will be created even if the directory in which it will be created is not empty " +
- "or if a workspace for that element already exists")
-@click.option('--track', 'track_', is_flag=True,
- help="Track and fetch new source references before checking out the workspace")
-@click.option('--directory', type=click.Path(file_okay=False), default=None,
- help="Only for use when a single Element is given: Set the directory to use to create the workspace")
-@click.argument('elements', nargs=-1, type=click.Path(readable=False), required=True)
+@workspace.command(name="open", short_help="Open a new workspace")
+@click.option(
+ "--no-checkout",
+ is_flag=True,
+ help="Do not checkout the source, only link to the given directory",
+)
+@click.option(
+ "--force",
+ "-f",
+ is_flag=True,
+ help="The workspace will be created even if the directory in which it will be created is not empty "
+ + "or if a workspace for that element already exists",
+)
+@click.option(
+ "--track",
+ "track_",
+ is_flag=True,
+ help="Track and fetch new source references before checking out the workspace",
+)
+@click.option(
+ "--directory",
+ type=click.Path(file_okay=False),
+ default=None,
+ help="Only for use when a single Element is given: Set the directory to use to create the workspace",
+)
+@click.argument("elements", nargs=-1, type=click.Path(readable=False), required=True)
@click.pass_obj
def workspace_open(app, no_checkout, force, track_, directory, elements):
"""Open a workspace for manual source modification"""
with app.initialized():
- app.stream.workspace_open(elements,
- no_checkout=no_checkout,
- track_first=track_,
- force=force,
- custom_dir=directory)
+ app.stream.workspace_open(
+ elements,
+ no_checkout=no_checkout,
+ track_first=track_,
+ force=force,
+ custom_dir=directory,
+ )
##################################################################
# Workspace Close Command #
##################################################################
-@workspace.command(name='close', short_help="Close workspaces")
-@click.option('--remove-dir', is_flag=True,
- help="Remove the path that contains the closed workspace")
-@click.option('--all', '-a', 'all_', is_flag=True,
- help="Close all open workspaces")
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@workspace.command(name="close", short_help="Close workspaces")
+@click.option(
+ "--remove-dir",
+ is_flag=True,
+ help="Remove the path that contains the closed workspace",
+)
+@click.option("--all", "-a", "all_", is_flag=True, help="Close all open workspaces")
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def workspace_close(app, remove_dir, all_, elements):
"""Close a workspace"""
@@ -924,15 +1185,17 @@ def workspace_close(app, remove_dir, all_, elements):
if element:
elements = (element,)
else:
- raise AppError('No elements specified')
+ raise AppError("No elements specified")
# Early exit if we specified `all` and there are no workspaces
if all_ and not app.stream.workspace_exists():
- click.echo('No open workspaces to close', err=True)
+ click.echo("No open workspaces to close", err=True)
sys.exit(0)
if all_:
- elements = [element_name for element_name, _ in app.context.get_workspaces().list()]
+ elements = [
+ element_name for element_name, _ in app.context.get_workspaces().list()
+ ]
elements = app.stream.redirect_element_names(elements)
@@ -955,21 +1218,26 @@ def workspace_close(app, remove_dir, all_, elements):
if removed_required_element:
click.echo(
"Removed '{}', therefore you can no longer run BuildStream "
- "commands from the current directory.".format(element_name), err=True)
+ "commands from the current directory.".format(element_name),
+ err=True,
+ )
##################################################################
# Workspace Reset Command #
##################################################################
-@workspace.command(name='reset', short_help="Reset a workspace to its original state")
-@click.option('--soft', is_flag=True,
- help="Reset workspace state without affecting its contents")
-@click.option('--track', 'track_', is_flag=True,
- help="Track and fetch the latest source before resetting")
-@click.option('--all', '-a', 'all_', is_flag=True,
- help="Reset all open workspaces")
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@workspace.command(name="reset", short_help="Reset a workspace to its original state")
+@click.option(
+ "--soft", is_flag=True, help="Reset workspace state without affecting its contents"
+)
+@click.option(
+ "--track",
+ "track_",
+ is_flag=True,
+ help="Track and fetch the latest source before resetting",
+)
+@click.option("--all", "-a", "all_", is_flag=True, help="Reset all open workspaces")
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def workspace_reset(app, soft, track_, all_, elements):
"""Reset a workspace to its original state"""
@@ -982,13 +1250,15 @@ def workspace_reset(app, soft, track_, all_, elements):
if element:
elements = (element,)
else:
- raise AppError('No elements specified to reset')
+ raise AppError("No elements specified to reset")
if all_ and not app.stream.workspace_exists():
raise AppError("No open workspaces to reset")
if all_:
- elements = tuple(element_name for element_name, _ in app.context.get_workspaces().list())
+ elements = tuple(
+ element_name for element_name, _ in app.context.get_workspaces().list()
+ )
app.stream.workspace_reset(elements, soft=soft, track_first=track_)
@@ -996,7 +1266,7 @@ def workspace_reset(app, soft, track_, all_, elements):
##################################################################
# Workspace List Command #
##################################################################
-@workspace.command(name='list', short_help="List open workspaces")
+@workspace.command(name="list", short_help="List open workspaces")
@click.pass_obj
def workspace_list(app):
"""List open workspaces"""
@@ -1041,11 +1311,16 @@ def artifact():
#############################################################
# Artifact show Command #
#############################################################
-@artifact.command(name='show', short_help="Show the cached state of artifacts")
-@click.option('--deps', '-d', default='none', show_default=True,
- type=click.Choice(['build', 'run', 'all', 'none']),
- help='The dependencies we also want to show')
-@click.argument('artifacts', type=click.Path(), nargs=-1)
+@artifact.command(name="show", short_help="Show the cached state of artifacts")
+@click.option(
+ "--deps",
+ "-d",
+ default="none",
+ show_default=True,
+ type=click.Choice(["build", "run", "all", "none"]),
+ help="The dependencies we also want to show",
+)
+@click.argument("artifacts", type=click.Path(), nargs=-1)
@click.pass_obj
def artifact_show(app, deps, artifacts):
"""show the cached state of artifacts"""
@@ -1058,33 +1333,59 @@ def artifact_show(app, deps, artifacts):
#####################################################################
# Artifact Checkout Command #
#####################################################################
-@artifact.command(name='checkout', short_help="Checkout contents of an artifact")
-@click.option('--force', '-f', is_flag=True,
- help="Allow files to be overwritten")
-@click.option('--deps', '-d', default='run', show_default=True,
- type=click.Choice(['run', 'build', 'none', 'all']),
- help='The dependencies to checkout')
-@click.option('--integrate/--no-integrate', default=None, is_flag=True,
- help="Whether to run integration commands")
-@click.option('--hardlinks', is_flag=True,
- help="Checkout hardlinks instead of copying if possible")
-@click.option('--tar', default=None, metavar='LOCATION',
- type=click.Path(),
- help="Create a tarball from the artifact contents instead "
- "of a file tree. If LOCATION is '-', the tarball "
- "will be dumped to the standard output.")
-@click.option('--compression', default=None,
- type=click.Choice(['gz', 'xz', 'bz2']),
- help="The compression option of the tarball created.")
-@click.option('--pull', 'pull_', is_flag=True,
- help="Pull the artifact if it's missing or incomplete.")
-@click.option('--directory', default=None,
- type=click.Path(file_okay=False),
- help="The directory to checkout the artifact to")
-@click.argument('target', required=False,
- type=click.Path(readable=False))
+@artifact.command(name="checkout", short_help="Checkout contents of an artifact")
+@click.option("--force", "-f", is_flag=True, help="Allow files to be overwritten")
+@click.option(
+ "--deps",
+ "-d",
+ default="run",
+ show_default=True,
+ type=click.Choice(["run", "build", "none", "all"]),
+ help="The dependencies to checkout",
+)
+@click.option(
+ "--integrate/--no-integrate",
+ default=None,
+ is_flag=True,
+ help="Whether to run integration commands",
+)
+@click.option(
+ "--hardlinks",
+ is_flag=True,
+ help="Checkout hardlinks instead of copying if possible",
+)
+@click.option(
+ "--tar",
+ default=None,
+ metavar="LOCATION",
+ type=click.Path(),
+ help="Create a tarball from the artifact contents instead "
+ "of a file tree. If LOCATION is '-', the tarball "
+ "will be dumped to the standard output.",
+)
+@click.option(
+ "--compression",
+ default=None,
+ type=click.Choice(["gz", "xz", "bz2"]),
+ help="The compression option of the tarball created.",
+)
+@click.option(
+ "--pull",
+ "pull_",
+ is_flag=True,
+ help="Pull the artifact if it's missing or incomplete.",
+)
+@click.option(
+ "--directory",
+ default=None,
+ type=click.Path(file_okay=False),
+ help="The directory to checkout the artifact to",
+)
+@click.argument("target", required=False, type=click.Path(readable=False))
@click.pass_obj
-def artifact_checkout(app, force, deps, integrate, hardlinks, tar, compression, pull_, directory, target):
+def artifact_checkout(
+ app, force, deps, integrate, hardlinks, tar, compression, pull_, directory, target
+):
"""Checkout contents of an artifact
When this command is executed from a workspace directory, the default
@@ -1100,14 +1401,17 @@ def artifact_checkout(app, force, deps, integrate, hardlinks, tar, compression,
if not tar:
if compression:
- click.echo("ERROR: --compression can only be provided if --tar is provided", err=True)
+ click.echo(
+ "ERROR: --compression can only be provided if --tar is provided",
+ err=True,
+ )
sys.exit(-1)
else:
if directory is None:
location = os.path.abspath(os.path.join(os.getcwd(), target))
else:
location = directory
- if location[-4:] == '.bst':
+ if location[-4:] == ".bst":
location = location[:-4]
tar = False
else:
@@ -1115,11 +1419,21 @@ def artifact_checkout(app, force, deps, integrate, hardlinks, tar, compression,
try:
inferred_compression = _get_compression(tar)
except UtilError as e:
- click.echo("ERROR: Invalid file extension given with '--tar': {}".format(e), err=True)
+ click.echo(
+ "ERROR: Invalid file extension given with '--tar': {}".format(e),
+ err=True,
+ )
sys.exit(-1)
- if compression and inferred_compression != '' and inferred_compression != compression:
- click.echo("WARNING: File extension and compression differ."
- "File extension has been overridden by --compression", err=True)
+ if (
+ compression
+ and inferred_compression != ""
+ and inferred_compression != compression
+ ):
+ click.echo(
+ "WARNING: File extension and compression differ."
+ "File extension has been overridden by --compression",
+ err=True,
+ )
if not compression:
compression = inferred_compression
@@ -1129,28 +1443,38 @@ def artifact_checkout(app, force, deps, integrate, hardlinks, tar, compression,
if not target:
raise AppError('Missing argument "ELEMENT".')
- app.stream.checkout(target,
- location=location,
- force=force,
- selection=deps,
- integrate=True if integrate is None else integrate,
- hardlinks=hardlinks,
- pull=pull_,
- compression=compression,
- tar=bool(tar))
+ app.stream.checkout(
+ target,
+ location=location,
+ force=force,
+ selection=deps,
+ integrate=True if integrate is None else integrate,
+ hardlinks=hardlinks,
+ pull=pull_,
+ compression=compression,
+ tar=bool(tar),
+ )
################################################################
# Artifact Pull Command #
################################################################
@artifact.command(name="pull", short_help="Pull a built artifact")
-@click.option('--deps', '-d', default='none', show_default=True,
- type=click.Choice(['none', 'all']),
- help='The dependency artifacts to pull')
-@click.option('--remote', '-r', default=None,
- help="The URL of the remote cache (defaults to the first configured cache)")
-@click.argument('artifacts', nargs=-1,
- type=click.Path(readable=False))
+@click.option(
+ "--deps",
+ "-d",
+ default="none",
+ show_default=True,
+ type=click.Choice(["none", "all"]),
+ help="The dependency artifacts to pull",
+)
+@click.option(
+ "--remote",
+ "-r",
+ default=None,
+ help="The URL of the remote cache (defaults to the first configured cache)",
+)
+@click.argument("artifacts", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def artifact_pull(app, artifacts, deps, remote):
"""Pull a built artifact from the configured remote artifact cache.
@@ -1181,21 +1505,33 @@ def artifact_pull(app, artifacts, deps, remote):
# Junction elements cannot be pulled, exclude them from default targets
ignore_junction_targets = True
- app.stream.pull(artifacts, selection=deps, remote=remote,
- ignore_junction_targets=ignore_junction_targets)
+ app.stream.pull(
+ artifacts,
+ selection=deps,
+ remote=remote,
+ ignore_junction_targets=ignore_junction_targets,
+ )
##################################################################
# Artifact Push Command #
##################################################################
@artifact.command(name="push", short_help="Push a built artifact")
-@click.option('--deps', '-d', default='none', show_default=True,
- type=click.Choice(['none', 'all']),
- help='The dependencies to push')
-@click.option('--remote', '-r', default=None,
- help="The URL of the remote cache (defaults to the first configured cache)")
-@click.argument('artifacts', nargs=-1,
- type=click.Path(readable=False))
+@click.option(
+ "--deps",
+ "-d",
+ default="none",
+ show_default=True,
+ type=click.Choice(["none", "all"]),
+ help="The dependencies to push",
+)
+@click.option(
+ "--remote",
+ "-r",
+ default=None,
+ help="The URL of the remote cache (defaults to the first configured cache)",
+)
+@click.argument("artifacts", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def artifact_push(app, artifacts, deps, remote):
"""Push a built artifact to a remote artifact cache.
@@ -1228,18 +1564,24 @@ def artifact_push(app, artifacts, deps, remote):
# Junction elements cannot be pushed, exclude them from default targets
ignore_junction_targets = True
- app.stream.push(artifacts, selection=deps, remote=remote,
- ignore_junction_targets=ignore_junction_targets)
+ app.stream.push(
+ artifacts,
+ selection=deps,
+ remote=remote,
+ ignore_junction_targets=ignore_junction_targets,
+ )
################################################################
# Artifact Log Command #
################################################################
-@artifact.command(name='log', short_help="Show logs of artifacts")
-@click.option('--out',
- type=click.Path(file_okay=True, writable=True),
- help="Output logs to individual files in the specified path. If absent, logs are written to stdout.")
-@click.argument('artifacts', type=click.Path(), nargs=-1)
+@artifact.command(name="log", short_help="Show logs of artifacts")
+@click.option(
+ "--out",
+ type=click.Path(file_okay=True, writable=True),
+ help="Output logs to individual files in the specified path. If absent, logs are written to stdout.",
+)
+@click.argument("artifacts", type=click.Path(), nargs=-1)
@click.pass_obj
def artifact_log(app, artifacts, out):
"""Show build logs of artifacts"""
@@ -1249,7 +1591,7 @@ def artifact_log(app, artifacts, out):
if not out:
try:
for log in list(artifact_logs.values()):
- with open(log[0], 'r') as f:
+ with open(log[0], "r") as f:
data = f.read()
click.echo_via_pager(data)
except (OSError, FileNotFoundError):
@@ -1271,7 +1613,7 @@ def artifact_log(app, artifacts, out):
shutil.copy(log, dest)
# make a dir and write in log files
else:
- log_name = os.path.splitext(name)[0] + '.log'
+ log_name = os.path.splitext(name)[0] + ".log"
dest = os.path.join(out, log_name)
shutil.copy(log_files[0], dest)
# write a log file
@@ -1280,10 +1622,15 @@ def artifact_log(app, artifacts, out):
################################################################
# Artifact List-Contents Command #
################################################################
-@artifact.command(name='list-contents', short_help="List the contents of an artifact")
-@click.option('--long', '-l', 'long_', is_flag=True,
- help="Provide more information about the contents of the artifact.")
-@click.argument('artifacts', type=click.Path(), nargs=-1)
+@artifact.command(name="list-contents", short_help="List the contents of an artifact")
+@click.option(
+ "--long",
+ "-l",
+ "long_",
+ is_flag=True,
+ help="Provide more information about the contents of the artifact.",
+)
+@click.argument("artifacts", type=click.Path(), nargs=-1)
@click.pass_obj
def artifact_list_contents(app, artifacts, long_):
"""List the contents of an artifact.
@@ -1305,11 +1652,16 @@ def artifact_list_contents(app, artifacts, long_):
###################################################################
# Artifact Delete Command #
###################################################################
-@artifact.command(name='delete', short_help="Remove artifacts from the local cache")
-@click.option('--deps', '-d', default='none', show_default=True,
- type=click.Choice(['none', 'run', 'build', 'all']),
- help="The dependencies to delete")
-@click.argument('artifacts', type=click.Path(), nargs=-1)
+@artifact.command(name="delete", short_help="Remove artifacts from the local cache")
+@click.option(
+ "--deps",
+ "-d",
+ default="none",
+ show_default=True,
+ type=click.Choice(["none", "run", "build", "all"]),
+ help="The dependencies to delete",
+)
+@click.argument("artifacts", type=click.Path(), nargs=-1)
@click.pass_obj
def artifact_delete(app, artifacts, deps):
"""Remove artifacts from the local cache"""
@@ -1330,21 +1682,39 @@ def artifact_delete(app, artifacts, deps):
# Fetch Command #
##################################################################
@cli.command(short_help="COMMAND OBSOLETE - Fetch sources in a pipeline", hidden=True)
-@click.option('--except', 'except_', multiple=True,
- type=click.Path(readable=False),
- help="Except certain dependencies from fetching")
-@click.option('--deps', '-d', default='plan', show_default=True,
- type=click.Choice(['none', 'plan', 'all']),
- help='The dependencies to fetch')
-@click.option('--track', 'track_', is_flag=True,
- help="Track new source references before fetching")
-@click.option('--track-cross-junctions', '-J', is_flag=True,
- help="Allow tracking to cross junction boundaries")
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@click.option(
+ "--except",
+ "except_",
+ multiple=True,
+ type=click.Path(readable=False),
+ help="Except certain dependencies from fetching",
+)
+@click.option(
+ "--deps",
+ "-d",
+ default="plan",
+ show_default=True,
+ type=click.Choice(["none", "plan", "all"]),
+ help="The dependencies to fetch",
+)
+@click.option(
+ "--track",
+ "track_",
+ is_flag=True,
+ help="Track new source references before fetching",
+)
+@click.option(
+ "--track-cross-junctions",
+ "-J",
+ is_flag=True,
+ help="Allow tracking to cross junction boundaries",
+)
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def fetch(app, elements, deps, track_, except_, track_cross_junctions):
- click.echo("This command is now obsolete. Use `bst source fetch` instead.", err=True)
+ click.echo(
+ "This command is now obsolete. Use `bst source fetch` instead.", err=True
+ )
sys.exit(1)
@@ -1352,19 +1722,30 @@ def fetch(app, elements, deps, track_, except_, track_cross_junctions):
# Track Command #
##################################################################
@cli.command(short_help="COMMAND OBSOLETE - Track new source references", hidden=True)
-@click.option('--except', 'except_', multiple=True,
- type=click.Path(readable=False),
- help="Except certain dependencies from tracking")
-@click.option('--deps', '-d', default='none', show_default=True,
- type=click.Choice(['none', 'all']),
- help='The dependencies to track')
-@click.option('--cross-junctions', '-J', is_flag=True,
- help="Allow crossing junction boundaries")
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@click.option(
+ "--except",
+ "except_",
+ multiple=True,
+ type=click.Path(readable=False),
+ help="Except certain dependencies from tracking",
+)
+@click.option(
+ "--deps",
+ "-d",
+ default="none",
+ show_default=True,
+ type=click.Choice(["none", "all"]),
+ help="The dependencies to track",
+)
+@click.option(
+ "--cross-junctions", "-J", is_flag=True, help="Allow crossing junction boundaries"
+)
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def track(app, elements, deps, except_, cross_junctions):
- click.echo("This command is now obsolete. Use `bst source track` instead.", err=True)
+ click.echo(
+ "This command is now obsolete. Use `bst source track` instead.", err=True
+ )
sys.exit(1)
@@ -1372,26 +1753,41 @@ def track(app, elements, deps, except_, cross_junctions):
# Checkout Command #
##################################################################
@cli.command(short_help="COMMAND OBSOLETE - Checkout a built artifact", hidden=True)
-@click.option('--force', '-f', is_flag=True,
- help="Allow files to be overwritten")
-@click.option('--deps', '-d', default='run', show_default=True,
- type=click.Choice(['run', 'build', 'none']),
- help='The dependencies to checkout')
-@click.option('--integrate/--no-integrate', default=True,
- help="Run integration commands (default is to run commands)")
-@click.option('--hardlinks', is_flag=True,
- help="Checkout hardlinks instead of copies (handle with care)")
-@click.option('--tar', is_flag=True,
- help="Create a tarball from the artifact contents instead "
- "of a file tree. If LOCATION is '-', the tarball "
- "will be dumped to the standard output.")
-@click.argument('element', required=False,
- type=click.Path(readable=False))
-@click.argument('location', type=click.Path(), required=False)
+@click.option("--force", "-f", is_flag=True, help="Allow files to be overwritten")
+@click.option(
+ "--deps",
+ "-d",
+ default="run",
+ show_default=True,
+ type=click.Choice(["run", "build", "none"]),
+ help="The dependencies to checkout",
+)
+@click.option(
+ "--integrate/--no-integrate",
+ default=True,
+ help="Run integration commands (default is to run commands)",
+)
+@click.option(
+ "--hardlinks",
+ is_flag=True,
+ help="Checkout hardlinks instead of copies (handle with care)",
+)
+@click.option(
+ "--tar",
+ is_flag=True,
+ help="Create a tarball from the artifact contents instead "
+ "of a file tree. If LOCATION is '-', the tarball "
+ "will be dumped to the standard output.",
+)
+@click.argument("element", required=False, type=click.Path(readable=False))
+@click.argument("location", type=click.Path(), required=False)
@click.pass_obj
def checkout(app, element, location, force, deps, integrate, hardlinks, tar):
- click.echo("This command is now obsolete. Use `bst artifact checkout` instead " +
- "and use the --directory option to specify LOCATION", err=True)
+ click.echo(
+ "This command is now obsolete. Use `bst artifact checkout` instead "
+ + "and use the --directory option to specify LOCATION",
+ err=True,
+ )
sys.exit(1)
@@ -1399,16 +1795,25 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar):
# Pull Command #
################################################################
@cli.command(short_help="COMMAND OBSOLETE - Pull a built artifact", hidden=True)
-@click.option('--deps', '-d', default='none', show_default=True,
- type=click.Choice(['none', 'all']),
- help='The dependency artifacts to pull')
-@click.option('--remote', '-r',
- help="The URL of the remote cache (defaults to the first configured cache)")
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@click.option(
+ "--deps",
+ "-d",
+ default="none",
+ show_default=True,
+ type=click.Choice(["none", "all"]),
+ help="The dependency artifacts to pull",
+)
+@click.option(
+ "--remote",
+ "-r",
+ help="The URL of the remote cache (defaults to the first configured cache)",
+)
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def pull(app, elements, deps, remote):
- click.echo("This command is now obsolete. Use `bst artifact pull` instead.", err=True)
+ click.echo(
+ "This command is now obsolete. Use `bst artifact pull` instead.", err=True
+ )
sys.exit(1)
@@ -1416,14 +1821,24 @@ def pull(app, elements, deps, remote):
# Push Command #
##################################################################
@cli.command(short_help="COMMAND OBSOLETE - Push a built artifact", hidden=True)
-@click.option('--deps', '-d', default='none', show_default=True,
- type=click.Choice(['none', 'all']),
- help='The dependencies to push')
-@click.option('--remote', '-r', default=None,
- help="The URL of the remote cache (defaults to the first configured cache)")
-@click.argument('elements', nargs=-1,
- type=click.Path(readable=False))
+@click.option(
+ "--deps",
+ "-d",
+ default="none",
+ show_default=True,
+ type=click.Choice(["none", "all"]),
+ help="The dependencies to push",
+)
+@click.option(
+ "--remote",
+ "-r",
+ default=None,
+ help="The URL of the remote cache (defaults to the first configured cache)",
+)
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def push(app, elements, deps, remote):
- click.echo("This command is now obsolete. Use `bst artifact push` instead.", err=True)
+ click.echo(
+ "This command is now obsolete. Use `bst artifact push` instead.", err=True
+ )
sys.exit(1)
diff --git a/src/buildstream/_frontend/complete.py b/src/buildstream/_frontend/complete.py
index 06067f6cc..35d1cb1a3 100644
--- a/src/buildstream/_frontend/complete.py
+++ b/src/buildstream/_frontend/complete.py
@@ -39,9 +39,9 @@ import click
from click.core import MultiCommand, Option, Argument
from click.parser import split_arg_string
-WORDBREAK = '='
+WORDBREAK = "="
-COMPLETION_SCRIPT = '''
+COMPLETION_SCRIPT = """
%(complete_func)s() {
local IFS=$'\n'
COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\
@@ -51,7 +51,7 @@ COMPLETION_SCRIPT = '''
}
complete -F %(complete_func)s -o nospace %(script_names)s
-'''
+"""
# An exception for our custom completion handler to
@@ -62,7 +62,7 @@ class CompleteUnhandled(Exception):
pass
-def complete_path(path_type, incomplete, base_directory='.'):
+def complete_path(path_type, incomplete, base_directory="."):
"""Helper method for implementing the completions() method
for File and Path parameter types.
"""
@@ -71,7 +71,7 @@ def complete_path(path_type, incomplete, base_directory='.'):
# specified in `incomplete` minus the last path component,
# otherwise list files starting from the current working directory.
entries = []
- base_path = ''
+ base_path = ""
# This is getting a bit messy
listed_base_directory = False
@@ -128,11 +128,11 @@ def complete_path(path_type, incomplete, base_directory='.'):
return [
# Return an appropriate path for each entry
- fix_path(e) for e in sorted(entries)
-
+ fix_path(e)
+ for e in sorted(entries)
# Filter out non directory elements when searching for a directory,
# the opposite is fine, however.
- if not (path_type == 'Directory' and not entry_is_dir(e))
+ if not (path_type == "Directory" and not entry_is_dir(e))
]
@@ -170,7 +170,12 @@ def resolve_ctx(cli, prog_name, args):
cmd = ctx.command.get_command(ctx, args_remaining[0])
if cmd is None:
return None
- ctx = cmd.make_context(args_remaining[0], args_remaining[1:], parent=ctx, resilient_parsing=True)
+ ctx = cmd.make_context(
+ args_remaining[0],
+ args_remaining[1:],
+ parent=ctx,
+ resilient_parsing=True,
+ )
args_remaining = ctx.protected_args + ctx.args
else:
ctx = ctx.parent
@@ -183,7 +188,7 @@ def start_of_option(param_str):
:param param_str: param_str to check
:return: whether or not this is the start of an option declaration (i.e. starts "-" or "--")
"""
- return param_str and param_str[:1] == '-'
+ return param_str and param_str[:1] == "-"
def is_incomplete_option(all_args, cmd_param):
@@ -197,7 +202,9 @@ def is_incomplete_option(all_args, cmd_param):
if cmd_param.is_flag:
return False
last_option = None
- for index, arg_str in enumerate(reversed([arg for arg in all_args if arg != WORDBREAK])):
+ for index, arg_str in enumerate(
+ reversed([arg for arg in all_args if arg != WORDBREAK])
+ ):
if index + 1 > cmd_param.nargs:
break
if start_of_option(arg_str):
@@ -218,8 +225,11 @@ def is_incomplete_argument(current_params, cmd_param):
return True
if cmd_param.nargs == -1:
return True
- if isinstance(current_param_values, collections.abc.Iterable) \
- and cmd_param.nargs > 1 and len(current_param_values) < cmd_param.nargs:
+ if (
+ isinstance(current_param_values, collections.abc.Iterable)
+ and cmd_param.nargs > 1
+ and len(current_param_values) < cmd_param.nargs
+ ):
return True
return False
@@ -237,10 +247,7 @@ def get_user_autocompletions(args, incomplete, cmd, cmd_param, override):
# Use the type specific default completions unless it was overridden
try:
- return override(cmd=cmd,
- cmd_param=cmd_param,
- args=args,
- incomplete=incomplete)
+ return override(cmd=cmd, cmd_param=cmd_param, args=args, incomplete=incomplete)
except CompleteUnhandled:
return get_param_type_completion(cmd_param.type, incomplete) or []
@@ -269,7 +276,7 @@ def get_choices(cli, prog_name, args, incomplete, override):
all_args.append(partition_incomplete[0])
incomplete = partition_incomplete[2]
elif incomplete == WORDBREAK:
- incomplete = ''
+ incomplete = ""
choices = []
found_param = False
@@ -277,34 +284,63 @@ def get_choices(cli, prog_name, args, incomplete, override):
# completions for options
for param in ctx.command.params:
if isinstance(param, Option):
- choices.extend([param_opt + " " for param_opt in param.opts + param.secondary_opts
- if param_opt not in all_args or param.multiple])
+ choices.extend(
+ [
+ param_opt + " "
+ for param_opt in param.opts + param.secondary_opts
+ if param_opt not in all_args or param.multiple
+ ]
+ )
found_param = True
if not found_param:
# completion for option values by choices
for cmd_param in ctx.command.params:
- if isinstance(cmd_param, Option) and is_incomplete_option(all_args, cmd_param):
- choices.extend(get_user_autocompletions(all_args, incomplete, ctx.command, cmd_param, override))
+ if isinstance(cmd_param, Option) and is_incomplete_option(
+ all_args, cmd_param
+ ):
+ choices.extend(
+ get_user_autocompletions(
+ all_args, incomplete, ctx.command, cmd_param, override
+ )
+ )
found_param = True
break
if not found_param:
# completion for argument values by choices
for cmd_param in ctx.command.params:
- if isinstance(cmd_param, Argument) and is_incomplete_argument(ctx.params, cmd_param):
- choices.extend(get_user_autocompletions(all_args, incomplete, ctx.command, cmd_param, override))
+ if isinstance(cmd_param, Argument) and is_incomplete_argument(
+ ctx.params, cmd_param
+ ):
+ choices.extend(
+ get_user_autocompletions(
+ all_args, incomplete, ctx.command, cmd_param, override
+ )
+ )
found_param = True
break
if not found_param and isinstance(ctx.command, MultiCommand):
# completion for any subcommands
- choices.extend([cmd + " " for cmd in ctx.command.list_commands(ctx)
- if not ctx.command.get_command(ctx, cmd).hidden])
-
- if not start_of_option(incomplete) and ctx.parent is not None \
- and isinstance(ctx.parent.command, MultiCommand) and ctx.parent.command.chain:
+ choices.extend(
+ [
+ cmd + " "
+ for cmd in ctx.command.list_commands(ctx)
+ if not ctx.command.get_command(ctx, cmd).hidden
+ ]
+ )
+
+ if (
+ not start_of_option(incomplete)
+ and ctx.parent is not None
+ and isinstance(ctx.parent.command, MultiCommand)
+ and ctx.parent.command.chain
+ ):
# completion for chained commands
- visible_commands = [cmd for cmd in ctx.parent.command.list_commands(ctx.parent)
- if not ctx.parent.command.get_command(ctx.parent, cmd).hidden]
+ visible_commands = [
+ cmd
+ for cmd in ctx.parent.command.list_commands(ctx.parent)
+ if not ctx.parent.command.get_command(ctx.parent, cmd).hidden
+ ]
remaining_commands = set(visible_commands) - set(ctx.parent.protected_args)
choices.extend([cmd + " " for cmd in remaining_commands])
@@ -314,13 +350,13 @@ def get_choices(cli, prog_name, args, incomplete, override):
def do_complete(cli, prog_name, override):
- cwords = split_arg_string(os.environ['COMP_WORDS'])
- cword = int(os.environ['COMP_CWORD'])
+ cwords = split_arg_string(os.environ["COMP_WORDS"])
+ cword = int(os.environ["COMP_CWORD"])
args = cwords[1:cword]
try:
incomplete = cwords[cword]
except IndexError:
- incomplete = ''
+ incomplete = ""
for item in get_choices(cli, prog_name, args, incomplete, override):
click.echo(item)
@@ -331,7 +367,7 @@ def do_complete(cli, prog_name, override):
def main_bashcomplete(cmd, prog_name, override):
"""Internal handler for the bash completion support."""
- if '_BST_COMPLETION' in os.environ:
+ if "_BST_COMPLETION" in os.environ:
do_complete(cmd, prog_name, override)
return True
diff --git a/src/buildstream/_frontend/linuxapp.py b/src/buildstream/_frontend/linuxapp.py
index 0444dc7b4..987b023bd 100644
--- a/src/buildstream/_frontend/linuxapp.py
+++ b/src/buildstream/_frontend/linuxapp.py
@@ -28,9 +28,9 @@ from .app import App
#
def _osc_777_supported():
- term = os.environ.get('TERM')
+ term = os.environ.get("TERM")
- if term and (term.startswith('xterm') or term.startswith('vte')):
+ if term and (term.startswith("xterm") or term.startswith("vte")):
# Since vte version 4600, upstream silently ignores
# the OSC 777 without printing garbage to the terminal.
@@ -39,7 +39,7 @@ def _osc_777_supported():
# will trigger a desktop notification and bring attention
# to the terminal.
#
- vte_version = os.environ.get('VTE_VERSION')
+ vte_version = os.environ.get("VTE_VERSION")
try:
vte_version_int = int(vte_version)
except (ValueError, TypeError):
@@ -54,7 +54,6 @@ def _osc_777_supported():
# A linux specific App implementation
#
class LinuxApp(App):
-
def notify(self, title, text):
# Currently we only try this notification method
diff --git a/src/buildstream/_frontend/profile.py b/src/buildstream/_frontend/profile.py
index dda0f7ffe..f49be5b0a 100644
--- a/src/buildstream/_frontend/profile.py
+++ b/src/buildstream/_frontend/profile.py
@@ -28,7 +28,7 @@ import click
# Kwargs:
# The same keyword arguments which can be used with click.style()
#
-class Profile():
+class Profile:
def __init__(self, **kwargs):
self._kwargs = dict(kwargs)
@@ -64,7 +64,6 @@ class Profile():
# arguments
#
def fmt_subst(self, text, varname, value, **kwargs):
-
def subst_callback(match):
# Extract and format the "{(varname)...}" portion of the match
inner_token = match.group(1)
diff --git a/src/buildstream/_frontend/status.py b/src/buildstream/_frontend/status.py
index 8da7df047..577fd40c5 100644
--- a/src/buildstream/_frontend/status.py
+++ b/src/buildstream/_frontend/status.py
@@ -44,19 +44,22 @@ from .widget import TimeCode
# stream (Stream): The Stream
# colors (bool): Whether to print the ANSI color codes in the output
#
-class Status():
+class Status:
# Table of the terminal capabilities we require and use
- _TERM_CAPABILITIES = {
- 'move_up': 'cuu1',
- 'move_x': 'hpa',
- 'clear_eol': 'el'
- }
-
- def __init__(self, context, state,
- content_profile, format_profile,
- success_profile, error_profile,
- stream, colors=False):
+ _TERM_CAPABILITIES = {"move_up": "cuu1", "move_x": "hpa", "clear_eol": "el"}
+
+ def __init__(
+ self,
+ context,
+ state,
+ content_profile,
+ format_profile,
+ success_profile,
+ error_profile,
+ stream,
+ colors=False,
+ ):
self._context = context
self._state = state
@@ -69,10 +72,15 @@ class Status():
self._last_lines = 0 # Number of status lines we last printed to console
self._spacing = 1
self._colors = colors
- self._header = _StatusHeader(context, state,
- content_profile, format_profile,
- success_profile, error_profile,
- stream)
+ self._header = _StatusHeader(
+ context,
+ state,
+ content_profile,
+ format_profile,
+ success_profile,
+ error_profile,
+ stream,
+ )
self._term_width, _ = click.get_terminal_size()
self._alloc_lines = 0
@@ -133,7 +141,7 @@ class Status():
# feeds for the amount of lines we intend to print first, and
# move cursor position back to the first line
for _ in range(self._alloc_lines + self._header.lines):
- click.echo('', err=True)
+ click.echo("", err=True)
for _ in range(self._alloc_lines + self._header.lines):
self._move_up()
@@ -145,14 +153,14 @@ class Status():
# alignment of each column
n_columns = len(self._alloc_columns)
for line in self._job_lines(n_columns):
- text = ''
+ text = ""
for job in line:
column = line.index(job)
text += job.render(self._alloc_columns[column] - job.size, elapsed)
# Add spacing between columns
if column < (n_columns - 1):
- text += ' ' * self._spacing
+ text += " " * self._spacing
# Print the line
click.echo(text, color=self._colors, err=True)
@@ -198,7 +206,7 @@ class Status():
# Initialized terminal, curses might decide it doesnt
# support this terminal
try:
- curses.setupterm(os.environ.get('TERM', 'dumb'))
+ curses.setupterm(os.environ.get("TERM", "dumb"))
except curses.error:
return None
@@ -223,7 +231,7 @@ class Status():
# as well, and should provide better compatibility with most
# terminals.
#
- term_caps[capname] = code.decode('latin1')
+ term_caps[capname] = code.decode("latin1")
return term_caps
@@ -238,19 +246,19 @@ class Status():
# Explicitly move to beginning of line, fixes things up
# when there was a ^C or ^Z printed to the terminal.
- move_x = curses.tparm(self._term_caps['move_x'].encode('latin1'), 0)
- move_x = move_x.decode('latin1')
+ move_x = curses.tparm(self._term_caps["move_x"].encode("latin1"), 0)
+ move_x = move_x.decode("latin1")
- move_up = curses.tparm(self._term_caps['move_up'].encode('latin1'))
- move_up = move_up.decode('latin1')
+ move_up = curses.tparm(self._term_caps["move_up"].encode("latin1"))
+ move_up = move_up.decode("latin1")
click.echo(move_x + move_up, nl=False, err=True)
def _clear_line(self):
assert self._term_caps is not None
- clear_eol = curses.tparm(self._term_caps['clear_eol'].encode('latin1'))
- clear_eol = clear_eol.decode('latin1')
+ clear_eol = curses.tparm(self._term_caps["clear_eol"].encode("latin1"))
+ clear_eol = clear_eol.decode("latin1")
click.echo(clear_eol, nl=False, err=True)
def _allocate(self):
@@ -279,7 +287,7 @@ class Status():
def _job_lines(self, columns):
jobs_list = list(self._jobs.values())
for i in range(0, len(self._jobs), columns):
- yield jobs_list[i:i + columns]
+ yield jobs_list[i : i + columns]
# Returns an array of integers representing the maximum
# length in characters for each column, given the current
@@ -309,9 +317,14 @@ class Status():
def _add_job(self, action_name, full_name):
task = self._state.tasks[(action_name, full_name)]
elapsed = task.elapsed_offset
- job = _StatusJob(self._context, action_name, full_name,
- self._content_profile, self._format_profile,
- elapsed)
+ job = _StatusJob(
+ self._context,
+ action_name,
+ full_name,
+ self._content_profile,
+ self._format_profile,
+ elapsed,
+ )
self._jobs[(action_name, full_name)] = job
self._need_alloc = True
@@ -340,12 +353,17 @@ class Status():
# error_profile (Profile): Formatting profile for error text
# stream (Stream): The Stream
#
-class _StatusHeader():
-
- def __init__(self, context, state,
- content_profile, format_profile,
- success_profile, error_profile,
- stream):
+class _StatusHeader:
+ def __init__(
+ self,
+ context,
+ state,
+ content_profile,
+ format_profile,
+ success_profile,
+ error_profile,
+ stream,
+ ):
#
# Public members
@@ -377,19 +395,22 @@ class _StatusHeader():
total = str(len(self._stream.total_elements))
size = 0
- text = ''
+ text = ""
size += len(total) + len(session) + 4 # Size for (N/N) with a leading space
size += 8 # Size of time code
size += len(project.name) + 1
text += self._time_code.render_time(elapsed)
- text += ' ' + self._content_profile.fmt(project.name)
- text += ' ' + self._format_profile.fmt('(') + \
- self._content_profile.fmt(session) + \
- self._format_profile.fmt('/') + \
- self._content_profile.fmt(total) + \
- self._format_profile.fmt(')')
-
- line1 = self._centered(text, size, line_length, '=')
+ text += " " + self._content_profile.fmt(project.name)
+ text += (
+ " "
+ + self._format_profile.fmt("(")
+ + self._content_profile.fmt(session)
+ + self._format_profile.fmt("/")
+ + self._content_profile.fmt(total)
+ + self._format_profile.fmt(")")
+ )
+
+ line1 = self._centered(text, size, line_length, "=")
#
# Line 2: Dynamic list of queue status reports
@@ -397,7 +418,7 @@ class _StatusHeader():
# (Sources Fetched:0 117 0)→ (Built:4 0 0)
#
size = 0
- text = ''
+ text = ""
# Format and calculate size for each queue progress
for index, task_group in enumerate(self._state.task_groups.values()):
@@ -405,13 +426,13 @@ class _StatusHeader():
# Add spacing
if index > 0:
size += 2
- text += self._format_profile.fmt('→ ')
+ text += self._format_profile.fmt("→ ")
group_text, group_size = self._render_task_group(task_group)
size += group_size
text += group_text
- line2 = self._centered(text, size, line_length, ' ')
+ line2 = self._centered(text, size, line_length, " ")
#
# Line 3: Cache usage percentage report
@@ -425,7 +446,7 @@ class _StatusHeader():
if usage.used_size is None:
# Cache usage is unknown
size = 0
- text = ''
+ text = ""
else:
size = 21
size += len(usage_string)
@@ -436,15 +457,17 @@ class _StatusHeader():
else:
formatted_usage = self._success_profile.fmt(usage_string)
- text = self._format_profile.fmt("~~~~~~ ") + \
- self._content_profile.fmt('cache') + \
- self._format_profile.fmt(': ') + \
- formatted_usage + \
- self._format_profile.fmt(' ~~~~~~')
+ text = (
+ self._format_profile.fmt("~~~~~~ ")
+ + self._content_profile.fmt("cache")
+ + self._format_profile.fmt(": ")
+ + formatted_usage
+ + self._format_profile.fmt(" ~~~~~~")
+ )
- line3 = self._centered(text, size, line_length, ' ')
+ line3 = self._centered(text, size, line_length, " ")
- return line1 + '\n' + line2 + '\n' + line3
+ return line1 + "\n" + line2 + "\n" + line3
###################################################
# Private Methods #
@@ -457,13 +480,17 @@ class _StatusHeader():
size = 5 # Space for the formatting '[', ':', ' ', ' ' and ']'
size += len(group.complete_name)
size += len(processed) + len(skipped) + len(failed)
- text = self._format_profile.fmt("(") + \
- self._content_profile.fmt(group.complete_name) + \
- self._format_profile.fmt(":") + \
- self._success_profile.fmt(processed) + ' ' + \
- self._content_profile.fmt(skipped) + ' ' + \
- self._error_profile.fmt(failed) + \
- self._format_profile.fmt(")")
+ text = (
+ self._format_profile.fmt("(")
+ + self._content_profile.fmt(group.complete_name)
+ + self._format_profile.fmt(":")
+ + self._success_profile.fmt(processed)
+ + " "
+ + self._content_profile.fmt(skipped)
+ + " "
+ + self._error_profile.fmt(failed)
+ + self._format_profile.fmt(")")
+ )
return (text, size)
@@ -471,9 +498,9 @@ class _StatusHeader():
remaining = line_length - size
remaining -= 2
- final_text = self._format_profile.fmt(fill * (remaining // 2)) + ' '
+ final_text = self._format_profile.fmt(fill * (remaining // 2)) + " "
final_text += text
- final_text += ' ' + self._format_profile.fmt(fill * (remaining // 2))
+ final_text += " " + self._format_profile.fmt(fill * (remaining // 2))
return final_text
@@ -490,14 +517,15 @@ class _StatusHeader():
# format_profile (Profile): Formatting profile for formatting text
# elapsed (datetime): The offset into the session when this job is created
#
-class _StatusJob():
-
- def __init__(self, context, action_name, full_name, content_profile, format_profile, elapsed):
+class _StatusJob:
+ def __init__(
+ self, context, action_name, full_name, content_profile, format_profile, elapsed
+ ):
#
# Public members
#
- self.action_name = action_name # The action name
- self.size = None # The number of characters required to render
+ self.action_name = action_name # The action name
+ self.size = None # The number of characters required to render
self.full_name = full_name
#
@@ -570,24 +598,30 @@ class _StatusJob():
# elapsed (datetime): The session elapsed time offset
#
def render(self, padding, elapsed):
- text = self._format_profile.fmt('[') + \
- self._time_code.render_time(elapsed - self._offset) + \
- self._format_profile.fmt(']')
-
- text += self._format_profile.fmt('[') + \
- self._content_profile.fmt(self.action_name) + \
- self._format_profile.fmt(':') + \
- self._content_profile.fmt(self.full_name)
+ text = (
+ self._format_profile.fmt("[")
+ + self._time_code.render_time(elapsed - self._offset)
+ + self._format_profile.fmt("]")
+ )
+
+ text += (
+ self._format_profile.fmt("[")
+ + self._content_profile.fmt(self.action_name)
+ + self._format_profile.fmt(":")
+ + self._content_profile.fmt(self.full_name)
+ )
if self._current_progress is not None:
- text += self._format_profile.fmt(':') + \
- self._content_profile.fmt(str(self._current_progress))
+ text += self._format_profile.fmt(":") + self._content_profile.fmt(
+ str(self._current_progress)
+ )
if self._maximum_progress is not None:
- text += self._format_profile.fmt('/') + \
- self._content_profile.fmt(str(self._maximum_progress))
+ text += self._format_profile.fmt("/") + self._content_profile.fmt(
+ str(self._maximum_progress)
+ )
# Add padding before terminating ']'
- terminator = (' ' * padding) + ']'
+ terminator = (" " * padding) + "]"
text += self._format_profile.fmt(terminator)
return text
diff --git a/src/buildstream/_frontend/widget.py b/src/buildstream/_frontend/widget.py
index 181ee7d2e..8a605bb33 100644
--- a/src/buildstream/_frontend/widget.py
+++ b/src/buildstream/_frontend/widget.py
@@ -45,8 +45,7 @@ ERROR_MESSAGES = [MessageType.FAIL, MessageType.ERROR, MessageType.BUG]
#
# An abstract class for printing output columns in our text UI.
#
-class Widget():
-
+class Widget:
def __init__(self, context, content_profile, format_profile):
# The context
@@ -74,7 +73,6 @@ class Widget():
# Used to add fixed text between columns
class FixedText(Widget):
-
def __init__(self, context, text, content_profile, format_profile):
super().__init__(context, content_profile, format_profile)
self.text = text
@@ -91,27 +89,30 @@ class WallclockTime(Widget):
def render(self, message):
- fields = [self.content_profile.fmt("{:02d}".format(x)) for x in
- [message.creation_time.hour,
- message.creation_time.minute,
- message.creation_time.second,
- ]
- ]
+ fields = [
+ self.content_profile.fmt("{:02d}".format(x))
+ for x in [
+ message.creation_time.hour,
+ message.creation_time.minute,
+ message.creation_time.second,
+ ]
+ ]
text = self.format_profile.fmt(":").join(fields)
- if self._output_format == 'us':
- text += self.content_profile.fmt(".{:06d}".format(message.creation_time.microsecond))
+ if self._output_format == "us":
+ text += self.content_profile.fmt(
+ ".{:06d}".format(message.creation_time.microsecond)
+ )
return text
# A widget for rendering the debugging column
class Debug(Widget):
-
def render(self, message):
element_name = "n/a" if message.element_name is None else message.element_name
- text = self.format_profile.fmt('pid:')
+ text = self.format_profile.fmt("pid:")
text += self.content_profile.fmt("{: <5}".format(message.pid))
text += self.format_profile.fmt("element name:")
text += self.content_profile.fmt("{: <30}".format(element_name))
@@ -130,10 +131,7 @@ class TimeCode(Widget):
def render_time(self, elapsed):
if elapsed is None:
- fields = [
- self.content_profile.fmt('--')
- for i in range(3)
- ]
+ fields = [self.content_profile.fmt("--") for i in range(3)]
else:
hours, remainder = divmod(int(elapsed.total_seconds()), 60 * 60)
minutes, seconds = divmod(remainder, 60)
@@ -142,11 +140,13 @@ class TimeCode(Widget):
for field in [hours, minutes, seconds]
]
- text = self.format_profile.fmt(':').join(fields)
+ text = self.format_profile.fmt(":").join(fields)
if self._microseconds:
if elapsed is not None:
- text += self.content_profile.fmt(".{0:06d}".format(elapsed.microseconds))
+ text += self.content_profile.fmt(
+ ".{0:06d}".format(elapsed.microseconds)
+ )
else:
text += self.content_profile.fmt(".------")
return text
@@ -169,41 +169,43 @@ class TypeName(Widget):
}
def render(self, message):
- return self.content_profile.fmt("{: <7}"
- .format(message.message_type.upper()),
- bold=True, dim=True,
- fg=self._action_colors[message.message_type])
+ return self.content_profile.fmt(
+ "{: <7}".format(message.message_type.upper()),
+ bold=True,
+ dim=True,
+ fg=self._action_colors[message.message_type],
+ )
# A widget for displaying the Element name
class ElementName(Widget):
-
def render(self, message):
action_name = message.action_name
element_name = message.element_name
if element_name is not None:
- name = '{: <30}'.format(element_name)
+ name = "{: <30}".format(element_name)
else:
- name = 'core activity'
- name = '{: <30}'.format(name)
+ name = "core activity"
+ name = "{: <30}".format(name)
if not action_name:
action_name = "Main"
- return self.content_profile.fmt("{: >8}".format(action_name.lower())) + \
- self.format_profile.fmt(':') + self.content_profile.fmt(name)
+ return (
+ self.content_profile.fmt("{: >8}".format(action_name.lower()))
+ + self.format_profile.fmt(":")
+ + self.content_profile.fmt(name)
+ )
# A widget for displaying the primary message text
class MessageText(Widget):
-
def render(self, message):
return message.message
# A widget for formatting the element cache key
class CacheKey(Widget):
-
def __init__(self, context, content_profile, format_profile, err_profile):
super().__init__(context, content_profile, format_profile)
@@ -216,10 +218,10 @@ class CacheKey(Widget):
return ""
if message.element_name is None:
- return ' ' * self._key_length
+ return " " * self._key_length
missing = False
- key = ' ' * self._key_length
+ key = " " * self._key_length
if message.element_key:
_, key, missing = message.element_key
@@ -233,7 +235,6 @@ class CacheKey(Widget):
# A widget for formatting the log file
class LogFile(Widget):
-
def __init__(self, context, content_profile, format_profile, err_profile):
super().__init__(context, content_profile, format_profile)
@@ -248,7 +249,7 @@ class LogFile(Widget):
logfile = message.logfile
if abbrev and self._logdir != "" and logfile.startswith(self._logdir):
- logfile = logfile[len(self._logdir):]
+ logfile = logfile[len(self._logdir) :]
logfile = logfile.lstrip(os.sep)
if message.message_type in ERROR_MESSAGES:
@@ -256,7 +257,7 @@ class LogFile(Widget):
else:
text = self.content_profile.fmt(logfile, dim=True)
else:
- text = ''
+ text = ""
return text
@@ -269,12 +270,17 @@ class MessageOrLogFile(Widget):
def __init__(self, context, content_profile, format_profile, err_profile):
super().__init__(context, content_profile, format_profile)
self._message_widget = MessageText(context, content_profile, format_profile)
- self._logfile_widget = LogFile(context, content_profile, format_profile, err_profile)
+ self._logfile_widget = LogFile(
+ context, content_profile, format_profile, err_profile
+ )
def render(self, message):
# Show the log file only in the main start/success messages
- if message.logfile and message.scheduler and \
- message.message_type in [MessageType.START, MessageType.SUCCESS]:
+ if (
+ message.logfile
+ and message.scheduler
+ and message.message_type in [MessageType.START, MessageType.SUCCESS]
+ ):
text = self._logfile_widget.render(message)
else:
text = self._message_widget.render(message)
@@ -296,14 +302,17 @@ class MessageOrLogFile(Widget):
# indent (int): Number of spaces to use for general indentation
#
class LogLine(Widget):
-
- def __init__(self, context, state,
- content_profile,
- format_profile,
- success_profile,
- err_profile,
- detail_profile,
- indent=4):
+ def __init__(
+ self,
+ context,
+ state,
+ content_profile,
+ format_profile,
+ success_profile,
+ err_profile,
+ detail_profile,
+ indent=4,
+ ):
super().__init__(context, content_profile, format_profile)
self._columns = []
@@ -311,30 +320,40 @@ class LogLine(Widget):
self._success_profile = success_profile
self._err_profile = err_profile
self._detail_profile = detail_profile
- self._indent = ' ' * indent
+ self._indent = " " * indent
self._log_lines = context.log_error_lines
self._message_lines = context.log_message_lines
self._resolved_keys = None
self._state = state
- self._logfile_widget = LogFile(context, content_profile, format_profile, err_profile)
+ self._logfile_widget = LogFile(
+ context, content_profile, format_profile, err_profile
+ )
if context.log_debug:
- self._columns.extend([
- Debug(context, content_profile, format_profile)
- ])
+ self._columns.extend([Debug(context, content_profile, format_profile)])
self.logfile_variable_names = {
- "elapsed": TimeCode(context, content_profile, format_profile, microseconds=False),
- "elapsed-us": TimeCode(context, content_profile, format_profile, microseconds=True),
+ "elapsed": TimeCode(
+ context, content_profile, format_profile, microseconds=False
+ ),
+ "elapsed-us": TimeCode(
+ context, content_profile, format_profile, microseconds=True
+ ),
"wallclock": WallclockTime(context, content_profile, format_profile),
- "wallclock-us": WallclockTime(context, content_profile, format_profile, output_format='us'),
+ "wallclock-us": WallclockTime(
+ context, content_profile, format_profile, output_format="us"
+ ),
"key": CacheKey(context, content_profile, format_profile, err_profile),
"element": ElementName(context, content_profile, format_profile),
"action": TypeName(context, content_profile, format_profile),
- "message": MessageOrLogFile(context, content_profile, format_profile, err_profile)
+ "message": MessageOrLogFile(
+ context, content_profile, format_profile, err_profile
+ ),
}
- logfile_tokens = self._parse_logfile_format(context.log_message_format, content_profile, format_profile)
+ logfile_tokens = self._parse_logfile_format(
+ context.log_message_format, content_profile, format_profile
+ )
self._columns.extend(logfile_tokens)
# show_pipeline()
@@ -352,7 +371,7 @@ class LogLine(Widget):
# (str): The formatted list of elements
#
def show_pipeline(self, dependencies, format_):
- report = ''
+ report = ""
p = Profile()
for element in dependencies:
@@ -360,57 +379,82 @@ class LogLine(Widget):
full_key, cache_key, dim_keys = element._get_display_key()
- line = p.fmt_subst(line, 'name', element._get_full_name(), fg='blue', bold=True)
- line = p.fmt_subst(line, 'key', cache_key, fg='yellow', dim=dim_keys)
- line = p.fmt_subst(line, 'full-key', full_key, fg='yellow', dim=dim_keys)
+ line = p.fmt_subst(
+ line, "name", element._get_full_name(), fg="blue", bold=True
+ )
+ line = p.fmt_subst(line, "key", cache_key, fg="yellow", dim=dim_keys)
+ line = p.fmt_subst(line, "full-key", full_key, fg="yellow", dim=dim_keys)
consistency = element._get_consistency()
if consistency == Consistency.INCONSISTENT:
- line = p.fmt_subst(line, 'state', "no reference", fg='red')
+ line = p.fmt_subst(line, "state", "no reference", fg="red")
else:
if element._cached_failure():
- line = p.fmt_subst(line, 'state', "failed", fg='red')
+ line = p.fmt_subst(line, "state", "failed", fg="red")
elif element._cached_success():
- line = p.fmt_subst(line, 'state', "cached", fg='magenta')
- elif consistency == Consistency.RESOLVED and not element._source_cached():
- line = p.fmt_subst(line, 'state', "fetch needed", fg='red')
+ line = p.fmt_subst(line, "state", "cached", fg="magenta")
+ elif (
+ consistency == Consistency.RESOLVED and not element._source_cached()
+ ):
+ line = p.fmt_subst(line, "state", "fetch needed", fg="red")
elif element._buildable():
- line = p.fmt_subst(line, 'state', "buildable", fg='green')
+ line = p.fmt_subst(line, "state", "buildable", fg="green")
else:
- line = p.fmt_subst(line, 'state', "waiting", fg='blue')
+ line = p.fmt_subst(line, "state", "waiting", fg="blue")
# Element configuration
if "%{config" in format_:
line = p.fmt_subst(
- line, 'config',
- yaml.round_trip_dump(element._Element__config, default_flow_style=False, allow_unicode=True))
+ line,
+ "config",
+ yaml.round_trip_dump(
+ element._Element__config,
+ default_flow_style=False,
+ allow_unicode=True,
+ ),
+ )
# Variables
if "%{vars" in format_:
variables = element._Element__variables.flat
line = p.fmt_subst(
- line, 'vars',
- yaml.round_trip_dump(variables, default_flow_style=False, allow_unicode=True))
+ line,
+ "vars",
+ yaml.round_trip_dump(
+ variables, default_flow_style=False, allow_unicode=True
+ ),
+ )
# Environment
if "%{env" in format_:
environment = element._Element__environment
line = p.fmt_subst(
- line, 'env',
- yaml.round_trip_dump(environment, default_flow_style=False, allow_unicode=True))
+ line,
+ "env",
+ yaml.round_trip_dump(
+ environment, default_flow_style=False, allow_unicode=True
+ ),
+ )
# Public
if "%{public" in format_:
environment = element._Element__public
line = p.fmt_subst(
- line, 'public',
- yaml.round_trip_dump(environment, default_flow_style=False, allow_unicode=True))
+ line,
+ "public",
+ yaml.round_trip_dump(
+ environment, default_flow_style=False, allow_unicode=True
+ ),
+ )
# Workspaced
if "%{workspaced" in format_:
line = p.fmt_subst(
- line, 'workspaced',
- '(workspaced)' if element._get_workspace() else '', fg='yellow')
+ line,
+ "workspaced",
+ "(workspaced)" if element._get_workspace() else "",
+ fg="yellow",
+ )
# Workspace-dirs
if "%{workspace-dirs" in format_:
@@ -418,36 +462,45 @@ class LogLine(Widget):
if workspace is not None:
path = workspace.get_absolute_path()
if path.startswith("~/"):
- path = os.path.join(os.getenv('HOME', '/root'), path[2:])
- line = p.fmt_subst(line, 'workspace-dirs', "Workspace: {}".format(path))
- else:
+ path = os.path.join(os.getenv("HOME", "/root"), path[2:])
line = p.fmt_subst(
- line, 'workspace-dirs', '')
+ line, "workspace-dirs", "Workspace: {}".format(path)
+ )
+ else:
+ line = p.fmt_subst(line, "workspace-dirs", "")
# Dependencies
if "%{deps" in format_:
deps = [e.name for e in element.dependencies(Scope.ALL, recurse=False)]
line = p.fmt_subst(
- line, 'deps',
- yaml.safe_dump(deps, default_style=None).rstrip('\n'))
+ line, "deps", yaml.safe_dump(deps, default_style=None).rstrip("\n")
+ )
# Build Dependencies
if "%{build-deps" in format_:
- build_deps = [e.name for e in element.dependencies(Scope.BUILD, recurse=False)]
+ build_deps = [
+ e.name for e in element.dependencies(Scope.BUILD, recurse=False)
+ ]
line = p.fmt_subst(
- line, 'build-deps',
- yaml.safe_dump(build_deps, default_style=False).rstrip('\n'))
+ line,
+ "build-deps",
+ yaml.safe_dump(build_deps, default_style=False).rstrip("\n"),
+ )
# Runtime Dependencies
if "%{runtime-deps" in format_:
- runtime_deps = [e.name for e in element.dependencies(Scope.RUN, recurse=False)]
+ runtime_deps = [
+ e.name for e in element.dependencies(Scope.RUN, recurse=False)
+ ]
line = p.fmt_subst(
- line, 'runtime-deps',
- yaml.safe_dump(runtime_deps, default_style=False).rstrip('\n'))
+ line,
+ "runtime-deps",
+ yaml.safe_dump(runtime_deps, default_style=False).rstrip("\n"),
+ )
- report += line + '\n'
+ report += line + "\n"
- return report.rstrip('\n')
+ return report.rstrip("\n")
# print_heading()
#
@@ -464,25 +517,32 @@ class LogLine(Widget):
def print_heading(self, project, stream, *, log_file, styling=False):
context = self.context
starttime = datetime.datetime.now()
- text = ''
+ text = ""
- self._resolved_keys = {element: element._get_cache_key() for element in stream.session_elements}
+ self._resolved_keys = {
+ element: element._get_cache_key() for element in stream.session_elements
+ }
# Main invocation context
- text += '\n'
- text += self.content_profile.fmt("BuildStream Version {}\n".format(bst_version), bold=True)
+ text += "\n"
+ text += self.content_profile.fmt(
+ "BuildStream Version {}\n".format(bst_version), bold=True
+ )
values = OrderedDict()
- values["Session Start"] = starttime.strftime('%A, %d-%m-%Y at %H:%M:%S')
+ values["Session Start"] = starttime.strftime("%A, %d-%m-%Y at %H:%M:%S")
values["Project"] = "{} ({})".format(project.name, project.directory)
values["Targets"] = ", ".join([t.name for t in stream.targets])
text += self._format_values(values)
# User configurations
- text += '\n'
+ text += "\n"
text += self.content_profile.fmt("User Configuration\n", bold=True)
values = OrderedDict()
- values["Configuration File"] = \
- "Default Configuration" if not context.config_origin else context.config_origin
+ values["Configuration File"] = (
+ "Default Configuration"
+ if not context.config_origin
+ else context.config_origin
+ )
values["Cache Directory"] = context.cachedir
values["Log Files"] = context.logdir
values["Source Mirrors"] = context.sourcedir
@@ -493,7 +553,7 @@ class LogLine(Widget):
values["Maximum Push Tasks"] = context.sched_pushers
values["Maximum Network Retries"] = context.sched_network_retries
text += self._format_values(values)
- text += '\n'
+ text += "\n"
# Project Options
values = OrderedDict()
@@ -501,22 +561,26 @@ class LogLine(Widget):
if values:
text += self.content_profile.fmt("Project Options\n", bold=True)
text += self._format_values(values)
- text += '\n'
+ text += "\n"
# Plugins
- text += self._format_plugins(project.first_pass_config.element_factory.loaded_dependencies,
- project.first_pass_config.source_factory.loaded_dependencies)
+ text += self._format_plugins(
+ project.first_pass_config.element_factory.loaded_dependencies,
+ project.first_pass_config.source_factory.loaded_dependencies,
+ )
if project.config.element_factory and project.config.source_factory:
- text += self._format_plugins(project.config.element_factory.loaded_dependencies,
- project.config.source_factory.loaded_dependencies)
+ text += self._format_plugins(
+ project.config.element_factory.loaded_dependencies,
+ project.config.source_factory.loaded_dependencies,
+ )
# Pipeline state
text += self.content_profile.fmt("Pipeline\n", bold=True)
text += self.show_pipeline(stream.total_elements, context.log_element_format)
- text += '\n'
+ text += "\n"
# Separator line before following output
- text += self.format_profile.fmt("=" * 79 + '\n')
+ text += self.format_profile.fmt("=" * 79 + "\n")
click.echo(text, color=styling, nl=False, err=True)
if log_file:
@@ -539,10 +603,12 @@ class LogLine(Widget):
if not self._state.task_groups:
return
- text = ''
+ text = ""
assert self._resolved_keys is not None
- elements = sorted(e for (e, k) in self._resolved_keys.items() if k != e._get_cache_key())
+ elements = sorted(
+ e for (e, k) in self._resolved_keys.items() if k != e._get_cache_key()
+ )
if elements:
text += self.content_profile.fmt("Resolved key Summary\n", bold=True)
text += self.show_pipeline(elements, self.context.log_element_format)
@@ -556,7 +622,9 @@ class LogLine(Widget):
# Exclude the failure messages if the job didn't ultimately fail
# (e.g. succeeded on retry)
if element_name in group.failed_tasks:
- values[element_name] = ''.join(self._render(v) for v in messages)
+ values[element_name] = "".join(
+ self._render(v) for v in messages
+ )
if values:
text += self.content_profile.fmt("Failure Summary\n", bold=True)
@@ -565,8 +633,8 @@ class LogLine(Widget):
text += self.content_profile.fmt("Pipeline Summary\n", bold=True)
values = OrderedDict()
- values['Total'] = self.content_profile.fmt(str(len(stream.total_elements)))
- values['Session'] = self.content_profile.fmt(str(len(stream.session_elements)))
+ values["Total"] = self.content_profile.fmt(str(len(stream.total_elements)))
+ values["Session"] = self.content_profile.fmt(str(len(stream.session_elements)))
processed_maxlen = 1
skipped_maxlen = 1
@@ -581,20 +649,30 @@ class LogLine(Widget):
skipped = str(group.skipped_tasks)
failed = str(len(group.failed_tasks))
- processed_align = ' ' * (processed_maxlen - len(processed))
- skipped_align = ' ' * (skipped_maxlen - len(skipped))
- failed_align = ' ' * (failed_maxlen - len(failed))
-
- status_text = self.content_profile.fmt("processed ") + \
- self._success_profile.fmt(processed) + \
- self.format_profile.fmt(', ') + processed_align
-
- status_text += self.content_profile.fmt("skipped ") + \
- self.content_profile.fmt(skipped) + \
- self.format_profile.fmt(', ') + skipped_align
-
- status_text += self.content_profile.fmt("failed ") + \
- self._err_profile.fmt(failed) + ' ' + failed_align
+ processed_align = " " * (processed_maxlen - len(processed))
+ skipped_align = " " * (skipped_maxlen - len(skipped))
+ failed_align = " " * (failed_maxlen - len(failed))
+
+ status_text = (
+ self.content_profile.fmt("processed ")
+ + self._success_profile.fmt(processed)
+ + self.format_profile.fmt(", ")
+ + processed_align
+ )
+
+ status_text += (
+ self.content_profile.fmt("skipped ")
+ + self.content_profile.fmt(skipped)
+ + self.format_profile.fmt(", ")
+ + skipped_align
+ )
+
+ status_text += (
+ self.content_profile.fmt("failed ")
+ + self._err_profile.fmt(failed)
+ + " "
+ + failed_align
+ )
values["{} Queue".format(group.name)] = status_text
text += self._format_values(values, style_value=False)
@@ -623,35 +701,45 @@ class LogLine(Widget):
logfile_tokens = []
while format_string:
if format_string.startswith("%%"):
- logfile_tokens.append(FixedText(self.context, "%", content_profile, format_profile))
+ logfile_tokens.append(
+ FixedText(self.context, "%", content_profile, format_profile)
+ )
format_string = format_string[2:]
continue
m = re.search(r"^%\{([^\}]+)\}", format_string)
if m is not None:
variable = m.group(1)
- format_string = format_string[m.end(0):]
+ format_string = format_string[m.end(0) :]
if variable not in self.logfile_variable_names:
- raise Exception("'{0}' is not a valid log variable name.".format(variable))
+ raise Exception(
+ "'{0}' is not a valid log variable name.".format(variable)
+ )
logfile_tokens.append(self.logfile_variable_names[variable])
else:
m = re.search("^[^%]+", format_string)
if m is not None:
- text = FixedText(self.context, m.group(0), content_profile, format_profile)
- format_string = format_string[m.end(0):]
+ text = FixedText(
+ self.context, m.group(0), content_profile, format_profile
+ )
+ format_string = format_string[m.end(0) :]
logfile_tokens.append(text)
else:
# No idea what to do now
- raise Exception("'{0}' could not be parsed into a valid logging format.".format(format_string))
+ raise Exception(
+ "'{0}' could not be parsed into a valid logging format.".format(
+ format_string
+ )
+ )
return logfile_tokens
def _render(self, message):
# Render the column widgets first
- text = ''
+ text = ""
for widget in self._columns:
text += widget.render(message)
- text += '\n'
+ text += "\n"
extra_nl = False
@@ -666,51 +754,68 @@ class LogLine(Widget):
n_lines = len(lines)
abbrev = False
- if message.message_type not in ERROR_MESSAGES \
- and not frontend_message and n_lines > self._message_lines:
- lines = lines[0:self._message_lines]
+ if (
+ message.message_type not in ERROR_MESSAGES
+ and not frontend_message
+ and n_lines > self._message_lines
+ ):
+ lines = lines[0 : self._message_lines]
if self._message_lines > 0:
abbrev = True
else:
- lines[n_lines - 1] = lines[n_lines - 1].rstrip('\n')
+ lines[n_lines - 1] = lines[n_lines - 1].rstrip("\n")
detail = self._indent + self._indent.join(lines)
- text += '\n'
+ text += "\n"
if message.message_type in ERROR_MESSAGES:
text += self._err_profile.fmt(detail, bold=True)
else:
text += self._detail_profile.fmt(detail)
if abbrev:
- text += self._indent + \
- self.content_profile.fmt('Message contains {} additional lines'
- .format(n_lines - self._message_lines), dim=True)
- text += '\n'
+ text += self._indent + self.content_profile.fmt(
+ "Message contains {} additional lines".format(
+ n_lines - self._message_lines
+ ),
+ dim=True,
+ )
+ text += "\n"
extra_nl = True
if message.scheduler and message.message_type == MessageType.FAIL:
- text += '\n'
+ text += "\n"
if self.context is not None and not self.context.log_verbose:
text += self._indent + self._err_profile.fmt("Log file: ")
- text += self._indent + self._logfile_widget.render(message) + '\n'
+ text += self._indent + self._logfile_widget.render(message) + "\n"
elif self._log_lines > 0:
- text += self._indent + self._err_profile.fmt("Printing the last {} lines from log file:"
- .format(self._log_lines)) + '\n'
- text += self._indent + self._logfile_widget.render_abbrev(message, abbrev=False) + '\n'
- text += self._indent + self._err_profile.fmt("=" * 70) + '\n'
+ text += (
+ self._indent
+ + self._err_profile.fmt(
+ "Printing the last {} lines from log file:".format(
+ self._log_lines
+ )
+ )
+ + "\n"
+ )
+ text += (
+ self._indent
+ + self._logfile_widget.render_abbrev(message, abbrev=False)
+ + "\n"
+ )
+ text += self._indent + self._err_profile.fmt("=" * 70) + "\n"
log_content = self._read_last_lines(message.logfile)
log_content = textwrap.indent(log_content, self._indent)
text += self._detail_profile.fmt(log_content)
- text += '\n'
- text += self._indent + self._err_profile.fmt("=" * 70) + '\n'
+ text += "\n"
+ text += self._indent + self._err_profile.fmt("=" * 70) + "\n"
extra_nl = True
if extra_nl:
- text += '\n'
+ text += "\n"
return text
@@ -718,14 +823,14 @@ class LogLine(Widget):
with ExitStack() as stack:
# mmap handles low-level memory details, allowing for
# faster searches
- f = stack.enter_context(open(logfile, 'r+'))
+ f = stack.enter_context(open(logfile, "r+"))
log = stack.enter_context(mmap(f.fileno(), os.path.getsize(f.name)))
count = 0
end = log.size() - 1
while count < self._log_lines and end >= 0:
- location = log.rfind(b'\n', 0, end)
+ location = log.rfind(b"\n", 0, end)
count += 1
# If location is -1 (none found), this will print the
@@ -737,8 +842,8 @@ class LogLine(Widget):
# then we get the first characther. If end is a newline position,
# we discard it and only want to print the beginning of the next
# line.
- lines = log[(end + 1):].splitlines()
- return '\n'.join([line.decode('utf-8') for line in lines]).rstrip()
+ lines = log[(end + 1) :].splitlines()
+ return "\n".join([line.decode("utf-8") for line in lines]).rstrip()
def _format_plugins(self, element_plugins, source_plugins):
text = ""
@@ -758,7 +863,7 @@ class LogLine(Widget):
for plugin in source_plugins:
text += self.content_profile.fmt(" - {}\n".format(plugin))
- text += '\n'
+ text += "\n"
return text
@@ -775,23 +880,25 @@ class LogLine(Widget):
# (str): The formatted values
#
def _format_values(self, values, style_value=True):
- text = ''
+ text = ""
max_key_len = 0
for key, value in values.items():
max_key_len = max(len(key), max_key_len)
for key, value in values.items():
- if isinstance(value, str) and '\n' in value:
+ if isinstance(value, str) and "\n" in value:
text += self.format_profile.fmt(" {}:\n".format(key))
text += textwrap.indent(value, self._indent)
continue
- text += self.format_profile.fmt(" {}: {}".format(key, ' ' * (max_key_len - len(key))))
+ text += self.format_profile.fmt(
+ " {}: {}".format(key, " " * (max_key_len - len(key)))
+ )
if style_value:
text += self.content_profile.fmt(str(value))
else:
text += str(value)
- text += '\n'
+ text += "\n"
return text
@@ -808,22 +915,26 @@ class LogLine(Widget):
# (str): The formatted values
#
def _pretty_print_dictionary(self, values, long_=False, style_value=True):
- text = ''
+ text = ""
max_key_len = 0
try:
max_key_len = max(len(key) for key in values.keys())
except ValueError:
- text = ''
+ text = ""
for key, value in values.items():
- if isinstance(value, str) and '\n' in value:
+ if isinstance(value, str) and "\n" in value:
text += self.format_profile.fmt(" {}:".format(key))
text += textwrap.indent(value, self._indent)
continue
- text += self.format_profile.fmt(" {}:{}".format(key, ' ' * (max_key_len - len(key))))
+ text += self.format_profile.fmt(
+ " {}:{}".format(key, " " * (max_key_len - len(key)))
+ )
- value_list = "\n\t" + "\n\t".join((self._get_filestats(v, list_long=long_) for v in value))
+ value_list = "\n\t" + "\n\t".join(
+ (self._get_filestats(v, list_long=long_) for v in value)
+ )
if value == []:
message = "\n\tThis element has no associated artifacts"
if style_value:
@@ -834,7 +945,7 @@ class LogLine(Widget):
text += self.content_profile.fmt(value_list)
else:
text += value_list
- text += '\n'
+ text += "\n"
return text
@@ -856,22 +967,22 @@ class LogLine(Widget):
# cached status of
#
def show_state_of_artifacts(self, targets):
- report = ''
+ report = ""
p = Profile()
for element in targets:
- line = '%{state: >12} %{name}'
- line = p.fmt_subst(line, 'name', element.name, fg='yellow')
+ line = "%{state: >12} %{name}"
+ line = p.fmt_subst(line, "name", element.name, fg="yellow")
if element._cached_success():
- line = p.fmt_subst(line, 'state', "cached", fg='magenta')
+ line = p.fmt_subst(line, "state", "cached", fg="magenta")
elif element._cached():
- line = p.fmt_subst(line, 'state', "failed", fg='red')
+ line = p.fmt_subst(line, "state", "failed", fg="red")
elif element._cached_remotely():
- line = p.fmt_subst(line, 'state', "available", fg='green')
+ line = p.fmt_subst(line, "state", "available", fg="green")
else:
- line = p.fmt_subst(line, 'state', "not cached", fg='bright_red')
+ line = p.fmt_subst(line, "state", "not cached", fg="bright_red")
- report += line + '\n'
+ report += line + "\n"
return report
@@ -892,15 +1003,27 @@ class LogLine(Widget):
# Support files up to 99G, meaning maximum characters is 11
max_v_len = 11
if entry["type"] == _FileType.DIRECTORY:
- return "drwxr-xr-x dir {}".format(entry["size"]) +\
- "{} ".format(' ' * (max_v_len - len(size))) + "{}".format(entry["name"])
+ return (
+ "drwxr-xr-x dir {}".format(entry["size"])
+ + "{} ".format(" " * (max_v_len - len(size)))
+ + "{}".format(entry["name"])
+ )
elif entry["type"] == _FileType.SYMLINK:
- return "lrwxrwxrwx link {}".format(entry["size"]) +\
- "{} ".format(' ' * (max_v_len - len(size))) + "{} -> {}".format(entry["name"], entry["target"])
+ return (
+ "lrwxrwxrwx link {}".format(entry["size"])
+ + "{} ".format(" " * (max_v_len - len(size)))
+ + "{} -> {}".format(entry["name"], entry["target"])
+ )
elif entry["executable"]:
- return "-rwxr-xr-x exe {}".format(entry["size"]) +\
- "{} ".format(' ' * (max_v_len - len(size))) + "{}".format(entry["name"])
+ return (
+ "-rwxr-xr-x exe {}".format(entry["size"])
+ + "{} ".format(" " * (max_v_len - len(size)))
+ + "{}".format(entry["name"])
+ )
else:
- return "-rw-r--r-- reg {}".format(entry["size"]) +\
- "{} ".format(' ' * (max_v_len - len(size))) + "{}".format(entry["name"])
+ return (
+ "-rw-r--r-- reg {}".format(entry["size"])
+ + "{} ".format(" " * (max_v_len - len(size)))
+ + "{}".format(entry["name"])
+ )
return entry["name"]
diff --git a/src/buildstream/_fuse/fuse.py b/src/buildstream/_fuse/fuse.py
index 41e126ef5..9bedb2d4b 100644
--- a/src/buildstream/_fuse/fuse.py
+++ b/src/buildstream/_fuse/fuse.py
@@ -49,40 +49,46 @@ except ImportError:
newfunc.keywords = keywords
return newfunc
+
try:
basestring
except NameError:
basestring = str
+
class c_timespec(Structure):
- _fields_ = [('tv_sec', c_long), ('tv_nsec', c_long)]
+ _fields_ = [("tv_sec", c_long), ("tv_nsec", c_long)]
+
class c_utimbuf(Structure):
- _fields_ = [('actime', c_timespec), ('modtime', c_timespec)]
+ _fields_ = [("actime", c_timespec), ("modtime", c_timespec)]
+
class c_stat(Structure):
- pass # Platform dependent
+ pass # Platform dependent
+
_system = system()
_machine = machine()
-if _system == 'Darwin':
- _libiconv = CDLL(find_library('iconv'), RTLD_GLOBAL) # libfuse dependency
- _libfuse_path = (find_library('fuse4x') or find_library('osxfuse') or
- find_library('fuse'))
+if _system == "Darwin":
+ _libiconv = CDLL(find_library("iconv"), RTLD_GLOBAL) # libfuse dependency
+ _libfuse_path = (
+ find_library("fuse4x") or find_library("osxfuse") or find_library("fuse")
+ )
else:
- _libfuse_path = find_library('fuse')
+ _libfuse_path = find_library("fuse")
if not _libfuse_path:
- raise EnvironmentError('Unable to find libfuse')
+ raise EnvironmentError("Unable to find libfuse")
else:
_libfuse = CDLL(_libfuse_path)
-if _system == 'Darwin' and hasattr(_libfuse, 'macfuse_version'):
- _system = 'Darwin-MacFuse'
+if _system == "Darwin" and hasattr(_libfuse, "macfuse_version"):
+ _system = "Darwin-MacFuse"
-if _system in ('Darwin', 'Darwin-MacFuse', 'FreeBSD'):
+if _system in ("Darwin", "Darwin-MacFuse", "FreeBSD"):
ENOTSUP = 45
c_dev_t = c_int32
c_fsblkcnt_t = c_ulong
@@ -92,46 +98,50 @@ if _system in ('Darwin', 'Darwin-MacFuse', 'FreeBSD'):
c_off_t = c_int64
c_pid_t = c_int32
c_uid_t = c_uint32
- setxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte),
- c_size_t, c_int, c_uint32)
- getxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte),
- c_size_t, c_uint32)
- if _system == 'Darwin':
+ setxattr_t = CFUNCTYPE(
+ c_int, c_char_p, c_char_p, POINTER(c_byte), c_size_t, c_int, c_uint32
+ )
+ getxattr_t = CFUNCTYPE(
+ c_int, c_char_p, c_char_p, POINTER(c_byte), c_size_t, c_uint32
+ )
+ if _system == "Darwin":
c_stat._fields_ = [
- ('st_dev', c_dev_t),
- ('st_mode', c_mode_t),
- ('st_nlink', c_uint16),
- ('st_ino', c_uint64),
- ('st_uid', c_uid_t),
- ('st_gid', c_gid_t),
- ('st_rdev', c_dev_t),
- ('st_atimespec', c_timespec),
- ('st_mtimespec', c_timespec),
- ('st_ctimespec', c_timespec),
- ('st_birthtimespec', c_timespec),
- ('st_size', c_off_t),
- ('st_blocks', c_int64),
- ('st_blksize', c_int32),
- ('st_flags', c_int32),
- ('st_gen', c_int32),
- ('st_lspare', c_int32),
- ('st_qspare', c_int64)]
+ ("st_dev", c_dev_t),
+ ("st_mode", c_mode_t),
+ ("st_nlink", c_uint16),
+ ("st_ino", c_uint64),
+ ("st_uid", c_uid_t),
+ ("st_gid", c_gid_t),
+ ("st_rdev", c_dev_t),
+ ("st_atimespec", c_timespec),
+ ("st_mtimespec", c_timespec),
+ ("st_ctimespec", c_timespec),
+ ("st_birthtimespec", c_timespec),
+ ("st_size", c_off_t),
+ ("st_blocks", c_int64),
+ ("st_blksize", c_int32),
+ ("st_flags", c_int32),
+ ("st_gen", c_int32),
+ ("st_lspare", c_int32),
+ ("st_qspare", c_int64),
+ ]
else:
c_stat._fields_ = [
- ('st_dev', c_dev_t),
- ('st_ino', c_uint32),
- ('st_mode', c_mode_t),
- ('st_nlink', c_uint16),
- ('st_uid', c_uid_t),
- ('st_gid', c_gid_t),
- ('st_rdev', c_dev_t),
- ('st_atimespec', c_timespec),
- ('st_mtimespec', c_timespec),
- ('st_ctimespec', c_timespec),
- ('st_size', c_off_t),
- ('st_blocks', c_int64),
- ('st_blksize', c_int32)]
-elif _system == 'Linux':
+ ("st_dev", c_dev_t),
+ ("st_ino", c_uint32),
+ ("st_mode", c_mode_t),
+ ("st_nlink", c_uint16),
+ ("st_uid", c_uid_t),
+ ("st_gid", c_gid_t),
+ ("st_rdev", c_dev_t),
+ ("st_atimespec", c_timespec),
+ ("st_mtimespec", c_timespec),
+ ("st_ctimespec", c_timespec),
+ ("st_size", c_off_t),
+ ("st_blocks", c_int64),
+ ("st_blksize", c_int32),
+ ]
+elif _system == "Linux":
ENOTSUP = 95
c_dev_t = c_ulonglong
c_fsblkcnt_t = c_ulonglong
@@ -141,273 +151,295 @@ elif _system == 'Linux':
c_off_t = c_longlong
c_pid_t = c_int
c_uid_t = c_uint
- setxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte),
- c_size_t, c_int)
+ setxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte), c_size_t, c_int)
- getxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte),
- c_size_t)
+ getxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte), c_size_t)
- if _machine == 'x86_64':
+ if _machine == "x86_64":
c_stat._fields_ = [
- ('st_dev', c_dev_t),
- ('st_ino', c_ulong),
- ('st_nlink', c_ulong),
- ('st_mode', c_mode_t),
- ('st_uid', c_uid_t),
- ('st_gid', c_gid_t),
- ('__pad0', c_int),
- ('st_rdev', c_dev_t),
- ('st_size', c_off_t),
- ('st_blksize', c_long),
- ('st_blocks', c_long),
- ('st_atimespec', c_timespec),
- ('st_mtimespec', c_timespec),
- ('st_ctimespec', c_timespec)]
- elif _machine == 'mips':
+ ("st_dev", c_dev_t),
+ ("st_ino", c_ulong),
+ ("st_nlink", c_ulong),
+ ("st_mode", c_mode_t),
+ ("st_uid", c_uid_t),
+ ("st_gid", c_gid_t),
+ ("__pad0", c_int),
+ ("st_rdev", c_dev_t),
+ ("st_size", c_off_t),
+ ("st_blksize", c_long),
+ ("st_blocks", c_long),
+ ("st_atimespec", c_timespec),
+ ("st_mtimespec", c_timespec),
+ ("st_ctimespec", c_timespec),
+ ]
+ elif _machine == "mips":
c_stat._fields_ = [
- ('st_dev', c_dev_t),
- ('__pad1_1', c_ulong),
- ('__pad1_2', c_ulong),
- ('__pad1_3', c_ulong),
- ('st_ino', c_ulong),
- ('st_mode', c_mode_t),
- ('st_nlink', c_ulong),
- ('st_uid', c_uid_t),
- ('st_gid', c_gid_t),
- ('st_rdev', c_dev_t),
- ('__pad2_1', c_ulong),
- ('__pad2_2', c_ulong),
- ('st_size', c_off_t),
- ('__pad3', c_ulong),
- ('st_atimespec', c_timespec),
- ('__pad4', c_ulong),
- ('st_mtimespec', c_timespec),
- ('__pad5', c_ulong),
- ('st_ctimespec', c_timespec),
- ('__pad6', c_ulong),
- ('st_blksize', c_long),
- ('st_blocks', c_long),
- ('__pad7_1', c_ulong),
- ('__pad7_2', c_ulong),
- ('__pad7_3', c_ulong),
- ('__pad7_4', c_ulong),
- ('__pad7_5', c_ulong),
- ('__pad7_6', c_ulong),
- ('__pad7_7', c_ulong),
- ('__pad7_8', c_ulong),
- ('__pad7_9', c_ulong),
- ('__pad7_10', c_ulong),
- ('__pad7_11', c_ulong),
- ('__pad7_12', c_ulong),
- ('__pad7_13', c_ulong),
- ('__pad7_14', c_ulong)]
- elif _machine == 'ppc':
+ ("st_dev", c_dev_t),
+ ("__pad1_1", c_ulong),
+ ("__pad1_2", c_ulong),
+ ("__pad1_3", c_ulong),
+ ("st_ino", c_ulong),
+ ("st_mode", c_mode_t),
+ ("st_nlink", c_ulong),
+ ("st_uid", c_uid_t),
+ ("st_gid", c_gid_t),
+ ("st_rdev", c_dev_t),
+ ("__pad2_1", c_ulong),
+ ("__pad2_2", c_ulong),
+ ("st_size", c_off_t),
+ ("__pad3", c_ulong),
+ ("st_atimespec", c_timespec),
+ ("__pad4", c_ulong),
+ ("st_mtimespec", c_timespec),
+ ("__pad5", c_ulong),
+ ("st_ctimespec", c_timespec),
+ ("__pad6", c_ulong),
+ ("st_blksize", c_long),
+ ("st_blocks", c_long),
+ ("__pad7_1", c_ulong),
+ ("__pad7_2", c_ulong),
+ ("__pad7_3", c_ulong),
+ ("__pad7_4", c_ulong),
+ ("__pad7_5", c_ulong),
+ ("__pad7_6", c_ulong),
+ ("__pad7_7", c_ulong),
+ ("__pad7_8", c_ulong),
+ ("__pad7_9", c_ulong),
+ ("__pad7_10", c_ulong),
+ ("__pad7_11", c_ulong),
+ ("__pad7_12", c_ulong),
+ ("__pad7_13", c_ulong),
+ ("__pad7_14", c_ulong),
+ ]
+ elif _machine == "ppc":
c_stat._fields_ = [
- ('st_dev', c_dev_t),
- ('st_ino', c_ulonglong),
- ('st_mode', c_mode_t),
- ('st_nlink', c_uint),
- ('st_uid', c_uid_t),
- ('st_gid', c_gid_t),
- ('st_rdev', c_dev_t),
- ('__pad2', c_ushort),
- ('st_size', c_off_t),
- ('st_blksize', c_long),
- ('st_blocks', c_longlong),
- ('st_atimespec', c_timespec),
- ('st_mtimespec', c_timespec),
- ('st_ctimespec', c_timespec)]
- elif _machine == 'ppc64' or _machine == 'ppc64le':
+ ("st_dev", c_dev_t),
+ ("st_ino", c_ulonglong),
+ ("st_mode", c_mode_t),
+ ("st_nlink", c_uint),
+ ("st_uid", c_uid_t),
+ ("st_gid", c_gid_t),
+ ("st_rdev", c_dev_t),
+ ("__pad2", c_ushort),
+ ("st_size", c_off_t),
+ ("st_blksize", c_long),
+ ("st_blocks", c_longlong),
+ ("st_atimespec", c_timespec),
+ ("st_mtimespec", c_timespec),
+ ("st_ctimespec", c_timespec),
+ ]
+ elif _machine == "ppc64" or _machine == "ppc64le":
c_stat._fields_ = [
- ('st_dev', c_dev_t),
- ('st_ino', c_ulong),
- ('st_nlink', c_ulong),
- ('st_mode', c_mode_t),
- ('st_uid', c_uid_t),
- ('st_gid', c_gid_t),
- ('__pad', c_uint),
- ('st_rdev', c_dev_t),
- ('st_size', c_off_t),
- ('st_blksize', c_long),
- ('st_blocks', c_long),
- ('st_atimespec', c_timespec),
- ('st_mtimespec', c_timespec),
- ('st_ctimespec', c_timespec)]
- elif _machine == 'aarch64':
+ ("st_dev", c_dev_t),
+ ("st_ino", c_ulong),
+ ("st_nlink", c_ulong),
+ ("st_mode", c_mode_t),
+ ("st_uid", c_uid_t),
+ ("st_gid", c_gid_t),
+ ("__pad", c_uint),
+ ("st_rdev", c_dev_t),
+ ("st_size", c_off_t),
+ ("st_blksize", c_long),
+ ("st_blocks", c_long),
+ ("st_atimespec", c_timespec),
+ ("st_mtimespec", c_timespec),
+ ("st_ctimespec", c_timespec),
+ ]
+ elif _machine == "aarch64":
c_stat._fields_ = [
- ('st_dev', c_dev_t),
- ('st_ino', c_ulong),
- ('st_mode', c_mode_t),
- ('st_nlink', c_uint),
- ('st_uid', c_uid_t),
- ('st_gid', c_gid_t),
- ('st_rdev', c_dev_t),
- ('__pad1', c_ulong),
- ('st_size', c_off_t),
- ('st_blksize', c_int),
- ('__pad2', c_int),
- ('st_blocks', c_long),
- ('st_atimespec', c_timespec),
- ('st_mtimespec', c_timespec),
- ('st_ctimespec', c_timespec)]
+ ("st_dev", c_dev_t),
+ ("st_ino", c_ulong),
+ ("st_mode", c_mode_t),
+ ("st_nlink", c_uint),
+ ("st_uid", c_uid_t),
+ ("st_gid", c_gid_t),
+ ("st_rdev", c_dev_t),
+ ("__pad1", c_ulong),
+ ("st_size", c_off_t),
+ ("st_blksize", c_int),
+ ("__pad2", c_int),
+ ("st_blocks", c_long),
+ ("st_atimespec", c_timespec),
+ ("st_mtimespec", c_timespec),
+ ("st_ctimespec", c_timespec),
+ ]
else:
# i686, use as fallback for everything else
c_stat._fields_ = [
- ('st_dev', c_dev_t),
- ('__pad1', c_ushort),
- ('__st_ino', c_ulong),
- ('st_mode', c_mode_t),
- ('st_nlink', c_uint),
- ('st_uid', c_uid_t),
- ('st_gid', c_gid_t),
- ('st_rdev', c_dev_t),
- ('__pad2', c_ushort),
- ('st_size', c_off_t),
- ('st_blksize', c_long),
- ('st_blocks', c_longlong),
- ('st_atimespec', c_timespec),
- ('st_mtimespec', c_timespec),
- ('st_ctimespec', c_timespec),
- ('st_ino', c_ulonglong)]
+ ("st_dev", c_dev_t),
+ ("__pad1", c_ushort),
+ ("__st_ino", c_ulong),
+ ("st_mode", c_mode_t),
+ ("st_nlink", c_uint),
+ ("st_uid", c_uid_t),
+ ("st_gid", c_gid_t),
+ ("st_rdev", c_dev_t),
+ ("__pad2", c_ushort),
+ ("st_size", c_off_t),
+ ("st_blksize", c_long),
+ ("st_blocks", c_longlong),
+ ("st_atimespec", c_timespec),
+ ("st_mtimespec", c_timespec),
+ ("st_ctimespec", c_timespec),
+ ("st_ino", c_ulonglong),
+ ]
else:
- raise NotImplementedError('{} is not supported.'.format(_system))
+ raise NotImplementedError("{} is not supported.".format(_system))
class c_statvfs(Structure):
_fields_ = [
- ('f_bsize', c_ulong),
- ('f_frsize', c_ulong),
- ('f_blocks', c_fsblkcnt_t),
- ('f_bfree', c_fsblkcnt_t),
- ('f_bavail', c_fsblkcnt_t),
- ('f_files', c_fsfilcnt_t),
- ('f_ffree', c_fsfilcnt_t),
- ('f_favail', c_fsfilcnt_t),
- ('f_fsid', c_ulong),
- #('unused', c_int),
- ('f_flag', c_ulong),
- ('f_namemax', c_ulong)]
-
-if _system == 'FreeBSD':
+ ("f_bsize", c_ulong),
+ ("f_frsize", c_ulong),
+ ("f_blocks", c_fsblkcnt_t),
+ ("f_bfree", c_fsblkcnt_t),
+ ("f_bavail", c_fsblkcnt_t),
+ ("f_files", c_fsfilcnt_t),
+ ("f_ffree", c_fsfilcnt_t),
+ ("f_favail", c_fsfilcnt_t),
+ ("f_fsid", c_ulong),
+ # ('unused', c_int),
+ ("f_flag", c_ulong),
+ ("f_namemax", c_ulong),
+ ]
+
+
+if _system == "FreeBSD":
c_fsblkcnt_t = c_uint64
c_fsfilcnt_t = c_uint64
- setxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte),
- c_size_t, c_int)
+ setxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte), c_size_t, c_int)
- getxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte),
- c_size_t)
+ getxattr_t = CFUNCTYPE(c_int, c_char_p, c_char_p, POINTER(c_byte), c_size_t)
class c_statvfs(Structure):
_fields_ = [
- ('f_bavail', c_fsblkcnt_t),
- ('f_bfree', c_fsblkcnt_t),
- ('f_blocks', c_fsblkcnt_t),
- ('f_favail', c_fsfilcnt_t),
- ('f_ffree', c_fsfilcnt_t),
- ('f_files', c_fsfilcnt_t),
- ('f_bsize', c_ulong),
- ('f_flag', c_ulong),
- ('f_frsize', c_ulong)]
+ ("f_bavail", c_fsblkcnt_t),
+ ("f_bfree", c_fsblkcnt_t),
+ ("f_blocks", c_fsblkcnt_t),
+ ("f_favail", c_fsfilcnt_t),
+ ("f_ffree", c_fsfilcnt_t),
+ ("f_files", c_fsfilcnt_t),
+ ("f_bsize", c_ulong),
+ ("f_flag", c_ulong),
+ ("f_frsize", c_ulong),
+ ]
+
class fuse_file_info(Structure):
_fields_ = [
- ('flags', c_int),
- ('fh_old', c_ulong),
- ('writepage', c_int),
- ('direct_io', c_uint, 1),
- ('keep_cache', c_uint, 1),
- ('flush', c_uint, 1),
- ('padding', c_uint, 29),
- ('fh', c_uint64),
- ('lock_owner', c_uint64)]
+ ("flags", c_int),
+ ("fh_old", c_ulong),
+ ("writepage", c_int),
+ ("direct_io", c_uint, 1),
+ ("keep_cache", c_uint, 1),
+ ("flush", c_uint, 1),
+ ("padding", c_uint, 29),
+ ("fh", c_uint64),
+ ("lock_owner", c_uint64),
+ ]
+
class fuse_context(Structure):
_fields_ = [
- ('fuse', c_voidp),
- ('uid', c_uid_t),
- ('gid', c_gid_t),
- ('pid', c_pid_t),
- ('private_data', c_voidp)]
+ ("fuse", c_voidp),
+ ("uid", c_uid_t),
+ ("gid", c_gid_t),
+ ("pid", c_pid_t),
+ ("private_data", c_voidp),
+ ]
+
_libfuse.fuse_get_context.restype = POINTER(fuse_context)
class fuse_operations(Structure):
_fields_ = [
- ('getattr', CFUNCTYPE(c_int, c_char_p, POINTER(c_stat))),
- ('readlink', CFUNCTYPE(c_int, c_char_p, POINTER(c_byte), c_size_t)),
- ('getdir', c_voidp), # Deprecated, use readdir
- ('mknod', CFUNCTYPE(c_int, c_char_p, c_mode_t, c_dev_t)),
- ('mkdir', CFUNCTYPE(c_int, c_char_p, c_mode_t)),
- ('unlink', CFUNCTYPE(c_int, c_char_p)),
- ('rmdir', CFUNCTYPE(c_int, c_char_p)),
- ('symlink', CFUNCTYPE(c_int, c_char_p, c_char_p)),
- ('rename', CFUNCTYPE(c_int, c_char_p, c_char_p)),
- ('link', CFUNCTYPE(c_int, c_char_p, c_char_p)),
- ('chmod', CFUNCTYPE(c_int, c_char_p, c_mode_t)),
- ('chown', CFUNCTYPE(c_int, c_char_p, c_uid_t, c_gid_t)),
- ('truncate', CFUNCTYPE(c_int, c_char_p, c_off_t)),
- ('utime', c_voidp), # Deprecated, use utimens
- ('open', CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
-
- ('read', CFUNCTYPE(c_int, c_char_p, POINTER(c_byte), c_size_t,
- c_off_t, POINTER(fuse_file_info))),
-
- ('write', CFUNCTYPE(c_int, c_char_p, POINTER(c_byte), c_size_t,
- c_off_t, POINTER(fuse_file_info))),
-
- ('statfs', CFUNCTYPE(c_int, c_char_p, POINTER(c_statvfs))),
- ('flush', CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
- ('release', CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
- ('fsync', CFUNCTYPE(c_int, c_char_p, c_int, POINTER(fuse_file_info))),
- ('setxattr', setxattr_t),
- ('getxattr', getxattr_t),
- ('listxattr', CFUNCTYPE(c_int, c_char_p, POINTER(c_byte), c_size_t)),
- ('removexattr', CFUNCTYPE(c_int, c_char_p, c_char_p)),
- ('opendir', CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
-
- ('readdir', CFUNCTYPE(c_int, c_char_p, c_voidp,
- CFUNCTYPE(c_int, c_voidp, c_char_p,
- POINTER(c_stat), c_off_t),
- c_off_t, POINTER(fuse_file_info))),
-
- ('releasedir', CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
-
- ('fsyncdir', CFUNCTYPE(c_int, c_char_p, c_int,
- POINTER(fuse_file_info))),
-
- ('init', CFUNCTYPE(c_voidp, c_voidp)),
- ('destroy', CFUNCTYPE(c_voidp, c_voidp)),
- ('access', CFUNCTYPE(c_int, c_char_p, c_int)),
-
- ('create', CFUNCTYPE(c_int, c_char_p, c_mode_t,
- POINTER(fuse_file_info))),
-
- ('ftruncate', CFUNCTYPE(c_int, c_char_p, c_off_t,
- POINTER(fuse_file_info))),
-
- ('fgetattr', CFUNCTYPE(c_int, c_char_p, POINTER(c_stat),
- POINTER(fuse_file_info))),
-
- ('lock', CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info),
- c_int, c_voidp)),
-
- ('utimens', CFUNCTYPE(c_int, c_char_p, POINTER(c_utimbuf))),
- ('bmap', CFUNCTYPE(c_int, c_char_p, c_size_t, POINTER(c_ulonglong))),
- ('flag_nullpath_ok', c_uint, 1),
- ('flag_nopath', c_uint, 1),
- ('flag_utime_omit_ok', c_uint, 1),
- ('flag_reserved', c_uint, 29),
+ ("getattr", CFUNCTYPE(c_int, c_char_p, POINTER(c_stat))),
+ ("readlink", CFUNCTYPE(c_int, c_char_p, POINTER(c_byte), c_size_t)),
+ ("getdir", c_voidp), # Deprecated, use readdir
+ ("mknod", CFUNCTYPE(c_int, c_char_p, c_mode_t, c_dev_t)),
+ ("mkdir", CFUNCTYPE(c_int, c_char_p, c_mode_t)),
+ ("unlink", CFUNCTYPE(c_int, c_char_p)),
+ ("rmdir", CFUNCTYPE(c_int, c_char_p)),
+ ("symlink", CFUNCTYPE(c_int, c_char_p, c_char_p)),
+ ("rename", CFUNCTYPE(c_int, c_char_p, c_char_p)),
+ ("link", CFUNCTYPE(c_int, c_char_p, c_char_p)),
+ ("chmod", CFUNCTYPE(c_int, c_char_p, c_mode_t)),
+ ("chown", CFUNCTYPE(c_int, c_char_p, c_uid_t, c_gid_t)),
+ ("truncate", CFUNCTYPE(c_int, c_char_p, c_off_t)),
+ ("utime", c_voidp), # Deprecated, use utimens
+ ("open", CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
+ (
+ "read",
+ CFUNCTYPE(
+ c_int,
+ c_char_p,
+ POINTER(c_byte),
+ c_size_t,
+ c_off_t,
+ POINTER(fuse_file_info),
+ ),
+ ),
+ (
+ "write",
+ CFUNCTYPE(
+ c_int,
+ c_char_p,
+ POINTER(c_byte),
+ c_size_t,
+ c_off_t,
+ POINTER(fuse_file_info),
+ ),
+ ),
+ ("statfs", CFUNCTYPE(c_int, c_char_p, POINTER(c_statvfs))),
+ ("flush", CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
+ ("release", CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
+ ("fsync", CFUNCTYPE(c_int, c_char_p, c_int, POINTER(fuse_file_info))),
+ ("setxattr", setxattr_t),
+ ("getxattr", getxattr_t),
+ ("listxattr", CFUNCTYPE(c_int, c_char_p, POINTER(c_byte), c_size_t)),
+ ("removexattr", CFUNCTYPE(c_int, c_char_p, c_char_p)),
+ ("opendir", CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
+ (
+ "readdir",
+ CFUNCTYPE(
+ c_int,
+ c_char_p,
+ c_voidp,
+ CFUNCTYPE(c_int, c_voidp, c_char_p, POINTER(c_stat), c_off_t),
+ c_off_t,
+ POINTER(fuse_file_info),
+ ),
+ ),
+ ("releasedir", CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info))),
+ ("fsyncdir", CFUNCTYPE(c_int, c_char_p, c_int, POINTER(fuse_file_info))),
+ ("init", CFUNCTYPE(c_voidp, c_voidp)),
+ ("destroy", CFUNCTYPE(c_voidp, c_voidp)),
+ ("access", CFUNCTYPE(c_int, c_char_p, c_int)),
+ ("create", CFUNCTYPE(c_int, c_char_p, c_mode_t, POINTER(fuse_file_info))),
+ ("ftruncate", CFUNCTYPE(c_int, c_char_p, c_off_t, POINTER(fuse_file_info))),
+ (
+ "fgetattr",
+ CFUNCTYPE(c_int, c_char_p, POINTER(c_stat), POINTER(fuse_file_info)),
+ ),
+ ("lock", CFUNCTYPE(c_int, c_char_p, POINTER(fuse_file_info), c_int, c_voidp)),
+ ("utimens", CFUNCTYPE(c_int, c_char_p, POINTER(c_utimbuf))),
+ ("bmap", CFUNCTYPE(c_int, c_char_p, c_size_t, POINTER(c_ulonglong))),
+ ("flag_nullpath_ok", c_uint, 1),
+ ("flag_nopath", c_uint, 1),
+ ("flag_utime_omit_ok", c_uint, 1),
+ ("flag_reserved", c_uint, 29),
]
def time_of_timespec(ts):
return ts.tv_sec + ts.tv_nsec / 10 ** 9
+
def set_st_attrs(st, attrs):
for key, val in attrs.items():
- if key in ('st_atime', 'st_mtime', 'st_ctime', 'st_birthtime'):
- timespec = getattr(st, key + 'spec', None)
+ if key in ("st_atime", "st_mtime", "st_ctime", "st_birthtime"):
+ timespec = getattr(st, key + "spec", None)
if timespec is None:
continue
timespec.tv_sec = int(val)
@@ -417,7 +449,7 @@ def set_st_attrs(st, attrs):
def fuse_get_context():
- 'Returns a (uid, gid, pid) tuple'
+ "Returns a (uid, gid, pid) tuple"
ctxp = _libfuse.fuse_get_context()
ctx = ctxp.contents
@@ -430,28 +462,29 @@ class FuseOSError(OSError):
class FUSE(object):
- '''
+ """
This class is the lower level interface and should not be subclassed under
normal use. Its methods are called by fuse.
Assumes API version 2.6 or later.
- '''
+ """
OPTIONS = (
- ('foreground', '-f'),
- ('debug', '-d'),
- ('nothreads', '-s'),
+ ("foreground", "-f"),
+ ("debug", "-d"),
+ ("nothreads", "-s"),
)
- def __init__(self, operations, mountpoint, raw_fi=False, encoding='utf-8',
- **kwargs):
+ def __init__(
+ self, operations, mountpoint, raw_fi=False, encoding="utf-8", **kwargs
+ ):
- '''
+ """
Setting raw_fi to True will cause FUSE to pass the fuse_file_info
class as is to Operations, instead of just the fh field.
This gives you access to direct_io, keep_cache, etc.
- '''
+ """
# Note that in BuildStream we're assuming that raw_fi is always False.
assert not raw_fi, "raw_fi is not supported in BuildStream."
@@ -460,14 +493,13 @@ class FUSE(object):
self.raw_fi = raw_fi
self.encoding = encoding
- args = ['fuse']
+ args = ["fuse"]
- args.extend(flag for arg, flag in self.OPTIONS
- if kwargs.pop(arg, False))
+ args.extend(flag for arg, flag in self.OPTIONS if kwargs.pop(arg, False))
- kwargs.setdefault('fsname', operations.__class__.__name__)
- args.append('-o')
- args.append(','.join(self._normalize_fuse_options(**kwargs)))
+ kwargs.setdefault("fsname", operations.__class__.__name__)
+ args.append("-o")
+ args.append(",".join(self._normalize_fuse_options(**kwargs)))
args.append(mountpoint)
args = [arg.encode(encoding) for arg in args]
@@ -484,7 +516,7 @@ class FUSE(object):
# Function pointer members are tested for using the
# getattr(operations, name) above but are dynamically
# invoked using self.operations(name)
- if hasattr(prototype, 'argtypes'):
+ if hasattr(prototype, "argtypes"):
val = prototype(partial(self._wrapper, getattr(self, name)))
setattr(fuse_ops, name, val)
@@ -494,15 +526,16 @@ class FUSE(object):
except ValueError:
old_handler = SIG_DFL
- err = _libfuse.fuse_main_real(len(args), argv, pointer(fuse_ops),
- sizeof(fuse_ops), None)
+ err = _libfuse.fuse_main_real(
+ len(args), argv, pointer(fuse_ops), sizeof(fuse_ops), None
+ )
try:
signal(SIGINT, old_handler)
except ValueError:
pass
- del self.operations # Invoke the destructor
+ del self.operations # Invoke the destructor
if err:
raise RuntimeError(err)
@@ -510,13 +543,14 @@ class FUSE(object):
def _normalize_fuse_options(**kargs):
for key, value in kargs.items():
if isinstance(value, bool):
- if value is True: yield key
+ if value is True:
+ yield key
else:
- yield '{}={}'.format(key, value)
+ yield "{}={}".format(key, value)
@staticmethod
def _wrapper(func, *args, **kwargs):
- 'Decorator for the methods that follow'
+ "Decorator for the methods that follow"
try:
return func(*args, **kwargs) or 0
@@ -538,45 +572,49 @@ class FUSE(object):
return self.fgetattr(path, buf, None)
def readlink(self, path, buf, bufsize):
- ret = self.operations('readlink', path.decode(self.encoding)) \
- .encode(self.encoding)
+ ret = self.operations("readlink", path.decode(self.encoding)).encode(
+ self.encoding
+ )
# copies a string into the given buffer
# (null terminated and truncated if necessary)
- data = create_string_buffer(ret[:bufsize - 1])
+ data = create_string_buffer(ret[: bufsize - 1])
memmove(buf, data, len(data))
return 0
def mknod(self, path, mode, dev):
- return self.operations('mknod', path.decode(self.encoding), mode, dev)
+ return self.operations("mknod", path.decode(self.encoding), mode, dev)
def mkdir(self, path, mode):
- return self.operations('mkdir', path.decode(self.encoding), mode)
+ return self.operations("mkdir", path.decode(self.encoding), mode)
def unlink(self, path):
- return self.operations('unlink', path.decode(self.encoding))
+ return self.operations("unlink", path.decode(self.encoding))
def rmdir(self, path):
- return self.operations('rmdir', path.decode(self.encoding))
+ return self.operations("rmdir", path.decode(self.encoding))
def symlink(self, source, target):
- 'creates a symlink `target -> source` (e.g. ln -s source target)'
+ "creates a symlink `target -> source` (e.g. ln -s source target)"
- return self.operations('symlink', target.decode(self.encoding),
- source.decode(self.encoding))
+ return self.operations(
+ "symlink", target.decode(self.encoding), source.decode(self.encoding)
+ )
def rename(self, old, new):
- return self.operations('rename', old.decode(self.encoding),
- new.decode(self.encoding))
+ return self.operations(
+ "rename", old.decode(self.encoding), new.decode(self.encoding)
+ )
def link(self, source, target):
- 'creates a hard link `target -> source` (e.g. ln source target)'
+ "creates a hard link `target -> source` (e.g. ln source target)"
- return self.operations('link', target.decode(self.encoding),
- source.decode(self.encoding))
+ return self.operations(
+ "link", target.decode(self.encoding), source.decode(self.encoding)
+ )
def chmod(self, path, mode):
- return self.operations('chmod', path.decode(self.encoding), mode)
+ return self.operations("chmod", path.decode(self.encoding), mode)
def chown(self, path, uid, gid):
# Check if any of the arguments is a -1 that has overflowed
@@ -585,35 +623,37 @@ class FUSE(object):
if c_gid_t(gid + 1).value == 0:
gid = -1
- return self.operations('chown', path.decode(self.encoding), uid, gid)
+ return self.operations("chown", path.decode(self.encoding), uid, gid)
def truncate(self, path, length):
- return self.operations('truncate', path.decode(self.encoding), length)
+ return self.operations("truncate", path.decode(self.encoding), length)
def open(self, path, fip):
fi = fip.contents
if self.raw_fi:
- return self.operations('open', path.decode(self.encoding), fi)
+ return self.operations("open", path.decode(self.encoding), fi)
else:
- fi.fh = self.operations('open', path.decode(self.encoding),
- fi.flags)
+ fi.fh = self.operations("open", path.decode(self.encoding), fi.flags)
return 0
def read(self, path, buf, size, offset, fip):
if self.raw_fi:
- fh = fip.contents
+ fh = fip.contents
else:
- fh = fip.contents.fh
+ fh = fip.contents.fh
- ret = self.operations('read', self._decode_optional_path(path), size,
- offset, fh)
+ ret = self.operations(
+ "read", self._decode_optional_path(path), size, offset, fh
+ )
- if not ret: return 0
+ if not ret:
+ return 0
retsize = len(ret)
- assert retsize <= size, \
- 'actual amount read {:d} greater than expected {:d}'.format(retsize, size)
+ assert (
+ retsize <= size
+ ), "actual amount read {:d} greater than expected {:d}".format(retsize, size)
data = create_string_buffer(ret, retsize)
memmove(buf, data, retsize)
@@ -627,12 +667,13 @@ class FUSE(object):
else:
fh = fip.contents.fh
- return self.operations('write', self._decode_optional_path(path), data,
- offset, fh)
+ return self.operations(
+ "write", self._decode_optional_path(path), data, offset, fh
+ )
def statfs(self, path, buf):
stv = buf.contents
- attrs = self.operations('statfs', path.decode(self.encoding))
+ attrs = self.operations("statfs", path.decode(self.encoding))
for key, val in attrs.items():
if hasattr(stv, key):
setattr(stv, key, val)
@@ -645,15 +686,15 @@ class FUSE(object):
else:
fh = fip.contents.fh
- return self.operations('flush', self._decode_optional_path(path), fh)
+ return self.operations("flush", self._decode_optional_path(path), fh)
def release(self, path, fip):
if self.raw_fi:
- fh = fip.contents
+ fh = fip.contents
else:
- fh = fip.contents.fh
+ fh = fip.contents.fh
- return self.operations('release', self._decode_optional_path(path), fh)
+ return self.operations("release", self._decode_optional_path(path), fh)
def fsync(self, path, datasync, fip):
if self.raw_fi:
@@ -661,42 +702,51 @@ class FUSE(object):
else:
fh = fip.contents.fh
- return self.operations('fsync', self._decode_optional_path(path), datasync,
- fh)
+ return self.operations("fsync", self._decode_optional_path(path), datasync, fh)
def setxattr(self, path, name, value, size, options, *args):
- return self.operations('setxattr', path.decode(self.encoding),
- name.decode(self.encoding),
- string_at(value, size), options, *args)
+ return self.operations(
+ "setxattr",
+ path.decode(self.encoding),
+ name.decode(self.encoding),
+ string_at(value, size),
+ options,
+ *args
+ )
def getxattr(self, path, name, value, size, *args):
- ret = self.operations('getxattr', path.decode(self.encoding),
- name.decode(self.encoding), *args)
+ ret = self.operations(
+ "getxattr", path.decode(self.encoding), name.decode(self.encoding), *args
+ )
retsize = len(ret)
# allow size queries
- if not value: return retsize
+ if not value:
+ return retsize
# do not truncate
- if retsize > size: return -ERANGE
+ if retsize > size:
+ return -ERANGE
- buf = create_string_buffer(ret, retsize) # Does not add trailing 0
+ buf = create_string_buffer(ret, retsize) # Does not add trailing 0
memmove(value, buf, retsize)
return retsize
def listxattr(self, path, namebuf, size):
- attrs = self.operations('listxattr', path.decode(self.encoding)) or ''
- ret = '\x00'.join(attrs).encode(self.encoding)
+ attrs = self.operations("listxattr", path.decode(self.encoding)) or ""
+ ret = "\x00".join(attrs).encode(self.encoding)
if len(ret) > 0:
- ret += '\x00'.encode(self.encoding)
+ ret += "\x00".encode(self.encoding)
retsize = len(ret)
# allow size queries
- if not namebuf: return retsize
+ if not namebuf:
+ return retsize
# do not truncate
- if retsize > size: return -ERANGE
+ if retsize > size:
+ return -ERANGE
buf = create_string_buffer(ret, retsize)
memmove(namebuf, buf, retsize)
@@ -704,20 +754,21 @@ class FUSE(object):
return retsize
def removexattr(self, path, name):
- return self.operations('removexattr', path.decode(self.encoding),
- name.decode(self.encoding))
+ return self.operations(
+ "removexattr", path.decode(self.encoding), name.decode(self.encoding)
+ )
def opendir(self, path, fip):
# Ignore raw_fi
- fip.contents.fh = self.operations('opendir',
- path.decode(self.encoding))
+ fip.contents.fh = self.operations("opendir", path.decode(self.encoding))
return 0
def readdir(self, path, buf, filler, offset, fip):
# Ignore raw_fi
- for item in self.operations('readdir', self._decode_optional_path(path),
- fip.contents.fh):
+ for item in self.operations(
+ "readdir", self._decode_optional_path(path), fip.contents.fh
+ ):
if isinstance(item, basestring):
name, st, offset = item, None, 0
@@ -736,22 +787,24 @@ class FUSE(object):
def releasedir(self, path, fip):
# Ignore raw_fi
- return self.operations('releasedir', self._decode_optional_path(path),
- fip.contents.fh)
+ return self.operations(
+ "releasedir", self._decode_optional_path(path), fip.contents.fh
+ )
def fsyncdir(self, path, datasync, fip):
# Ignore raw_fi
- return self.operations('fsyncdir', self._decode_optional_path(path),
- datasync, fip.contents.fh)
+ return self.operations(
+ "fsyncdir", self._decode_optional_path(path), datasync, fip.contents.fh
+ )
def init(self, conn):
- return self.operations('init', '/')
+ return self.operations("init", "/")
def destroy(self, private_data):
- return self.operations('destroy', '/')
+ return self.operations("destroy", "/")
def access(self, path, amode):
- return self.operations('access', path.decode(self.encoding), amode)
+ return self.operations("access", path.decode(self.encoding), amode)
def create(self, path, mode, fip):
fi = fip.contents
@@ -762,7 +815,7 @@ class FUSE(object):
# This line is different from upstream to fix issues
# reading file opened with O_CREAT|O_RDWR.
# See issue #143.
- fi.fh = self.operations('create', path, mode, fi.flags)
+ fi.fh = self.operations("create", path, mode, fi.flags)
# END OF MODIFICATION
return 0
@@ -772,8 +825,7 @@ class FUSE(object):
else:
fh = fip.contents.fh
- return self.operations('truncate', self._decode_optional_path(path),
- length, fh)
+ return self.operations("truncate", self._decode_optional_path(path), length, fh)
def fgetattr(self, path, buf, fip):
memset(buf, 0, sizeof(c_stat))
@@ -786,7 +838,7 @@ class FUSE(object):
else:
fh = fip.contents.fh
- attrs = self.operations('getattr', self._decode_optional_path(path), fh)
+ attrs = self.operations("getattr", self._decode_optional_path(path), fh)
set_st_attrs(st, attrs)
return 0
@@ -796,8 +848,7 @@ class FUSE(object):
else:
fh = fip.contents.fh
- return self.operations('lock', self._decode_optional_path(path), fh, cmd,
- lock)
+ return self.operations("lock", self._decode_optional_path(path), fh, cmd, lock)
def utimens(self, path, buf):
if buf:
@@ -807,22 +858,21 @@ class FUSE(object):
else:
times = None
- return self.operations('utimens', path.decode(self.encoding), times)
+ return self.operations("utimens", path.decode(self.encoding), times)
def bmap(self, path, blocksize, idx):
- return self.operations('bmap', path.decode(self.encoding), blocksize,
- idx)
+ return self.operations("bmap", path.decode(self.encoding), blocksize, idx)
class Operations(object):
- '''
+ """
This class should be subclassed and passed as an argument to FUSE on
initialization. All operations should raise a FuseOSError exception on
error.
When in doubt of what an operation should do, check the FUSE header file
or the corresponding system call man page.
- '''
+ """
def __call__(self, op, *args):
if not hasattr(self, op):
@@ -845,7 +895,7 @@ class Operations(object):
raise FuseOSError(EROFS)
def destroy(self, path):
- 'Called on filesystem destruction. Path is always /'
+ "Called on filesystem destruction. Path is always /"
pass
@@ -859,7 +909,7 @@ class Operations(object):
return 0
def getattr(self, path, fh=None):
- '''
+ """
Returns a dictionary with keys identical to the stat C structure of
stat(2).
@@ -868,9 +918,9 @@ class Operations(object):
NOTE: There is an incombatibility between Linux and Mac OS X
concerning st_nlink of directories. Mac OS X counts all files inside
the directory, while Linux counts only the subdirectories.
- '''
+ """
- if path != '/':
+ if path != "/":
raise FuseOSError(ENOENT)
return dict(st_mode=(S_IFDIR | 0o755), st_nlink=2)
@@ -878,16 +928,16 @@ class Operations(object):
raise FuseOSError(ENOTSUP)
def init(self, path):
- '''
+ """
Called on filesystem initialization. (Path is always /)
Use it instead of __init__ if you start threads on initialization.
- '''
+ """
pass
def link(self, target, source):
- 'creates a hard link `target -> source` (e.g. ln source target)'
+ "creates a hard link `target -> source` (e.g. ln source target)"
raise FuseOSError(EROFS)
@@ -903,7 +953,7 @@ class Operations(object):
raise FuseOSError(EROFS)
def open(self, path, flags):
- '''
+ """
When raw_fi is False (default case), open should return a numerical
file handle.
@@ -911,27 +961,27 @@ class Operations(object):
open(self, path, fi)
and the file handle should be set directly.
- '''
+ """
return 0
def opendir(self, path):
- 'Returns a numerical file handle.'
+ "Returns a numerical file handle."
return 0
def read(self, path, size, offset, fh):
- 'Returns a string containing the data requested.'
+ "Returns a string containing the data requested."
raise FuseOSError(EIO)
def readdir(self, path, fh):
- '''
+ """
Can return either a list of names, or a list of (name, attrs, offset)
tuples. attrs is a dict as in getattr.
- '''
+ """
- return ['.', '..']
+ return [".", ".."]
def readlink(self, path):
raise FuseOSError(ENOENT)
@@ -955,18 +1005,18 @@ class Operations(object):
raise FuseOSError(ENOTSUP)
def statfs(self, path):
- '''
+ """
Returns a dictionary with keys identical to the statvfs C structure of
statvfs(3).
On Mac OS X f_bsize and f_frsize must be a power of 2
(minimum 512).
- '''
+ """
return {}
def symlink(self, target, source):
- 'creates a symlink `target -> source` (e.g. ln -s source target)'
+ "creates a symlink `target -> source` (e.g. ln -s source target)"
raise FuseOSError(EROFS)
@@ -977,7 +1027,7 @@ class Operations(object):
raise FuseOSError(EROFS)
def utimens(self, path, times=None):
- 'Times is a (atime, mtime) tuple. If None use current time.'
+ "Times is a (atime, mtime) tuple. If None use current time."
return 0
@@ -986,11 +1036,11 @@ class Operations(object):
class LoggingMixIn:
- log = logging.getLogger('fuse.log-mixin')
+ log = logging.getLogger("fuse.log-mixin")
def __call__(self, op, path, *args):
- self.log.debug('-> %s %s %s', op, path, repr(args))
- ret = '[Unhandled Exception]'
+ self.log.debug("-> %s %s %s", op, path, repr(args))
+ ret = "[Unhandled Exception]"
try:
ret = getattr(self, op)(path, *args)
return ret
@@ -998,4 +1048,4 @@ class LoggingMixIn:
ret = str(e)
raise
finally:
- self.log.debug('<- %s %s', op, repr(ret))
+ self.log.debug("<- %s %s", op, repr(ret))
diff --git a/src/buildstream/_fuse/hardlinks.py b/src/buildstream/_fuse/hardlinks.py
index 798e1c816..8b09901a0 100644
--- a/src/buildstream/_fuse/hardlinks.py
+++ b/src/buildstream/_fuse/hardlinks.py
@@ -41,7 +41,6 @@ from .mount import Mount
# tmp (str): A directory on the same filesystem for creating temp files
#
class SafeHardlinks(Mount):
-
def __init__(self, directory, tempdir, fuse_mount_options=None):
self.directory = directory
self.tempdir = tempdir
@@ -58,7 +57,6 @@ class SafeHardlinks(Mount):
# The actual FUSE Operations implementation below.
#
class SafeHardlinkOps(Operations):
-
def __init__(self, root, tmp):
self.root = root
self.tmp = tmp
@@ -122,14 +120,25 @@ class SafeHardlinkOps(Operations):
def getattr(self, path, fh=None):
full_path = self._full_path(path)
st = os.lstat(full_path)
- return dict((key, getattr(st, key)) for key in (
- 'st_atime', 'st_ctime', 'st_gid', 'st_mode',
- 'st_mtime', 'st_nlink', 'st_size', 'st_uid', 'st_rdev'))
+ return dict(
+ (key, getattr(st, key))
+ for key in (
+ "st_atime",
+ "st_ctime",
+ "st_gid",
+ "st_mode",
+ "st_mtime",
+ "st_nlink",
+ "st_size",
+ "st_uid",
+ "st_rdev",
+ )
+ )
def readdir(self, path, fh):
full_path = self._full_path(path)
- dirents = ['.', '..']
+ dirents = [".", ".."]
if os.path.isdir(full_path):
dirents.extend(os.listdir(full_path))
for r in dirents:
@@ -156,22 +165,34 @@ class SafeHardlinkOps(Operations):
def statfs(self, path):
full_path = self._full_path(path)
stv = os.statvfs(full_path)
- return dict((key, getattr(stv, key)) for key in (
- 'f_bavail', 'f_bfree', 'f_blocks', 'f_bsize', 'f_favail',
- 'f_ffree', 'f_files', 'f_flag', 'f_frsize', 'f_namemax'))
+ return dict(
+ (key, getattr(stv, key))
+ for key in (
+ "f_bavail",
+ "f_bfree",
+ "f_blocks",
+ "f_bsize",
+ "f_favail",
+ "f_ffree",
+ "f_files",
+ "f_flag",
+ "f_frsize",
+ "f_namemax",
+ )
+ )
def unlink(self, path):
os.unlink(self._full_path(path))
def symlink(self, target, source):
- 'creates a symlink `target -> source` (e.g. ln -s source target)'
+ "creates a symlink `target -> source` (e.g. ln -s source target)"
return os.symlink(source, self._full_path(target))
def rename(self, old, new):
return os.rename(self._full_path(old), self._full_path(new))
def link(self, target, source):
- 'creates a hard link `target -> source` (e.g. ln source target)'
+ "creates a hard link `target -> source` (e.g. ln source target)"
# When creating a hard link here, should we ensure the original
# file is not a hardlink itself first ?
@@ -207,7 +228,7 @@ class SafeHardlinkOps(Operations):
def truncate(self, path, length, fh=None):
full_path = self._full_path(path)
- with open(full_path, 'r+') as f:
+ with open(full_path, "r+") as f:
f.truncate(length)
def flush(self, path, fh):
diff --git a/src/buildstream/_fuse/mount.py b/src/buildstream/_fuse/mount.py
index 92ca63179..4df2ed603 100644
--- a/src/buildstream/_fuse/mount.py
+++ b/src/buildstream/_fuse/mount.py
@@ -72,7 +72,7 @@ class FuseMountError(Exception):
# run the fuse loop in foreground, and we block the parent
# process until the volume is mounted with a busy loop with timeouts.
#
-class Mount():
+class Mount:
# These are not really class data, they are
# just here for the sake of having None setup instead
@@ -89,7 +89,9 @@ class Mount():
################################################
def __init__(self, fuse_mount_options=None):
- self._fuse_mount_options = {} if fuse_mount_options is None else fuse_mount_options
+ self._fuse_mount_options = (
+ {} if fuse_mount_options is None else fuse_mount_options
+ )
# _mount():
#
@@ -108,14 +110,18 @@ class Mount():
# Ensure the child process does not inherit our signal handlers, if the
# child wants to handle a signal then it will first set its own
# handler, and then unblock it.
- with _signals.blocked([signal.SIGTERM, signal.SIGTSTP, signal.SIGINT], ignore=False):
+ with _signals.blocked(
+ [signal.SIGTERM, signal.SIGTSTP, signal.SIGINT], ignore=False
+ ):
self.__process.start()
while not os.path.ismount(mountpoint):
if not self.__process.is_alive():
self.__logfile.seek(0)
stderr = self.__logfile.read()
- raise FuseMountError("Unable to mount {}: {}".format(mountpoint, stderr.decode().strip()))
+ raise FuseMountError(
+ "Unable to mount {}: {}".format(mountpoint, stderr.decode().strip())
+ )
time.sleep(1 / 100)
@@ -135,8 +141,11 @@ class Mount():
self.__logfile.seek(0)
stderr = self.__logfile.read()
- raise FuseMountError("{} reported exit code {} when unmounting: {}"
- .format(type(self).__name__, self.__process.exitcode, stderr))
+ raise FuseMountError(
+ "{} reported exit code {} when unmounting: {}".format(
+ type(self).__name__, self.__process.exitcode, stderr
+ )
+ )
self.__mountpoint = None
self.__process = None
@@ -176,8 +185,11 @@ class Mount():
# Returns:
# (Operations): A FUSE Operations implementation
def create_operations(self):
- raise ImplError("Mount subclass '{}' did not implement create_operations()"
- .format(type(self).__name__))
+ raise ImplError(
+ "Mount subclass '{}' did not implement create_operations()".format(
+ type(self).__name__
+ )
+ )
################################################
# Child Process #
@@ -199,14 +211,22 @@ class Mount():
# Ask the subclass to give us an Operations object
#
- self.__operations = self.create_operations() # pylint: disable=assignment-from-no-return
+ self.__operations = (
+ self.create_operations()
+ ) # pylint: disable=assignment-from-no-return
# Run fuse in foreground in this child process, internally libfuse
# will handle SIGTERM and gracefully exit its own little main loop.
#
try:
- FUSE(self.__operations, self.__mountpoint, nothreads=True, foreground=True, nonempty=True,
- **self._fuse_mount_options)
+ FUSE(
+ self.__operations,
+ self.__mountpoint,
+ nothreads=True,
+ foreground=True,
+ nonempty=True,
+ **self._fuse_mount_options
+ )
except RuntimeError as exc:
# FUSE will throw a RuntimeError with the exit code of libfuse as args[0]
sys.exit(exc.args[0])
diff --git a/src/buildstream/_gitsourcebase.py b/src/buildstream/_gitsourcebase.py
index 120d8c72a..11f1d6572 100644
--- a/src/buildstream/_gitsourcebase.py
+++ b/src/buildstream/_gitsourcebase.py
@@ -35,7 +35,7 @@ from . import utils
from .types import FastEnum
from .utils import move_atomic, DirectoryExistsError
-GIT_MODULES = '.gitmodules'
+GIT_MODULES = ".gitmodules"
# Warnings
WARN_INCONSISTENT_SUBMODULE = "inconsistent-submodule"
@@ -53,7 +53,6 @@ class _RefFormat(FastEnum):
# might have at a given time
#
class _GitMirror(SourceFetcher):
-
def __init__(self, source, path, url, ref, *, primary=False, tags=[]):
super().__init__()
@@ -63,7 +62,9 @@ class _GitMirror(SourceFetcher):
self.ref = ref
self.tags = tags
self.primary = primary
- self.mirror = os.path.join(source.get_mirror_directory(), utils.url_directory_name(url))
+ self.mirror = os.path.join(
+ source.get_mirror_directory(), utils.url_directory_name(url)
+ )
self.mark_download_url(url)
# Ensures that the mirror exists
@@ -80,59 +81,74 @@ class _GitMirror(SourceFetcher):
# system configured tmpdir is not on the same partition.
#
with self.source.tempdir() as tmpdir:
- url = self.source.translate_url(self.url, alias_override=alias_override,
- primary=self.primary)
- self.source.call([self.source.host_git, 'clone', '--mirror', '-n', url, tmpdir],
- fail="Failed to clone git repository {}".format(url),
- fail_temporarily=True)
+ url = self.source.translate_url(
+ self.url, alias_override=alias_override, primary=self.primary
+ )
+ self.source.call(
+ [self.source.host_git, "clone", "--mirror", "-n", url, tmpdir],
+ fail="Failed to clone git repository {}".format(url),
+ fail_temporarily=True,
+ )
try:
move_atomic(tmpdir, self.mirror)
except DirectoryExistsError:
# Another process was quicker to download this repository.
# Let's discard our own
- self.source.status("{}: Discarding duplicate clone of {}"
- .format(self.source, url))
+ self.source.status(
+ "{}: Discarding duplicate clone of {}".format(self.source, url)
+ )
except OSError as e:
- raise SourceError("{}: Failed to move cloned git repository {} from '{}' to '{}': {}"
- .format(self.source, url, tmpdir, self.mirror, e)) from e
+ raise SourceError(
+ "{}: Failed to move cloned git repository {} from '{}' to '{}': {}".format(
+ self.source, url, tmpdir, self.mirror, e
+ )
+ ) from e
def _fetch(self, alias_override=None):
- url = self.source.translate_url(self.url,
- alias_override=alias_override,
- primary=self.primary)
+ url = self.source.translate_url(
+ self.url, alias_override=alias_override, primary=self.primary
+ )
if alias_override:
remote_name = utils.url_directory_name(alias_override)
_, remotes = self.source.check_output(
- [self.source.host_git, 'remote'],
+ [self.source.host_git, "remote"],
fail="Failed to retrieve list of remotes in {}".format(self.mirror),
- cwd=self.mirror
+ cwd=self.mirror,
)
if remote_name not in remotes:
self.source.call(
- [self.source.host_git, 'remote', 'add', remote_name, url],
+ [self.source.host_git, "remote", "add", remote_name, url],
fail="Failed to add remote {} with url {}".format(remote_name, url),
- cwd=self.mirror
+ cwd=self.mirror,
)
else:
remote_name = "origin"
- self.source.call([self.source.host_git, 'fetch', remote_name, '--prune',
- '+refs/heads/*:refs/heads/*', '+refs/tags/*:refs/tags/*'],
- fail="Failed to fetch from remote git repository: {}".format(url),
- fail_temporarily=True,
- cwd=self.mirror)
+ self.source.call(
+ [
+ self.source.host_git,
+ "fetch",
+ remote_name,
+ "--prune",
+ "+refs/heads/*:refs/heads/*",
+ "+refs/tags/*:refs/tags/*",
+ ],
+ fail="Failed to fetch from remote git repository: {}".format(url),
+ fail_temporarily=True,
+ cwd=self.mirror,
+ )
def fetch(self, alias_override=None): # pylint: disable=arguments-differ
# Resolve the URL for the message
- resolved_url = self.source.translate_url(self.url,
- alias_override=alias_override,
- primary=self.primary)
+ resolved_url = self.source.translate_url(
+ self.url, alias_override=alias_override, primary=self.primary
+ )
- with self.source.timed_activity("Fetching from {}"
- .format(resolved_url),
- silent_nested=True):
+ with self.source.timed_activity(
+ "Fetching from {}".format(resolved_url), silent_nested=True
+ ):
self.ensure(alias_override)
if not self.has_ref():
self._fetch(alias_override)
@@ -147,48 +163,71 @@ class _GitMirror(SourceFetcher):
return False
# Check if the ref is really there
- rc = self.source.call([self.source.host_git, 'cat-file', '-t', self.ref], cwd=self.mirror)
+ rc = self.source.call(
+ [self.source.host_git, "cat-file", "-t", self.ref], cwd=self.mirror
+ )
return rc == 0
def assert_ref(self):
if not self.has_ref():
- raise SourceError("{}: expected ref '{}' was not found in git repository: '{}'"
- .format(self.source, self.ref, self.url))
+ raise SourceError(
+ "{}: expected ref '{}' was not found in git repository: '{}'".format(
+ self.source, self.ref, self.url
+ )
+ )
def latest_commit_with_tags(self, tracking, track_tags=False):
_, output = self.source.check_output(
- [self.source.host_git, 'rev-parse', tracking],
- fail="Unable to find commit for specified branch name '{}'".format(tracking),
- cwd=self.mirror)
- ref = output.rstrip('\n')
+ [self.source.host_git, "rev-parse", tracking],
+ fail="Unable to find commit for specified branch name '{}'".format(
+ tracking
+ ),
+ cwd=self.mirror,
+ )
+ ref = output.rstrip("\n")
if self.source.ref_format == _RefFormat.GIT_DESCRIBE:
# Prefix the ref with the closest tag, if available,
# to make the ref human readable
exit_code, output = self.source.check_output(
- [self.source.host_git, 'describe', '--tags', '--abbrev=40', '--long', ref],
- cwd=self.mirror)
+ [
+ self.source.host_git,
+ "describe",
+ "--tags",
+ "--abbrev=40",
+ "--long",
+ ref,
+ ],
+ cwd=self.mirror,
+ )
if exit_code == 0:
- ref = output.rstrip('\n')
+ ref = output.rstrip("\n")
if not track_tags:
return ref, []
tags = set()
- for options in [[], ['--first-parent'], ['--tags'], ['--tags', '--first-parent']]:
+ for options in [
+ [],
+ ["--first-parent"],
+ ["--tags"],
+ ["--tags", "--first-parent"],
+ ]:
exit_code, output = self.source.check_output(
- [self.source.host_git, 'describe', '--abbrev=0', ref, *options],
- cwd=self.mirror)
+ [self.source.host_git, "describe", "--abbrev=0", ref, *options],
+ cwd=self.mirror,
+ )
if exit_code == 0:
tag = output.strip()
_, commit_ref = self.source.check_output(
- [self.source.host_git, 'rev-parse', tag + '^{commit}'],
+ [self.source.host_git, "rev-parse", tag + "^{commit}"],
fail="Unable to resolve tag '{}'".format(tag),
- cwd=self.mirror)
+ cwd=self.mirror,
+ )
exit_code = self.source.call(
- [self.source.host_git, 'cat-file', 'tag', tag],
- cwd=self.mirror)
- annotated = (exit_code == 0)
+ [self.source.host_git, "cat-file", "tag", tag], cwd=self.mirror
+ )
+ annotated = exit_code == 0
tags.add((tag, commit_ref.strip(), annotated))
@@ -200,13 +239,26 @@ class _GitMirror(SourceFetcher):
# Using --shared here avoids copying the objects into the checkout, in any
# case we're just checking out a specific commit and then removing the .git/
# directory.
- self.source.call([self.source.host_git, 'clone', '--no-checkout', '--shared', self.mirror, fullpath],
- fail="Failed to create git mirror {} in directory: {}".format(self.mirror, fullpath),
- fail_temporarily=True)
-
- self.source.call([self.source.host_git, 'checkout', '--force', self.ref],
- fail="Failed to checkout git ref {}".format(self.ref),
- cwd=fullpath)
+ self.source.call(
+ [
+ self.source.host_git,
+ "clone",
+ "--no-checkout",
+ "--shared",
+ self.mirror,
+ fullpath,
+ ],
+ fail="Failed to create git mirror {} in directory: {}".format(
+ self.mirror, fullpath
+ ),
+ fail_temporarily=True,
+ )
+
+ self.source.call(
+ [self.source.host_git, "checkout", "--force", self.ref],
+ fail="Failed to checkout git ref {}".format(self.ref),
+ cwd=fullpath,
+ )
# Remove .git dir
shutil.rmtree(os.path.join(fullpath, ".git"))
@@ -217,23 +269,32 @@ class _GitMirror(SourceFetcher):
fullpath = os.path.join(directory, self.path)
url = self.source.translate_url(self.url)
- self.source.call([self.source.host_git, 'clone', '--no-checkout', self.mirror, fullpath],
- fail="Failed to clone git mirror {} in directory: {}".format(self.mirror, fullpath),
- fail_temporarily=True)
-
- self.source.call([self.source.host_git, 'remote', 'set-url', 'origin', url],
- fail='Failed to add remote origin "{}"'.format(url),
- cwd=fullpath)
-
- self.source.call([self.source.host_git, 'checkout', '--force', self.ref],
- fail="Failed to checkout git ref {}".format(self.ref),
- cwd=fullpath)
+ self.source.call(
+ [self.source.host_git, "clone", "--no-checkout", self.mirror, fullpath],
+ fail="Failed to clone git mirror {} in directory: {}".format(
+ self.mirror, fullpath
+ ),
+ fail_temporarily=True,
+ )
+
+ self.source.call(
+ [self.source.host_git, "remote", "set-url", "origin", url],
+ fail='Failed to add remote origin "{}"'.format(url),
+ cwd=fullpath,
+ )
+
+ self.source.call(
+ [self.source.host_git, "checkout", "--force", self.ref],
+ fail="Failed to checkout git ref {}".format(self.ref),
+ cwd=fullpath,
+ )
# List the submodules (path/url tuples) present at the given ref of this repo
def submodule_list(self):
modules = "{}:{}".format(self.ref, GIT_MODULES)
exit_code, output = self.source.check_output(
- [self.source.host_git, 'show', modules], cwd=self.mirror)
+ [self.source.host_git, "show", modules], cwd=self.mirror
+ )
# If git show reports error code 128 here, we take it to mean there is
# no .gitmodules file to display for the given revision.
@@ -242,9 +303,11 @@ class _GitMirror(SourceFetcher):
elif exit_code != 0:
raise SourceError(
"{plugin}: Failed to show gitmodules at ref {ref}".format(
- plugin=self, ref=self.ref))
+ plugin=self, ref=self.ref
+ )
+ )
- content = '\n'.join([l.strip() for l in output.splitlines()])
+ content = "\n".join([l.strip() for l in output.splitlines()])
io = StringIO(content)
parser = RawConfigParser()
@@ -253,8 +316,8 @@ class _GitMirror(SourceFetcher):
for section in parser.sections():
# validate section name against the 'submodule "foo"' pattern
if re.match(r'submodule "(.*)"', section):
- path = parser.get(section, 'path')
- url = parser.get(section, 'url')
+ path = parser.get(section, "path")
+ url = parser.get(section, "url")
yield (path, url)
@@ -266,31 +329,45 @@ class _GitMirror(SourceFetcher):
# list objects in the parent repo tree to find the commit
# object that corresponds to the submodule
- _, output = self.source.check_output([self.source.host_git, 'ls-tree', ref, submodule],
- fail="ls-tree failed for commit {} and submodule: {}".format(
- ref, submodule),
- cwd=self.mirror)
+ _, output = self.source.check_output(
+ [self.source.host_git, "ls-tree", ref, submodule],
+ fail="ls-tree failed for commit {} and submodule: {}".format(
+ ref, submodule
+ ),
+ cwd=self.mirror,
+ )
# read the commit hash from the output
fields = output.split()
- if len(fields) >= 2 and fields[1] == 'commit':
+ if len(fields) >= 2 and fields[1] == "commit":
submodule_commit = output.split()[2]
# fail if the commit hash is invalid
if len(submodule_commit) != 40:
- raise SourceError("{}: Error reading commit information for submodule '{}'"
- .format(self.source, submodule))
+ raise SourceError(
+ "{}: Error reading commit information for submodule '{}'".format(
+ self.source, submodule
+ )
+ )
return submodule_commit
else:
- detail = "The submodule '{}' is defined either in the BuildStream source\n".format(submodule) + \
- "definition, or in a .gitmodules file. But the submodule was never added to the\n" + \
- "underlying git repository with `git submodule add`."
+ detail = (
+ "The submodule '{}' is defined either in the BuildStream source\n".format(
+ submodule
+ )
+ + "definition, or in a .gitmodules file. But the submodule was never added to the\n"
+ + "underlying git repository with `git submodule add`."
+ )
- self.source.warn("{}: Ignoring inconsistent submodule '{}'"
- .format(self.source, submodule), detail=detail,
- warning_token=WARN_INCONSISTENT_SUBMODULE)
+ self.source.warn(
+ "{}: Ignoring inconsistent submodule '{}'".format(
+ self.source, submodule
+ ),
+ detail=detail,
+ warning_token=WARN_INCONSISTENT_SUBMODULE,
+ )
return None
@@ -307,17 +384,26 @@ class _GitMirror(SourceFetcher):
# rev-list does not work in case of same rev
shallow.add(self.ref)
else:
- _, out = self.source.check_output([self.source.host_git, 'rev-list',
- '--ancestry-path', '--boundary',
- '{}..{}'.format(commit_ref, self.ref)],
- fail="Failed to get git history {}..{} in directory: {}"
- .format(commit_ref, self.ref, fullpath),
- fail_temporarily=True,
- cwd=self.mirror)
- self.source.warn("refs {}..{}: {}".format(commit_ref, self.ref, out.splitlines()))
+ _, out = self.source.check_output(
+ [
+ self.source.host_git,
+ "rev-list",
+ "--ancestry-path",
+ "--boundary",
+ "{}..{}".format(commit_ref, self.ref),
+ ],
+ fail="Failed to get git history {}..{} in directory: {}".format(
+ commit_ref, self.ref, fullpath
+ ),
+ fail_temporarily=True,
+ cwd=self.mirror,
+ )
+ self.source.warn(
+ "refs {}..{}: {}".format(commit_ref, self.ref, out.splitlines())
+ )
for line in out.splitlines():
- rev = line.lstrip('-')
- if line[0] == '-':
+ rev = line.lstrip("-")
+ if line[0] == "-":
shallow.add(rev)
else:
included.add(rev)
@@ -325,52 +411,80 @@ class _GitMirror(SourceFetcher):
shallow -= included
included |= shallow
- self.source.call([self.source.host_git, 'init'],
- fail="Cannot initialize git repository: {}".format(fullpath),
- cwd=fullpath)
+ self.source.call(
+ [self.source.host_git, "init"],
+ fail="Cannot initialize git repository: {}".format(fullpath),
+ cwd=fullpath,
+ )
for rev in included:
with TemporaryFile(dir=tmpdir) as commit_file:
- self.source.call([self.source.host_git, 'cat-file', 'commit', rev],
- stdout=commit_file,
- fail="Failed to get commit {}".format(rev),
- cwd=self.mirror)
+ self.source.call(
+ [self.source.host_git, "cat-file", "commit", rev],
+ stdout=commit_file,
+ fail="Failed to get commit {}".format(rev),
+ cwd=self.mirror,
+ )
commit_file.seek(0, 0)
- self.source.call([self.source.host_git, 'hash-object', '-w', '-t', 'commit', '--stdin'],
- stdin=commit_file,
- fail="Failed to add commit object {}".format(rev),
- cwd=fullpath)
-
- with open(os.path.join(fullpath, '.git', 'shallow'), 'w') as shallow_file:
+ self.source.call(
+ [
+ self.source.host_git,
+ "hash-object",
+ "-w",
+ "-t",
+ "commit",
+ "--stdin",
+ ],
+ stdin=commit_file,
+ fail="Failed to add commit object {}".format(rev),
+ cwd=fullpath,
+ )
+
+ with open(os.path.join(fullpath, ".git", "shallow"), "w") as shallow_file:
for rev in shallow:
- shallow_file.write('{}\n'.format(rev))
+ shallow_file.write("{}\n".format(rev))
for tag, commit_ref, annotated in self.tags:
if annotated:
with TemporaryFile(dir=tmpdir) as tag_file:
- tag_data = 'object {}\ntype commit\ntag {}\n'.format(commit_ref, tag)
- tag_file.write(tag_data.encode('ascii'))
+ tag_data = "object {}\ntype commit\ntag {}\n".format(
+ commit_ref, tag
+ )
+ tag_file.write(tag_data.encode("ascii"))
tag_file.seek(0, 0)
_, tag_ref = self.source.check_output(
- [self.source.host_git, 'hash-object', '-w', '-t',
- 'tag', '--stdin'],
+ [
+ self.source.host_git,
+ "hash-object",
+ "-w",
+ "-t",
+ "tag",
+ "--stdin",
+ ],
stdin=tag_file,
fail="Failed to add tag object {}".format(tag),
- cwd=fullpath)
-
- self.source.call([self.source.host_git, 'tag', tag, tag_ref.strip()],
- fail="Failed to tag: {}".format(tag),
- cwd=fullpath)
+ cwd=fullpath,
+ )
+
+ self.source.call(
+ [self.source.host_git, "tag", tag, tag_ref.strip()],
+ fail="Failed to tag: {}".format(tag),
+ cwd=fullpath,
+ )
else:
- self.source.call([self.source.host_git, 'tag', tag, commit_ref],
- fail="Failed to tag: {}".format(tag),
- cwd=fullpath)
+ self.source.call(
+ [self.source.host_git, "tag", tag, commit_ref],
+ fail="Failed to tag: {}".format(tag),
+ cwd=fullpath,
+ )
- with open(os.path.join(fullpath, '.git', 'HEAD'), 'w') as head:
- self.source.call([self.source.host_git, 'rev-parse', self.ref],
- stdout=head,
- fail="Failed to parse commit {}".format(self.ref),
- cwd=self.mirror)
+ with open(os.path.join(fullpath, ".git", "HEAD"), "w") as head:
+ self.source.call(
+ [self.source.host_git, "rev-parse", self.ref],
+ stdout=head,
+ fail="Failed to parse commit {}".format(self.ref),
+ cwd=self.mirror,
+ )
class _GitSourceBase(Source):
@@ -382,58 +496,69 @@ class _GitSourceBase(Source):
BST_MIRROR_CLASS = _GitMirror
def configure(self, node):
- ref = node.get_str('ref', None)
-
- config_keys = ['url', 'track', 'ref', 'submodules',
- 'checkout-submodules', 'ref-format',
- 'track-tags', 'tags']
+ ref = node.get_str("ref", None)
+
+ config_keys = [
+ "url",
+ "track",
+ "ref",
+ "submodules",
+ "checkout-submodules",
+ "ref-format",
+ "track-tags",
+ "tags",
+ ]
node.validate_keys(config_keys + Source.COMMON_CONFIG_KEYS)
- tags_node = node.get_sequence('tags', [])
+ tags_node = node.get_sequence("tags", [])
for tag_node in tags_node:
- tag_node.validate_keys(['tag', 'commit', 'annotated'])
+ tag_node.validate_keys(["tag", "commit", "annotated"])
tags = self._load_tags(node)
- self.track_tags = node.get_bool('track-tags', default=False)
+ self.track_tags = node.get_bool("track-tags", default=False)
- self.original_url = node.get_str('url')
- self.mirror = self.BST_MIRROR_CLASS(self, '', self.original_url, ref, tags=tags, primary=True)
- self.tracking = node.get_str('track', None)
+ self.original_url = node.get_str("url")
+ self.mirror = self.BST_MIRROR_CLASS(
+ self, "", self.original_url, ref, tags=tags, primary=True
+ )
+ self.tracking = node.get_str("track", None)
- self.ref_format = node.get_enum('ref-format', _RefFormat, _RefFormat.SHA1)
+ self.ref_format = node.get_enum("ref-format", _RefFormat, _RefFormat.SHA1)
# At this point we now know if the source has a ref and/or a track.
# If it is missing both then we will be unable to track or build.
if self.mirror.ref is None and self.tracking is None:
- raise SourceError("{}: Git sources require a ref and/or track".format(self),
- reason="missing-track-and-ref")
+ raise SourceError(
+ "{}: Git sources require a ref and/or track".format(self),
+ reason="missing-track-and-ref",
+ )
- self.checkout_submodules = node.get_bool('checkout-submodules', default=True)
+ self.checkout_submodules = node.get_bool("checkout-submodules", default=True)
self.submodules = []
# Parse a dict of submodule overrides, stored in the submodule_overrides
# and submodule_checkout_overrides dictionaries.
self.submodule_overrides = {}
self.submodule_checkout_overrides = {}
- modules = node.get_mapping('submodules', {})
+ modules = node.get_mapping("submodules", {})
for path in modules.keys():
submodule = modules.get_mapping(path)
- url = submodule.get_str('url', None)
+ url = submodule.get_str("url", None)
# Make sure to mark all URLs that are specified in the configuration
if url:
self.mark_download_url(url, primary=False)
self.submodule_overrides[path] = url
- if 'checkout' in submodule:
- checkout = submodule.get_bool('checkout')
+ if "checkout" in submodule:
+ checkout = submodule.get_bool("checkout")
self.submodule_checkout_overrides[path] = checkout
self.mark_download_url(self.original_url)
def preflight(self):
# Check if git is installed, get the binary at the same time
- self.host_git = utils.get_host_tool('git')
+ self.host_git = utils.get_host_tool("git")
def get_unique_key(self):
# Here we want to encode the local name of the repository and
@@ -441,8 +566,10 @@ class _GitSourceBase(Source):
# from another location, it should not affect the cache key.
key = [self.original_url, self.mirror.ref]
if self.mirror.tags:
- tags = {tag: (commit, annotated) for tag, commit, annotated in self.mirror.tags}
- key.append({'tags': tags})
+ tags = {
+ tag: (commit, annotated) for tag, commit, annotated in self.mirror.tags
+ }
+ key.append({"tags": tags})
# Only modify the cache key with checkout_submodules if it's something
# other than the default behaviour.
@@ -455,7 +582,9 @@ class _GitSourceBase(Source):
key.append(self.submodule_overrides)
if self.submodule_checkout_overrides:
- key.append({"submodule_checkout_overrides": self.submodule_checkout_overrides})
+ key.append(
+ {"submodule_checkout_overrides": self.submodule_checkout_overrides}
+ )
return key
@@ -467,7 +596,7 @@ class _GitSourceBase(Source):
return Consistency.INCONSISTENT
def load_ref(self, node):
- self.mirror.ref = node.get_str('ref', None)
+ self.mirror.ref = node.get_str("ref", None)
self.mirror.tags = self._load_tags(node)
def get_ref(self):
@@ -478,25 +607,23 @@ class _GitSourceBase(Source):
def set_ref(self, ref, node):
if not ref:
self.mirror.ref = None
- if 'ref' in node:
- del node['ref']
+ if "ref" in node:
+ del node["ref"]
self.mirror.tags = []
- if 'tags' in node:
- del node['tags']
+ if "tags" in node:
+ del node["tags"]
else:
actual_ref, tags = ref
- node['ref'] = self.mirror.ref = actual_ref
+ node["ref"] = self.mirror.ref = actual_ref
self.mirror.tags = tags
if tags:
- node['tags'] = []
+ node["tags"] = []
for tag, commit_ref, annotated in tags:
- data = {'tag': tag,
- 'commit': commit_ref,
- 'annotated': annotated}
- node['tags'].append(data)
+ data = {"tag": tag, "commit": commit_ref, "annotated": annotated}
+ node["tags"].append(data)
else:
- if 'tags' in node:
- del node['tags']
+ if "tags" in node:
+ del node["tags"]
def track(self): # pylint: disable=arguments-differ
@@ -504,17 +631,23 @@ class _GitSourceBase(Source):
if not self.tracking:
# Is there a better way to check if a ref is given.
if self.mirror.ref is None:
- detail = 'Without a tracking branch ref can not be updated. Please ' + \
- 'provide a ref or a track.'
- raise SourceError("{}: No track or ref".format(self),
- detail=detail, reason="track-attempt-no-track")
+ detail = (
+ "Without a tracking branch ref can not be updated. Please "
+ + "provide a ref or a track."
+ )
+ raise SourceError(
+ "{}: No track or ref".format(self),
+ detail=detail,
+ reason="track-attempt-no-track",
+ )
return None
# Resolve the URL for the message
resolved_url = self.translate_url(self.mirror.url)
- with self.timed_activity("Tracking {} from {}"
- .format(self.tracking, resolved_url),
- silent_nested=True):
+ with self.timed_activity(
+ "Tracking {} from {}".format(self.tracking, resolved_url),
+ silent_nested=True,
+ ):
self.mirror.ensure()
self.mirror._fetch()
@@ -527,7 +660,9 @@ class _GitSourceBase(Source):
# XXX: may wish to refactor this as some code dupe with stage()
self._refresh_submodules()
- with self.timed_activity('Setting up workspace "{}"'.format(directory), silent_nested=True):
+ with self.timed_activity(
+ 'Setting up workspace "{}"'.format(directory), silent_nested=True
+ ):
self.mirror.init_workspace(directory)
for mirror in self.submodules:
mirror.init_workspace(directory)
@@ -543,7 +678,9 @@ class _GitSourceBase(Source):
# Stage the main repo in the specified directory
#
- with self.timed_activity("Staging {}".format(self.mirror.url), silent_nested=True):
+ with self.timed_activity(
+ "Staging {}".format(self.mirror.url), silent_nested=True
+ ):
self.mirror.stage(directory)
for mirror in self.submodules:
mirror.stage(directory)
@@ -578,11 +715,13 @@ class _GitSourceBase(Source):
for path, url in invalid_submodules:
detail.append(" Submodule URL '{}' at path '{}'".format(url, path))
- self.warn("{}: Invalid submodules specified".format(self),
- warning_token=WARN_INVALID_SUBMODULE,
- detail="The following submodules are specified in the source "
- "description but do not exist according to the repository\n\n" +
- "\n".join(detail))
+ self.warn(
+ "{}: Invalid submodules specified".format(self),
+ warning_token=WARN_INVALID_SUBMODULE,
+ detail="The following submodules are specified in the source "
+ "description but do not exist according to the repository\n\n"
+ + "\n".join(detail),
+ )
# Warn about submodules which exist but have not been explicitly configured
if unlisted_submodules:
@@ -590,37 +729,65 @@ class _GitSourceBase(Source):
for path, url in unlisted_submodules:
detail.append(" Submodule URL '{}' at path '{}'".format(url, path))
- self.warn("{}: Unlisted submodules exist".format(self),
- warning_token=WARN_UNLISTED_SUBMODULE,
- detail="The following submodules exist but are not specified " +
- "in the source description\n\n" +
- "\n".join(detail))
+ self.warn(
+ "{}: Unlisted submodules exist".format(self),
+ warning_token=WARN_UNLISTED_SUBMODULE,
+ detail="The following submodules exist but are not specified "
+ + "in the source description\n\n"
+ + "\n".join(detail),
+ )
# Assert that the ref exists in the track tag/branch, if track has been specified.
ref_in_track = False
if self.tracking:
- _, branch = self.check_output([self.host_git, 'branch', '--list', self.tracking,
- '--contains', self.mirror.ref],
- cwd=self.mirror.mirror)
+ _, branch = self.check_output(
+ [
+ self.host_git,
+ "branch",
+ "--list",
+ self.tracking,
+ "--contains",
+ self.mirror.ref,
+ ],
+ cwd=self.mirror.mirror,
+ )
if branch:
ref_in_track = True
else:
- _, tag = self.check_output([self.host_git, 'tag', '--list', self.tracking,
- '--contains', self.mirror.ref],
- cwd=self.mirror.mirror)
+ _, tag = self.check_output(
+ [
+ self.host_git,
+ "tag",
+ "--list",
+ self.tracking,
+ "--contains",
+ self.mirror.ref,
+ ],
+ cwd=self.mirror.mirror,
+ )
if tag:
ref_in_track = True
if not ref_in_track:
- detail = "The ref provided for the element does not exist locally " + \
- "in the provided track branch / tag '{}'.\n".format(self.tracking) + \
- "You may wish to track the element to update the ref from '{}' ".format(self.tracking) + \
- "with `bst source track`,\n" + \
- "or examine the upstream at '{}' for the specific ref.".format(self.mirror.url)
+ detail = (
+ "The ref provided for the element does not exist locally "
+ + "in the provided track branch / tag '{}'.\n".format(self.tracking)
+ + "You may wish to track the element to update the ref from '{}' ".format(
+ self.tracking
+ )
+ + "with `bst source track`,\n"
+ + "or examine the upstream at '{}' for the specific ref.".format(
+ self.mirror.url
+ )
+ )
- self.warn("{}: expected ref '{}' was not found in given track '{}' for staged repository: '{}'\n"
- .format(self, self.mirror.ref, self.tracking, self.mirror.url),
- detail=detail, warning_token=CoreWarnings.REF_NOT_IN_TRACK)
+ self.warn(
+ "{}: expected ref '{}' was not found in given track '{}' for staged repository: '{}'\n".format(
+ self, self.mirror.ref, self.tracking, self.mirror.url
+ ),
+ detail=detail,
+ warning_token=CoreWarnings.REF_NOT_IN_TRACK,
+ )
###########################################################
# Local Functions #
@@ -668,11 +835,11 @@ class _GitSourceBase(Source):
def _load_tags(self, node):
tags = []
- tags_node = node.get_sequence('tags', [])
+ tags_node = node.get_sequence("tags", [])
for tag_node in tags_node:
- tag = tag_node.get_str('tag')
- commit_ref = tag_node.get_str('commit')
- annotated = tag_node.get_bool('annotated')
+ tag = tag_node.get_str("tag")
+ commit_ref = tag_node.get_str("commit")
+ annotated = tag_node.get_bool("annotated")
tags.append((tag, commit_ref, annotated))
return tags
diff --git a/src/buildstream/_includes.py b/src/buildstream/_includes.py
index c04601b91..860b1328f 100644
--- a/src/buildstream/_includes.py
+++ b/src/buildstream/_includes.py
@@ -14,7 +14,6 @@ from ._exceptions import LoadError, LoadErrorReason
# provenance. Should be true if intended to be
# serialized.
class Includes:
-
def __init__(self, loader, *, copy_tree=False):
self._loader = loader
self._loaded = {}
@@ -29,47 +28,57 @@ class Includes:
# included (set): Fail for recursion if trying to load any files in this set
# current_loader (Loader): Use alternative loader (for junction files)
# only_local (bool): Whether to ignore junction files
- def process(self, node, *,
- included=set(),
- current_loader=None,
- only_local=False):
+ def process(self, node, *, included=set(), current_loader=None, only_local=False):
if current_loader is None:
current_loader = self._loader
- includes_node = node.get_node('(@)', allowed_types=[ScalarNode, SequenceNode], allow_none=True)
+ includes_node = node.get_node(
+ "(@)", allowed_types=[ScalarNode, SequenceNode], allow_none=True
+ )
if includes_node:
- if type(includes_node) is ScalarNode: # pylint: disable=unidiomatic-typecheck
+ if (
+ type(includes_node) is ScalarNode
+ ): # pylint: disable=unidiomatic-typecheck
includes = [includes_node.as_str()]
else:
includes = includes_node.as_str_list()
- del node['(@)']
+ del node["(@)"]
for include in reversed(includes):
- if only_local and ':' in include:
+ if only_local and ":" in include:
continue
try:
- include_node, file_path, sub_loader = self._include_file(include,
- current_loader)
+ include_node, file_path, sub_loader = self._include_file(
+ include, current_loader
+ )
except LoadError as e:
include_provenance = includes_node.get_provenance()
if e.reason == LoadErrorReason.MISSING_FILE:
message = "{}: Include block references a file that could not be found: '{}'.".format(
- include_provenance, include)
+ include_provenance, include
+ )
raise LoadError(message, LoadErrorReason.MISSING_FILE) from e
if e.reason == LoadErrorReason.LOADING_DIRECTORY:
message = "{}: Include block references a directory instead of a file: '{}'.".format(
- include_provenance, include)
- raise LoadError(message, LoadErrorReason.LOADING_DIRECTORY) from e
+ include_provenance, include
+ )
+ raise LoadError(
+ message, LoadErrorReason.LOADING_DIRECTORY
+ ) from e
# Otherwise, we don't know the reason, so just raise
raise
if file_path in included:
include_provenance = includes_node.get_provenance()
- raise LoadError("{}: trying to recursively include {}". format(include_provenance, file_path),
- LoadErrorReason.RECURSIVE_INCLUDE)
+ raise LoadError(
+ "{}: trying to recursively include {}".format(
+ include_provenance, file_path
+ ),
+ LoadErrorReason.RECURSIVE_INCLUDE,
+ )
# Because the included node will be modified, we need
# to copy it so that we do not modify the toplevel
# node of the provenance.
@@ -77,19 +86,24 @@ class Includes:
try:
included.add(file_path)
- self.process(include_node, included=included,
- current_loader=sub_loader,
- only_local=only_local)
+ self.process(
+ include_node,
+ included=included,
+ current_loader=sub_loader,
+ only_local=only_local,
+ )
finally:
included.remove(file_path)
include_node._composite_under(node)
for value in node.values():
- self._process_value(value,
- included=included,
- current_loader=current_loader,
- only_local=only_local)
+ self._process_value(
+ value,
+ included=included,
+ current_loader=current_loader,
+ only_local=only_local,
+ )
# _include_file()
#
@@ -101,8 +115,8 @@ class Includes:
# loader (Loader): Loader for the current project.
def _include_file(self, include, loader):
shortname = include
- if ':' in include:
- junction, include = include.split(':', 1)
+ if ":" in include:
+ junction, include = include.split(":", 1)
junction_loader = loader._get_loader(junction)
current_loader = junction_loader
else:
@@ -112,10 +126,12 @@ class Includes:
file_path = os.path.join(directory, include)
key = (current_loader, file_path)
if key not in self._loaded:
- self._loaded[key] = _yaml.load(file_path,
- shortname=shortname,
- project=project,
- copy_tree=self._copy_tree)
+ self._loaded[key] = _yaml.load(
+ file_path,
+ shortname=shortname,
+ project=project,
+ copy_tree=self._copy_tree,
+ )
return self._loaded[key], file_path, current_loader
# _process_value()
@@ -127,20 +143,23 @@ class Includes:
# included (set): Fail for recursion if trying to load any files in this set
# current_loader (Loader): Use alternative loader (for junction files)
# only_local (bool): Whether to ignore junction files
- def _process_value(self, value, *,
- included=set(),
- current_loader=None,
- only_local=False):
+ def _process_value(
+ self, value, *, included=set(), current_loader=None, only_local=False
+ ):
value_type = type(value)
if value_type is MappingNode:
- self.process(value,
- included=included,
- current_loader=current_loader,
- only_local=only_local)
+ self.process(
+ value,
+ included=included,
+ current_loader=current_loader,
+ only_local=only_local,
+ )
elif value_type is SequenceNode:
for v in value:
- self._process_value(v,
- included=included,
- current_loader=current_loader,
- only_local=only_local)
+ self._process_value(
+ v,
+ included=included,
+ current_loader=current_loader,
+ only_local=only_local,
+ )
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index cceda284c..d703bd711 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -54,8 +54,7 @@ _NO_PROGRESS = object()
# fetch_subprojects (callable): A function to fetch subprojects
# parent (Loader): A parent Loader object, in the case this is a junctioned Loader
#
-class Loader():
-
+class Loader:
def __init__(self, context, project, *, fetch_subprojects, parent=None):
# Ensure we have an absolute path for the base directory
@@ -66,22 +65,24 @@ class Loader():
#
# Public members
#
- self.project = project # The associated Project
- self.loaded = None # The number of loaded Elements
+ self.project = project # The associated Project
+ self.loaded = None # The number of loaded Elements
#
# Private members
#
self._context = context
- self._options = project.options # Project options (OptionPool)
- self._basedir = basedir # Base project directory
- self._first_pass_options = project.first_pass_config.options # Project options (OptionPool)
- self._parent = parent # The parent loader
+ self._options = project.options # Project options (OptionPool)
+ self._basedir = basedir # Base project directory
+ self._first_pass_options = (
+ project.first_pass_config.options
+ ) # Project options (OptionPool)
+ self._parent = parent # The parent loader
self._fetch_subprojects = fetch_subprojects
self._meta_elements = {} # Dict of resolved meta elements by name
- self._elements = {} # Dict of elements
- self._loaders = {} # Dict of junction loaders
+ self._elements = {} # Dict of elements
+ self._loaders = {} # Dict of junction loaders
self._includes = Includes(self, copy_tree=True)
@@ -100,15 +101,21 @@ class Loader():
# Raises: LoadError
#
# Returns: The toplevel LoadElement
- def load(self, targets, task, rewritable=False, ticker=None, ignore_workspaces=False):
+ def load(
+ self, targets, task, rewritable=False, ticker=None, ignore_workspaces=False
+ ):
for filename in targets:
if os.path.isabs(filename):
# XXX Should this just be an assertion ?
# Expect that the caller gives us the right thing at least ?
- raise LoadError("Target '{}' was not specified as a relative "
- "path to the base project directory: {}"
- .format(filename, self._basedir), LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "Target '{}' was not specified as a relative "
+ "path to the base project directory: {}".format(
+ filename, self._basedir
+ ),
+ LoadErrorReason.INVALID_DATA,
+ )
self._warn_invalid_elements(targets)
@@ -131,8 +138,7 @@ class Loader():
dummy_target = LoadElement(Node.from_dict({}), "", self)
# Pylint is not very happy with Cython and can't understand 'dependencies' is a list
dummy_target.dependencies.extend( # pylint: disable=no-member
- Dependency(element, Symbol.RUNTIME, False)
- for element in target_elements
+ Dependency(element, Symbol.RUNTIME, False) for element in target_elements
)
with PROFILER.profile(Topics.CIRCULAR_CHECK, "_".join(targets)):
@@ -149,7 +155,11 @@ class Loader():
# Finally, wrap what we have into LoadElements and return the target
#
- ret.append(loader._collect_element(element, task, ignore_workspaces=ignore_workspaces))
+ ret.append(
+ loader._collect_element(
+ element, task, ignore_workspaces=ignore_workspaces
+ )
+ )
self._clean_caches()
@@ -181,12 +191,12 @@ class Loader():
# too late. The only time that seems just right is here, when preparing
# the child process' copy of the Loader.
#
- del state['_fetch_subprojects']
+ del state["_fetch_subprojects"]
# Also there's no gain in pickling over the caches, and they might
# contain things which are unpleasantly large or unable to pickle.
- del state['_elements']
- del state['_meta_elements']
+ del state["_elements"]
+ del state["_meta_elements"]
return state
@@ -231,16 +241,20 @@ class Loader():
# Load the data and process any conditional statements therein
fullpath = os.path.join(self._basedir, filename)
try:
- node = _yaml.load(fullpath, shortname=filename, copy_tree=rewritable,
- project=self.project)
+ node = _yaml.load(
+ fullpath, shortname=filename, copy_tree=rewritable, project=self.project
+ )
except LoadError as e:
if e.reason == LoadErrorReason.MISSING_FILE:
if self.project.junction:
- message = "Could not find element '{}' in project referred to by junction element '{}'" \
- .format(filename, self.project.junction.name)
+ message = "Could not find element '{}' in project referred to by junction element '{}'".format(
+ filename, self.project.junction.name
+ )
else:
- message = "Could not find element '{}' in elements directory '{}'".format(filename, self._basedir)
+ message = "Could not find element '{}' in elements directory '{}'".format(
+ filename, self._basedir
+ )
if provenance:
message = "{}: {}".format(provenance, message)
@@ -251,10 +265,14 @@ class Loader():
detail = None
elements_dir = os.path.relpath(self._basedir, self.project.directory)
element_relpath = os.path.relpath(filename, elements_dir)
- if filename.startswith(elements_dir) and os.path.exists(os.path.join(self._basedir, element_relpath)):
+ if filename.startswith(elements_dir) and os.path.exists(
+ os.path.join(self._basedir, element_relpath)
+ ):
detail = "Did you mean '{}'?".format(element_relpath)
- raise LoadError(message, LoadErrorReason.MISSING_FILE, detail=detail) from e
+ raise LoadError(
+ message, LoadErrorReason.MISSING_FILE, detail=detail
+ ) from e
if e.reason == LoadErrorReason.LOADING_DIRECTORY:
# If a <directory>.bst file exists in the element path,
@@ -263,10 +281,12 @@ class Loader():
if provenance:
message = "{}: {}".format(provenance, message)
detail = None
- if os.path.exists(os.path.join(self._basedir, filename + '.bst')):
- element_name = filename + '.bst'
+ if os.path.exists(os.path.join(self._basedir, filename + ".bst")):
+ element_name = filename + ".bst"
detail = "Did you mean '{}'?\n".format(element_name)
- raise LoadError(message, LoadErrorReason.LOADING_DIRECTORY, detail=detail) from e
+ raise LoadError(
+ message, LoadErrorReason.LOADING_DIRECTORY, detail=detail
+ ) from e
# Otherwise, we don't know the reason, so just raise
raise
@@ -334,11 +354,15 @@ class Loader():
if dep.junction:
self._load_file(dep.junction, rewritable, ticker, dep.provenance)
- loader = self._get_loader(dep.junction,
- rewritable=rewritable,
- ticker=ticker,
- provenance=dep.provenance)
- dep_element = loader._load_file(dep.name, rewritable, ticker, dep.provenance)
+ loader = self._get_loader(
+ dep.junction,
+ rewritable=rewritable,
+ ticker=ticker,
+ provenance=dep.provenance,
+ )
+ dep_element = loader._load_file(
+ dep.name, rewritable, ticker, dep.provenance
+ )
else:
dep_element = self._elements.get(dep.name)
@@ -346,19 +370,26 @@ class Loader():
# The loader does not have this available so we need to
# either recursively cause it to be loaded, or else we
# need to push this onto the loader queue in this loader
- dep_element = self._load_file_no_deps(dep.name, rewritable, dep.provenance)
+ dep_element = self._load_file_no_deps(
+ dep.name, rewritable, dep.provenance
+ )
dep_deps = extract_depends_from_node(dep_element.node)
loader_queue.append((dep_element, list(reversed(dep_deps)), []))
# Pylint is not very happy about Cython and can't understand 'node' is a 'MappingNode'
- if dep_element.node.get_str(Symbol.KIND) == 'junction': # pylint: disable=no-member
- raise LoadError("{}: Cannot depend on junction" .format(dep.provenance),
- LoadErrorReason.INVALID_DATA)
+ if (
+ dep_element.node.get_str(Symbol.KIND) == "junction"
+ ): # pylint: disable=no-member
+ raise LoadError(
+ "{}: Cannot depend on junction".format(dep.provenance),
+ LoadErrorReason.INVALID_DATA,
+ )
# All is well, push the dependency onto the LoadElement
# Pylint is not very happy with Cython and can't understand 'dependencies' is a list
current_element[0].dependencies.append( # pylint: disable=no-member
- Dependency(dep_element, dep.dep_type, dep.strict))
+ Dependency(dep_element, dep.dep_type, dep.strict)
+ )
else:
# We do not have any more dependencies to load for this
# element on the queue, report any invalid dep names
@@ -398,12 +429,18 @@ class Loader():
# Create `chain`, the loop of element dependencies from this
# element back to itself, by trimming everything before this
# element from the sequence under consideration.
- chain = [element.full_name for element in sequence[sequence.index(element):]]
+ chain = [
+ element.full_name
+ for element in sequence[sequence.index(element) :]
+ ]
chain.append(element.full_name)
- raise LoadError(("Circular dependency detected at element: {}\n" +
- "Dependency chain: {}")
- .format(element.full_name, " -> ".join(chain)),
- LoadErrorReason.CIRCULAR_DEPENDENCY)
+ raise LoadError(
+ (
+ "Circular dependency detected at element: {}\n"
+ + "Dependency chain: {}"
+ ).format(element.full_name, " -> ".join(chain)),
+ LoadErrorReason.CIRCULAR_DEPENDENCY,
+ )
if element not in validated:
# We've not already validated this element, so let's
# descend into it to check it out
@@ -449,9 +486,11 @@ class Loader():
workspace = self._context.get_workspaces().get_workspace(element.name)
skip_workspace = True
if workspace and not ignore_workspaces:
- workspace_node = {'kind': 'workspace'}
- workspace_node['path'] = workspace.get_absolute_path()
- workspace_node['ref'] = str(workspace.to_dict().get('last_successful', 'ignored'))
+ workspace_node = {"kind": "workspace"}
+ workspace_node["path"] = workspace.get_absolute_path()
+ workspace_node["ref"] = str(
+ workspace.to_dict().get("last_successful", "ignored")
+ )
node[Symbol.SOURCES] = [workspace_node]
skip_workspace = False
@@ -459,7 +498,7 @@ class Loader():
for index, source in enumerate(sources):
kind = source.get_str(Symbol.KIND)
# the workspace source plugin cannot be used unless the element is workspaced
- if kind == 'workspace' and skip_workspace:
+ if kind == "workspace" and skip_workspace:
continue
del source[Symbol.KIND]
@@ -468,18 +507,25 @@ class Loader():
directory = source.get_str(Symbol.DIRECTORY, default=None)
if directory:
del source[Symbol.DIRECTORY]
- meta_source = MetaSource(element.name, index, element_kind, kind, source, directory)
+ meta_source = MetaSource(
+ element.name, index, element_kind, kind, source, directory
+ )
meta_sources.append(meta_source)
- meta_element = MetaElement(self.project, element.name, element_kind,
- elt_provenance, meta_sources,
- node.get_mapping(Symbol.CONFIG, default={}),
- node.get_mapping(Symbol.VARIABLES, default={}),
- node.get_mapping(Symbol.ENVIRONMENT, default={}),
- node.get_str_list(Symbol.ENV_NOCACHE, default=[]),
- node.get_mapping(Symbol.PUBLIC, default={}),
- node.get_mapping(Symbol.SANDBOX, default={}),
- element_kind == 'junction')
+ meta_element = MetaElement(
+ self.project,
+ element.name,
+ element_kind,
+ elt_provenance,
+ meta_sources,
+ node.get_mapping(Symbol.CONFIG, default={}),
+ node.get_mapping(Symbol.VARIABLES, default={}),
+ node.get_mapping(Symbol.ENVIRONMENT, default={}),
+ node.get_str_list(Symbol.ENV_NOCACHE, default=[]),
+ node.get_mapping(Symbol.PUBLIC, default={}),
+ node.get_mapping(Symbol.SANDBOX, default={}),
+ element_kind == "junction",
+ )
# Cache it now, make sure it's already there before recursing
self._meta_elements[element.name] = meta_element
@@ -502,7 +548,11 @@ class Loader():
#
def _collect_element(self, top_element, task, ignore_workspaces=False):
element_queue = [top_element]
- meta_element_queue = [self._collect_element_no_deps(top_element, task, ignore_workspaces=ignore_workspaces)]
+ meta_element_queue = [
+ self._collect_element_no_deps(
+ top_element, task, ignore_workspaces=ignore_workspaces
+ )
+ ]
while element_queue:
element = element_queue.pop()
@@ -519,15 +569,17 @@ class Loader():
name = dep.element.name
if name not in loader._meta_elements:
- meta_dep = loader._collect_element_no_deps(dep.element, task, ignore_workspaces=ignore_workspaces)
+ meta_dep = loader._collect_element_no_deps(
+ dep.element, task, ignore_workspaces=ignore_workspaces
+ )
element_queue.append(dep.element)
meta_element_queue.append(meta_dep)
else:
meta_dep = loader._meta_elements[name]
- if dep.dep_type != 'runtime':
+ if dep.dep_type != "runtime":
meta_element.build_dependencies.append(meta_dep)
- if dep.dep_type != 'build':
+ if dep.dep_type != "build":
meta_element.dependencies.append(meta_dep)
if dep.strict:
meta_element.strict_dependencies.append(meta_dep)
@@ -546,8 +598,9 @@ class Loader():
# Raises: LoadError
#
# Returns: A Loader or None if specified junction does not exist
- def _get_loader(self, filename, *, rewritable=False, ticker=None, level=0,
- provenance=None):
+ def _get_loader(
+ self, filename, *, rewritable=False, ticker=None, level=0, provenance=None
+ ):
provenance_str = ""
if provenance is not None:
@@ -560,17 +613,25 @@ class Loader():
if loader is None:
# do not allow junctions with the same name in different
# subprojects
- raise LoadError("{}Conflicting junction {} in subprojects, define junction in {}"
- .format(provenance_str, filename, self.project.name),
- LoadErrorReason.CONFLICTING_JUNCTION)
+ raise LoadError(
+ "{}Conflicting junction {} in subprojects, define junction in {}".format(
+ provenance_str, filename, self.project.name
+ ),
+ LoadErrorReason.CONFLICTING_JUNCTION,
+ )
return loader
if self._parent:
# junctions in the parent take precedence over junctions defined
# in subprojects
- loader = self._parent._get_loader(filename, rewritable=rewritable, ticker=ticker,
- level=level + 1, provenance=provenance)
+ loader = self._parent._get_loader(
+ filename,
+ rewritable=rewritable,
+ ticker=ticker,
+ level=level + 1,
+ provenance=provenance,
+ )
if loader:
self._loaders[filename] = loader
return loader
@@ -601,11 +662,16 @@ class Loader():
#
# Any task counting *inside* the junction will be handled by
# its loader.
- meta_element = self._collect_element_no_deps(self._elements[filename], _NO_PROGRESS)
- if meta_element.kind != 'junction':
- raise LoadError("{}{}: Expected junction but element kind is {}"
- .format(provenance_str, filename, meta_element.kind),
- LoadErrorReason.INVALID_DATA)
+ meta_element = self._collect_element_no_deps(
+ self._elements[filename], _NO_PROGRESS
+ )
+ if meta_element.kind != "junction":
+ raise LoadError(
+ "{}{}: Expected junction but element kind is {}".format(
+ provenance_str, filename, meta_element.kind
+ ),
+ LoadErrorReason.INVALID_DATA,
+ )
# We check that junctions have no dependencies a little
# early. This is cheating, since we don't technically know
@@ -623,7 +689,8 @@ class Loader():
if self._elements[filename].dependencies:
raise LoadError(
"Dependencies are forbidden for 'junction' elements",
- LoadErrorReason.INVALID_JUNCTION)
+ LoadErrorReason.INVALID_JUNCTION,
+ )
element = Element._new_from_meta(meta_element)
element._update_state()
@@ -631,26 +698,46 @@ class Loader():
# If this junction element points to a sub-sub-project, we need to
# find loader for that project.
if element.target:
- subproject_loader = self._get_loader(element.target_junction, rewritable=rewritable, ticker=ticker,
- level=level, provenance=provenance)
- loader = subproject_loader._get_loader(element.target_element, rewritable=rewritable, ticker=ticker,
- level=level, provenance=provenance)
+ subproject_loader = self._get_loader(
+ element.target_junction,
+ rewritable=rewritable,
+ ticker=ticker,
+ level=level,
+ provenance=provenance,
+ )
+ loader = subproject_loader._get_loader(
+ element.target_element,
+ rewritable=rewritable,
+ ticker=ticker,
+ level=level,
+ provenance=provenance,
+ )
self._loaders[filename] = loader
return loader
# Handle the case where a subproject needs to be fetched
#
- if element._get_consistency() >= Consistency.RESOLVED and not element._source_cached():
+ if (
+ element._get_consistency() >= Consistency.RESOLVED
+ and not element._source_cached()
+ ):
if ticker:
- ticker(filename, 'Fetching subproject')
+ ticker(filename, "Fetching subproject")
self._fetch_subprojects([element])
# Handle the case where a subproject has no ref
#
elif element._get_consistency() == Consistency.INCONSISTENT:
- detail = "Try tracking the junction element with `bst source track {}`".format(filename)
- raise LoadError("{}Subproject has no ref for junction: {}".format(provenance_str, filename),
- LoadErrorReason.SUBPROJECT_INCONSISTENT, detail=detail)
+ detail = "Try tracking the junction element with `bst source track {}`".format(
+ filename
+ )
+ raise LoadError(
+ "{}Subproject has no ref for junction: {}".format(
+ provenance_str, filename
+ ),
+ LoadErrorReason.SUBPROJECT_INCONSISTENT,
+ detail=detail,
+ )
sources = list(element.sources())
if len(sources) == 1 and sources[0]._get_local_path():
@@ -659,8 +746,13 @@ class Loader():
else:
# Stage sources
element._set_required()
- basedir = os.path.join(self.project.directory, ".bst", "staged-junctions",
- filename, element._get_cache_key())
+ basedir = os.path.join(
+ self.project.directory,
+ ".bst",
+ "staged-junctions",
+ filename,
+ element._get_cache_key(),
+ )
if not os.path.exists(basedir):
os.makedirs(basedir, exist_ok=True)
element._stage_sources_at(basedir)
@@ -669,18 +761,29 @@ class Loader():
project_dir = os.path.join(basedir, element.path)
try:
from .._project import Project # pylint: disable=cyclic-import
- project = Project(project_dir, self._context, junction=element,
- parent_loader=self, search_for_project=False,
- fetch_subprojects=self._fetch_subprojects)
+
+ project = Project(
+ project_dir,
+ self._context,
+ junction=element,
+ parent_loader=self,
+ search_for_project=False,
+ fetch_subprojects=self._fetch_subprojects,
+ )
except LoadError as e:
if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
message = (
- provenance_str + "Could not find the project.conf file in the project "
+ provenance_str
+ + "Could not find the project.conf file in the project "
"referred to by junction element '{}'.".format(element.name)
)
if element.path:
- message += " Was expecting it at path '{}' in the junction's source.".format(element.path)
- raise LoadError(message=message, reason=LoadErrorReason.INVALID_JUNCTION) from e
+ message += " Was expecting it at path '{}' in the junction's source.".format(
+ element.path
+ )
+ raise LoadError(
+ message=message, reason=LoadErrorReason.INVALID_JUNCTION
+ ) from e
# Otherwise, we don't know the reason, so just raise
raise
@@ -709,12 +812,14 @@ class Loader():
# We allow to split only once since deep junctions names are forbidden.
# Users who want to refer to elements in sub-sub-projects are required
# to create junctions on the top level project.
- junction_path = name.rsplit(':', 1)
+ junction_path = name.rsplit(":", 1)
if len(junction_path) == 1:
return None, junction_path[-1], self
else:
self._load_file(junction_path[-2], rewritable, ticker)
- loader = self._get_loader(junction_path[-2], rewritable=rewritable, ticker=ticker)
+ loader = self._get_loader(
+ junction_path[-2], rewritable=rewritable, ticker=ticker
+ )
return junction_path[-2], junction_path[-1], loader
# Print a warning message, checks warning_token against project configuration
@@ -763,11 +868,17 @@ class Loader():
invalid_elements[CoreWarnings.BAD_CHARACTERS_IN_NAME].append(filename)
if invalid_elements[CoreWarnings.BAD_ELEMENT_SUFFIX]:
- self._warn("Target elements '{}' do not have expected file extension `.bst` "
- "Improperly named elements will not be discoverable by commands"
- .format(invalid_elements[CoreWarnings.BAD_ELEMENT_SUFFIX]),
- warning_token=CoreWarnings.BAD_ELEMENT_SUFFIX)
+ self._warn(
+ "Target elements '{}' do not have expected file extension `.bst` "
+ "Improperly named elements will not be discoverable by commands".format(
+ invalid_elements[CoreWarnings.BAD_ELEMENT_SUFFIX]
+ ),
+ warning_token=CoreWarnings.BAD_ELEMENT_SUFFIX,
+ )
if invalid_elements[CoreWarnings.BAD_CHARACTERS_IN_NAME]:
- self._warn("Target elements '{}' have invalid characerts in their name."
- .format(invalid_elements[CoreWarnings.BAD_CHARACTERS_IN_NAME]),
- warning_token=CoreWarnings.BAD_CHARACTERS_IN_NAME)
+ self._warn(
+ "Target elements '{}' have invalid characerts in their name.".format(
+ invalid_elements[CoreWarnings.BAD_CHARACTERS_IN_NAME]
+ ),
+ warning_token=CoreWarnings.BAD_CHARACTERS_IN_NAME,
+ )
diff --git a/src/buildstream/_loader/metaelement.py b/src/buildstream/_loader/metaelement.py
index 00d8560f8..97b0de242 100644
--- a/src/buildstream/_loader/metaelement.py
+++ b/src/buildstream/_loader/metaelement.py
@@ -20,7 +20,7 @@
from ..node import Node
-class MetaElement():
+class MetaElement:
# MetaElement()
#
@@ -40,9 +40,21 @@ class MetaElement():
# sandbox: Configuration specific to the sandbox environment
# first_pass: The element is to be loaded with first pass configuration (junction)
#
- def __init__(self, project, name, kind=None, provenance=None, sources=None, config=None,
- variables=None, environment=None, env_nocache=None, public=None,
- sandbox=None, first_pass=False):
+ def __init__(
+ self,
+ project,
+ name,
+ kind=None,
+ provenance=None,
+ sources=None,
+ config=None,
+ variables=None,
+ environment=None,
+ env_nocache=None,
+ public=None,
+ sandbox=None,
+ first_pass=False,
+ ):
self.project = project
self.name = name
self.kind = kind
diff --git a/src/buildstream/_loader/metasource.py b/src/buildstream/_loader/metasource.py
index da2c0e292..bb83a6bc8 100644
--- a/src/buildstream/_loader/metasource.py
+++ b/src/buildstream/_loader/metasource.py
@@ -18,7 +18,7 @@
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
-class MetaSource():
+class MetaSource:
# MetaSource()
#
@@ -32,7 +32,9 @@ class MetaSource():
# config: The configuration data for the source
# first_pass: This source will be used with first project pass configuration (used for junctions).
#
- def __init__(self, element_name, element_index, element_kind, kind, config, directory):
+ def __init__(
+ self, element_name, element_index, element_kind, kind, config, directory
+ ):
self.element_name = element_name
self.element_index = element_index
self.element_kind = element_kind
diff --git a/src/buildstream/_message.py b/src/buildstream/_message.py
index f4f342a6a..79d71441c 100644
--- a/src/buildstream/_message.py
+++ b/src/buildstream/_message.py
@@ -23,19 +23,19 @@ import os
# Types of status messages.
#
-class MessageType():
- DEBUG = "debug" # Debugging message
- STATUS = "status" # Status message, verbose details
- INFO = "info" # Informative messages
- WARN = "warning" # Warning messages
- ERROR = "error" # Error messages
- BUG = "bug" # An unhandled exception was raised in a plugin
- LOG = "log" # Messages for log files _only_, never in the frontend
+class MessageType:
+ DEBUG = "debug" # Debugging message
+ STATUS = "status" # Status message, verbose details
+ INFO = "info" # Informative messages
+ WARN = "warning" # Warning messages
+ ERROR = "error" # Error messages
+ BUG = "bug" # An unhandled exception was raised in a plugin
+ LOG = "log" # Messages for log files _only_, never in the frontend
# Timed Messages: SUCCESS and FAIL have duration timestamps
- START = "start" # Status start message
- SUCCESS = "success" # Successful status complete message
- FAIL = "failure" # Failing status complete message
+ START = "start" # Status start message
+ SUCCESS = "success" # Successful status complete message
+ FAIL = "failure" # Failing status complete message
SKIPPED = "skipped"
@@ -46,34 +46,44 @@ unconditional_messages = [
MessageType.WARN,
MessageType.FAIL,
MessageType.ERROR,
- MessageType.BUG
+ MessageType.BUG,
]
# Message object
#
-class Message():
-
- def __init__(self, message_type, message, *,
- element_name=None,
- element_key=None,
- detail=None,
- action_name=None,
- elapsed=None,
- logfile=None,
- sandbox=False,
- scheduler=False):
+class Message:
+ def __init__(
+ self,
+ message_type,
+ message,
+ *,
+ element_name=None,
+ element_key=None,
+ detail=None,
+ action_name=None,
+ elapsed=None,
+ logfile=None,
+ sandbox=False,
+ scheduler=False
+ ):
self.message_type = message_type # Message type
- self.message = message # The message string
- self.element_name = element_name # The instance element name of the issuing plugin
- self.element_key = element_key # The display key of the issuing plugin element
- self.detail = detail # An additional detail string
- self.action_name = action_name # Name of the task queue (fetch, refresh, build, etc)
- self.elapsed = elapsed # The elapsed time, in timed messages
- self.logfile = logfile # The log file path where commands took place
- self.sandbox = sandbox # Whether the error that caused this message used a sandbox
- self.pid = os.getpid() # The process pid
- self.scheduler = scheduler # Whether this is a scheduler level message
+ self.message = message # The message string
+ self.element_name = (
+ element_name # The instance element name of the issuing plugin
+ )
+ self.element_key = element_key # The display key of the issuing plugin element
+ self.detail = detail # An additional detail string
+ self.action_name = (
+ action_name # Name of the task queue (fetch, refresh, build, etc)
+ )
+ self.elapsed = elapsed # The elapsed time, in timed messages
+ self.logfile = logfile # The log file path where commands took place
+ self.sandbox = (
+ sandbox # Whether the error that caused this message used a sandbox
+ )
+ self.pid = os.getpid() # The process pid
+ self.scheduler = scheduler # Whether this is a scheduler level message
self.creation_time = datetime.datetime.now()
if message_type in (MessageType.SUCCESS, MessageType.FAIL):
assert elapsed is not None
diff --git a/src/buildstream/_messenger.py b/src/buildstream/_messenger.py
index 20c327728..687d64ebf 100644
--- a/src/buildstream/_messenger.py
+++ b/src/buildstream/_messenger.py
@@ -39,15 +39,14 @@ if "BST_TEST_SUITE" in os.environ:
# TimeData class to contain times in an object that can be passed around
# and updated from different places
-class _TimeData():
- __slots__ = ['start_time']
+class _TimeData:
+ __slots__ = ["start_time"]
def __init__(self, start_time):
self.start_time = start_time
-class Messenger():
-
+class Messenger:
def __init__(self):
self._message_handler = None
self._silence_scope_depth = 0
@@ -160,11 +159,18 @@ class Messenger():
# silent_nested (bool): If True, all but _message.unconditional_messages are silenced
#
@contextmanager
- def timed_activity(self, activity_name, *, element_name=None, detail=None, silent_nested=False):
+ def timed_activity(
+ self, activity_name, *, element_name=None, detail=None, silent_nested=False
+ ):
with self._timed_suspendable() as timedata:
try:
# Push activity depth for status messages
- message = Message(MessageType.START, activity_name, detail=detail, element_name=element_name)
+ message = Message(
+ MessageType.START,
+ activity_name,
+ detail=detail,
+ element_name=element_name,
+ )
self.message(message)
with self.silence(actually_silence=silent_nested):
yield
@@ -173,12 +179,22 @@ class Messenger():
# Note the failure in status messages and reraise, the scheduler
# expects an error when there is an error.
elapsed = datetime.datetime.now() - timedata.start_time
- message = Message(MessageType.FAIL, activity_name, elapsed=elapsed, element_name=element_name)
+ message = Message(
+ MessageType.FAIL,
+ activity_name,
+ elapsed=elapsed,
+ element_name=element_name,
+ )
self.message(message)
raise
elapsed = datetime.datetime.now() - timedata.start_time
- message = Message(MessageType.SUCCESS, activity_name, elapsed=elapsed, element_name=element_name)
+ message = Message(
+ MessageType.SUCCESS,
+ activity_name,
+ elapsed=elapsed,
+ element_name=element_name,
+ )
self.message(message)
# simple_task()
@@ -195,10 +211,14 @@ class Messenger():
# Task: A Task object that represents this activity, principally used to report progress
#
@contextmanager
- def simple_task(self, activity_name, *, element_name=None, full_name=None, silent_nested=False):
+ def simple_task(
+ self, activity_name, *, element_name=None, full_name=None, silent_nested=False
+ ):
# Bypass use of State when none exists (e.g. tests)
if not self._state:
- with self.timed_activity(activity_name, element_name=element_name, silent_nested=silent_nested):
+ with self.timed_activity(
+ activity_name, element_name=element_name, silent_nested=silent_nested
+ ):
yield
return
@@ -207,7 +227,9 @@ class Messenger():
with self._timed_suspendable() as timedata:
try:
- message = Message(MessageType.START, activity_name, element_name=element_name)
+ message = Message(
+ MessageType.START, activity_name, element_name=element_name
+ )
self.message(message)
task = self._state.add_task(activity_name, full_name)
@@ -221,7 +243,12 @@ class Messenger():
except BstError:
elapsed = datetime.datetime.now() - timedata.start_time
- message = Message(MessageType.FAIL, activity_name, elapsed=elapsed, element_name=element_name)
+ message = Message(
+ MessageType.FAIL,
+ activity_name,
+ elapsed=elapsed,
+ element_name=element_name,
+ )
self.message(message)
raise
finally:
@@ -235,11 +262,18 @@ class Messenger():
if task.current_progress is not None and elapsed > _DISPLAY_LIMIT:
if task.maximum_progress is not None:
- detail = "{} of {} subtasks processed".format(task.current_progress, task.maximum_progress)
+ detail = "{} of {} subtasks processed".format(
+ task.current_progress, task.maximum_progress
+ )
else:
detail = "{} subtasks processed".format(task.current_progress)
- message = Message(MessageType.SUCCESS, activity_name, elapsed=elapsed, detail=detail,
- element_name=element_name)
+ message = Message(
+ MessageType.SUCCESS,
+ activity_name,
+ elapsed=elapsed,
+ detail=detail,
+ element_name=element_name,
+ )
self.message(message)
# recorded_messages()
@@ -274,14 +308,15 @@ class Messenger():
# Create the fully qualified logfile in the log directory,
# appending the pid and .log extension at the end.
- self._log_filename = os.path.join(logdir,
- '{}.{}.log'.format(filename, os.getpid()))
+ self._log_filename = os.path.join(
+ logdir, "{}.{}.log".format(filename, os.getpid())
+ )
# Ensure the directory exists first
directory = os.path.dirname(self._log_filename)
os.makedirs(directory, exist_ok=True)
- with open(self._log_filename, 'a') as logfile:
+ with open(self._log_filename, "a") as logfile:
# Write one last line to the log and flush it to disk
def flush_log():
@@ -291,7 +326,7 @@ class Messenger():
#
# So just try to flush as well as we can at SIGTERM time
try:
- logfile.write('\n\nForcefully terminated\n')
+ logfile.write("\n\nForcefully terminated\n")
logfile.flush()
except RuntimeError:
os.fsync(logfile.fileno())
@@ -352,26 +387,28 @@ class Messenger():
template += ": {message}"
- detail = ''
+ detail = ""
if message.detail is not None:
template += "\n\n{detail}"
- detail = message.detail.rstrip('\n')
+ detail = message.detail.rstrip("\n")
detail = INDENT + INDENT.join(detail.splitlines(True))
timecode = EMPTYTIME
if message.message_type in (MessageType.SUCCESS, MessageType.FAIL):
- hours, remainder = divmod(int(message.elapsed.total_seconds()), 60**2)
+ hours, remainder = divmod(int(message.elapsed.total_seconds()), 60 ** 2)
minutes, seconds = divmod(remainder, 60)
timecode = "{0:02d}:{1:02d}:{2:02d}".format(hours, minutes, seconds)
- text = template.format(timecode=timecode,
- element_name=element_name,
- type=message.message_type.upper(),
- message=message.message,
- detail=detail)
+ text = template.format(
+ timecode=timecode,
+ element_name=element_name,
+ type=message.message_type.upper(),
+ message=message.message,
+ detail=detail,
+ )
# Write to the open log file
- self._log_handle.write('{}\n'.format(text))
+ self._log_handle.write("{}\n".format(text))
self._log_handle.flush()
# get_state_for_child_job_pickling(self)
@@ -399,21 +436,21 @@ class Messenger():
# access to private details of Messenger, but it would open up a window
# where messagesw wouldn't be handled as expected.
#
- del state['_message_handler']
+ del state["_message_handler"]
# The render status callback is only used in the main process
#
- del state['_render_status_cb']
+ del state["_render_status_cb"]
# The "simple_task" context manager is not needed outside the main
# process. During testing we override it to something that cannot
# pickle, so just drop it when pickling to a child job. Note that it
# will only appear in 'state' if it has been overridden.
#
- state.pop('simple_task', None)
+ state.pop("simple_task", None)
# The State object is not needed outside the main process
- del state['_state']
+ del state["_state"]
return state
diff --git a/src/buildstream/_options/option.py b/src/buildstream/_options/option.py
index 51017be22..f039ca28a 100644
--- a/src/buildstream/_options/option.py
+++ b/src/buildstream/_options/option.py
@@ -27,11 +27,7 @@ if TYPE_CHECKING:
# Shared symbols for validation purposes
#
-OPTION_SYMBOLS = [
- 'type',
- 'description',
- 'variable'
-]
+OPTION_SYMBOLS = ["type", "description", "variable"]
# Option()
@@ -42,7 +38,7 @@ OPTION_SYMBOLS = [
# the loaded project options is a collection of typed Option
# instances.
#
-class Option():
+class Option:
# Subclasses use this to specify the type name used
# for the yaml format and error messages
@@ -66,12 +62,14 @@ class Option():
def load(self, node):
# We don't use the description, but we do require that options have a
# description.
- node.get_str('description')
- self.variable = node.get_str('variable', default=None)
+ node.get_str("description")
+ self.variable = node.get_str("variable", default=None)
# Assert valid symbol name for variable name
if self.variable is not None:
- _assert_symbol_name(self.variable, 'variable name', ref_node=node.get_node('variable'))
+ _assert_symbol_name(
+ self.variable, "variable name", ref_node=node.get_node("variable")
+ )
# load_value()
#
diff --git a/src/buildstream/_options/optionarch.py b/src/buildstream/_options/optionarch.py
index cbe360f9e..ed7656ea3 100644
--- a/src/buildstream/_options/optionarch.py
+++ b/src/buildstream/_options/optionarch.py
@@ -36,7 +36,7 @@ from .optionenum import OptionEnum
#
class OptionArch(OptionEnum):
- OPTION_TYPE = 'arch'
+ OPTION_TYPE = "arch"
def load(self, node):
super().load_special(node, allow_default_definition=False)
@@ -54,12 +54,18 @@ class OptionArch(OptionEnum):
# Do not terminate the loop early to ensure we validate
# all values in the list.
except PlatformError as e:
- provenance = node.get_sequence('values').scalar_at(index).get_provenance()
+ provenance = (
+ node.get_sequence("values").scalar_at(index).get_provenance()
+ )
prefix = ""
if provenance:
prefix = "{}: ".format(provenance)
- raise LoadError("{}Invalid value for {} option '{}': {}"
- .format(prefix, self.OPTION_TYPE, self.name, e), LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "{}Invalid value for {} option '{}': {}".format(
+ prefix, self.OPTION_TYPE, self.name, e
+ ),
+ LoadErrorReason.INVALID_DATA,
+ )
if default_value is None:
# Host architecture is not supported by the project.
diff --git a/src/buildstream/_options/optionbool.py b/src/buildstream/_options/optionbool.py
index f91cb257d..d8201de51 100644
--- a/src/buildstream/_options/optionbool.py
+++ b/src/buildstream/_options/optionbool.py
@@ -27,13 +27,13 @@ from .option import Option, OPTION_SYMBOLS
#
class OptionBool(Option):
- OPTION_TYPE = 'bool'
+ OPTION_TYPE = "bool"
def load(self, node):
super().load(node)
- node.validate_keys(OPTION_SYMBOLS + ['default'])
- self.value = node.get_bool('default')
+ node.validate_keys(OPTION_SYMBOLS + ["default"])
+ self.value = node.get_bool("default")
def load_value(self, node, *, transform=None):
if transform:
@@ -42,13 +42,15 @@ class OptionBool(Option):
self.value = node.get_bool(self.name)
def set_value(self, value):
- if value in ('True', 'true'):
+ if value in ("True", "true"):
self.value = True
- elif value in ('False', 'false'):
+ elif value in ("False", "false"):
self.value = False
else:
- raise LoadError("Invalid value for boolean option {}: {}".format(self.name, value),
- LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "Invalid value for boolean option {}: {}".format(self.name, value),
+ LoadErrorReason.INVALID_DATA,
+ )
def get_value(self):
if self.value:
diff --git a/src/buildstream/_options/optioneltmask.py b/src/buildstream/_options/optioneltmask.py
index 178999fa1..5a0d15f8e 100644
--- a/src/buildstream/_options/optioneltmask.py
+++ b/src/buildstream/_options/optioneltmask.py
@@ -28,7 +28,7 @@ from .optionflags import OptionFlags
#
class OptionEltMask(OptionFlags):
- OPTION_TYPE = 'element-mask'
+ OPTION_TYPE = "element-mask"
def load(self, node):
# Ask the parent constructor to disallow value definitions,
@@ -41,6 +41,6 @@ class OptionEltMask(OptionFlags):
def load_valid_values(self, node):
values = []
for filename in utils.list_relative_paths(self.pool.element_path):
- if filename.endswith('.bst'):
+ if filename.endswith(".bst"):
values.append(filename)
return values
diff --git a/src/buildstream/_options/optionenum.py b/src/buildstream/_options/optionenum.py
index 4a0941369..80d0fa156 100644
--- a/src/buildstream/_options/optionenum.py
+++ b/src/buildstream/_options/optionenum.py
@@ -27,7 +27,7 @@ from .option import Option, OPTION_SYMBOLS
#
class OptionEnum(Option):
- OPTION_TYPE = 'enum'
+ OPTION_TYPE = "enum"
def __init__(self, name, definition, pool):
self.values = None
@@ -39,17 +39,20 @@ class OptionEnum(Option):
def load_special(self, node, allow_default_definition=True):
super().load(node)
- valid_symbols = OPTION_SYMBOLS + ['values']
+ valid_symbols = OPTION_SYMBOLS + ["values"]
if allow_default_definition:
- valid_symbols += ['default']
+ valid_symbols += ["default"]
node.validate_keys(valid_symbols)
- self.values = node.get_str_list('values', default=[])
+ self.values = node.get_str_list("values", default=[])
if not self.values:
- raise LoadError("{}: No values specified for {} option '{}'"
- .format(node.get_provenance(), self.OPTION_TYPE, self.name),
- LoadErrorReason.INVALID_DATA,)
+ raise LoadError(
+ "{}: No values specified for {} option '{}'".format(
+ node.get_provenance(), self.OPTION_TYPE, self.name
+ ),
+ LoadErrorReason.INVALID_DATA,
+ )
# Allow subclass to define the default value
self.value = self.load_default_value(node)
@@ -77,13 +80,16 @@ class OptionEnum(Option):
prefix = "{}: ".format(provenance)
else:
prefix = ""
- raise LoadError("{}Invalid value for {} option '{}': {}\n"
- .format(prefix, self.OPTION_TYPE, self.name, value) +
- "Valid values: {}".format(", ".join(self.values)),
- LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "{}Invalid value for {} option '{}': {}\n".format(
+ prefix, self.OPTION_TYPE, self.name, value
+ )
+ + "Valid values: {}".format(", ".join(self.values)),
+ LoadErrorReason.INVALID_DATA,
+ )
def load_default_value(self, node):
- value_node = node.get_scalar('default')
+ value_node = node.get_scalar("default")
value = value_node.as_str()
self.validate(value, value_node)
return value
diff --git a/src/buildstream/_options/optionflags.py b/src/buildstream/_options/optionflags.py
index e5217a718..5977930d4 100644
--- a/src/buildstream/_options/optionflags.py
+++ b/src/buildstream/_options/optionflags.py
@@ -27,7 +27,7 @@ from .option import Option, OPTION_SYMBOLS
#
class OptionFlags(Option):
- OPTION_TYPE = 'flags'
+ OPTION_TYPE = "flags"
def __init__(self, name, definition, pool):
self.values = None
@@ -39,20 +39,23 @@ class OptionFlags(Option):
def load_special(self, node, allow_value_definitions=True):
super().load(node)
- valid_symbols = OPTION_SYMBOLS + ['default']
+ valid_symbols = OPTION_SYMBOLS + ["default"]
if allow_value_definitions:
- valid_symbols += ['values']
+ valid_symbols += ["values"]
node.validate_keys(valid_symbols)
# Allow subclass to define the valid values
self.values = self.load_valid_values(node)
if not self.values:
- raise LoadError("{}: No values specified for {} option '{}'"
- .format(node.get_provenance(), self.OPTION_TYPE, self.name),
- LoadErrorReason.INVALID_DATA)
-
- value_node = node.get_sequence('default', default=[])
+ raise LoadError(
+ "{}: No values specified for {} option '{}'".format(
+ node.get_provenance(), self.OPTION_TYPE, self.name
+ ),
+ LoadErrorReason.INVALID_DATA,
+ )
+
+ value_node = node.get_sequence("default", default=[])
self.value = value_node.as_str_list()
self.validate(self.value, value_node)
@@ -70,7 +73,7 @@ class OptionFlags(Option):
stripped = "".join(value.split())
# Get the comma separated values
- list_value = stripped.split(',')
+ list_value = stripped.split(",")
self.validate(list_value)
self.value = sorted(list_value)
@@ -86,12 +89,15 @@ class OptionFlags(Option):
prefix = "{}: ".format(provenance)
else:
prefix = ""
- raise LoadError("{}Invalid value for flags option '{}': {}\n"
- .format(prefix, self.name, value) +
- "Valid values: {}".format(", ".join(self.values)),
- LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "{}Invalid value for flags option '{}': {}\n".format(
+ prefix, self.name, value
+ )
+ + "Valid values: {}".format(", ".join(self.values)),
+ LoadErrorReason.INVALID_DATA,
+ )
def load_valid_values(self, node):
# Allow the more descriptive error to raise when no values
# exist rather than bailing out here (by specifying default_value)
- return node.get_str_list('values', default=[])
+ return node.get_str_list("values", default=[])
diff --git a/src/buildstream/_options/optionos.py b/src/buildstream/_options/optionos.py
index fcf4552f5..3f4e902c9 100644
--- a/src/buildstream/_options/optionos.py
+++ b/src/buildstream/_options/optionos.py
@@ -1,4 +1,3 @@
-
#
# Copyright (C) 2017 Codethink Limited
#
@@ -26,7 +25,7 @@ from .optionenum import OptionEnum
#
class OptionOS(OptionEnum):
- OPTION_TYPE = 'os'
+ OPTION_TYPE = "os"
def load(self, node):
super().load_special(node, allow_default_definition=False)
diff --git a/src/buildstream/_options/optionpool.py b/src/buildstream/_options/optionpool.py
index a0730c617..3b58a5904 100644
--- a/src/buildstream/_options/optionpool.py
+++ b/src/buildstream/_options/optionpool.py
@@ -50,8 +50,7 @@ class OptionTypes(FastEnum):
OS = OptionOS.OPTION_TYPE
-class OptionPool():
-
+class OptionPool:
def __init__(self, element_path):
# We hold on to the element path for the sake of OptionEltMask
self.element_path = element_path
@@ -59,7 +58,7 @@ class OptionPool():
#
# Private members
#
- self._options = {} # The Options
+ self._options = {} # The Options
self._variables = None # The Options resolved into typed variables
self._environment = None
@@ -69,7 +68,7 @@ class OptionPool():
state = self.__dict__.copy()
# Jinja2 Environments don't appear to be serializable. It is easy
# enough for us to reconstruct this one anyway, so no need to pickle it.
- del state['_environment']
+ del state["_environment"]
return state
def __setstate__(self, state):
@@ -88,9 +87,14 @@ class OptionPool():
for option_name, option_definition in options.items():
# Assert that the option name is a valid symbol
- _assert_symbol_name(option_name, "option name", ref_node=option_definition, allow_dashes=False)
-
- opt_type_name = option_definition.get_enum('type', OptionTypes)
+ _assert_symbol_name(
+ option_name,
+ "option name",
+ ref_node=option_definition,
+ allow_dashes=False,
+ )
+
+ opt_type_name = option_definition.get_enum("type", OptionTypes)
opt_type = _OPTION_TYPES[opt_type_name.value]
option = opt_type(option_name, option_definition, self)
@@ -110,8 +114,10 @@ class OptionPool():
option = self._options[option_name]
except KeyError as e:
p = option_value.get_provenance()
- raise LoadError("{}: Unknown option '{}' specified"
- .format(p, option_name), LoadErrorReason.INVALID_DATA) from e
+ raise LoadError(
+ "{}: Unknown option '{}' specified".format(p, option_name),
+ LoadErrorReason.INVALID_DATA,
+ ) from e
option.load_value(node, transform=transform)
# load_cli_values()
@@ -129,8 +135,12 @@ class OptionPool():
option = self._options[option_name]
except KeyError as e:
if not ignore_unknown:
- raise LoadError("Unknown option '{}' specified on the command line"
- .format(option_name), LoadErrorReason.INVALID_DATA) from e
+ raise LoadError(
+ "Unknown option '{}' specified on the command line".format(
+ option_name
+ ),
+ LoadErrorReason.INVALID_DATA,
+ ) from e
else:
option.set_value(option_value)
@@ -227,7 +237,9 @@ class OptionPool():
# Variables must be resolved at this point.
#
try:
- template_string = "{{% if {} %}} True {{% else %}} False {{% endif %}}".format(expression)
+ template_string = "{{% if {} %}} True {{% else %}} False {{% endif %}}".format(
+ expression
+ )
template = self._environment.from_string(template_string)
context = template.new_context(self._variables, shared=True)
result = template.root_render_func(context)
@@ -239,11 +251,15 @@ class OptionPool():
elif val == "False":
return False
else: # pragma: nocover
- raise LoadError("Failed to evaluate expression: {}".format(expression),
- LoadErrorReason.EXPRESSION_FAILED)
+ raise LoadError(
+ "Failed to evaluate expression: {}".format(expression),
+ LoadErrorReason.EXPRESSION_FAILED,
+ )
except jinja2.exceptions.TemplateError as e:
- raise LoadError("Failed to evaluate expression ({}): {}".format(expression, e),
- LoadErrorReason.EXPRESSION_FAILED)
+ raise LoadError(
+ "Failed to evaluate expression ({}): {}".format(expression, e),
+ LoadErrorReason.EXPRESSION_FAILED,
+ )
# Recursion assistent for lists, in case there
# are lists of lists.
@@ -262,25 +278,31 @@ class OptionPool():
# Return true if a conditional was processed.
#
def _process_one_node(self, node):
- conditions = node.get_sequence('(?)', default=None)
- assertion = node.get_str('(!)', default=None)
+ conditions = node.get_sequence("(?)", default=None)
+ assertion = node.get_str("(!)", default=None)
# Process assersions first, we want to abort on the first encountered
# assertion in a given dictionary, and not lose an assertion due to
# it being overwritten by a later assertion which might also trigger.
if assertion is not None:
- p = node.get_scalar('(!)').get_provenance()
- raise LoadError("{}: {}".format(p, assertion.strip()), LoadErrorReason.USER_ASSERTION)
+ p = node.get_scalar("(!)").get_provenance()
+ raise LoadError(
+ "{}: {}".format(p, assertion.strip()), LoadErrorReason.USER_ASSERTION
+ )
if conditions is not None:
- del node['(?)']
+ del node["(?)"]
for condition in conditions:
tuples = list(condition.items())
if len(tuples) > 1:
provenance = condition.get_provenance()
- raise LoadError("{}: Conditional statement has more than one key".format(provenance),
- LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "{}: Conditional statement has more than one key".format(
+ provenance
+ ),
+ LoadErrorReason.INVALID_DATA,
+ )
expression, value = tuples[0]
try:
@@ -290,10 +312,16 @@ class OptionPool():
provenance = condition.get_provenance()
raise LoadError("{}: {}".format(provenance, e), e.reason) from e
- if type(value) is not MappingNode: # pylint: disable=unidiomatic-typecheck
+ if (
+ type(value) is not MappingNode
+ ): # pylint: disable=unidiomatic-typecheck
provenance = condition.get_provenance()
- raise LoadError("{}: Only values of type 'dict' can be composed.".format(provenance),
- LoadErrorReason.ILLEGAL_COMPOSITE)
+ raise LoadError(
+ "{}: Only values of type 'dict' can be composed.".format(
+ provenance
+ ),
+ LoadErrorReason.ILLEGAL_COMPOSITE,
+ )
# Apply the yaml fragment if its condition evaluates to true
if apply_fragment:
diff --git a/src/buildstream/_pipeline.py b/src/buildstream/_pipeline.py
index 943d65e44..ace93acef 100644
--- a/src/buildstream/_pipeline.py
+++ b/src/buildstream/_pipeline.py
@@ -40,27 +40,27 @@ from ._project import ProjectRefStorage
#
# These values correspond to the CLI `--deps` arguments for convenience.
#
-class PipelineSelection():
+class PipelineSelection:
# Select only the target elements in the associated targets
- NONE = 'none'
+ NONE = "none"
# As NONE, but redirect elements that are capable of it
- REDIRECT = 'redirect'
+ REDIRECT = "redirect"
# Select elements which must be built for the associated targets to be built
- PLAN = 'plan'
+ PLAN = "plan"
# All dependencies of all targets, including the targets
- ALL = 'all'
+ ALL = "all"
# All direct build dependencies and their recursive runtime dependencies,
# excluding the targets
- BUILD = 'build'
+ BUILD = "build"
# All direct runtime dependencies and their recursive runtime dependencies,
# including the targets
- RUN = 'run'
+ RUN = "run"
# Pipeline()
@@ -70,12 +70,11 @@ class PipelineSelection():
# context (Context): The Context object
# artifacts (Context): The ArtifactCache object
#
-class Pipeline():
-
+class Pipeline:
def __init__(self, context, project, artifacts):
- self._context = context # The Context
- self._project = project # The toplevel project
+ self._context = context # The Context
+ self._project = project # The toplevel project
#
# Private members
@@ -104,14 +103,17 @@ class Pipeline():
# First concatenate all the lists for the loader's sake
targets = list(itertools.chain(*target_groups))
- with PROFILER.profile(Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, "-") for t in targets)):
- elements = self._project.load_elements(targets, rewritable=rewritable, ignore_workspaces=ignore_workspaces)
+ with PROFILER.profile(
+ Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, "-") for t in targets)
+ ):
+ elements = self._project.load_elements(
+ targets, rewritable=rewritable, ignore_workspaces=ignore_workspaces
+ )
# Now create element groups to match the input target groups
elt_iter = iter(elements)
element_groups = [
- [next(elt_iter) for i in range(len(group))]
- for group in target_groups
+ [next(elt_iter) for i in range(len(group))] for group in target_groups
]
return tuple(element_groups)
@@ -139,7 +141,9 @@ class Pipeline():
# targets (list of Element): The list of toplevel element targets
#
def resolve_elements(self, targets):
- with self._context.messenger.simple_task("Resolving cached state", silent_nested=True) as task:
+ with self._context.messenger.simple_task(
+ "Resolving cached state", silent_nested=True
+ ) as task:
# We need to go through the project to access the loader
if task:
task.set_maximum_progress(self._project.loader.loaded)
@@ -170,7 +174,9 @@ class Pipeline():
# targets (list [Element]): The list of element targets
#
def check_remotes(self, targets):
- with self._context.messenger.simple_task("Querying remotes for cached status", silent_nested=True) as task:
+ with self._context.messenger.simple_task(
+ "Querying remotes for cached status", silent_nested=True
+ ) as task:
task.set_maximum_progress(len(targets))
for element in targets:
@@ -213,7 +219,9 @@ class Pipeline():
def plan(self, elements):
# Keep locally cached elements in the plan if remote artifact cache is used
# to allow pulling artifact with strict cache key, if available.
- plan_cached = not self._context.get_strict() and self._artifacts.has_fetch_remotes()
+ plan_cached = (
+ not self._context.get_strict() and self._artifacts.has_fetch_remotes()
+ )
return _Planner().plan(elements, plan_cached)
@@ -241,8 +249,10 @@ class Pipeline():
for t in targets:
new_elm = t._get_source_element()
if new_elm != t and not silent:
- self._message(MessageType.INFO, "Element '{}' redirected to '{}'"
- .format(t.name, new_elm.name))
+ self._message(
+ MessageType.INFO,
+ "Element '{}' redirected to '{}'".format(t.name, new_elm.name),
+ )
if new_elm not in elements:
elements.append(new_elm)
elif mode == PipelineSelection.PLAN:
@@ -297,9 +307,11 @@ class Pipeline():
# Build a list of 'intersection' elements, i.e. the set of
# elements that lie on the border closest to excepted elements
# between excepted and target elements.
- intersection = list(itertools.chain.from_iterable(
- find_intersection(element) for element in except_targets
- ))
+ intersection = list(
+ itertools.chain.from_iterable(
+ find_intersection(element) for element in except_targets
+ )
+ )
# Now use this set of elements to traverse the targeted
# elements, except 'intersection' elements and their unique
@@ -355,10 +367,7 @@ class Pipeline():
#
def subtract_elements(self, elements, subtract):
subtract_set = set(subtract)
- return [
- e for e in elements
- if e not in subtract_set
- ]
+ return [e for e in elements if e not in subtract_set]
# add_elements()
#
@@ -423,21 +432,31 @@ class Pipeline():
if inconsistent:
detail = "Exact versions are missing for the following elements:\n\n"
for element in inconsistent:
- detail += " Element: {} is inconsistent\n".format(element._get_full_name())
+ detail += " Element: {} is inconsistent\n".format(
+ element._get_full_name()
+ )
for source in element.sources():
if source._get_consistency() == Consistency.INCONSISTENT:
detail += " {} is missing ref\n".format(source)
- detail += '\n'
+ detail += "\n"
detail += "Try tracking these elements first with `bst source track`\n"
- raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline")
+ raise PipelineError(
+ "Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline"
+ )
if inconsistent_workspaced:
- detail = "Some workspaces exist but are not closed\n" + \
- "Try closing them with `bst workspace close`\n\n"
+ detail = (
+ "Some workspaces exist but are not closed\n"
+ + "Try closing them with `bst workspace close`\n\n"
+ )
for element in inconsistent_workspaced:
detail += " " + element._get_full_name() + "\n"
- raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline-workspaced")
+ raise PipelineError(
+ "Inconsistent pipeline",
+ detail=detail,
+ reason="inconsistent-pipeline-workspaced",
+ )
# assert_sources_cached()
#
@@ -450,22 +469,30 @@ class Pipeline():
uncached = []
with self._context.messenger.timed_activity("Checking sources"):
for element in elements:
- if element._get_consistency() < Consistency.CACHED and \
- not element._source_cached():
+ if (
+ element._get_consistency() < Consistency.CACHED
+ and not element._source_cached()
+ ):
uncached.append(element)
if uncached:
detail = "Sources are not cached for the following elements:\n\n"
for element in uncached:
- detail += " Following sources for element: {} are not cached:\n".format(element._get_full_name())
+ detail += " Following sources for element: {} are not cached:\n".format(
+ element._get_full_name()
+ )
for source in element.sources():
if source._get_consistency() < Consistency.CACHED:
detail += " {}\n".format(source)
- detail += '\n'
- detail += "Try fetching these elements first with `bst source fetch`,\n" + \
- "or run this command with `--fetch` option\n"
+ detail += "\n"
+ detail += (
+ "Try fetching these elements first with `bst source fetch`,\n"
+ + "or run this command with `--fetch` option\n"
+ )
- raise PipelineError("Uncached sources", detail=detail, reason="uncached-sources")
+ raise PipelineError(
+ "Uncached sources", detail=detail, reason="uncached-sources"
+ )
#############################################################
# Private Methods #
@@ -484,10 +511,7 @@ class Pipeline():
# not contain any cross junction elements.
#
def _filter_cross_junctions(self, project, elements):
- return [
- element for element in elements
- if element._get_project() is project
- ]
+ return [element for element in elements if element._get_project() is project]
# _assert_junction_tracking()
#
@@ -512,10 +536,14 @@ class Pipeline():
for element in elements:
element_project = element._get_project()
if element_project is not self._project:
- detail = "Requested to track sources across junction boundaries\n" + \
- "in a project which does not use project.refs ref-storage."
+ detail = (
+ "Requested to track sources across junction boundaries\n"
+ + "in a project which does not use project.refs ref-storage."
+ )
- raise PipelineError("Untrackable sources", detail=detail, reason="untrackable-sources")
+ raise PipelineError(
+ "Untrackable sources", detail=detail, reason="untrackable-sources"
+ )
# _message()
#
@@ -523,8 +551,7 @@ class Pipeline():
#
def _message(self, message_type, message, **kwargs):
args = dict(kwargs)
- self._context.messenger.message(
- Message(message_type, message, **args))
+ self._context.messenger.message(Message(message_type, message, **args))
# _Planner()
@@ -534,7 +561,7 @@ class Pipeline():
# parts need to be built depending on build only dependencies
# being cached, and depth sorting for more efficient processing.
#
-class _Planner():
+class _Planner:
def __init__(self):
self.depth_map = OrderedDict()
self.visiting_elements = set()
@@ -574,4 +601,8 @@ class _Planner():
for index, item in enumerate(depth_sorted):
item[0]._set_depth(index)
- return [item[0] for item in depth_sorted if plan_cached or not item[0]._cached_success()]
+ return [
+ item[0]
+ for item in depth_sorted
+ if plan_cached or not item[0]._cached_success()
+ ]
diff --git a/src/buildstream/_platform/darwin.py b/src/buildstream/_platform/darwin.py
index f23535373..adc858842 100644
--- a/src/buildstream/_platform/darwin.py
+++ b/src/buildstream/_platform/darwin.py
@@ -59,9 +59,10 @@ class Darwin(Platform):
@staticmethod
def _create_dummy_sandbox(*args, **kwargs):
- kwargs['dummy_reason'] = \
- "OSXFUSE is not supported and there are no supported sandbox " + \
- "technologies for MacOS at this time"
+ kwargs["dummy_reason"] = (
+ "OSXFUSE is not supported and there are no supported sandbox "
+ + "technologies for MacOS at this time"
+ )
return SandboxDummy(*args, **kwargs)
def _setup_dummy_sandbox(self):
diff --git a/src/buildstream/_platform/fallback.py b/src/buildstream/_platform/fallback.py
index 4f7ff8086..b9e9f520d 100644
--- a/src/buildstream/_platform/fallback.py
+++ b/src/buildstream/_platform/fallback.py
@@ -20,15 +20,15 @@ from .platform import Platform
class Fallback(Platform):
-
def _check_dummy_sandbox_config(self, config):
return True
def _create_dummy_sandbox(self, *args, **kwargs):
- kwargs['dummy_reason'] = \
- ("FallBack platform only implements dummy sandbox, "
- "Buildstream may be having issues correctly detecting your platform, "
- "platform can be forced with BST_FORCE_BACKEND")
+ kwargs["dummy_reason"] = (
+ "FallBack platform only implements dummy sandbox, "
+ "Buildstream may be having issues correctly detecting your platform, "
+ "platform can be forced with BST_FORCE_BACKEND"
+ )
return SandboxDummy(*args, **kwargs)
def _setup_dummy_sandbox(self):
diff --git a/src/buildstream/_platform/linux.py b/src/buildstream/_platform/linux.py
index b400bfaac..bdc2e0df1 100644
--- a/src/buildstream/_platform/linux.py
+++ b/src/buildstream/_platform/linux.py
@@ -28,17 +28,16 @@ from .._exceptions import PlatformError
class Linux(Platform):
-
def _setup_sandbox(self, force_sandbox):
sandbox_setups = {
- 'bwrap': self._setup_bwrap_sandbox,
- 'buildbox': self._setup_buildbox_sandbox,
- 'chroot': self._setup_chroot_sandbox,
- 'dummy': self._setup_dummy_sandbox,
+ "bwrap": self._setup_bwrap_sandbox,
+ "buildbox": self._setup_buildbox_sandbox,
+ "chroot": self._setup_chroot_sandbox,
+ "dummy": self._setup_dummy_sandbox,
}
preferred_sandboxes = [
- 'bwrap',
+ "bwrap",
]
self._try_sandboxes(force_sandbox, sandbox_setups, preferred_sandboxes)
@@ -54,11 +53,12 @@ class Linux(Platform):
def can_crossbuild(self, config):
host_arch = self.get_host_arch()
- if ((config.build_arch == "x86-32" and host_arch == "x86-64") or
- (config.build_arch == "aarch32" and host_arch == "aarch64")):
+ if (config.build_arch == "x86-32" and host_arch == "x86-64") or (
+ config.build_arch == "aarch32" and host_arch == "aarch64"
+ ):
if self.linux32 is None:
try:
- utils.get_host_tool('linux32')
+ utils.get_host_tool("linux32")
self.linux32 = True
except utils.ProgramNotFoundError:
self.linux32 = False
@@ -76,7 +76,7 @@ class Linux(Platform):
def _create_dummy_sandbox(self, *args, **kwargs):
dummy_reasons = " and ".join(self.dummy_reasons)
- kwargs['dummy_reason'] = dummy_reasons
+ kwargs["dummy_reason"] = dummy_reasons
return SandboxDummy(*args, **kwargs)
def _setup_dummy_sandbox(self):
@@ -87,11 +87,13 @@ class Linux(Platform):
# Bubble-wrap sandbox methods
def _check_sandbox_config_bwrap(self, config):
from ..sandbox._sandboxbwrap import SandboxBwrap
+
return SandboxBwrap.check_sandbox_config(self, config)
def _create_bwrap_sandbox(self, *args, **kwargs):
from ..sandbox._sandboxbwrap import SandboxBwrap
- kwargs['linux32'] = self.linux32
+
+ kwargs["linux32"] = self.linux32
return SandboxBwrap(*args, **kwargs)
def _setup_bwrap_sandbox(self):
@@ -110,15 +112,18 @@ class Linux(Platform):
# Chroot sandbox methods
def _check_sandbox_config_chroot(self, config):
from ..sandbox._sandboxchroot import SandboxChroot
+
return SandboxChroot.check_sandbox_config(self, config)
@staticmethod
def _create_chroot_sandbox(*args, **kwargs):
from ..sandbox._sandboxchroot import SandboxChroot
+
return SandboxChroot(*args, **kwargs)
def _setup_chroot_sandbox(self):
from ..sandbox._sandboxchroot import SandboxChroot
+
self._check_sandbox(SandboxChroot)
self.check_sandbox_config = self._check_sandbox_config_chroot
self.create_sandbox = Linux._create_chroot_sandbox
@@ -127,18 +132,23 @@ class Linux(Platform):
# Buildbox sandbox methods
def _check_sandbox_config_buildbox(self, config):
from ..sandbox._sandboxbuildbox import SandboxBuildBox
+
return SandboxBuildBox.check_sandbox_config(self, config)
@staticmethod
def _create_buildbox_sandbox(*args, **kwargs):
from ..sandbox._sandboxbuildbox import SandboxBuildBox
- if kwargs.get('allow_real_directory'):
- raise PlatformError("The BuildBox Sandbox does not support real directories.",
- reason="You are using BuildBox sandbox because BST_FORCE_SANBOX=buildbox")
+
+ if kwargs.get("allow_real_directory"):
+ raise PlatformError(
+ "The BuildBox Sandbox does not support real directories.",
+ reason="You are using BuildBox sandbox because BST_FORCE_SANBOX=buildbox",
+ )
return SandboxBuildBox(*args, **kwargs)
def _setup_buildbox_sandbox(self):
from ..sandbox._sandboxbuildbox import SandboxBuildBox
+
self._check_sandbox(SandboxBuildBox)
self.check_sandbox_config = self._check_sandbox_config_buildbox
self.create_sandbox = self._create_buildbox_sandbox
diff --git a/src/buildstream/_platform/platform.py b/src/buildstream/_platform/platform.py
index af49b9e82..ebac66843 100644
--- a/src/buildstream/_platform/platform.py
+++ b/src/buildstream/_platform/platform.py
@@ -29,7 +29,7 @@ from .._exceptions import PlatformError, ImplError, SandboxError
from .. import utils
-class Platform():
+class Platform:
# Platform()
#
# A class to manage platform-specific details. Currently holds the
@@ -45,7 +45,7 @@ class Platform():
self._setup_sandbox(force_sandbox)
def _setup_sandbox(self, force_sandbox):
- sandbox_setups = {'dummy': self._setup_dummy_sandbox}
+ sandbox_setups = {"dummy": self._setup_dummy_sandbox}
preferred_sandboxes = []
self._try_sandboxes(force_sandbox, sandbox_setups, preferred_sandboxes)
@@ -58,12 +58,16 @@ class Platform():
try:
sandbox_setups[force_sandbox]()
except KeyError:
- raise PlatformError("Forced Sandbox is unavailable on this platform: BST_FORCE_SANDBOX"
- " is set to {} but it is not available".format(force_sandbox))
+ raise PlatformError(
+ "Forced Sandbox is unavailable on this platform: BST_FORCE_SANDBOX"
+ " is set to {} but it is not available".format(force_sandbox)
+ )
except SandboxError as Error:
- raise PlatformError("Forced Sandbox Error: BST_FORCE_SANDBOX"
- " is set to {} but cannot be setup".format(force_sandbox),
- detail=" and ".join(self.dummy_reasons)) from Error
+ raise PlatformError(
+ "Forced Sandbox Error: BST_FORCE_SANDBOX"
+ " is set to {} but cannot be setup".format(force_sandbox),
+ detail=" and ".join(self.dummy_reasons),
+ ) from Error
else:
for good_sandbox in preferred_sandboxes:
try:
@@ -73,7 +77,7 @@ class Platform():
continue
except utils.ProgramNotFoundError:
continue
- sandbox_setups['dummy']()
+ sandbox_setups["dummy"]()
def _check_sandbox(self, Sandbox):
try:
@@ -87,30 +91,32 @@ class Platform():
# Meant for testing purposes and therefore hidden in the
# deepest corners of the source code. Try not to abuse this,
# please?
- if os.getenv('BST_FORCE_SANDBOX'):
- force_sandbox = os.getenv('BST_FORCE_SANDBOX')
+ if os.getenv("BST_FORCE_SANDBOX"):
+ force_sandbox = os.getenv("BST_FORCE_SANDBOX")
else:
force_sandbox = None
- if os.getenv('BST_FORCE_BACKEND'):
- backend = os.getenv('BST_FORCE_BACKEND')
- elif sys.platform.startswith('darwin'):
- backend = 'darwin'
- elif sys.platform.startswith('linux'):
- backend = 'linux'
- elif sys.platform == 'win32':
- backend = 'win32'
+ if os.getenv("BST_FORCE_BACKEND"):
+ backend = os.getenv("BST_FORCE_BACKEND")
+ elif sys.platform.startswith("darwin"):
+ backend = "darwin"
+ elif sys.platform.startswith("linux"):
+ backend = "linux"
+ elif sys.platform == "win32":
+ backend = "win32"
else:
- backend = 'fallback'
+ backend = "fallback"
- if backend == 'linux':
+ if backend == "linux":
from .linux import Linux as PlatformImpl # pylint: disable=cyclic-import
- elif backend == 'darwin':
+ elif backend == "darwin":
from .darwin import Darwin as PlatformImpl # pylint: disable=cyclic-import
- elif backend == 'win32':
+ elif backend == "win32":
from .win32 import Win32 as PlatformImpl # pylint: disable=cyclic-import
- elif backend == 'fallback':
- from .fallback import Fallback as PlatformImpl # pylint: disable=cyclic-import
+ elif backend == "fallback":
+ from .fallback import (
+ Fallback as PlatformImpl,
+ ) # pylint: disable=cyclic-import
else:
raise PlatformError("No such platform: '{}'".format(backend))
@@ -156,11 +162,11 @@ class Platform():
"sparc64": "sparc-v9",
"sparc-v9": "sparc-v9",
"x86-32": "x86-32",
- "x86-64": "x86-64"
+ "x86-64": "x86-64",
}
try:
- return aliases[arch.replace('_', '-').lower()]
+ return aliases[arch.replace("_", "-").lower()]
except KeyError:
raise PlatformError("Unknown architecture: {}".format(arch))
@@ -188,7 +194,7 @@ class Platform():
def does_multiprocessing_start_require_pickling(self):
# Note that if the start method has not been set before now, it will be
# set to the platform default by `get_start_method`.
- return multiprocessing.get_start_method() != 'fork'
+ return multiprocessing.get_start_method() != "fork"
##################################################################
# Sandbox functions #
@@ -206,12 +212,18 @@ class Platform():
# (Sandbox) A sandbox
#
def create_sandbox(self, *args, **kwargs):
- raise ImplError("Platform {platform} does not implement create_sandbox()"
- .format(platform=type(self).__name__))
+ raise ImplError(
+ "Platform {platform} does not implement create_sandbox()".format(
+ platform=type(self).__name__
+ )
+ )
def check_sandbox_config(self, config):
- raise ImplError("Platform {platform} does not implement check_sandbox_config()"
- .format(platform=type(self).__name__))
+ raise ImplError(
+ "Platform {platform} does not implement check_sandbox_config()".format(
+ platform=type(self).__name__
+ )
+ )
def maximize_open_file_limit(self):
# Need to set resources for _frontend/app.py as this is dependent on the platform
@@ -230,5 +242,8 @@ class Platform():
resource.setrlimit(resource.RLIMIT_NOFILE, (hard_limit, hard_limit))
def _setup_dummy_sandbox(self):
- raise ImplError("Platform {platform} does not implement _setup_dummy_sandbox()"
- .format(platform=type(self).__name__))
+ raise ImplError(
+ "Platform {platform} does not implement _setup_dummy_sandbox()".format(
+ platform=type(self).__name__
+ )
+ )
diff --git a/src/buildstream/_platform/win32.py b/src/buildstream/_platform/win32.py
index 36680019d..a6aaf1662 100644
--- a/src/buildstream/_platform/win32.py
+++ b/src/buildstream/_platform/win32.py
@@ -20,7 +20,6 @@ from .platform import Platform
class Win32(Platform):
-
def maximize_open_file_limit(self):
# Note that on Windows, we don't have the 'resource' module to help us
# configure open file limits.
@@ -50,7 +49,9 @@ class Win32(Platform):
@staticmethod
def _create_dummy_sandbox(*args, **kwargs):
- kwargs['dummy_reason'] = "There are no supported sandbox technologies for Win32 at this time."
+ kwargs[
+ "dummy_reason"
+ ] = "There are no supported sandbox technologies for Win32 at this time."
return SandboxDummy(*args, **kwargs)
def _setup_dummy_sandbox(self):
diff --git a/src/buildstream/_plugincontext.py b/src/buildstream/_plugincontext.py
index b07c2b31a..f542f6fd4 100644
--- a/src/buildstream/_plugincontext.py
+++ b/src/buildstream/_plugincontext.py
@@ -41,10 +41,16 @@ from . import utils
# a given BuildStream project are isolated to their respective
# Pipelines.
#
-class PluginContext():
-
- def __init__(self, plugin_base, base_type, site_plugin_path, *,
- plugin_origins=None, format_versions={}):
+class PluginContext:
+ def __init__(
+ self,
+ plugin_base,
+ base_type,
+ site_plugin_path,
+ *,
+ plugin_origins=None,
+ format_versions={}
+ ):
# For pickling across processes, make sure this context has a unique
# identifier, which we prepend to the identifier of each PluginSource.
@@ -59,7 +65,7 @@ class PluginContext():
# Private members
#
self._base_type = base_type # The base class plugins derive from
- self._types = {} # Plugin type lookup table by kind
+ self._types = {} # Plugin type lookup table by kind
self._plugin_origins = plugin_origins or []
# The PluginSource object
@@ -72,8 +78,7 @@ class PluginContext():
def _init_site_source(self):
self._site_source = self._plugin_base.make_plugin_source(
- searchpath=self._site_plugin_path,
- identifier=self._identifier + 'site',
+ searchpath=self._site_plugin_path, identifier=self._identifier + "site",
)
def __getstate__(self):
@@ -93,11 +98,11 @@ class PluginContext():
# this by making sure we are not creating new members, only clearing
# existing ones.
#
- del state['_site_source']
- assert '_types' in state
- state['_types'] = {}
- assert '_alternate_sources' in state
- state['_alternate_sources'] = {}
+ del state["_site_source"]
+ assert "_types" in state
+ state["_types"] = {}
+ assert "_alternate_sources" in state
+ state["_alternate_sources"] = {}
return state
@@ -133,39 +138,44 @@ class PluginContext():
return self._types.values()
def _get_local_plugin_source(self, path):
- if ('local', path) not in self._alternate_sources:
+ if ("local", path) not in self._alternate_sources:
# key by a tuple to avoid collision
source = self._plugin_base.make_plugin_source(
- searchpath=[path],
- identifier=self._identifier + path,
+ searchpath=[path], identifier=self._identifier + path,
)
# Ensure that sources never get garbage collected,
# as they'll take the plugins with them.
- self._alternate_sources[('local', path)] = source
+ self._alternate_sources[("local", path)] = source
else:
- source = self._alternate_sources[('local', path)]
+ source = self._alternate_sources[("local", path)]
return source
def _get_pip_plugin_source(self, package_name, kind):
defaults = None
- if ('pip', package_name) not in self._alternate_sources:
+ if ("pip", package_name) not in self._alternate_sources:
import pkg_resources
+
# key by a tuple to avoid collision
try:
- package = pkg_resources.get_entry_info(package_name,
- 'buildstream.plugins',
- kind)
+ package = pkg_resources.get_entry_info(
+ package_name, "buildstream.plugins", kind
+ )
except pkg_resources.DistributionNotFound as e:
- raise PluginError("Failed to load {} plugin '{}': {}"
- .format(self._base_type.__name__, kind, e)) from e
+ raise PluginError(
+ "Failed to load {} plugin '{}': {}".format(
+ self._base_type.__name__, kind, e
+ )
+ ) from e
if package is None:
- raise PluginError("Pip package {} does not contain a plugin named '{}'"
- .format(package_name, kind))
+ raise PluginError(
+ "Pip package {} does not contain a plugin named '{}'".format(
+ package_name, kind
+ )
+ )
location = package.dist.get_resource_filename(
- pkg_resources._manager,
- package.module_name.replace('.', os.sep) + '.py'
+ pkg_resources._manager, package.module_name.replace(".", os.sep) + ".py"
)
# Also load the defaults - required since setuptools
@@ -173,7 +183,7 @@ class PluginContext():
try:
defaults = package.dist.get_resource_filename(
pkg_resources._manager,
- package.module_name.replace('.', os.sep) + '.yaml'
+ package.module_name.replace(".", os.sep) + ".yaml",
)
except KeyError:
# The plugin didn't have an accompanying YAML file
@@ -183,10 +193,10 @@ class PluginContext():
searchpath=[os.path.dirname(location)],
identifier=self._identifier + os.path.dirname(location),
)
- self._alternate_sources[('pip', package_name)] = source
+ self._alternate_sources[("pip", package_name)] = source
else:
- source = self._alternate_sources[('pip', package_name)]
+ source = self._alternate_sources[("pip", package_name)]
return source, defaults
@@ -199,27 +209,33 @@ class PluginContext():
loaded_dependency = False
for origin in self._plugin_origins:
- if kind not in origin.get_str_list('plugins'):
+ if kind not in origin.get_str_list("plugins"):
continue
- if origin.get_str('origin') == 'local':
- local_path = origin.get_str('path')
+ if origin.get_str("origin") == "local":
+ local_path = origin.get_str("path")
source = self._get_local_plugin_source(local_path)
- elif origin.get_str('origin') == 'pip':
- package_name = origin.get_str('package-name')
+ elif origin.get_str("origin") == "pip":
+ package_name = origin.get_str("package-name")
source, defaults = self._get_pip_plugin_source(package_name, kind)
else:
- raise PluginError("Failed to load plugin '{}': "
- "Unexpected plugin origin '{}'"
- .format(kind, origin.get_str('origin')))
+ raise PluginError(
+ "Failed to load plugin '{}': "
+ "Unexpected plugin origin '{}'".format(
+ kind, origin.get_str("origin")
+ )
+ )
loaded_dependency = True
break
# Fall back to getting the source from site
if not source:
if kind not in self._site_source.list_plugins():
- raise PluginError("No {} type registered for kind '{}'"
- .format(self._base_type.__name__, kind))
+ raise PluginError(
+ "No {} type registered for kind '{}'".format(
+ self._base_type.__name__, kind
+ )
+ )
source = self._site_source
@@ -241,17 +257,26 @@ class PluginContext():
defaults = os.path.join(plugin_dir, plugin_conf_name)
except ImportError as e:
- raise PluginError("Failed to load {} plugin '{}': {}"
- .format(self._base_type.__name__, kind, e)) from e
+ raise PluginError(
+ "Failed to load {} plugin '{}': {}".format(
+ self._base_type.__name__, kind, e
+ )
+ ) from e
try:
plugin_type = plugin.setup()
except AttributeError as e:
- raise PluginError("{} plugin '{}' did not provide a setup() function"
- .format(self._base_type.__name__, kind)) from e
+ raise PluginError(
+ "{} plugin '{}' did not provide a setup() function".format(
+ self._base_type.__name__, kind
+ )
+ ) from e
except TypeError as e:
- raise PluginError("setup symbol in {} plugin '{}' is not a function"
- .format(self._base_type.__name__, kind)) from e
+ raise PluginError(
+ "setup symbol in {} plugin '{}' is not a function".format(
+ self._base_type.__name__, kind
+ )
+ ) from e
self._assert_plugin(kind, plugin_type)
self._assert_version(kind, plugin_type)
@@ -259,19 +284,28 @@ class PluginContext():
def _assert_plugin(self, kind, plugin_type):
if kind in self._types:
- raise PluginError("Tried to register {} plugin for existing kind '{}' "
- "(already registered {})"
- .format(self._base_type.__name__, kind, self._types[kind].__name__))
+ raise PluginError(
+ "Tried to register {} plugin for existing kind '{}' "
+ "(already registered {})".format(
+ self._base_type.__name__, kind, self._types[kind].__name__
+ )
+ )
try:
if not issubclass(plugin_type, self._base_type):
- raise PluginError("{} plugin '{}' returned type '{}', which is not a subclass of {}"
- .format(self._base_type.__name__, kind,
- plugin_type.__name__,
- self._base_type.__name__))
+ raise PluginError(
+ "{} plugin '{}' returned type '{}', which is not a subclass of {}".format(
+ self._base_type.__name__,
+ kind,
+ plugin_type.__name__,
+ self._base_type.__name__,
+ )
+ )
except TypeError as e:
- raise PluginError("{} plugin '{}' returned something that is not a type (expected subclass of {})"
- .format(self._base_type.__name__, kind,
- self._base_type.__name__)) from e
+ raise PluginError(
+ "{} plugin '{}' returned something that is not a type (expected subclass of {})".format(
+ self._base_type.__name__, kind, self._base_type.__name__
+ )
+ ) from e
def _assert_version(self, kind, plugin_type):
@@ -282,12 +316,16 @@ class PluginContext():
req_minor = plugin_type.BST_REQUIRED_VERSION_MINOR
if (bst_major, bst_minor) < (req_major, req_minor):
- raise PluginError("BuildStream {}.{} is too old for {} plugin '{}' (requires {}.{})"
- .format(
- bst_major, bst_minor,
- self._base_type.__name__, kind,
- plugin_type.BST_REQUIRED_VERSION_MAJOR,
- plugin_type.BST_REQUIRED_VERSION_MINOR))
+ raise PluginError(
+ "BuildStream {}.{} is too old for {} plugin '{}' (requires {}.{})".format(
+ bst_major,
+ bst_minor,
+ self._base_type.__name__,
+ kind,
+ plugin_type.BST_REQUIRED_VERSION_MAJOR,
+ plugin_type.BST_REQUIRED_VERSION_MINOR,
+ )
+ )
# _assert_plugin_format()
#
@@ -296,6 +334,9 @@ class PluginContext():
#
def _assert_plugin_format(self, plugin, version):
if plugin.BST_FORMAT_VERSION < version:
- raise LoadError("{}: Format version {} is too old for requested version {}"
- .format(plugin, plugin.BST_FORMAT_VERSION, version),
- LoadErrorReason.UNSUPPORTED_PLUGIN)
+ raise LoadError(
+ "{}: Format version {} is too old for requested version {}".format(
+ plugin, plugin.BST_FORMAT_VERSION, version
+ ),
+ LoadErrorReason.UNSUPPORTED_PLUGIN,
+ )
diff --git a/src/buildstream/_profile.py b/src/buildstream/_profile.py
index b17215d0e..b8a9537a8 100644
--- a/src/buildstream/_profile.py
+++ b/src/buildstream/_profile.py
@@ -39,15 +39,15 @@ import time
# BST_PROFILE=circ-dep-check:sort-deps bst <command> <args>
#
# The special 'all' value will enable all profiles.
-class Topics():
- CIRCULAR_CHECK = 'circ-dep-check'
- SORT_DEPENDENCIES = 'sort-deps'
- LOAD_CONTEXT = 'load-context'
- LOAD_PROJECT = 'load-project'
- LOAD_PIPELINE = 'load-pipeline'
- LOAD_SELECTION = 'load-selection'
- SCHEDULER = 'scheduler'
- ALL = 'all'
+class Topics:
+ CIRCULAR_CHECK = "circ-dep-check"
+ SORT_DEPENDENCIES = "sort-deps"
+ LOAD_CONTEXT = "load-context"
+ LOAD_PROJECT = "load-project"
+ LOAD_PIPELINE = "load-pipeline"
+ LOAD_SELECTION = "load-selection"
+ SCHEDULER = "scheduler"
+ ALL = "all"
class _Profile:
@@ -62,9 +62,11 @@ class _Profile:
filename_template = os.path.join(
os.getcwd(),
"profile-{}-{}".format(
- datetime.datetime.fromtimestamp(self.start_time).strftime("%Y%m%dT%H%M%S"),
- self.key.replace("/", "-").replace(".", "-")
- )
+ datetime.datetime.fromtimestamp(self.start_time).strftime(
+ "%Y%m%dT%H%M%S"
+ ),
+ self.key.replace("/", "-").replace(".", "-"),
+ ),
)
self.log_filename = "{}.log".format(filename_template)
self.cprofile_filename = "{}.cprofile".format(filename_template)
@@ -86,17 +88,21 @@ class _Profile:
self.profiler.disable()
def save(self):
- heading = "\n".join([
- "-" * 64,
- "Profile for key: {}".format(self.key),
- "Started at: {}".format(self.start_time),
- "\n\t{}".format(self.message) if self.message else "",
- "-" * 64,
- "" # for a final new line
- ])
+ heading = "\n".join(
+ [
+ "-" * 64,
+ "Profile for key: {}".format(self.key),
+ "Started at: {}".format(self.start_time),
+ "\n\t{}".format(self.message) if self.message else "",
+ "-" * 64,
+ "", # for a final new line
+ ]
+ )
with open(self.log_filename, "a") as fp:
- stats = pstats.Stats(self.profiler, *self._additional_pstats_files, stream=fp)
+ stats = pstats.Stats(
+ self.profiler, *self._additional_pstats_files, stream=fp
+ )
# Create the log file
fp.write(heading)
@@ -114,10 +120,7 @@ class _Profiler:
self._active_profilers = []
if settings:
- self.enabled_topics = {
- topic
- for topic in settings.split(":")
- }
+ self.enabled_topics = {topic for topic in settings.split(":")}
@contextlib.contextmanager
def profile(self, topic, key, message=None):
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index cf0001e71..bc361d288 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -49,7 +49,7 @@ from ._workspaces import WORKSPACE_PROJECT_FILE
# Project Configuration file
-_PROJECT_CONF_FILE = 'project.conf'
+_PROJECT_CONF_FILE = "project.conf"
# List of all places plugins can come from
@@ -64,8 +64,7 @@ class PluginOrigins(FastEnum):
# A simple object describing the behavior of
# a host mount.
#
-class HostMount():
-
+class HostMount:
def __init__(self, path, host_path=None, optional=False):
# Support environment variable expansion in host mounts
@@ -73,9 +72,9 @@ class HostMount():
if host_path is not None:
host_path = os.path.expandvars(host_path)
- self.path = path # Path inside the sandbox
- self.host_path = host_path # Path on the host
- self.optional = optional # Optional mounts do not incur warnings or errors
+ self.path = path # Path inside the sandbox
+ self.host_path = host_path # Path on the host
+ self.optional = optional # Optional mounts do not incur warnings or errors
if self.host_path is None:
self.host_path = self.path
@@ -86,24 +85,32 @@ class ProjectConfig:
def __init__(self):
self.element_factory = None
self.source_factory = None
- self.options = None # OptionPool
- self.base_variables = {} # The base set of variables
- self.element_overrides = {} # Element specific configurations
- self.source_overrides = {} # Source specific configurations
- self.mirrors = OrderedDict() # contains dicts of alias-mappings to URIs.
- self.default_mirror = None # The name of the preferred mirror.
- self._aliases = None # Aliases dictionary
+ self.options = None # OptionPool
+ self.base_variables = {} # The base set of variables
+ self.element_overrides = {} # Element specific configurations
+ self.source_overrides = {} # Source specific configurations
+ self.mirrors = OrderedDict() # contains dicts of alias-mappings to URIs.
+ self.default_mirror = None # The name of the preferred mirror.
+ self._aliases = None # Aliases dictionary
# Project()
#
# The Project Configuration
#
-class Project():
-
- def __init__(self, directory, context, *, junction=None, cli_options=None,
- default_mirror=None, parent_loader=None,
- search_for_project=True, fetch_subprojects=None):
+class Project:
+ def __init__(
+ self,
+ directory,
+ context,
+ *,
+ junction=None,
+ cli_options=None,
+ default_mirror=None,
+ parent_loader=None,
+ search_for_project=True,
+ fetch_subprojects=None
+ ):
# The project name
self.name = None
@@ -111,7 +118,10 @@ class Project():
self._context = context # The invocation Context, a private member
if search_for_project:
- self.directory, self._invoked_from_workspace_element = self._find_project_dir(directory)
+ (
+ self.directory,
+ self._invoked_from_workspace_element,
+ ) = self._find_project_dir(directory)
else:
self.directory = directory
self._invoked_from_workspace_element = None
@@ -125,31 +135,31 @@ class Project():
self._default_targets = None
# ProjectRefs for the main refs and also for junctions
- self.refs = ProjectRefs(self.directory, 'project.refs')
- self.junction_refs = ProjectRefs(self.directory, 'junction.refs')
+ self.refs = ProjectRefs(self.directory, "project.refs")
+ self.junction_refs = ProjectRefs(self.directory, "junction.refs")
self.config = ProjectConfig()
self.first_pass_config = ProjectConfig()
- self.junction = junction # The junction Element object, if this is a subproject
+ self.junction = junction # The junction Element object, if this is a subproject
- self.ref_storage = None # ProjectRefStorage setting
- self.base_environment = {} # The base set of environment variables
- self.base_env_nocache = None # The base nocache mask (list) for the environment
+ self.ref_storage = None # ProjectRefStorage setting
+ self.base_environment = {} # The base set of environment variables
+ self.base_env_nocache = None # The base nocache mask (list) for the environment
#
# Private Members
#
- self._default_mirror = default_mirror # The name of the preferred mirror.
+ self._default_mirror = default_mirror # The name of the preferred mirror.
self._cli_options = cli_options
- self._fatal_warnings = [] # A list of warnings which should trigger an error
+ self._fatal_warnings = [] # A list of warnings which should trigger an error
- self._shell_command = [] # The default interactive shell command
+ self._shell_command = [] # The default interactive shell command
self._shell_environment = {} # Statically set environment vars
- self._shell_host_files = [] # A list of HostMount objects
+ self._shell_host_files = [] # A list of HostMount objects
self.artifact_cache_specs = None
self.source_cache_specs = None
@@ -163,7 +173,7 @@ class Project():
self._fully_loaded = False
self._project_includes = None
- with PROFILER.profile(Topics.LOAD_PROJECT, self.directory.replace(os.sep, '-')):
+ with PROFILER.profile(Topics.LOAD_PROJECT, self.directory.replace(os.sep, "-")):
self._load(parent_loader=parent_loader, fetch_subprojects=fetch_subprojects)
self._partially_loaded = True
@@ -252,80 +262,121 @@ class Project():
# (LoadError): In case that the project path is not valid or does not
# exist
#
- def get_path_from_node(self, node, *,
- check_is_file=False, check_is_dir=False):
+ def get_path_from_node(self, node, *, check_is_file=False, check_is_dir=False):
path_str = node.as_str()
path = Path(path_str)
full_path = self._absolute_directory_path / path
if full_path.is_symlink():
provenance = node.get_provenance()
- raise LoadError("{}: Specified path '{}' must not point to "
- "symbolic links ".format(provenance, path_str),
- LoadErrorReason.PROJ_PATH_INVALID_KIND)
+ raise LoadError(
+ "{}: Specified path '{}' must not point to "
+ "symbolic links ".format(provenance, path_str),
+ LoadErrorReason.PROJ_PATH_INVALID_KIND,
+ )
- if path.parts and path.parts[0] == '..':
+ if path.parts and path.parts[0] == "..":
provenance = node.get_provenance()
- raise LoadError("{}: Specified path '{}' first component must "
- "not be '..'".format(provenance, path_str),
- LoadErrorReason.PROJ_PATH_INVALID)
+ raise LoadError(
+ "{}: Specified path '{}' first component must "
+ "not be '..'".format(provenance, path_str),
+ LoadErrorReason.PROJ_PATH_INVALID,
+ )
try:
if sys.version_info[0] == 3 and sys.version_info[1] < 6:
full_resolved_path = full_path.resolve()
else:
- full_resolved_path = full_path.resolve(strict=True) # pylint: disable=unexpected-keyword-arg
+ full_resolved_path = full_path.resolve(
+ strict=True
+ ) # pylint: disable=unexpected-keyword-arg
except FileNotFoundError:
provenance = node.get_provenance()
- raise LoadError("{}: Specified path '{}' does not exist".format(provenance, path_str),
- LoadErrorReason.MISSING_FILE)
+ raise LoadError(
+ "{}: Specified path '{}' does not exist".format(provenance, path_str),
+ LoadErrorReason.MISSING_FILE,
+ )
is_inside = self._absolute_directory_path in full_resolved_path.parents or (
- full_resolved_path == self._absolute_directory_path)
+ full_resolved_path == self._absolute_directory_path
+ )
if not is_inside:
provenance = node.get_provenance()
- raise LoadError("{}: Specified path '{}' must not lead outside of the "
- "project directory".format(provenance, path_str),
- LoadErrorReason.PROJ_PATH_INVALID)
+ raise LoadError(
+ "{}: Specified path '{}' must not lead outside of the "
+ "project directory".format(provenance, path_str),
+ LoadErrorReason.PROJ_PATH_INVALID,
+ )
if path.is_absolute():
provenance = node.get_provenance()
- raise LoadError("{}: Absolute path: '{}' invalid.\n"
- "Please specify a path relative to the project's root."
- .format(provenance, path), LoadErrorReason.PROJ_PATH_INVALID)
+ raise LoadError(
+ "{}: Absolute path: '{}' invalid.\n"
+ "Please specify a path relative to the project's root.".format(
+ provenance, path
+ ),
+ LoadErrorReason.PROJ_PATH_INVALID,
+ )
if full_resolved_path.is_socket() or (
- full_resolved_path.is_fifo() or
- full_resolved_path.is_block_device()):
+ full_resolved_path.is_fifo() or full_resolved_path.is_block_device()
+ ):
provenance = node.get_provenance()
- raise LoadError("{}: Specified path '{}' points to an unsupported "
- "file kind".format(provenance, path_str), LoadErrorReason.PROJ_PATH_INVALID_KIND)
+ raise LoadError(
+ "{}: Specified path '{}' points to an unsupported "
+ "file kind".format(provenance, path_str),
+ LoadErrorReason.PROJ_PATH_INVALID_KIND,
+ )
if check_is_file and not full_resolved_path.is_file():
provenance = node.get_provenance()
- raise LoadError("{}: Specified path '{}' is not a regular file"
- .format(provenance, path_str), LoadErrorReason.PROJ_PATH_INVALID_KIND)
+ raise LoadError(
+ "{}: Specified path '{}' is not a regular file".format(
+ provenance, path_str
+ ),
+ LoadErrorReason.PROJ_PATH_INVALID_KIND,
+ )
if check_is_dir and not full_resolved_path.is_dir():
provenance = node.get_provenance()
- raise LoadError("{}: Specified path '{}' is not a directory"
- .format(provenance, path_str), LoadErrorReason.PROJ_PATH_INVALID_KIND)
+ raise LoadError(
+ "{}: Specified path '{}' is not a directory".format(
+ provenance, path_str
+ ),
+ LoadErrorReason.PROJ_PATH_INVALID_KIND,
+ )
return path_str
def _validate_node(self, node):
- node.validate_keys([
- 'format-version',
- 'element-path', 'variables',
- 'environment', 'environment-nocache',
- 'split-rules', 'elements', 'plugins',
- 'aliases', 'name', 'defaults',
- 'artifacts', 'options',
- 'fail-on-overlap', 'shell', 'fatal-warnings',
- 'ref-storage', 'sandbox', 'mirrors', 'remote-execution',
- 'sources', 'source-caches', '(@)'
- ])
+ node.validate_keys(
+ [
+ "format-version",
+ "element-path",
+ "variables",
+ "environment",
+ "environment-nocache",
+ "split-rules",
+ "elements",
+ "plugins",
+ "aliases",
+ "name",
+ "defaults",
+ "artifacts",
+ "options",
+ "fail-on-overlap",
+ "shell",
+ "fatal-warnings",
+ "ref-storage",
+ "sandbox",
+ "mirrors",
+ "remote-execution",
+ "sources",
+ "source-caches",
+ "(@)",
+ ]
+ )
# create_element()
#
@@ -340,7 +391,9 @@ class Project():
#
def create_element(self, meta, *, first_pass=False):
if first_pass:
- return self.first_pass_config.element_factory.create(self._context, self, meta)
+ return self.first_pass_config.element_factory.create(
+ self._context, self, meta
+ )
else:
return self.config.element_factory.create(self._context, self, meta)
@@ -370,7 +423,9 @@ class Project():
#
def create_source(self, meta, *, first_pass=False):
if first_pass:
- return self.first_pass_config.source_factory.create(self._context, self, meta)
+ return self.first_pass_config.source_factory.create(
+ self._context, self, meta
+ )
else:
return self.config.source_factory.create(self._context, self, meta)
@@ -406,7 +461,9 @@ class Project():
else:
config = self.config
- if not alias or alias not in config._aliases: # pylint: disable=unsupported-membership-test
+ if (
+ not alias or alias not in config._aliases
+ ): # pylint: disable=unsupported-membership-test
return [None]
mirror_list = []
@@ -433,17 +490,21 @@ class Project():
# (list): A list of loaded Element
#
def load_elements(self, targets, *, rewritable=False, ignore_workspaces=False):
- with self._context.messenger.simple_task("Loading elements", silent_nested=True) as task:
- meta_elements = self.loader.load(targets, task, rewritable=rewritable, ticker=None,
- ignore_workspaces=ignore_workspaces)
+ with self._context.messenger.simple_task(
+ "Loading elements", silent_nested=True
+ ) as task:
+ meta_elements = self.loader.load(
+ targets,
+ task,
+ rewritable=rewritable,
+ ticker=None,
+ ignore_workspaces=ignore_workspaces,
+ )
with self._context.messenger.simple_task("Resolving elements") as task:
if task:
task.set_maximum_progress(self.loader.loaded)
- elements = [
- Element._new_from_meta(meta, task)
- for meta in meta_elements
- ]
+ elements = [Element._new_from_meta(meta, task) for meta in meta_elements]
Element._clear_meta_elements_cache()
@@ -451,14 +512,21 @@ class Project():
# been discovered in the resolve() phase.
redundant_refs = Element._get_redundant_source_refs()
if redundant_refs:
- detail = "The following inline specified source references will be ignored:\n\n"
+ detail = (
+ "The following inline specified source references will be ignored:\n\n"
+ )
lines = [
"{}:{}".format(source._get_provenance(), ref)
for source, ref in redundant_refs
]
detail += "\n".join(lines)
self._context.messenger.message(
- Message(MessageType.WARN, "Ignoring redundant source references", detail=detail))
+ Message(
+ MessageType.WARN,
+ "Ignoring redundant source references",
+ detail=detail,
+ )
+ )
return elements
@@ -483,7 +551,9 @@ class Project():
#
artifacts = []
for ref in targets:
- artifacts.append(ArtifactElement._new_from_artifact_ref(ref, self._context, task))
+ artifacts.append(
+ ArtifactElement._new_from_artifact_ref(ref, self._context, task)
+ )
ArtifactElement._clear_artifact_refs_cache()
@@ -592,49 +662,63 @@ class Project():
self._project_conf._composite(pre_config_node)
# Assert project's format version early, before validating toplevel keys
- format_version = pre_config_node.get_int('format-version')
+ format_version = pre_config_node.get_int("format-version")
if format_version < BST_FORMAT_VERSION_MIN:
major, minor = utils.get_bst_version()
raise LoadError(
"Project requested format version {}, but BuildStream {}.{} only supports format version {} or above."
- "Use latest 1.x release"
- .format(format_version, major, minor, BST_FORMAT_VERSION_MIN), LoadErrorReason.UNSUPPORTED_PROJECT)
+ "Use latest 1.x release".format(
+ format_version, major, minor, BST_FORMAT_VERSION_MIN
+ ),
+ LoadErrorReason.UNSUPPORTED_PROJECT,
+ )
if BST_FORMAT_VERSION < format_version:
major, minor = utils.get_bst_version()
raise LoadError(
- "Project requested format version {}, but BuildStream {}.{} only supports up until format version {}"
- .format(format_version, major, minor, BST_FORMAT_VERSION), LoadErrorReason.UNSUPPORTED_PROJECT)
+ "Project requested format version {}, but BuildStream {}.{} only supports up until format version {}".format(
+ format_version, major, minor, BST_FORMAT_VERSION
+ ),
+ LoadErrorReason.UNSUPPORTED_PROJECT,
+ )
self._validate_node(pre_config_node)
# The project name, element path and option declarations
# are constant and cannot be overridden by option conditional statements
# FIXME: we should be keeping node information for further composition here
- self.name = self._project_conf.get_str('name')
+ self.name = self._project_conf.get_str("name")
# Validate that project name is a valid symbol name
- _assert_symbol_name(self.name, "project name",
- ref_node=pre_config_node.get_node('name'))
+ _assert_symbol_name(
+ self.name, "project name", ref_node=pre_config_node.get_node("name")
+ )
self.element_path = os.path.join(
self.directory,
- self.get_path_from_node(pre_config_node.get_scalar('element-path'),
- check_is_dir=True)
+ self.get_path_from_node(
+ pre_config_node.get_scalar("element-path"), check_is_dir=True
+ ),
)
self.config.options = OptionPool(self.element_path)
self.first_pass_config.options = OptionPool(self.element_path)
- defaults = pre_config_node.get_mapping('defaults')
- defaults.validate_keys(['targets'])
+ defaults = pre_config_node.get_mapping("defaults")
+ defaults.validate_keys(["targets"])
self._default_targets = defaults.get_str_list("targets")
# Fatal warnings
- self._fatal_warnings = pre_config_node.get_str_list('fatal-warnings', default=[])
+ self._fatal_warnings = pre_config_node.get_str_list(
+ "fatal-warnings", default=[]
+ )
- self.loader = Loader(self._context, self,
- parent=parent_loader, fetch_subprojects=fetch_subprojects)
+ self.loader = Loader(
+ self._context,
+ self,
+ parent=parent_loader,
+ fetch_subprojects=fetch_subprojects,
+ )
self._project_includes = Includes(self.loader, copy_tree=False)
@@ -643,16 +727,22 @@ class Project():
config_no_include = self._default_config_node.clone()
project_conf_first_pass._composite(config_no_include)
- self._load_pass(config_no_include, self.first_pass_config,
- ignore_unknown=True)
+ self._load_pass(config_no_include, self.first_pass_config, ignore_unknown=True)
# Use separate file for storing source references
- ref_storage_node = pre_config_node.get_scalar('ref-storage')
+ ref_storage_node = pre_config_node.get_scalar("ref-storage")
self.ref_storage = ref_storage_node.as_str()
- if self.ref_storage not in [ProjectRefStorage.INLINE, ProjectRefStorage.PROJECT_REFS]:
+ if self.ref_storage not in [
+ ProjectRefStorage.INLINE,
+ ProjectRefStorage.PROJECT_REFS,
+ ]:
p = ref_storage_node.get_provenance()
- raise LoadError("{}: Invalid value '{}' specified for ref-storage"
- .format(p, self.ref_storage), LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "{}: Invalid value '{}' specified for ref-storage".format(
+ p, self.ref_storage
+ ),
+ LoadErrorReason.INVALID_DATA,
+ )
if self.ref_storage == ProjectRefStorage.PROJECT_REFS:
self.junction_refs.load(self.first_pass_config.options)
@@ -677,25 +767,32 @@ class Project():
#
# Load artifacts pull/push configuration for this project
- self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config, self.directory)
+ self.artifact_cache_specs = ArtifactCache.specs_from_config_node(
+ config, self.directory
+ )
# If there is a junction Element which specifies that we want to remotely cache
# its elements, append the junction's remotes to the artifact cache specs list
if self.junction:
parent = self.junction._get_project()
if self.junction.cache_junction_elements:
- self.artifact_cache_specs = parent.artifact_cache_specs + self.artifact_cache_specs
+ self.artifact_cache_specs = (
+ parent.artifact_cache_specs + self.artifact_cache_specs
+ )
if self.junction.ignore_junction_remotes:
self.artifact_cache_specs = []
# Load source caches with pull/push config
- self.source_cache_specs = SourceCache.specs_from_config_node(config, self.directory)
+ self.source_cache_specs = SourceCache.specs_from_config_node(
+ config, self.directory
+ )
# Load remote-execution configuration for this project
project_specs = SandboxRemote.specs_from_config_node(config, self.directory)
override_specs = SandboxRemote.specs_from_config_node(
- self._context.get_overrides(self.name), self.directory)
+ self._context.get_overrides(self.name), self.directory
+ )
if override_specs is not None:
self.remote_execution_specs = override_specs
@@ -705,29 +802,31 @@ class Project():
self.remote_execution_specs = self._context.remote_execution_specs
# Load sandbox environment variables
- self.base_environment = config.get_mapping('environment')
- self.base_env_nocache = config.get_str_list('environment-nocache')
+ self.base_environment = config.get_mapping("environment")
+ self.base_env_nocache = config.get_str_list("environment-nocache")
# Load sandbox configuration
- self._sandbox = config.get_mapping('sandbox')
+ self._sandbox = config.get_mapping("sandbox")
# Load project split rules
- self._splits = config.get_mapping('split-rules')
+ self._splits = config.get_mapping("split-rules")
# Support backwards compatibility for fail-on-overlap
- fail_on_overlap = config.get_scalar('fail-on-overlap', None)
+ fail_on_overlap = config.get_scalar("fail-on-overlap", None)
# Deprecation check
if not fail_on_overlap.is_none():
self._context.messenger.message(
Message(
MessageType.WARN,
- "Use of fail-on-overlap within project.conf " +
- "is deprecated. Consider using fatal-warnings instead."
+ "Use of fail-on-overlap within project.conf "
+ + "is deprecated. Consider using fatal-warnings instead.",
)
)
- if (CoreWarnings.OVERLAPS not in self._fatal_warnings) and fail_on_overlap.as_bool():
+ if (
+ CoreWarnings.OVERLAPS not in self._fatal_warnings
+ ) and fail_on_overlap.as_bool():
self._fatal_warnings.append(CoreWarnings.OVERLAPS)
# Load project.refs if it exists, this may be ignored.
@@ -735,29 +834,29 @@ class Project():
self.refs.load(self.options)
# Parse shell options
- shell_options = config.get_mapping('shell')
- shell_options.validate_keys(['command', 'environment', 'host-files'])
- self._shell_command = shell_options.get_str_list('command')
+ shell_options = config.get_mapping("shell")
+ shell_options.validate_keys(["command", "environment", "host-files"])
+ self._shell_command = shell_options.get_str_list("command")
# Perform environment expansion right away
- shell_environment = shell_options.get_mapping('environment', default={})
+ shell_environment = shell_options.get_mapping("environment", default={})
for key in shell_environment.keys():
value = shell_environment.get_str(key)
self._shell_environment[key] = os.path.expandvars(value)
# Host files is parsed as a list for convenience
- host_files = shell_options.get_sequence('host-files', default=[])
+ host_files = shell_options.get_sequence("host-files", default=[])
for host_file in host_files:
if isinstance(host_file, ScalarNode):
mount = HostMount(host_file)
else:
# Some validation
- host_file.validate_keys(['path', 'host_path', 'optional'])
+ host_file.validate_keys(["path", "host_path", "optional"])
# Parse the host mount
- path = host_file.get_str('path')
- host_path = host_file.get_str('host_path', default=None)
- optional = host_file.get_bool('optional', default=False)
+ path = host_file.get_str("path")
+ host_path = host_file.get_str("host_path", default=None)
+ optional = host_file.get_bool("optional", default=False)
mount = HostMount(path, host_path, optional)
self._shell_host_files.append(mount)
@@ -772,33 +871,36 @@ class Project():
# output (ProjectConfig) - ProjectConfig to load configuration onto.
# ignore_unknown (bool) - Whether option loader shoud ignore unknown options.
#
- def _load_pass(self, config, output, *,
- ignore_unknown=False):
+ def _load_pass(self, config, output, *, ignore_unknown=False):
# Element and Source type configurations will be composited later onto
# element/source types, so we delete it from here and run our final
# assertion after.
- output.element_overrides = config.get_mapping('elements', default={})
- output.source_overrides = config.get_mapping('sources', default={})
- config.safe_del('elements')
- config.safe_del('sources')
+ output.element_overrides = config.get_mapping("elements", default={})
+ output.source_overrides = config.get_mapping("sources", default={})
+ config.safe_del("elements")
+ config.safe_del("sources")
config._assert_fully_composited()
self._load_plugin_factories(config, output)
# Load project options
- options_node = config.get_mapping('options', default={})
+ options_node = config.get_mapping("options", default={})
output.options.load(options_node)
if self.junction:
# load before user configuration
- output.options.load_yaml_values(self.junction.options, transform=self.junction.node_subst_vars)
+ output.options.load_yaml_values(
+ self.junction.options, transform=self.junction.node_subst_vars
+ )
# Collect option values specified in the user configuration
overrides = self._context.get_overrides(self.name)
- override_options = overrides.get_mapping('options', default={})
+ override_options = overrides.get_mapping("options", default={})
output.options.load_yaml_values(override_options)
if self._cli_options:
- output.options.load_cli_values(self._cli_options, ignore_unknown=ignore_unknown)
+ output.options.load_cli_values(
+ self._cli_options, ignore_unknown=ignore_unknown
+ )
# We're done modifying options, now we can use them for substitutions
output.options.resolve()
@@ -814,10 +916,10 @@ class Project():
output.options.process_node(output.source_overrides)
# Load base variables
- output.base_variables = config.get_mapping('variables')
+ output.base_variables = config.get_mapping("variables")
# Add the project name as a default variable
- output.base_variables['project-name'] = self.name
+ output.base_variables["project-name"] = self.name
# Extend variables with automatic variables and option exports
# Initialize it as a string as all variables are processed as strings.
@@ -827,35 +929,36 @@ class Project():
if self._context.build_max_jobs == 0:
# User requested automatic max-jobs
platform = self._context.platform
- output.base_variables['max-jobs'] = str(platform.get_cpu_count(8))
+ output.base_variables["max-jobs"] = str(platform.get_cpu_count(8))
else:
# User requested explicit max-jobs setting
- output.base_variables['max-jobs'] = str(self._context.build_max_jobs)
+ output.base_variables["max-jobs"] = str(self._context.build_max_jobs)
# Export options into variables, if that was requested
output.options.export_variables(output.base_variables)
# Override default_mirror if not set by command-line
output.default_mirror = self._default_mirror or overrides.get_str(
- 'default-mirror', default=None)
+ "default-mirror", default=None
+ )
- mirrors = config.get_sequence('mirrors', default=[])
+ mirrors = config.get_sequence("mirrors", default=[])
for mirror in mirrors:
- allowed_mirror_fields = [
- 'name', 'aliases'
- ]
+ allowed_mirror_fields = ["name", "aliases"]
mirror.validate_keys(allowed_mirror_fields)
- mirror_name = mirror.get_str('name')
+ mirror_name = mirror.get_str("name")
alias_mappings = {}
- for alias_mapping, uris in mirror.get_mapping('aliases').items():
- assert type(uris) is SequenceNode # pylint: disable=unidiomatic-typecheck
+ for alias_mapping, uris in mirror.get_mapping("aliases").items():
+ assert (
+ type(uris) is SequenceNode
+ ) # pylint: disable=unidiomatic-typecheck
alias_mappings[alias_mapping] = uris.as_str_list()
output.mirrors[mirror_name] = alias_mappings
if not output.default_mirror:
output.default_mirror = mirror_name
# Source url aliases
- output._aliases = config.get_mapping('aliases', default={})
+ output._aliases = config.get_mapping("aliases", default={})
# _find_project_dir()
#
@@ -887,57 +990,72 @@ class Project():
project_directory = workspace_project.get_default_project_path()
workspace_element = workspace_project.get_default_element()
else:
- raise LoadError("None of {names} found in '{path}' or any of its parent directories"
- .format(names=config_filenames, path=directory), LoadErrorReason.MISSING_PROJECT_CONF)
+ raise LoadError(
+ "None of {names} found in '{path}' or any of its parent directories".format(
+ names=config_filenames, path=directory
+ ),
+ LoadErrorReason.MISSING_PROJECT_CONF,
+ )
return project_directory, workspace_element
def _load_plugin_factories(self, config, output):
- plugin_source_origins = [] # Origins of custom sources
+ plugin_source_origins = [] # Origins of custom sources
plugin_element_origins = [] # Origins of custom elements
# Plugin origins and versions
- origins = config.get_sequence('plugins', default=[])
+ origins = config.get_sequence("plugins", default=[])
source_format_versions = {}
element_format_versions = {}
for origin in origins:
allowed_origin_fields = [
- 'origin', 'sources', 'elements',
- 'package-name', 'path',
+ "origin",
+ "sources",
+ "elements",
+ "package-name",
+ "path",
]
origin.validate_keys(allowed_origin_fields)
# Store source versions for checking later
- source_versions = origin.get_mapping('sources', default={})
+ source_versions = origin.get_mapping("sources", default={})
for key in source_versions.keys():
if key in source_format_versions:
- raise LoadError("Duplicate listing of source '{}'".format(key),
- LoadErrorReason.INVALID_YAML)
+ raise LoadError(
+ "Duplicate listing of source '{}'".format(key),
+ LoadErrorReason.INVALID_YAML,
+ )
source_format_versions[key] = source_versions.get_int(key)
# Store element versions for checking later
- element_versions = origin.get_mapping('elements', default={})
+ element_versions = origin.get_mapping("elements", default={})
for key in element_versions.keys():
if key in element_format_versions:
- raise LoadError("Duplicate listing of element '{}'".format(key),
- LoadErrorReason.INVALID_YAML)
+ raise LoadError(
+ "Duplicate listing of element '{}'".format(key),
+ LoadErrorReason.INVALID_YAML,
+ )
element_format_versions[key] = element_versions.get_int(key)
# Store the origins if they're not 'core'.
# core elements are loaded by default, so storing is unnecessary.
- origin_value = origin.get_enum('origin', PluginOrigins)
+ origin_value = origin.get_enum("origin", PluginOrigins)
if origin_value != PluginOrigins.CORE:
- self._store_origin(origin, 'sources', plugin_source_origins)
- self._store_origin(origin, 'elements', plugin_element_origins)
-
- pluginbase = PluginBase(package='buildstream.plugins')
- output.element_factory = ElementFactory(pluginbase,
- plugin_origins=plugin_element_origins,
- format_versions=element_format_versions)
- output.source_factory = SourceFactory(pluginbase,
- plugin_origins=plugin_source_origins,
- format_versions=source_format_versions)
+ self._store_origin(origin, "sources", plugin_source_origins)
+ self._store_origin(origin, "elements", plugin_element_origins)
+
+ pluginbase = PluginBase(package="buildstream.plugins")
+ output.element_factory = ElementFactory(
+ pluginbase,
+ plugin_origins=plugin_element_origins,
+ format_versions=element_format_versions,
+ )
+ output.source_factory = SourceFactory(
+ pluginbase,
+ plugin_origins=plugin_source_origins,
+ format_versions=source_format_versions,
+ )
# _store_origin()
#
@@ -953,25 +1071,29 @@ class Project():
# Raises:
# LoadError if 'origin' is an unexpected value
def _store_origin(self, origin, plugin_group, destination):
- expected_groups = ['sources', 'elements']
+ expected_groups = ["sources", "elements"]
if plugin_group not in expected_groups:
- raise LoadError("Unexpected plugin group: {}, expecting {}"
- .format(plugin_group, expected_groups),
- LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "Unexpected plugin group: {}, expecting {}".format(
+ plugin_group, expected_groups
+ ),
+ LoadErrorReason.INVALID_DATA,
+ )
if plugin_group in origin.keys():
origin_node = origin.clone()
plugins = origin.get_mapping(plugin_group, default={})
- origin_node['plugins'] = plugins.keys()
+ origin_node["plugins"] = plugins.keys()
for group in expected_groups:
if group in origin_node:
del origin_node[group]
- if origin_node.get_enum('origin', PluginOrigins) == PluginOrigins.LOCAL:
- path = self.get_path_from_node(origin.get_scalar('path'),
- check_is_dir=True)
+ if origin_node.get_enum("origin", PluginOrigins) == PluginOrigins.LOCAL:
+ path = self.get_path_from_node(
+ origin.get_scalar("path"), check_is_dir=True
+ )
# paths are passed in relative to the project, but must be absolute
- origin_node['path'] = os.path.join(self.directory, path)
+ origin_node["path"] = os.path.join(self.directory, path)
destination.append(origin_node)
# _warning_is_fatal():
diff --git a/src/buildstream/_projectrefs.py b/src/buildstream/_projectrefs.py
index 0555488c8..4b25192e4 100644
--- a/src/buildstream/_projectrefs.py
+++ b/src/buildstream/_projectrefs.py
@@ -26,15 +26,15 @@ from ._exceptions import LoadError, LoadErrorReason
# ProjectRefStorage()
#
# Indicates the type of ref storage
-class ProjectRefStorage():
+class ProjectRefStorage:
# Source references are stored inline
#
- INLINE = 'inline'
+ INLINE = "inline"
# Source references are stored in a central project.refs file
#
- PROJECT_REFS = 'project.refs'
+ PROJECT_REFS = "project.refs"
# ProjectRefs()
@@ -45,8 +45,7 @@ class ProjectRefStorage():
# directory (str): The project directory
# base_name (str): The project.refs basename
#
-class ProjectRefs():
-
+class ProjectRefs:
def __init__(self, directory, base_name):
directory = os.path.abspath(directory)
self._fullpath = os.path.join(directory, base_name)
@@ -63,7 +62,9 @@ class ProjectRefs():
#
def load(self, options):
try:
- self._toplevel_node = _yaml.load(self._fullpath, shortname=self._base_name, copy_tree=True)
+ self._toplevel_node = _yaml.load(
+ self._fullpath, shortname=self._base_name, copy_tree=True
+ )
provenance = self._toplevel_node.get_provenance()
self._toplevel_save = provenance._toplevel
@@ -83,12 +84,12 @@ class ProjectRefs():
self._toplevel_node = _new_synthetic_file(self._fullpath)
self._toplevel_save = self._toplevel_node
- self._toplevel_node.validate_keys(['projects'])
+ self._toplevel_node.validate_keys(["projects"])
# Ensure we create our toplevel entry point on the fly here
for node in [self._toplevel_node, self._toplevel_save]:
- if 'projects' not in node:
- node['projects'] = {}
+ if "projects" not in node:
+ node["projects"] = {}
# lookup_ref()
#
@@ -113,7 +114,9 @@ class ProjectRefs():
# If we couldnt find the orignal, create a new one.
#
if node is None:
- node = self._lookup(self._toplevel_save, project, element, source_index, ensure=True)
+ node = self._lookup(
+ self._toplevel_save, project, element, source_index, ensure=True
+ )
return node
@@ -122,7 +125,7 @@ class ProjectRefs():
# Looks up a ref node in the project.refs file, creates one if ensure is True.
#
def _lookup(self, toplevel, project, element, source_index, *, ensure=False):
- projects = toplevel.get_mapping('projects')
+ projects = toplevel.get_mapping("projects")
# Fetch the project
try:
diff --git a/src/buildstream/_protos/buildstream/v2/artifact_pb2.py b/src/buildstream/_protos/buildstream/v2/artifact_pb2.py
index 6ea9c4e10..5418aac18 100644
--- a/src/buildstream/_protos/buildstream/v2/artifact_pb2.py
+++ b/src/buildstream/_protos/buildstream/v2/artifact_pb2.py
@@ -3,393 +3,672 @@
# source: buildstream/v2/artifact.proto
import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
+
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
-from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2 as build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2
-from buildstream._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+from buildstream._protos.build.bazel.remote.execution.v2 import (
+ remote_execution_pb2 as build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2,
+)
+from buildstream._protos.google.api import (
+ annotations_pb2 as google_dot_api_dot_annotations__pb2,
+)
DESCRIPTOR = _descriptor.FileDescriptor(
- name='buildstream/v2/artifact.proto',
- package='buildstream.v2',
- syntax='proto3',
- serialized_options=None,
- serialized_pb=_b('\n\x1d\x62uildstream/v2/artifact.proto\x12\x0e\x62uildstream.v2\x1a\x36\x62uild/bazel/remote/execution/v2/remote_execution.proto\x1a\x1cgoogle/api/annotations.proto\"\xf4\x04\n\x08\x41rtifact\x12\x0f\n\x07version\x18\x01 \x01(\x05\x12\x15\n\rbuild_success\x18\x02 \x01(\x08\x12\x13\n\x0b\x62uild_error\x18\x03 \x01(\t\x12\x1b\n\x13\x62uild_error_details\x18\x04 \x01(\t\x12\x12\n\nstrong_key\x18\x05 \x01(\t\x12\x10\n\x08weak_key\x18\x06 \x01(\t\x12\x16\n\x0ewas_workspaced\x18\x07 \x01(\x08\x12\x36\n\x05\x66iles\x18\x08 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x37\n\nbuild_deps\x18\t \x03(\x0b\x32#.buildstream.v2.Artifact.Dependency\x12<\n\x0bpublic_data\x18\n \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12.\n\x04logs\x18\x0b \x03(\x0b\x32 .buildstream.v2.Artifact.LogFile\x12:\n\tbuildtree\x18\x0c \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x1a\x63\n\nDependency\x12\x14\n\x0cproject_name\x18\x01 \x01(\t\x12\x14\n\x0c\x65lement_name\x18\x02 \x01(\t\x12\x11\n\tcache_key\x18\x03 \x01(\t\x12\x16\n\x0ewas_workspaced\x18\x04 \x01(\x08\x1aP\n\x07LogFile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\">\n\x12GetArtifactRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x11\n\tcache_key\x18\x02 \x01(\t\"m\n\x15UpdateArtifactRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x11\n\tcache_key\x18\x02 \x01(\t\x12*\n\x08\x61rtifact\x18\x03 \x01(\x0b\x32\x18.buildstream.v2.Artifact2\xb5\x01\n\x0f\x41rtifactService\x12M\n\x0bGetArtifact\x12\".buildstream.v2.GetArtifactRequest\x1a\x18.buildstream.v2.Artifact\"\x00\x12S\n\x0eUpdateArtifact\x12%.buildstream.v2.UpdateArtifactRequest\x1a\x18.buildstream.v2.Artifact\"\x00\x62\x06proto3')
- ,
- dependencies=[build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
-
-
+ name="buildstream/v2/artifact.proto",
+ package="buildstream.v2",
+ syntax="proto3",
+ serialized_options=None,
+ serialized_pb=_b(
+ '\n\x1d\x62uildstream/v2/artifact.proto\x12\x0e\x62uildstream.v2\x1a\x36\x62uild/bazel/remote/execution/v2/remote_execution.proto\x1a\x1cgoogle/api/annotations.proto"\xf4\x04\n\x08\x41rtifact\x12\x0f\n\x07version\x18\x01 \x01(\x05\x12\x15\n\rbuild_success\x18\x02 \x01(\x08\x12\x13\n\x0b\x62uild_error\x18\x03 \x01(\t\x12\x1b\n\x13\x62uild_error_details\x18\x04 \x01(\t\x12\x12\n\nstrong_key\x18\x05 \x01(\t\x12\x10\n\x08weak_key\x18\x06 \x01(\t\x12\x16\n\x0ewas_workspaced\x18\x07 \x01(\x08\x12\x36\n\x05\x66iles\x18\x08 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x37\n\nbuild_deps\x18\t \x03(\x0b\x32#.buildstream.v2.Artifact.Dependency\x12<\n\x0bpublic_data\x18\n \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12.\n\x04logs\x18\x0b \x03(\x0b\x32 .buildstream.v2.Artifact.LogFile\x12:\n\tbuildtree\x18\x0c \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x1a\x63\n\nDependency\x12\x14\n\x0cproject_name\x18\x01 \x01(\t\x12\x14\n\x0c\x65lement_name\x18\x02 \x01(\t\x12\x11\n\tcache_key\x18\x03 \x01(\t\x12\x16\n\x0ewas_workspaced\x18\x04 \x01(\x08\x1aP\n\x07LogFile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest">\n\x12GetArtifactRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x11\n\tcache_key\x18\x02 \x01(\t"m\n\x15UpdateArtifactRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x11\n\tcache_key\x18\x02 \x01(\t\x12*\n\x08\x61rtifact\x18\x03 \x01(\x0b\x32\x18.buildstream.v2.Artifact2\xb5\x01\n\x0f\x41rtifactService\x12M\n\x0bGetArtifact\x12".buildstream.v2.GetArtifactRequest\x1a\x18.buildstream.v2.Artifact"\x00\x12S\n\x0eUpdateArtifact\x12%.buildstream.v2.UpdateArtifactRequest\x1a\x18.buildstream.v2.Artifact"\x00\x62\x06proto3'
+ ),
+ dependencies=[
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.DESCRIPTOR,
+ google_dot_api_dot_annotations__pb2.DESCRIPTOR,
+ ],
+)
_ARTIFACT_DEPENDENCY = _descriptor.Descriptor(
- name='Dependency',
- full_name='buildstream.v2.Artifact.Dependency',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='project_name', full_name='buildstream.v2.Artifact.Dependency.project_name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='element_name', full_name='buildstream.v2.Artifact.Dependency.element_name', index=1,
- number=2, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='cache_key', full_name='buildstream.v2.Artifact.Dependency.cache_key', index=2,
- number=3, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='was_workspaced', full_name='buildstream.v2.Artifact.Dependency.was_workspaced', index=3,
- number=4, type=8, cpp_type=7, label=1,
- has_default_value=False, default_value=False,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=583,
- serialized_end=682,
+ name="Dependency",
+ full_name="buildstream.v2.Artifact.Dependency",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="project_name",
+ full_name="buildstream.v2.Artifact.Dependency.project_name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="element_name",
+ full_name="buildstream.v2.Artifact.Dependency.element_name",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="cache_key",
+ full_name="buildstream.v2.Artifact.Dependency.cache_key",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="was_workspaced",
+ full_name="buildstream.v2.Artifact.Dependency.was_workspaced",
+ index=3,
+ number=4,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=583,
+ serialized_end=682,
)
_ARTIFACT_LOGFILE = _descriptor.Descriptor(
- name='LogFile',
- full_name='buildstream.v2.Artifact.LogFile',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='name', full_name='buildstream.v2.Artifact.LogFile.name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='digest', full_name='buildstream.v2.Artifact.LogFile.digest', index=1,
- number=2, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=684,
- serialized_end=764,
+ name="LogFile",
+ full_name="buildstream.v2.Artifact.LogFile",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="buildstream.v2.Artifact.LogFile.name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="digest",
+ full_name="buildstream.v2.Artifact.LogFile.digest",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=684,
+ serialized_end=764,
)
_ARTIFACT = _descriptor.Descriptor(
- name='Artifact',
- full_name='buildstream.v2.Artifact',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='version', full_name='buildstream.v2.Artifact.version', index=0,
- number=1, type=5, cpp_type=1, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='build_success', full_name='buildstream.v2.Artifact.build_success', index=1,
- number=2, type=8, cpp_type=7, label=1,
- has_default_value=False, default_value=False,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='build_error', full_name='buildstream.v2.Artifact.build_error', index=2,
- number=3, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='build_error_details', full_name='buildstream.v2.Artifact.build_error_details', index=3,
- number=4, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='strong_key', full_name='buildstream.v2.Artifact.strong_key', index=4,
- number=5, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='weak_key', full_name='buildstream.v2.Artifact.weak_key', index=5,
- number=6, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='was_workspaced', full_name='buildstream.v2.Artifact.was_workspaced', index=6,
- number=7, type=8, cpp_type=7, label=1,
- has_default_value=False, default_value=False,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='files', full_name='buildstream.v2.Artifact.files', index=7,
- number=8, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='build_deps', full_name='buildstream.v2.Artifact.build_deps', index=8,
- number=9, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='public_data', full_name='buildstream.v2.Artifact.public_data', index=9,
- number=10, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='logs', full_name='buildstream.v2.Artifact.logs', index=10,
- number=11, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='buildtree', full_name='buildstream.v2.Artifact.buildtree', index=11,
- number=12, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[_ARTIFACT_DEPENDENCY, _ARTIFACT_LOGFILE, ],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=136,
- serialized_end=764,
+ name="Artifact",
+ full_name="buildstream.v2.Artifact",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="version",
+ full_name="buildstream.v2.Artifact.version",
+ index=0,
+ number=1,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="build_success",
+ full_name="buildstream.v2.Artifact.build_success",
+ index=1,
+ number=2,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="build_error",
+ full_name="buildstream.v2.Artifact.build_error",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="build_error_details",
+ full_name="buildstream.v2.Artifact.build_error_details",
+ index=3,
+ number=4,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="strong_key",
+ full_name="buildstream.v2.Artifact.strong_key",
+ index=4,
+ number=5,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="weak_key",
+ full_name="buildstream.v2.Artifact.weak_key",
+ index=5,
+ number=6,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="was_workspaced",
+ full_name="buildstream.v2.Artifact.was_workspaced",
+ index=6,
+ number=7,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="files",
+ full_name="buildstream.v2.Artifact.files",
+ index=7,
+ number=8,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="build_deps",
+ full_name="buildstream.v2.Artifact.build_deps",
+ index=8,
+ number=9,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="public_data",
+ full_name="buildstream.v2.Artifact.public_data",
+ index=9,
+ number=10,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="logs",
+ full_name="buildstream.v2.Artifact.logs",
+ index=10,
+ number=11,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="buildtree",
+ full_name="buildstream.v2.Artifact.buildtree",
+ index=11,
+ number=12,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[_ARTIFACT_DEPENDENCY, _ARTIFACT_LOGFILE,],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=136,
+ serialized_end=764,
)
_GETARTIFACTREQUEST = _descriptor.Descriptor(
- name='GetArtifactRequest',
- full_name='buildstream.v2.GetArtifactRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='instance_name', full_name='buildstream.v2.GetArtifactRequest.instance_name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='cache_key', full_name='buildstream.v2.GetArtifactRequest.cache_key', index=1,
- number=2, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=766,
- serialized_end=828,
+ name="GetArtifactRequest",
+ full_name="buildstream.v2.GetArtifactRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="instance_name",
+ full_name="buildstream.v2.GetArtifactRequest.instance_name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="cache_key",
+ full_name="buildstream.v2.GetArtifactRequest.cache_key",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=766,
+ serialized_end=828,
)
_UPDATEARTIFACTREQUEST = _descriptor.Descriptor(
- name='UpdateArtifactRequest',
- full_name='buildstream.v2.UpdateArtifactRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='instance_name', full_name='buildstream.v2.UpdateArtifactRequest.instance_name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='cache_key', full_name='buildstream.v2.UpdateArtifactRequest.cache_key', index=1,
- number=2, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='artifact', full_name='buildstream.v2.UpdateArtifactRequest.artifact', index=2,
- number=3, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=830,
- serialized_end=939,
+ name="UpdateArtifactRequest",
+ full_name="buildstream.v2.UpdateArtifactRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="instance_name",
+ full_name="buildstream.v2.UpdateArtifactRequest.instance_name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="cache_key",
+ full_name="buildstream.v2.UpdateArtifactRequest.cache_key",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="artifact",
+ full_name="buildstream.v2.UpdateArtifactRequest.artifact",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=830,
+ serialized_end=939,
)
_ARTIFACT_DEPENDENCY.containing_type = _ARTIFACT
-_ARTIFACT_LOGFILE.fields_by_name['digest'].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
+_ARTIFACT_LOGFILE.fields_by_name[
+ "digest"
+].message_type = (
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
+)
_ARTIFACT_LOGFILE.containing_type = _ARTIFACT
-_ARTIFACT.fields_by_name['files'].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-_ARTIFACT.fields_by_name['build_deps'].message_type = _ARTIFACT_DEPENDENCY
-_ARTIFACT.fields_by_name['public_data'].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-_ARTIFACT.fields_by_name['logs'].message_type = _ARTIFACT_LOGFILE
-_ARTIFACT.fields_by_name['buildtree'].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-_UPDATEARTIFACTREQUEST.fields_by_name['artifact'].message_type = _ARTIFACT
-DESCRIPTOR.message_types_by_name['Artifact'] = _ARTIFACT
-DESCRIPTOR.message_types_by_name['GetArtifactRequest'] = _GETARTIFACTREQUEST
-DESCRIPTOR.message_types_by_name['UpdateArtifactRequest'] = _UPDATEARTIFACTREQUEST
+_ARTIFACT.fields_by_name[
+ "files"
+].message_type = (
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
+)
+_ARTIFACT.fields_by_name["build_deps"].message_type = _ARTIFACT_DEPENDENCY
+_ARTIFACT.fields_by_name[
+ "public_data"
+].message_type = (
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
+)
+_ARTIFACT.fields_by_name["logs"].message_type = _ARTIFACT_LOGFILE
+_ARTIFACT.fields_by_name[
+ "buildtree"
+].message_type = (
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
+)
+_UPDATEARTIFACTREQUEST.fields_by_name["artifact"].message_type = _ARTIFACT
+DESCRIPTOR.message_types_by_name["Artifact"] = _ARTIFACT
+DESCRIPTOR.message_types_by_name["GetArtifactRequest"] = _GETARTIFACTREQUEST
+DESCRIPTOR.message_types_by_name["UpdateArtifactRequest"] = _UPDATEARTIFACTREQUEST
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-Artifact = _reflection.GeneratedProtocolMessageType('Artifact', (_message.Message,), {
-
- 'Dependency' : _reflection.GeneratedProtocolMessageType('Dependency', (_message.Message,), {
- 'DESCRIPTOR' : _ARTIFACT_DEPENDENCY,
- '__module__' : 'buildstream.v2.artifact_pb2'
- # @@protoc_insertion_point(class_scope:buildstream.v2.Artifact.Dependency)
- })
- ,
-
- 'LogFile' : _reflection.GeneratedProtocolMessageType('LogFile', (_message.Message,), {
- 'DESCRIPTOR' : _ARTIFACT_LOGFILE,
- '__module__' : 'buildstream.v2.artifact_pb2'
- # @@protoc_insertion_point(class_scope:buildstream.v2.Artifact.LogFile)
- })
- ,
- 'DESCRIPTOR' : _ARTIFACT,
- '__module__' : 'buildstream.v2.artifact_pb2'
- # @@protoc_insertion_point(class_scope:buildstream.v2.Artifact)
- })
+Artifact = _reflection.GeneratedProtocolMessageType(
+ "Artifact",
+ (_message.Message,),
+ {
+ "Dependency": _reflection.GeneratedProtocolMessageType(
+ "Dependency",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _ARTIFACT_DEPENDENCY,
+ "__module__": "buildstream.v2.artifact_pb2"
+ # @@protoc_insertion_point(class_scope:buildstream.v2.Artifact.Dependency)
+ },
+ ),
+ "LogFile": _reflection.GeneratedProtocolMessageType(
+ "LogFile",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _ARTIFACT_LOGFILE,
+ "__module__": "buildstream.v2.artifact_pb2"
+ # @@protoc_insertion_point(class_scope:buildstream.v2.Artifact.LogFile)
+ },
+ ),
+ "DESCRIPTOR": _ARTIFACT,
+ "__module__": "buildstream.v2.artifact_pb2"
+ # @@protoc_insertion_point(class_scope:buildstream.v2.Artifact)
+ },
+)
_sym_db.RegisterMessage(Artifact)
_sym_db.RegisterMessage(Artifact.Dependency)
_sym_db.RegisterMessage(Artifact.LogFile)
-GetArtifactRequest = _reflection.GeneratedProtocolMessageType('GetArtifactRequest', (_message.Message,), {
- 'DESCRIPTOR' : _GETARTIFACTREQUEST,
- '__module__' : 'buildstream.v2.artifact_pb2'
- # @@protoc_insertion_point(class_scope:buildstream.v2.GetArtifactRequest)
- })
+GetArtifactRequest = _reflection.GeneratedProtocolMessageType(
+ "GetArtifactRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _GETARTIFACTREQUEST,
+ "__module__": "buildstream.v2.artifact_pb2"
+ # @@protoc_insertion_point(class_scope:buildstream.v2.GetArtifactRequest)
+ },
+)
_sym_db.RegisterMessage(GetArtifactRequest)
-UpdateArtifactRequest = _reflection.GeneratedProtocolMessageType('UpdateArtifactRequest', (_message.Message,), {
- 'DESCRIPTOR' : _UPDATEARTIFACTREQUEST,
- '__module__' : 'buildstream.v2.artifact_pb2'
- # @@protoc_insertion_point(class_scope:buildstream.v2.UpdateArtifactRequest)
- })
+UpdateArtifactRequest = _reflection.GeneratedProtocolMessageType(
+ "UpdateArtifactRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _UPDATEARTIFACTREQUEST,
+ "__module__": "buildstream.v2.artifact_pb2"
+ # @@protoc_insertion_point(class_scope:buildstream.v2.UpdateArtifactRequest)
+ },
+)
_sym_db.RegisterMessage(UpdateArtifactRequest)
-
_ARTIFACTSERVICE = _descriptor.ServiceDescriptor(
- name='ArtifactService',
- full_name='buildstream.v2.ArtifactService',
- file=DESCRIPTOR,
- index=0,
- serialized_options=None,
- serialized_start=942,
- serialized_end=1123,
- methods=[
- _descriptor.MethodDescriptor(
- name='GetArtifact',
- full_name='buildstream.v2.ArtifactService.GetArtifact',
+ name="ArtifactService",
+ full_name="buildstream.v2.ArtifactService",
+ file=DESCRIPTOR,
index=0,
- containing_service=None,
- input_type=_GETARTIFACTREQUEST,
- output_type=_ARTIFACT,
- serialized_options=None,
- ),
- _descriptor.MethodDescriptor(
- name='UpdateArtifact',
- full_name='buildstream.v2.ArtifactService.UpdateArtifact',
- index=1,
- containing_service=None,
- input_type=_UPDATEARTIFACTREQUEST,
- output_type=_ARTIFACT,
serialized_options=None,
- ),
-])
+ serialized_start=942,
+ serialized_end=1123,
+ methods=[
+ _descriptor.MethodDescriptor(
+ name="GetArtifact",
+ full_name="buildstream.v2.ArtifactService.GetArtifact",
+ index=0,
+ containing_service=None,
+ input_type=_GETARTIFACTREQUEST,
+ output_type=_ARTIFACT,
+ serialized_options=None,
+ ),
+ _descriptor.MethodDescriptor(
+ name="UpdateArtifact",
+ full_name="buildstream.v2.ArtifactService.UpdateArtifact",
+ index=1,
+ containing_service=None,
+ input_type=_UPDATEARTIFACTREQUEST,
+ output_type=_ARTIFACT,
+ serialized_options=None,
+ ),
+ ],
+)
_sym_db.RegisterServiceDescriptor(_ARTIFACTSERVICE)
-DESCRIPTOR.services_by_name['ArtifactService'] = _ARTIFACTSERVICE
+DESCRIPTOR.services_by_name["ArtifactService"] = _ARTIFACTSERVICE
# @@protoc_insertion_point(module_scope)
diff --git a/src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py b/src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py
index db0cd6435..694780e25 100644
--- a/src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py
+++ b/src/buildstream/_protos/buildstream/v2/artifact_pb2_grpc.py
@@ -1,69 +1,72 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
-from buildstream._protos.buildstream.v2 import artifact_pb2 as buildstream_dot_v2_dot_artifact__pb2
+from buildstream._protos.buildstream.v2 import (
+ artifact_pb2 as buildstream_dot_v2_dot_artifact__pb2,
+)
class ArtifactServiceStub(object):
- # missing associated documentation comment in .proto file
- pass
+ # missing associated documentation comment in .proto file
+ pass
- def __init__(self, channel):
- """Constructor.
+ def __init__(self, channel):
+ """Constructor.
Args:
channel: A grpc.Channel.
"""
- self.GetArtifact = channel.unary_unary(
- '/buildstream.v2.ArtifactService/GetArtifact',
- request_serializer=buildstream_dot_v2_dot_artifact__pb2.GetArtifactRequest.SerializeToString,
- response_deserializer=buildstream_dot_v2_dot_artifact__pb2.Artifact.FromString,
+ self.GetArtifact = channel.unary_unary(
+ "/buildstream.v2.ArtifactService/GetArtifact",
+ request_serializer=buildstream_dot_v2_dot_artifact__pb2.GetArtifactRequest.SerializeToString,
+ response_deserializer=buildstream_dot_v2_dot_artifact__pb2.Artifact.FromString,
)
- self.UpdateArtifact = channel.unary_unary(
- '/buildstream.v2.ArtifactService/UpdateArtifact',
- request_serializer=buildstream_dot_v2_dot_artifact__pb2.UpdateArtifactRequest.SerializeToString,
- response_deserializer=buildstream_dot_v2_dot_artifact__pb2.Artifact.FromString,
+ self.UpdateArtifact = channel.unary_unary(
+ "/buildstream.v2.ArtifactService/UpdateArtifact",
+ request_serializer=buildstream_dot_v2_dot_artifact__pb2.UpdateArtifactRequest.SerializeToString,
+ response_deserializer=buildstream_dot_v2_dot_artifact__pb2.Artifact.FromString,
)
class ArtifactServiceServicer(object):
- # missing associated documentation comment in .proto file
- pass
+ # missing associated documentation comment in .proto file
+ pass
- def GetArtifact(self, request, context):
- """Retrieves an Artifact message
+ def GetArtifact(self, request, context):
+ """Retrieves an Artifact message
Errors:
* `NOT_FOUND`: Artifact not found on server
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
- def UpdateArtifact(self, request, context):
- """Sets an Artifact message
+ def UpdateArtifact(self, request, context):
+ """Sets an Artifact message
Errors:
* `FAILED_PRECONDITION`: Files specified in upload aren't present in CAS
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
def add_ArtifactServiceServicer_to_server(servicer, server):
- rpc_method_handlers = {
- 'GetArtifact': grpc.unary_unary_rpc_method_handler(
- servicer.GetArtifact,
- request_deserializer=buildstream_dot_v2_dot_artifact__pb2.GetArtifactRequest.FromString,
- response_serializer=buildstream_dot_v2_dot_artifact__pb2.Artifact.SerializeToString,
- ),
- 'UpdateArtifact': grpc.unary_unary_rpc_method_handler(
- servicer.UpdateArtifact,
- request_deserializer=buildstream_dot_v2_dot_artifact__pb2.UpdateArtifactRequest.FromString,
- response_serializer=buildstream_dot_v2_dot_artifact__pb2.Artifact.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- 'buildstream.v2.ArtifactService', rpc_method_handlers)
- server.add_generic_rpc_handlers((generic_handler,))
+ rpc_method_handlers = {
+ "GetArtifact": grpc.unary_unary_rpc_method_handler(
+ servicer.GetArtifact,
+ request_deserializer=buildstream_dot_v2_dot_artifact__pb2.GetArtifactRequest.FromString,
+ response_serializer=buildstream_dot_v2_dot_artifact__pb2.Artifact.SerializeToString,
+ ),
+ "UpdateArtifact": grpc.unary_unary_rpc_method_handler(
+ servicer.UpdateArtifact,
+ request_deserializer=buildstream_dot_v2_dot_artifact__pb2.UpdateArtifactRequest.FromString,
+ response_serializer=buildstream_dot_v2_dot_artifact__pb2.Artifact.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ "buildstream.v2.ArtifactService", rpc_method_handlers
+ )
+ server.add_generic_rpc_handlers((generic_handler,))
diff --git a/src/buildstream/_protos/buildstream/v2/buildstream_pb2.py b/src/buildstream/_protos/buildstream/v2/buildstream_pb2.py
index 0b6243a4d..50621e7bd 100644
--- a/src/buildstream/_protos/buildstream/v2/buildstream_pb2.py
+++ b/src/buildstream/_protos/buildstream/v2/buildstream_pb2.py
@@ -3,513 +3,695 @@
# source: buildstream/v2/buildstream.proto
import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
+
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
-from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2 as build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2
-from buildstream._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+from buildstream._protos.build.bazel.remote.execution.v2 import (
+ remote_execution_pb2 as build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2,
+)
+from buildstream._protos.google.api import (
+ annotations_pb2 as google_dot_api_dot_annotations__pb2,
+)
DESCRIPTOR = _descriptor.FileDescriptor(
- name='buildstream/v2/buildstream.proto',
- package='buildstream.v2',
- syntax='proto3',
- serialized_options=None,
- serialized_pb=_b('\n buildstream/v2/buildstream.proto\x12\x0e\x62uildstream.v2\x1a\x36\x62uild/bazel/remote/execution/v2/remote_execution.proto\x1a\x1cgoogle/api/annotations.proto\"9\n\x13GetReferenceRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\"O\n\x14GetReferenceResponse\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"v\n\x16UpdateReferenceRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12\x37\n\x06\x64igest\x18\x03 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\x19\n\x17UpdateReferenceResponse\"&\n\rStatusRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\"\'\n\x0eStatusResponse\x12\x15\n\rallow_updates\x18\x01 \x01(\x08\"/\n\x16GetCapabilitiesRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\"-\n\x14\x41rtifactCapabilities\x12\x15\n\rallow_updates\x18\x01 \x01(\x08\"+\n\x12SourceCapabilities\x12\x15\n\rallow_updates\x18\x01 \x01(\x08\"\x9a\x01\n\x12ServerCapabilities\x12\x43\n\x15\x61rtifact_capabilities\x18\x01 \x01(\x0b\x32$.buildstream.v2.ArtifactCapabilities\x12?\n\x13source_capabilities\x18\x02 \x01(\x0b\x32\".buildstream.v2.SourceCapabilities2\xca\x03\n\x10ReferenceStorage\x12\x90\x01\n\x0cGetReference\x12#.buildstream.v2.GetReferenceRequest\x1a$.buildstream.v2.GetReferenceResponse\"5\x82\xd3\xe4\x93\x02/\x12-/v2/{instance_name=**}/buildstream/refs/{key}\x12\xa1\x01\n\x0fUpdateReference\x12&.buildstream.v2.UpdateReferenceRequest\x1a\'.buildstream.v2.UpdateReferenceResponse\"=\x82\xd3\xe4\x93\x02\x37\x1a-/v2/{instance_name=**}/buildstream/refs/{key}:\x06\x64igest\x12\x7f\n\x06Status\x12\x1d.buildstream.v2.StatusRequest\x1a\x1e.buildstream.v2.StatusResponse\"6\x82\xd3\xe4\x93\x02\x30\x1a./v2/{instance_name=**}/buildstream/refs:status2\x9b\x01\n\x0c\x43\x61pabilities\x12\x8a\x01\n\x0fGetCapabilities\x12&.buildstream.v2.GetCapabilitiesRequest\x1a\".buildstream.v2.ServerCapabilities\"+\x82\xd3\xe4\x93\x02%\x12#/v2/{instance_name=**}/capabilitiesb\x06proto3')
- ,
- dependencies=[build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
-
-
+ name="buildstream/v2/buildstream.proto",
+ package="buildstream.v2",
+ syntax="proto3",
+ serialized_options=None,
+ serialized_pb=_b(
+ '\n buildstream/v2/buildstream.proto\x12\x0e\x62uildstream.v2\x1a\x36\x62uild/bazel/remote/execution/v2/remote_execution.proto\x1a\x1cgoogle/api/annotations.proto"9\n\x13GetReferenceRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t"O\n\x14GetReferenceResponse\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest"v\n\x16UpdateReferenceRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12\x37\n\x06\x64igest\x18\x03 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest"\x19\n\x17UpdateReferenceResponse"&\n\rStatusRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t"\'\n\x0eStatusResponse\x12\x15\n\rallow_updates\x18\x01 \x01(\x08"/\n\x16GetCapabilitiesRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t"-\n\x14\x41rtifactCapabilities\x12\x15\n\rallow_updates\x18\x01 \x01(\x08"+\n\x12SourceCapabilities\x12\x15\n\rallow_updates\x18\x01 \x01(\x08"\x9a\x01\n\x12ServerCapabilities\x12\x43\n\x15\x61rtifact_capabilities\x18\x01 \x01(\x0b\x32$.buildstream.v2.ArtifactCapabilities\x12?\n\x13source_capabilities\x18\x02 \x01(\x0b\x32".buildstream.v2.SourceCapabilities2\xca\x03\n\x10ReferenceStorage\x12\x90\x01\n\x0cGetReference\x12#.buildstream.v2.GetReferenceRequest\x1a$.buildstream.v2.GetReferenceResponse"5\x82\xd3\xe4\x93\x02/\x12-/v2/{instance_name=**}/buildstream/refs/{key}\x12\xa1\x01\n\x0fUpdateReference\x12&.buildstream.v2.UpdateReferenceRequest\x1a\'.buildstream.v2.UpdateReferenceResponse"=\x82\xd3\xe4\x93\x02\x37\x1a-/v2/{instance_name=**}/buildstream/refs/{key}:\x06\x64igest\x12\x7f\n\x06Status\x12\x1d.buildstream.v2.StatusRequest\x1a\x1e.buildstream.v2.StatusResponse"6\x82\xd3\xe4\x93\x02\x30\x1a./v2/{instance_name=**}/buildstream/refs:status2\x9b\x01\n\x0c\x43\x61pabilities\x12\x8a\x01\n\x0fGetCapabilities\x12&.buildstream.v2.GetCapabilitiesRequest\x1a".buildstream.v2.ServerCapabilities"+\x82\xd3\xe4\x93\x02%\x12#/v2/{instance_name=**}/capabilitiesb\x06proto3'
+ ),
+ dependencies=[
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.DESCRIPTOR,
+ google_dot_api_dot_annotations__pb2.DESCRIPTOR,
+ ],
+)
_GETREFERENCEREQUEST = _descriptor.Descriptor(
- name='GetReferenceRequest',
- full_name='buildstream.v2.GetReferenceRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='instance_name', full_name='buildstream.v2.GetReferenceRequest.instance_name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='key', full_name='buildstream.v2.GetReferenceRequest.key', index=1,
- number=2, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=138,
- serialized_end=195,
+ name="GetReferenceRequest",
+ full_name="buildstream.v2.GetReferenceRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="instance_name",
+ full_name="buildstream.v2.GetReferenceRequest.instance_name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="key",
+ full_name="buildstream.v2.GetReferenceRequest.key",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=138,
+ serialized_end=195,
)
_GETREFERENCERESPONSE = _descriptor.Descriptor(
- name='GetReferenceResponse',
- full_name='buildstream.v2.GetReferenceResponse',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='digest', full_name='buildstream.v2.GetReferenceResponse.digest', index=0,
- number=1, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=197,
- serialized_end=276,
+ name="GetReferenceResponse",
+ full_name="buildstream.v2.GetReferenceResponse",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="digest",
+ full_name="buildstream.v2.GetReferenceResponse.digest",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=197,
+ serialized_end=276,
)
_UPDATEREFERENCEREQUEST = _descriptor.Descriptor(
- name='UpdateReferenceRequest',
- full_name='buildstream.v2.UpdateReferenceRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='instance_name', full_name='buildstream.v2.UpdateReferenceRequest.instance_name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='keys', full_name='buildstream.v2.UpdateReferenceRequest.keys', index=1,
- number=2, type=9, cpp_type=9, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='digest', full_name='buildstream.v2.UpdateReferenceRequest.digest', index=2,
- number=3, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=278,
- serialized_end=396,
+ name="UpdateReferenceRequest",
+ full_name="buildstream.v2.UpdateReferenceRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="instance_name",
+ full_name="buildstream.v2.UpdateReferenceRequest.instance_name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="keys",
+ full_name="buildstream.v2.UpdateReferenceRequest.keys",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="digest",
+ full_name="buildstream.v2.UpdateReferenceRequest.digest",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=278,
+ serialized_end=396,
)
_UPDATEREFERENCERESPONSE = _descriptor.Descriptor(
- name='UpdateReferenceResponse',
- full_name='buildstream.v2.UpdateReferenceResponse',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=398,
- serialized_end=423,
+ name="UpdateReferenceResponse",
+ full_name="buildstream.v2.UpdateReferenceResponse",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=398,
+ serialized_end=423,
)
_STATUSREQUEST = _descriptor.Descriptor(
- name='StatusRequest',
- full_name='buildstream.v2.StatusRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='instance_name', full_name='buildstream.v2.StatusRequest.instance_name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=425,
- serialized_end=463,
+ name="StatusRequest",
+ full_name="buildstream.v2.StatusRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="instance_name",
+ full_name="buildstream.v2.StatusRequest.instance_name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=425,
+ serialized_end=463,
)
_STATUSRESPONSE = _descriptor.Descriptor(
- name='StatusResponse',
- full_name='buildstream.v2.StatusResponse',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='allow_updates', full_name='buildstream.v2.StatusResponse.allow_updates', index=0,
- number=1, type=8, cpp_type=7, label=1,
- has_default_value=False, default_value=False,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=465,
- serialized_end=504,
+ name="StatusResponse",
+ full_name="buildstream.v2.StatusResponse",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="allow_updates",
+ full_name="buildstream.v2.StatusResponse.allow_updates",
+ index=0,
+ number=1,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=465,
+ serialized_end=504,
)
_GETCAPABILITIESREQUEST = _descriptor.Descriptor(
- name='GetCapabilitiesRequest',
- full_name='buildstream.v2.GetCapabilitiesRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='instance_name', full_name='buildstream.v2.GetCapabilitiesRequest.instance_name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=506,
- serialized_end=553,
+ name="GetCapabilitiesRequest",
+ full_name="buildstream.v2.GetCapabilitiesRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="instance_name",
+ full_name="buildstream.v2.GetCapabilitiesRequest.instance_name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=506,
+ serialized_end=553,
)
_ARTIFACTCAPABILITIES = _descriptor.Descriptor(
- name='ArtifactCapabilities',
- full_name='buildstream.v2.ArtifactCapabilities',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='allow_updates', full_name='buildstream.v2.ArtifactCapabilities.allow_updates', index=0,
- number=1, type=8, cpp_type=7, label=1,
- has_default_value=False, default_value=False,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=555,
- serialized_end=600,
+ name="ArtifactCapabilities",
+ full_name="buildstream.v2.ArtifactCapabilities",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="allow_updates",
+ full_name="buildstream.v2.ArtifactCapabilities.allow_updates",
+ index=0,
+ number=1,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=555,
+ serialized_end=600,
)
_SOURCECAPABILITIES = _descriptor.Descriptor(
- name='SourceCapabilities',
- full_name='buildstream.v2.SourceCapabilities',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='allow_updates', full_name='buildstream.v2.SourceCapabilities.allow_updates', index=0,
- number=1, type=8, cpp_type=7, label=1,
- has_default_value=False, default_value=False,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=602,
- serialized_end=645,
+ name="SourceCapabilities",
+ full_name="buildstream.v2.SourceCapabilities",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="allow_updates",
+ full_name="buildstream.v2.SourceCapabilities.allow_updates",
+ index=0,
+ number=1,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=602,
+ serialized_end=645,
)
_SERVERCAPABILITIES = _descriptor.Descriptor(
- name='ServerCapabilities',
- full_name='buildstream.v2.ServerCapabilities',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='artifact_capabilities', full_name='buildstream.v2.ServerCapabilities.artifact_capabilities', index=0,
- number=1, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='source_capabilities', full_name='buildstream.v2.ServerCapabilities.source_capabilities', index=1,
- number=2, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=648,
- serialized_end=802,
+ name="ServerCapabilities",
+ full_name="buildstream.v2.ServerCapabilities",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="artifact_capabilities",
+ full_name="buildstream.v2.ServerCapabilities.artifact_capabilities",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="source_capabilities",
+ full_name="buildstream.v2.ServerCapabilities.source_capabilities",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=648,
+ serialized_end=802,
)
-_GETREFERENCERESPONSE.fields_by_name['digest'].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-_UPDATEREFERENCEREQUEST.fields_by_name['digest'].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-_SERVERCAPABILITIES.fields_by_name['artifact_capabilities'].message_type = _ARTIFACTCAPABILITIES
-_SERVERCAPABILITIES.fields_by_name['source_capabilities'].message_type = _SOURCECAPABILITIES
-DESCRIPTOR.message_types_by_name['GetReferenceRequest'] = _GETREFERENCEREQUEST
-DESCRIPTOR.message_types_by_name['GetReferenceResponse'] = _GETREFERENCERESPONSE
-DESCRIPTOR.message_types_by_name['UpdateReferenceRequest'] = _UPDATEREFERENCEREQUEST
-DESCRIPTOR.message_types_by_name['UpdateReferenceResponse'] = _UPDATEREFERENCERESPONSE
-DESCRIPTOR.message_types_by_name['StatusRequest'] = _STATUSREQUEST
-DESCRIPTOR.message_types_by_name['StatusResponse'] = _STATUSRESPONSE
-DESCRIPTOR.message_types_by_name['GetCapabilitiesRequest'] = _GETCAPABILITIESREQUEST
-DESCRIPTOR.message_types_by_name['ArtifactCapabilities'] = _ARTIFACTCAPABILITIES
-DESCRIPTOR.message_types_by_name['SourceCapabilities'] = _SOURCECAPABILITIES
-DESCRIPTOR.message_types_by_name['ServerCapabilities'] = _SERVERCAPABILITIES
+_GETREFERENCERESPONSE.fields_by_name[
+ "digest"
+].message_type = (
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
+)
+_UPDATEREFERENCEREQUEST.fields_by_name[
+ "digest"
+].message_type = (
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
+)
+_SERVERCAPABILITIES.fields_by_name[
+ "artifact_capabilities"
+].message_type = _ARTIFACTCAPABILITIES
+_SERVERCAPABILITIES.fields_by_name[
+ "source_capabilities"
+].message_type = _SOURCECAPABILITIES
+DESCRIPTOR.message_types_by_name["GetReferenceRequest"] = _GETREFERENCEREQUEST
+DESCRIPTOR.message_types_by_name["GetReferenceResponse"] = _GETREFERENCERESPONSE
+DESCRIPTOR.message_types_by_name["UpdateReferenceRequest"] = _UPDATEREFERENCEREQUEST
+DESCRIPTOR.message_types_by_name["UpdateReferenceResponse"] = _UPDATEREFERENCERESPONSE
+DESCRIPTOR.message_types_by_name["StatusRequest"] = _STATUSREQUEST
+DESCRIPTOR.message_types_by_name["StatusResponse"] = _STATUSRESPONSE
+DESCRIPTOR.message_types_by_name["GetCapabilitiesRequest"] = _GETCAPABILITIESREQUEST
+DESCRIPTOR.message_types_by_name["ArtifactCapabilities"] = _ARTIFACTCAPABILITIES
+DESCRIPTOR.message_types_by_name["SourceCapabilities"] = _SOURCECAPABILITIES
+DESCRIPTOR.message_types_by_name["ServerCapabilities"] = _SERVERCAPABILITIES
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-GetReferenceRequest = _reflection.GeneratedProtocolMessageType('GetReferenceRequest', (_message.Message,), {
- 'DESCRIPTOR' : _GETREFERENCEREQUEST,
- '__module__' : 'buildstream.v2.buildstream_pb2'
- # @@protoc_insertion_point(class_scope:buildstream.v2.GetReferenceRequest)
- })
+GetReferenceRequest = _reflection.GeneratedProtocolMessageType(
+ "GetReferenceRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _GETREFERENCEREQUEST,
+ "__module__": "buildstream.v2.buildstream_pb2"
+ # @@protoc_insertion_point(class_scope:buildstream.v2.GetReferenceRequest)
+ },
+)
_sym_db.RegisterMessage(GetReferenceRequest)
-GetReferenceResponse = _reflection.GeneratedProtocolMessageType('GetReferenceResponse', (_message.Message,), {
- 'DESCRIPTOR' : _GETREFERENCERESPONSE,
- '__module__' : 'buildstream.v2.buildstream_pb2'
- # @@protoc_insertion_point(class_scope:buildstream.v2.GetReferenceResponse)
- })
+GetReferenceResponse = _reflection.GeneratedProtocolMessageType(
+ "GetReferenceResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _GETREFERENCERESPONSE,
+ "__module__": "buildstream.v2.buildstream_pb2"
+ # @@protoc_insertion_point(class_scope:buildstream.v2.GetReferenceResponse)
+ },
+)
_sym_db.RegisterMessage(GetReferenceResponse)
-UpdateReferenceRequest = _reflection.GeneratedProtocolMessageType('UpdateReferenceRequest', (_message.Message,), {
- 'DESCRIPTOR' : _UPDATEREFERENCEREQUEST,
- '__module__' : 'buildstream.v2.buildstream_pb2'
- # @@protoc_insertion_point(class_scope:buildstream.v2.UpdateReferenceRequest)
- })
+UpdateReferenceRequest = _reflection.GeneratedProtocolMessageType(
+ "UpdateReferenceRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _UPDATEREFERENCEREQUEST,
+ "__module__": "buildstream.v2.buildstream_pb2"
+ # @@protoc_insertion_point(class_scope:buildstream.v2.UpdateReferenceRequest)
+ },
+)
_sym_db.RegisterMessage(UpdateReferenceRequest)
-UpdateReferenceResponse = _reflection.GeneratedProtocolMessageType('UpdateReferenceResponse', (_message.Message,), {
- 'DESCRIPTOR' : _UPDATEREFERENCERESPONSE,
- '__module__' : 'buildstream.v2.buildstream_pb2'
- # @@protoc_insertion_point(class_scope:buildstream.v2.UpdateReferenceResponse)
- })
+UpdateReferenceResponse = _reflection.GeneratedProtocolMessageType(
+ "UpdateReferenceResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _UPDATEREFERENCERESPONSE,
+ "__module__": "buildstream.v2.buildstream_pb2"
+ # @@protoc_insertion_point(class_scope:buildstream.v2.UpdateReferenceResponse)
+ },
+)
_sym_db.RegisterMessage(UpdateReferenceResponse)
-StatusRequest = _reflection.GeneratedProtocolMessageType('StatusRequest', (_message.Message,), {
- 'DESCRIPTOR' : _STATUSREQUEST,
- '__module__' : 'buildstream.v2.buildstream_pb2'
- # @@protoc_insertion_point(class_scope:buildstream.v2.StatusRequest)
- })
+StatusRequest = _reflection.GeneratedProtocolMessageType(
+ "StatusRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _STATUSREQUEST,
+ "__module__": "buildstream.v2.buildstream_pb2"
+ # @@protoc_insertion_point(class_scope:buildstream.v2.StatusRequest)
+ },
+)
_sym_db.RegisterMessage(StatusRequest)
-StatusResponse = _reflection.GeneratedProtocolMessageType('StatusResponse', (_message.Message,), {
- 'DESCRIPTOR' : _STATUSRESPONSE,
- '__module__' : 'buildstream.v2.buildstream_pb2'
- # @@protoc_insertion_point(class_scope:buildstream.v2.StatusResponse)
- })
+StatusResponse = _reflection.GeneratedProtocolMessageType(
+ "StatusResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _STATUSRESPONSE,
+ "__module__": "buildstream.v2.buildstream_pb2"
+ # @@protoc_insertion_point(class_scope:buildstream.v2.StatusResponse)
+ },
+)
_sym_db.RegisterMessage(StatusResponse)
-GetCapabilitiesRequest = _reflection.GeneratedProtocolMessageType('GetCapabilitiesRequest', (_message.Message,), {
- 'DESCRIPTOR' : _GETCAPABILITIESREQUEST,
- '__module__' : 'buildstream.v2.buildstream_pb2'
- # @@protoc_insertion_point(class_scope:buildstream.v2.GetCapabilitiesRequest)
- })
+GetCapabilitiesRequest = _reflection.GeneratedProtocolMessageType(
+ "GetCapabilitiesRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _GETCAPABILITIESREQUEST,
+ "__module__": "buildstream.v2.buildstream_pb2"
+ # @@protoc_insertion_point(class_scope:buildstream.v2.GetCapabilitiesRequest)
+ },
+)
_sym_db.RegisterMessage(GetCapabilitiesRequest)
-ArtifactCapabilities = _reflection.GeneratedProtocolMessageType('ArtifactCapabilities', (_message.Message,), {
- 'DESCRIPTOR' : _ARTIFACTCAPABILITIES,
- '__module__' : 'buildstream.v2.buildstream_pb2'
- # @@protoc_insertion_point(class_scope:buildstream.v2.ArtifactCapabilities)
- })
+ArtifactCapabilities = _reflection.GeneratedProtocolMessageType(
+ "ArtifactCapabilities",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _ARTIFACTCAPABILITIES,
+ "__module__": "buildstream.v2.buildstream_pb2"
+ # @@protoc_insertion_point(class_scope:buildstream.v2.ArtifactCapabilities)
+ },
+)
_sym_db.RegisterMessage(ArtifactCapabilities)
-SourceCapabilities = _reflection.GeneratedProtocolMessageType('SourceCapabilities', (_message.Message,), {
- 'DESCRIPTOR' : _SOURCECAPABILITIES,
- '__module__' : 'buildstream.v2.buildstream_pb2'
- # @@protoc_insertion_point(class_scope:buildstream.v2.SourceCapabilities)
- })
+SourceCapabilities = _reflection.GeneratedProtocolMessageType(
+ "SourceCapabilities",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _SOURCECAPABILITIES,
+ "__module__": "buildstream.v2.buildstream_pb2"
+ # @@protoc_insertion_point(class_scope:buildstream.v2.SourceCapabilities)
+ },
+)
_sym_db.RegisterMessage(SourceCapabilities)
-ServerCapabilities = _reflection.GeneratedProtocolMessageType('ServerCapabilities', (_message.Message,), {
- 'DESCRIPTOR' : _SERVERCAPABILITIES,
- '__module__' : 'buildstream.v2.buildstream_pb2'
- # @@protoc_insertion_point(class_scope:buildstream.v2.ServerCapabilities)
- })
+ServerCapabilities = _reflection.GeneratedProtocolMessageType(
+ "ServerCapabilities",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _SERVERCAPABILITIES,
+ "__module__": "buildstream.v2.buildstream_pb2"
+ # @@protoc_insertion_point(class_scope:buildstream.v2.ServerCapabilities)
+ },
+)
_sym_db.RegisterMessage(ServerCapabilities)
-
_REFERENCESTORAGE = _descriptor.ServiceDescriptor(
- name='ReferenceStorage',
- full_name='buildstream.v2.ReferenceStorage',
- file=DESCRIPTOR,
- index=0,
- serialized_options=None,
- serialized_start=805,
- serialized_end=1263,
- methods=[
- _descriptor.MethodDescriptor(
- name='GetReference',
- full_name='buildstream.v2.ReferenceStorage.GetReference',
+ name="ReferenceStorage",
+ full_name="buildstream.v2.ReferenceStorage",
+ file=DESCRIPTOR,
index=0,
- containing_service=None,
- input_type=_GETREFERENCEREQUEST,
- output_type=_GETREFERENCERESPONSE,
- serialized_options=_b('\202\323\344\223\002/\022-/v2/{instance_name=**}/buildstream/refs/{key}'),
- ),
- _descriptor.MethodDescriptor(
- name='UpdateReference',
- full_name='buildstream.v2.ReferenceStorage.UpdateReference',
- index=1,
- containing_service=None,
- input_type=_UPDATEREFERENCEREQUEST,
- output_type=_UPDATEREFERENCERESPONSE,
- serialized_options=_b('\202\323\344\223\0027\032-/v2/{instance_name=**}/buildstream/refs/{key}:\006digest'),
- ),
- _descriptor.MethodDescriptor(
- name='Status',
- full_name='buildstream.v2.ReferenceStorage.Status',
- index=2,
- containing_service=None,
- input_type=_STATUSREQUEST,
- output_type=_STATUSRESPONSE,
- serialized_options=_b('\202\323\344\223\0020\032./v2/{instance_name=**}/buildstream/refs:status'),
- ),
-])
+ serialized_options=None,
+ serialized_start=805,
+ serialized_end=1263,
+ methods=[
+ _descriptor.MethodDescriptor(
+ name="GetReference",
+ full_name="buildstream.v2.ReferenceStorage.GetReference",
+ index=0,
+ containing_service=None,
+ input_type=_GETREFERENCEREQUEST,
+ output_type=_GETREFERENCERESPONSE,
+ serialized_options=_b(
+ "\202\323\344\223\002/\022-/v2/{instance_name=**}/buildstream/refs/{key}"
+ ),
+ ),
+ _descriptor.MethodDescriptor(
+ name="UpdateReference",
+ full_name="buildstream.v2.ReferenceStorage.UpdateReference",
+ index=1,
+ containing_service=None,
+ input_type=_UPDATEREFERENCEREQUEST,
+ output_type=_UPDATEREFERENCERESPONSE,
+ serialized_options=_b(
+ "\202\323\344\223\0027\032-/v2/{instance_name=**}/buildstream/refs/{key}:\006digest"
+ ),
+ ),
+ _descriptor.MethodDescriptor(
+ name="Status",
+ full_name="buildstream.v2.ReferenceStorage.Status",
+ index=2,
+ containing_service=None,
+ input_type=_STATUSREQUEST,
+ output_type=_STATUSRESPONSE,
+ serialized_options=_b(
+ "\202\323\344\223\0020\032./v2/{instance_name=**}/buildstream/refs:status"
+ ),
+ ),
+ ],
+)
_sym_db.RegisterServiceDescriptor(_REFERENCESTORAGE)
-DESCRIPTOR.services_by_name['ReferenceStorage'] = _REFERENCESTORAGE
+DESCRIPTOR.services_by_name["ReferenceStorage"] = _REFERENCESTORAGE
_CAPABILITIES = _descriptor.ServiceDescriptor(
- name='Capabilities',
- full_name='buildstream.v2.Capabilities',
- file=DESCRIPTOR,
- index=1,
- serialized_options=None,
- serialized_start=1266,
- serialized_end=1421,
- methods=[
- _descriptor.MethodDescriptor(
- name='GetCapabilities',
- full_name='buildstream.v2.Capabilities.GetCapabilities',
- index=0,
- containing_service=None,
- input_type=_GETCAPABILITIESREQUEST,
- output_type=_SERVERCAPABILITIES,
- serialized_options=_b('\202\323\344\223\002%\022#/v2/{instance_name=**}/capabilities'),
- ),
-])
+ name="Capabilities",
+ full_name="buildstream.v2.Capabilities",
+ file=DESCRIPTOR,
+ index=1,
+ serialized_options=None,
+ serialized_start=1266,
+ serialized_end=1421,
+ methods=[
+ _descriptor.MethodDescriptor(
+ name="GetCapabilities",
+ full_name="buildstream.v2.Capabilities.GetCapabilities",
+ index=0,
+ containing_service=None,
+ input_type=_GETCAPABILITIESREQUEST,
+ output_type=_SERVERCAPABILITIES,
+ serialized_options=_b(
+ "\202\323\344\223\002%\022#/v2/{instance_name=**}/capabilities"
+ ),
+ ),
+ ],
+)
_sym_db.RegisterServiceDescriptor(_CAPABILITIES)
-DESCRIPTOR.services_by_name['Capabilities'] = _CAPABILITIES
+DESCRIPTOR.services_by_name["Capabilities"] = _CAPABILITIES
# @@protoc_insertion_point(module_scope)
diff --git a/src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py b/src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py
index 52d22c593..7c4ca932b 100644
--- a/src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py
+++ b/src/buildstream/_protos/buildstream/v2/buildstream_pb2_grpc.py
@@ -1,131 +1,135 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
-from buildstream._protos.buildstream.v2 import buildstream_pb2 as buildstream_dot_v2_dot_buildstream__pb2
+from buildstream._protos.buildstream.v2 import (
+ buildstream_pb2 as buildstream_dot_v2_dot_buildstream__pb2,
+)
class ReferenceStorageStub(object):
- # missing associated documentation comment in .proto file
- pass
+ # missing associated documentation comment in .proto file
+ pass
- def __init__(self, channel):
- """Constructor.
+ def __init__(self, channel):
+ """Constructor.
Args:
channel: A grpc.Channel.
"""
- self.GetReference = channel.unary_unary(
- '/buildstream.v2.ReferenceStorage/GetReference',
- request_serializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceRequest.SerializeToString,
- response_deserializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceResponse.FromString,
+ self.GetReference = channel.unary_unary(
+ "/buildstream.v2.ReferenceStorage/GetReference",
+ request_serializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceRequest.SerializeToString,
+ response_deserializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceResponse.FromString,
)
- self.UpdateReference = channel.unary_unary(
- '/buildstream.v2.ReferenceStorage/UpdateReference',
- request_serializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceRequest.SerializeToString,
- response_deserializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceResponse.FromString,
+ self.UpdateReference = channel.unary_unary(
+ "/buildstream.v2.ReferenceStorage/UpdateReference",
+ request_serializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceRequest.SerializeToString,
+ response_deserializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceResponse.FromString,
)
- self.Status = channel.unary_unary(
- '/buildstream.v2.ReferenceStorage/Status',
- request_serializer=buildstream_dot_v2_dot_buildstream__pb2.StatusRequest.SerializeToString,
- response_deserializer=buildstream_dot_v2_dot_buildstream__pb2.StatusResponse.FromString,
+ self.Status = channel.unary_unary(
+ "/buildstream.v2.ReferenceStorage/Status",
+ request_serializer=buildstream_dot_v2_dot_buildstream__pb2.StatusRequest.SerializeToString,
+ response_deserializer=buildstream_dot_v2_dot_buildstream__pb2.StatusResponse.FromString,
)
class ReferenceStorageServicer(object):
- # missing associated documentation comment in .proto file
- pass
+ # missing associated documentation comment in .proto file
+ pass
- def GetReference(self, request, context):
- """Retrieve a CAS [Directory][build.bazel.remote.execution.v2.Directory]
+ def GetReference(self, request, context):
+ """Retrieve a CAS [Directory][build.bazel.remote.execution.v2.Directory]
digest by name.
Errors:
* `NOT_FOUND`: The requested reference is not in the cache.
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
- def UpdateReference(self, request, context):
- """Associate a name with a CAS [Directory][build.bazel.remote.execution.v2.Directory]
+ def UpdateReference(self, request, context):
+ """Associate a name with a CAS [Directory][build.bazel.remote.execution.v2.Directory]
digest.
Errors:
* `RESOURCE_EXHAUSTED`: There is insufficient storage space to add the
entry to the cache.
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
- def Status(self, request, context):
- # missing associated documentation comment in .proto file
- pass
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ def Status(self, request, context):
+ # missing associated documentation comment in .proto file
+ pass
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
def add_ReferenceStorageServicer_to_server(servicer, server):
- rpc_method_handlers = {
- 'GetReference': grpc.unary_unary_rpc_method_handler(
- servicer.GetReference,
- request_deserializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceRequest.FromString,
- response_serializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceResponse.SerializeToString,
- ),
- 'UpdateReference': grpc.unary_unary_rpc_method_handler(
- servicer.UpdateReference,
- request_deserializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceRequest.FromString,
- response_serializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceResponse.SerializeToString,
- ),
- 'Status': grpc.unary_unary_rpc_method_handler(
- servicer.Status,
- request_deserializer=buildstream_dot_v2_dot_buildstream__pb2.StatusRequest.FromString,
- response_serializer=buildstream_dot_v2_dot_buildstream__pb2.StatusResponse.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- 'buildstream.v2.ReferenceStorage', rpc_method_handlers)
- server.add_generic_rpc_handlers((generic_handler,))
+ rpc_method_handlers = {
+ "GetReference": grpc.unary_unary_rpc_method_handler(
+ servicer.GetReference,
+ request_deserializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceRequest.FromString,
+ response_serializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceResponse.SerializeToString,
+ ),
+ "UpdateReference": grpc.unary_unary_rpc_method_handler(
+ servicer.UpdateReference,
+ request_deserializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceRequest.FromString,
+ response_serializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceResponse.SerializeToString,
+ ),
+ "Status": grpc.unary_unary_rpc_method_handler(
+ servicer.Status,
+ request_deserializer=buildstream_dot_v2_dot_buildstream__pb2.StatusRequest.FromString,
+ response_serializer=buildstream_dot_v2_dot_buildstream__pb2.StatusResponse.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ "buildstream.v2.ReferenceStorage", rpc_method_handlers
+ )
+ server.add_generic_rpc_handlers((generic_handler,))
class CapabilitiesStub(object):
- # missing associated documentation comment in .proto file
- pass
+ # missing associated documentation comment in .proto file
+ pass
- def __init__(self, channel):
- """Constructor.
+ def __init__(self, channel):
+ """Constructor.
Args:
channel: A grpc.Channel.
"""
- self.GetCapabilities = channel.unary_unary(
- '/buildstream.v2.Capabilities/GetCapabilities',
- request_serializer=buildstream_dot_v2_dot_buildstream__pb2.GetCapabilitiesRequest.SerializeToString,
- response_deserializer=buildstream_dot_v2_dot_buildstream__pb2.ServerCapabilities.FromString,
+ self.GetCapabilities = channel.unary_unary(
+ "/buildstream.v2.Capabilities/GetCapabilities",
+ request_serializer=buildstream_dot_v2_dot_buildstream__pb2.GetCapabilitiesRequest.SerializeToString,
+ response_deserializer=buildstream_dot_v2_dot_buildstream__pb2.ServerCapabilities.FromString,
)
class CapabilitiesServicer(object):
- # missing associated documentation comment in .proto file
- pass
+ # missing associated documentation comment in .proto file
+ pass
- def GetCapabilities(self, request, context):
- """GetCapabilities mirrors
+ def GetCapabilities(self, request, context):
+ """GetCapabilities mirrors
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
def add_CapabilitiesServicer_to_server(servicer, server):
- rpc_method_handlers = {
- 'GetCapabilities': grpc.unary_unary_rpc_method_handler(
- servicer.GetCapabilities,
- request_deserializer=buildstream_dot_v2_dot_buildstream__pb2.GetCapabilitiesRequest.FromString,
- response_serializer=buildstream_dot_v2_dot_buildstream__pb2.ServerCapabilities.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- 'buildstream.v2.Capabilities', rpc_method_handlers)
- server.add_generic_rpc_handlers((generic_handler,))
+ rpc_method_handlers = {
+ "GetCapabilities": grpc.unary_unary_rpc_method_handler(
+ servicer.GetCapabilities,
+ request_deserializer=buildstream_dot_v2_dot_buildstream__pb2.GetCapabilitiesRequest.FromString,
+ response_serializer=buildstream_dot_v2_dot_buildstream__pb2.ServerCapabilities.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ "buildstream.v2.Capabilities", rpc_method_handlers
+ )
+ server.add_generic_rpc_handlers((generic_handler,))
diff --git a/src/buildstream/_protos/buildstream/v2/source_pb2.py b/src/buildstream/_protos/buildstream/v2/source_pb2.py
index 80ecb20e0..af1e456b9 100644
--- a/src/buildstream/_protos/buildstream/v2/source_pb2.py
+++ b/src/buildstream/_protos/buildstream/v2/source_pb2.py
@@ -3,212 +3,305 @@
# source: buildstream/v2/source.proto
import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
+
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
-from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2 as build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2
-from buildstream._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+from buildstream._protos.build.bazel.remote.execution.v2 import (
+ remote_execution_pb2 as build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2,
+)
+from buildstream._protos.google.api import (
+ annotations_pb2 as google_dot_api_dot_annotations__pb2,
+)
DESCRIPTOR = _descriptor.FileDescriptor(
- name='buildstream/v2/source.proto',
- package='buildstream.v2',
- syntax='proto3',
- serialized_options=None,
- serialized_pb=_b('\n\x1b\x62uildstream/v2/source.proto\x12\x0e\x62uildstream.v2\x1a\x36\x62uild/bazel/remote/execution/v2/remote_execution.proto\x1a\x1cgoogle/api/annotations.proto\"Q\n\x06Source\x12\x0f\n\x07version\x18\x01 \x01(\x05\x12\x36\n\x05\x66iles\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"<\n\x10GetSourceRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x11\n\tcache_key\x18\x02 \x01(\t\"g\n\x13UpdateSourceRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x11\n\tcache_key\x18\x02 \x01(\t\x12&\n\x06source\x18\x03 \x01(\x0b\x32\x16.buildstream.v2.Source2\xa7\x01\n\rSourceService\x12G\n\tGetSource\x12 .buildstream.v2.GetSourceRequest\x1a\x16.buildstream.v2.Source\"\x00\x12M\n\x0cUpdateSource\x12#.buildstream.v2.UpdateSourceRequest\x1a\x16.buildstream.v2.Source\"\x00\x62\x06proto3')
- ,
- dependencies=[build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
-
-
+ name="buildstream/v2/source.proto",
+ package="buildstream.v2",
+ syntax="proto3",
+ serialized_options=None,
+ serialized_pb=_b(
+ '\n\x1b\x62uildstream/v2/source.proto\x12\x0e\x62uildstream.v2\x1a\x36\x62uild/bazel/remote/execution/v2/remote_execution.proto\x1a\x1cgoogle/api/annotations.proto"Q\n\x06Source\x12\x0f\n\x07version\x18\x01 \x01(\x05\x12\x36\n\x05\x66iles\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest"<\n\x10GetSourceRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x11\n\tcache_key\x18\x02 \x01(\t"g\n\x13UpdateSourceRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x11\n\tcache_key\x18\x02 \x01(\t\x12&\n\x06source\x18\x03 \x01(\x0b\x32\x16.buildstream.v2.Source2\xa7\x01\n\rSourceService\x12G\n\tGetSource\x12 .buildstream.v2.GetSourceRequest\x1a\x16.buildstream.v2.Source"\x00\x12M\n\x0cUpdateSource\x12#.buildstream.v2.UpdateSourceRequest\x1a\x16.buildstream.v2.Source"\x00\x62\x06proto3'
+ ),
+ dependencies=[
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.DESCRIPTOR,
+ google_dot_api_dot_annotations__pb2.DESCRIPTOR,
+ ],
+)
_SOURCE = _descriptor.Descriptor(
- name='Source',
- full_name='buildstream.v2.Source',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='version', full_name='buildstream.v2.Source.version', index=0,
- number=1, type=5, cpp_type=1, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='files', full_name='buildstream.v2.Source.files', index=1,
- number=2, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=133,
- serialized_end=214,
+ name="Source",
+ full_name="buildstream.v2.Source",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="version",
+ full_name="buildstream.v2.Source.version",
+ index=0,
+ number=1,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="files",
+ full_name="buildstream.v2.Source.files",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=133,
+ serialized_end=214,
)
_GETSOURCEREQUEST = _descriptor.Descriptor(
- name='GetSourceRequest',
- full_name='buildstream.v2.GetSourceRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='instance_name', full_name='buildstream.v2.GetSourceRequest.instance_name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='cache_key', full_name='buildstream.v2.GetSourceRequest.cache_key', index=1,
- number=2, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=216,
- serialized_end=276,
+ name="GetSourceRequest",
+ full_name="buildstream.v2.GetSourceRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="instance_name",
+ full_name="buildstream.v2.GetSourceRequest.instance_name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="cache_key",
+ full_name="buildstream.v2.GetSourceRequest.cache_key",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=216,
+ serialized_end=276,
)
_UPDATESOURCEREQUEST = _descriptor.Descriptor(
- name='UpdateSourceRequest',
- full_name='buildstream.v2.UpdateSourceRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='instance_name', full_name='buildstream.v2.UpdateSourceRequest.instance_name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='cache_key', full_name='buildstream.v2.UpdateSourceRequest.cache_key', index=1,
- number=2, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='source', full_name='buildstream.v2.UpdateSourceRequest.source', index=2,
- number=3, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=278,
- serialized_end=381,
+ name="UpdateSourceRequest",
+ full_name="buildstream.v2.UpdateSourceRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="instance_name",
+ full_name="buildstream.v2.UpdateSourceRequest.instance_name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="cache_key",
+ full_name="buildstream.v2.UpdateSourceRequest.cache_key",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="source",
+ full_name="buildstream.v2.UpdateSourceRequest.source",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=278,
+ serialized_end=381,
)
-_SOURCE.fields_by_name['files'].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
-_UPDATESOURCEREQUEST.fields_by_name['source'].message_type = _SOURCE
-DESCRIPTOR.message_types_by_name['Source'] = _SOURCE
-DESCRIPTOR.message_types_by_name['GetSourceRequest'] = _GETSOURCEREQUEST
-DESCRIPTOR.message_types_by_name['UpdateSourceRequest'] = _UPDATESOURCEREQUEST
+_SOURCE.fields_by_name[
+ "files"
+].message_type = (
+ build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
+)
+_UPDATESOURCEREQUEST.fields_by_name["source"].message_type = _SOURCE
+DESCRIPTOR.message_types_by_name["Source"] = _SOURCE
+DESCRIPTOR.message_types_by_name["GetSourceRequest"] = _GETSOURCEREQUEST
+DESCRIPTOR.message_types_by_name["UpdateSourceRequest"] = _UPDATESOURCEREQUEST
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-Source = _reflection.GeneratedProtocolMessageType('Source', (_message.Message,), {
- 'DESCRIPTOR' : _SOURCE,
- '__module__' : 'buildstream.v2.source_pb2'
- # @@protoc_insertion_point(class_scope:buildstream.v2.Source)
- })
+Source = _reflection.GeneratedProtocolMessageType(
+ "Source",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _SOURCE,
+ "__module__": "buildstream.v2.source_pb2"
+ # @@protoc_insertion_point(class_scope:buildstream.v2.Source)
+ },
+)
_sym_db.RegisterMessage(Source)
-GetSourceRequest = _reflection.GeneratedProtocolMessageType('GetSourceRequest', (_message.Message,), {
- 'DESCRIPTOR' : _GETSOURCEREQUEST,
- '__module__' : 'buildstream.v2.source_pb2'
- # @@protoc_insertion_point(class_scope:buildstream.v2.GetSourceRequest)
- })
+GetSourceRequest = _reflection.GeneratedProtocolMessageType(
+ "GetSourceRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _GETSOURCEREQUEST,
+ "__module__": "buildstream.v2.source_pb2"
+ # @@protoc_insertion_point(class_scope:buildstream.v2.GetSourceRequest)
+ },
+)
_sym_db.RegisterMessage(GetSourceRequest)
-UpdateSourceRequest = _reflection.GeneratedProtocolMessageType('UpdateSourceRequest', (_message.Message,), {
- 'DESCRIPTOR' : _UPDATESOURCEREQUEST,
- '__module__' : 'buildstream.v2.source_pb2'
- # @@protoc_insertion_point(class_scope:buildstream.v2.UpdateSourceRequest)
- })
+UpdateSourceRequest = _reflection.GeneratedProtocolMessageType(
+ "UpdateSourceRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _UPDATESOURCEREQUEST,
+ "__module__": "buildstream.v2.source_pb2"
+ # @@protoc_insertion_point(class_scope:buildstream.v2.UpdateSourceRequest)
+ },
+)
_sym_db.RegisterMessage(UpdateSourceRequest)
-
_SOURCESERVICE = _descriptor.ServiceDescriptor(
- name='SourceService',
- full_name='buildstream.v2.SourceService',
- file=DESCRIPTOR,
- index=0,
- serialized_options=None,
- serialized_start=384,
- serialized_end=551,
- methods=[
- _descriptor.MethodDescriptor(
- name='GetSource',
- full_name='buildstream.v2.SourceService.GetSource',
+ name="SourceService",
+ full_name="buildstream.v2.SourceService",
+ file=DESCRIPTOR,
index=0,
- containing_service=None,
- input_type=_GETSOURCEREQUEST,
- output_type=_SOURCE,
serialized_options=None,
- ),
- _descriptor.MethodDescriptor(
- name='UpdateSource',
- full_name='buildstream.v2.SourceService.UpdateSource',
- index=1,
- containing_service=None,
- input_type=_UPDATESOURCEREQUEST,
- output_type=_SOURCE,
- serialized_options=None,
- ),
-])
+ serialized_start=384,
+ serialized_end=551,
+ methods=[
+ _descriptor.MethodDescriptor(
+ name="GetSource",
+ full_name="buildstream.v2.SourceService.GetSource",
+ index=0,
+ containing_service=None,
+ input_type=_GETSOURCEREQUEST,
+ output_type=_SOURCE,
+ serialized_options=None,
+ ),
+ _descriptor.MethodDescriptor(
+ name="UpdateSource",
+ full_name="buildstream.v2.SourceService.UpdateSource",
+ index=1,
+ containing_service=None,
+ input_type=_UPDATESOURCEREQUEST,
+ output_type=_SOURCE,
+ serialized_options=None,
+ ),
+ ],
+)
_sym_db.RegisterServiceDescriptor(_SOURCESERVICE)
-DESCRIPTOR.services_by_name['SourceService'] = _SOURCESERVICE
+DESCRIPTOR.services_by_name["SourceService"] = _SOURCESERVICE
# @@protoc_insertion_point(module_scope)
diff --git a/src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py b/src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py
index ecf734afb..11958366b 100644
--- a/src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py
+++ b/src/buildstream/_protos/buildstream/v2/source_pb2_grpc.py
@@ -1,70 +1,73 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
-from buildstream._protos.buildstream.v2 import source_pb2 as buildstream_dot_v2_dot_source__pb2
+from buildstream._protos.buildstream.v2 import (
+ source_pb2 as buildstream_dot_v2_dot_source__pb2,
+)
class SourceServiceStub(object):
- # missing associated documentation comment in .proto file
- pass
+ # missing associated documentation comment in .proto file
+ pass
- def __init__(self, channel):
- """Constructor.
+ def __init__(self, channel):
+ """Constructor.
Args:
channel: A grpc.Channel.
"""
- self.GetSource = channel.unary_unary(
- '/buildstream.v2.SourceService/GetSource',
- request_serializer=buildstream_dot_v2_dot_source__pb2.GetSourceRequest.SerializeToString,
- response_deserializer=buildstream_dot_v2_dot_source__pb2.Source.FromString,
+ self.GetSource = channel.unary_unary(
+ "/buildstream.v2.SourceService/GetSource",
+ request_serializer=buildstream_dot_v2_dot_source__pb2.GetSourceRequest.SerializeToString,
+ response_deserializer=buildstream_dot_v2_dot_source__pb2.Source.FromString,
)
- self.UpdateSource = channel.unary_unary(
- '/buildstream.v2.SourceService/UpdateSource',
- request_serializer=buildstream_dot_v2_dot_source__pb2.UpdateSourceRequest.SerializeToString,
- response_deserializer=buildstream_dot_v2_dot_source__pb2.Source.FromString,
+ self.UpdateSource = channel.unary_unary(
+ "/buildstream.v2.SourceService/UpdateSource",
+ request_serializer=buildstream_dot_v2_dot_source__pb2.UpdateSourceRequest.SerializeToString,
+ response_deserializer=buildstream_dot_v2_dot_source__pb2.Source.FromString,
)
class SourceServiceServicer(object):
- # missing associated documentation comment in .proto file
- pass
+ # missing associated documentation comment in .proto file
+ pass
- def GetSource(self, request, context):
- """Retrieve a source message given a reference name from the service
+ def GetSource(self, request, context):
+ """Retrieve a source message given a reference name from the service
Errors:
* `NOT_FOUND`: The requested reference is not in the cache.
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
- def UpdateSource(self, request, context):
- """Sets a source message on the service
+ def UpdateSource(self, request, context):
+ """Sets a source message on the service
Errors:
* `RESOURCE_EXHAUSTED`: There is insufficient storage space to add the
entry to the cache.
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
def add_SourceServiceServicer_to_server(servicer, server):
- rpc_method_handlers = {
- 'GetSource': grpc.unary_unary_rpc_method_handler(
- servicer.GetSource,
- request_deserializer=buildstream_dot_v2_dot_source__pb2.GetSourceRequest.FromString,
- response_serializer=buildstream_dot_v2_dot_source__pb2.Source.SerializeToString,
- ),
- 'UpdateSource': grpc.unary_unary_rpc_method_handler(
- servicer.UpdateSource,
- request_deserializer=buildstream_dot_v2_dot_source__pb2.UpdateSourceRequest.FromString,
- response_serializer=buildstream_dot_v2_dot_source__pb2.Source.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- 'buildstream.v2.SourceService', rpc_method_handlers)
- server.add_generic_rpc_handlers((generic_handler,))
+ rpc_method_handlers = {
+ "GetSource": grpc.unary_unary_rpc_method_handler(
+ servicer.GetSource,
+ request_deserializer=buildstream_dot_v2_dot_source__pb2.GetSourceRequest.FromString,
+ response_serializer=buildstream_dot_v2_dot_source__pb2.Source.SerializeToString,
+ ),
+ "UpdateSource": grpc.unary_unary_rpc_method_handler(
+ servicer.UpdateSource,
+ request_deserializer=buildstream_dot_v2_dot_source__pb2.UpdateSourceRequest.FromString,
+ response_serializer=buildstream_dot_v2_dot_source__pb2.Source.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ "buildstream.v2.SourceService", rpc_method_handlers
+ )
+ server.add_generic_rpc_handlers((generic_handler,))
diff --git a/src/buildstream/_protos/google/api/annotations_pb2.py b/src/buildstream/_protos/google/api/annotations_pb2.py
index 707fae87f..b68e2147c 100644
--- a/src/buildstream/_protos/google/api/annotations_pb2.py
+++ b/src/buildstream/_protos/google/api/annotations_pb2.py
@@ -3,11 +3,13 @@
# source: google/api/annotations.proto
import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
+
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
@@ -18,25 +20,43 @@ from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor
DESCRIPTOR = _descriptor.FileDescriptor(
- name='google/api/annotations.proto',
- package='google.api',
- syntax='proto3',
- serialized_options=_b('\n\016com.google.apiB\020AnnotationsProtoP\001ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\242\002\004GAPI'),
- serialized_pb=_b('\n\x1cgoogle/api/annotations.proto\x12\ngoogle.api\x1a\x15google/api/http.proto\x1a google/protobuf/descriptor.proto:E\n\x04http\x12\x1e.google.protobuf.MethodOptions\x18\xb0\xca\xbc\" \x01(\x0b\x32\x14.google.api.HttpRuleBn\n\x0e\x63om.google.apiB\x10\x41nnotationsProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xa2\x02\x04GAPIb\x06proto3')
- ,
- dependencies=[google_dot_api_dot_http__pb2.DESCRIPTOR,google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,])
+ name="google/api/annotations.proto",
+ package="google.api",
+ syntax="proto3",
+ serialized_options=_b(
+ "\n\016com.google.apiB\020AnnotationsProtoP\001ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\242\002\004GAPI"
+ ),
+ serialized_pb=_b(
+ '\n\x1cgoogle/api/annotations.proto\x12\ngoogle.api\x1a\x15google/api/http.proto\x1a google/protobuf/descriptor.proto:E\n\x04http\x12\x1e.google.protobuf.MethodOptions\x18\xb0\xca\xbc" \x01(\x0b\x32\x14.google.api.HttpRuleBn\n\x0e\x63om.google.apiB\x10\x41nnotationsProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xa2\x02\x04GAPIb\x06proto3'
+ ),
+ dependencies=[
+ google_dot_api_dot_http__pb2.DESCRIPTOR,
+ google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,
+ ],
+)
HTTP_FIELD_NUMBER = 72295728
http = _descriptor.FieldDescriptor(
- name='http', full_name='google.api.http', index=0,
- number=72295728, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=True, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR)
-
-DESCRIPTOR.extensions_by_name['http'] = http
+ name="http",
+ full_name="google.api.http",
+ index=0,
+ number=72295728,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=True,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+)
+
+DESCRIPTOR.extensions_by_name["http"] = http
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
http.message_type = google_dot_api_dot_http__pb2._HTTPRULE
diff --git a/src/buildstream/_protos/google/api/annotations_pb2_grpc.py b/src/buildstream/_protos/google/api/annotations_pb2_grpc.py
index a89435267..07cb78fe0 100644
--- a/src/buildstream/_protos/google/api/annotations_pb2_grpc.py
+++ b/src/buildstream/_protos/google/api/annotations_pb2_grpc.py
@@ -1,3 +1,2 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
-
diff --git a/src/buildstream/_protos/google/api/http_pb2.py b/src/buildstream/_protos/google/api/http_pb2.py
index 601b12a08..b85402af0 100644
--- a/src/buildstream/_protos/google/api/http_pb2.py
+++ b/src/buildstream/_protos/google/api/http_pb2.py
@@ -3,239 +3,390 @@
# source: google/api/http.proto
import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
+
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
-
-
DESCRIPTOR = _descriptor.FileDescriptor(
- name='google/api/http.proto',
- package='google.api',
- syntax='proto3',
- serialized_options=_b('\n\016com.google.apiB\tHttpProtoP\001ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\370\001\001\242\002\004GAPI'),
- serialized_pb=_b('\n\x15google/api/http.proto\x12\ngoogle.api\"T\n\x04Http\x12#\n\x05rules\x18\x01 \x03(\x0b\x32\x14.google.api.HttpRule\x12\'\n\x1f\x66ully_decode_reserved_expansion\x18\x02 \x01(\x08\"\xea\x01\n\x08HttpRule\x12\x10\n\x08selector\x18\x01 \x01(\t\x12\r\n\x03get\x18\x02 \x01(\tH\x00\x12\r\n\x03put\x18\x03 \x01(\tH\x00\x12\x0e\n\x04post\x18\x04 \x01(\tH\x00\x12\x10\n\x06\x64\x65lete\x18\x05 \x01(\tH\x00\x12\x0f\n\x05patch\x18\x06 \x01(\tH\x00\x12/\n\x06\x63ustom\x18\x08 \x01(\x0b\x32\x1d.google.api.CustomHttpPatternH\x00\x12\x0c\n\x04\x62ody\x18\x07 \x01(\t\x12\x31\n\x13\x61\x64\x64itional_bindings\x18\x0b \x03(\x0b\x32\x14.google.api.HttpRuleB\t\n\x07pattern\"/\n\x11\x43ustomHttpPattern\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\tBj\n\x0e\x63om.google.apiB\tHttpProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xf8\x01\x01\xa2\x02\x04GAPIb\x06proto3')
+ name="google/api/http.proto",
+ package="google.api",
+ syntax="proto3",
+ serialized_options=_b(
+ "\n\016com.google.apiB\tHttpProtoP\001ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\370\001\001\242\002\004GAPI"
+ ),
+ serialized_pb=_b(
+ '\n\x15google/api/http.proto\x12\ngoogle.api"T\n\x04Http\x12#\n\x05rules\x18\x01 \x03(\x0b\x32\x14.google.api.HttpRule\x12\'\n\x1f\x66ully_decode_reserved_expansion\x18\x02 \x01(\x08"\xea\x01\n\x08HttpRule\x12\x10\n\x08selector\x18\x01 \x01(\t\x12\r\n\x03get\x18\x02 \x01(\tH\x00\x12\r\n\x03put\x18\x03 \x01(\tH\x00\x12\x0e\n\x04post\x18\x04 \x01(\tH\x00\x12\x10\n\x06\x64\x65lete\x18\x05 \x01(\tH\x00\x12\x0f\n\x05patch\x18\x06 \x01(\tH\x00\x12/\n\x06\x63ustom\x18\x08 \x01(\x0b\x32\x1d.google.api.CustomHttpPatternH\x00\x12\x0c\n\x04\x62ody\x18\x07 \x01(\t\x12\x31\n\x13\x61\x64\x64itional_bindings\x18\x0b \x03(\x0b\x32\x14.google.api.HttpRuleB\t\n\x07pattern"/\n\x11\x43ustomHttpPattern\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\tBj\n\x0e\x63om.google.apiB\tHttpProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xf8\x01\x01\xa2\x02\x04GAPIb\x06proto3'
+ ),
)
-
-
_HTTP = _descriptor.Descriptor(
- name='Http',
- full_name='google.api.Http',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='rules', full_name='google.api.Http.rules', index=0,
- number=1, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='fully_decode_reserved_expansion', full_name='google.api.Http.fully_decode_reserved_expansion', index=1,
- number=2, type=8, cpp_type=7, label=1,
- has_default_value=False, default_value=False,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=37,
- serialized_end=121,
+ name="Http",
+ full_name="google.api.Http",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="rules",
+ full_name="google.api.Http.rules",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="fully_decode_reserved_expansion",
+ full_name="google.api.Http.fully_decode_reserved_expansion",
+ index=1,
+ number=2,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=37,
+ serialized_end=121,
)
_HTTPRULE = _descriptor.Descriptor(
- name='HttpRule',
- full_name='google.api.HttpRule',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='selector', full_name='google.api.HttpRule.selector', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='get', full_name='google.api.HttpRule.get', index=1,
- number=2, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='put', full_name='google.api.HttpRule.put', index=2,
- number=3, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='post', full_name='google.api.HttpRule.post', index=3,
- number=4, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='delete', full_name='google.api.HttpRule.delete', index=4,
- number=5, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='patch', full_name='google.api.HttpRule.patch', index=5,
- number=6, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='custom', full_name='google.api.HttpRule.custom', index=6,
- number=8, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='body', full_name='google.api.HttpRule.body', index=7,
- number=7, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='additional_bindings', full_name='google.api.HttpRule.additional_bindings', index=8,
- number=11, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name='pattern', full_name='google.api.HttpRule.pattern',
- index=0, containing_type=None, fields=[]),
- ],
- serialized_start=124,
- serialized_end=358,
+ name="HttpRule",
+ full_name="google.api.HttpRule",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="selector",
+ full_name="google.api.HttpRule.selector",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="get",
+ full_name="google.api.HttpRule.get",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="put",
+ full_name="google.api.HttpRule.put",
+ index=2,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="post",
+ full_name="google.api.HttpRule.post",
+ index=3,
+ number=4,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="delete",
+ full_name="google.api.HttpRule.delete",
+ index=4,
+ number=5,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="patch",
+ full_name="google.api.HttpRule.patch",
+ index=5,
+ number=6,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="custom",
+ full_name="google.api.HttpRule.custom",
+ index=6,
+ number=8,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="body",
+ full_name="google.api.HttpRule.body",
+ index=7,
+ number=7,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="additional_bindings",
+ full_name="google.api.HttpRule.additional_bindings",
+ index=8,
+ number=11,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="pattern",
+ full_name="google.api.HttpRule.pattern",
+ index=0,
+ containing_type=None,
+ fields=[],
+ ),
+ ],
+ serialized_start=124,
+ serialized_end=358,
)
_CUSTOMHTTPPATTERN = _descriptor.Descriptor(
- name='CustomHttpPattern',
- full_name='google.api.CustomHttpPattern',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='kind', full_name='google.api.CustomHttpPattern.kind', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='path', full_name='google.api.CustomHttpPattern.path', index=1,
- number=2, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=360,
- serialized_end=407,
+ name="CustomHttpPattern",
+ full_name="google.api.CustomHttpPattern",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="kind",
+ full_name="google.api.CustomHttpPattern.kind",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="path",
+ full_name="google.api.CustomHttpPattern.path",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=360,
+ serialized_end=407,
)
-_HTTP.fields_by_name['rules'].message_type = _HTTPRULE
-_HTTPRULE.fields_by_name['custom'].message_type = _CUSTOMHTTPPATTERN
-_HTTPRULE.fields_by_name['additional_bindings'].message_type = _HTTPRULE
-_HTTPRULE.oneofs_by_name['pattern'].fields.append(
- _HTTPRULE.fields_by_name['get'])
-_HTTPRULE.fields_by_name['get'].containing_oneof = _HTTPRULE.oneofs_by_name['pattern']
-_HTTPRULE.oneofs_by_name['pattern'].fields.append(
- _HTTPRULE.fields_by_name['put'])
-_HTTPRULE.fields_by_name['put'].containing_oneof = _HTTPRULE.oneofs_by_name['pattern']
-_HTTPRULE.oneofs_by_name['pattern'].fields.append(
- _HTTPRULE.fields_by_name['post'])
-_HTTPRULE.fields_by_name['post'].containing_oneof = _HTTPRULE.oneofs_by_name['pattern']
-_HTTPRULE.oneofs_by_name['pattern'].fields.append(
- _HTTPRULE.fields_by_name['delete'])
-_HTTPRULE.fields_by_name['delete'].containing_oneof = _HTTPRULE.oneofs_by_name['pattern']
-_HTTPRULE.oneofs_by_name['pattern'].fields.append(
- _HTTPRULE.fields_by_name['patch'])
-_HTTPRULE.fields_by_name['patch'].containing_oneof = _HTTPRULE.oneofs_by_name['pattern']
-_HTTPRULE.oneofs_by_name['pattern'].fields.append(
- _HTTPRULE.fields_by_name['custom'])
-_HTTPRULE.fields_by_name['custom'].containing_oneof = _HTTPRULE.oneofs_by_name['pattern']
-DESCRIPTOR.message_types_by_name['Http'] = _HTTP
-DESCRIPTOR.message_types_by_name['HttpRule'] = _HTTPRULE
-DESCRIPTOR.message_types_by_name['CustomHttpPattern'] = _CUSTOMHTTPPATTERN
+_HTTP.fields_by_name["rules"].message_type = _HTTPRULE
+_HTTPRULE.fields_by_name["custom"].message_type = _CUSTOMHTTPPATTERN
+_HTTPRULE.fields_by_name["additional_bindings"].message_type = _HTTPRULE
+_HTTPRULE.oneofs_by_name["pattern"].fields.append(_HTTPRULE.fields_by_name["get"])
+_HTTPRULE.fields_by_name["get"].containing_oneof = _HTTPRULE.oneofs_by_name["pattern"]
+_HTTPRULE.oneofs_by_name["pattern"].fields.append(_HTTPRULE.fields_by_name["put"])
+_HTTPRULE.fields_by_name["put"].containing_oneof = _HTTPRULE.oneofs_by_name["pattern"]
+_HTTPRULE.oneofs_by_name["pattern"].fields.append(_HTTPRULE.fields_by_name["post"])
+_HTTPRULE.fields_by_name["post"].containing_oneof = _HTTPRULE.oneofs_by_name["pattern"]
+_HTTPRULE.oneofs_by_name["pattern"].fields.append(_HTTPRULE.fields_by_name["delete"])
+_HTTPRULE.fields_by_name["delete"].containing_oneof = _HTTPRULE.oneofs_by_name[
+ "pattern"
+]
+_HTTPRULE.oneofs_by_name["pattern"].fields.append(_HTTPRULE.fields_by_name["patch"])
+_HTTPRULE.fields_by_name["patch"].containing_oneof = _HTTPRULE.oneofs_by_name["pattern"]
+_HTTPRULE.oneofs_by_name["pattern"].fields.append(_HTTPRULE.fields_by_name["custom"])
+_HTTPRULE.fields_by_name["custom"].containing_oneof = _HTTPRULE.oneofs_by_name[
+ "pattern"
+]
+DESCRIPTOR.message_types_by_name["Http"] = _HTTP
+DESCRIPTOR.message_types_by_name["HttpRule"] = _HTTPRULE
+DESCRIPTOR.message_types_by_name["CustomHttpPattern"] = _CUSTOMHTTPPATTERN
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-Http = _reflection.GeneratedProtocolMessageType('Http', (_message.Message,), {
- 'DESCRIPTOR' : _HTTP,
- '__module__' : 'google.api.http_pb2'
- # @@protoc_insertion_point(class_scope:google.api.Http)
- })
+Http = _reflection.GeneratedProtocolMessageType(
+ "Http",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _HTTP,
+ "__module__": "google.api.http_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.Http)
+ },
+)
_sym_db.RegisterMessage(Http)
-HttpRule = _reflection.GeneratedProtocolMessageType('HttpRule', (_message.Message,), {
- 'DESCRIPTOR' : _HTTPRULE,
- '__module__' : 'google.api.http_pb2'
- # @@protoc_insertion_point(class_scope:google.api.HttpRule)
- })
+HttpRule = _reflection.GeneratedProtocolMessageType(
+ "HttpRule",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _HTTPRULE,
+ "__module__": "google.api.http_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.HttpRule)
+ },
+)
_sym_db.RegisterMessage(HttpRule)
-CustomHttpPattern = _reflection.GeneratedProtocolMessageType('CustomHttpPattern', (_message.Message,), {
- 'DESCRIPTOR' : _CUSTOMHTTPPATTERN,
- '__module__' : 'google.api.http_pb2'
- # @@protoc_insertion_point(class_scope:google.api.CustomHttpPattern)
- })
+CustomHttpPattern = _reflection.GeneratedProtocolMessageType(
+ "CustomHttpPattern",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _CUSTOMHTTPPATTERN,
+ "__module__": "google.api.http_pb2"
+ # @@protoc_insertion_point(class_scope:google.api.CustomHttpPattern)
+ },
+)
_sym_db.RegisterMessage(CustomHttpPattern)
diff --git a/src/buildstream/_protos/google/api/http_pb2_grpc.py b/src/buildstream/_protos/google/api/http_pb2_grpc.py
index a89435267..07cb78fe0 100644
--- a/src/buildstream/_protos/google/api/http_pb2_grpc.py
+++ b/src/buildstream/_protos/google/api/http_pb2_grpc.py
@@ -1,3 +1,2 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
-
diff --git a/src/buildstream/_protos/google/bytestream/bytestream_pb2.py b/src/buildstream/_protos/google/bytestream/bytestream_pb2.py
index c8787c7ac..4a0badd27 100644
--- a/src/buildstream/_protos/google/bytestream/bytestream_pb2.py
+++ b/src/buildstream/_protos/google/bytestream/bytestream_pb2.py
@@ -3,351 +3,499 @@
# source: google/bytestream/bytestream.proto
import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
+
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
-from buildstream._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+from buildstream._protos.google.api import (
+ annotations_pb2 as google_dot_api_dot_annotations__pb2,
+)
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
- name='google/bytestream/bytestream.proto',
- package='google.bytestream',
- syntax='proto3',
- serialized_options=_b('\n\025com.google.bytestreamB\017ByteStreamProtoZ;google.golang.org/genproto/googleapis/bytestream;bytestream'),
- serialized_pb=_b('\n\"google/bytestream/bytestream.proto\x12\x11google.bytestream\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/wrappers.proto\"M\n\x0bReadRequest\x12\x15\n\rresource_name\x18\x01 \x01(\t\x12\x13\n\x0bread_offset\x18\x02 \x01(\x03\x12\x12\n\nread_limit\x18\x03 \x01(\x03\"\x1c\n\x0cReadResponse\x12\x0c\n\x04\x64\x61ta\x18\n \x01(\x0c\"_\n\x0cWriteRequest\x12\x15\n\rresource_name\x18\x01 \x01(\t\x12\x14\n\x0cwrite_offset\x18\x02 \x01(\x03\x12\x14\n\x0c\x66inish_write\x18\x03 \x01(\x08\x12\x0c\n\x04\x64\x61ta\x18\n \x01(\x0c\"\'\n\rWriteResponse\x12\x16\n\x0e\x63ommitted_size\x18\x01 \x01(\x03\"0\n\x17QueryWriteStatusRequest\x12\x15\n\rresource_name\x18\x01 \x01(\t\"D\n\x18QueryWriteStatusResponse\x12\x16\n\x0e\x63ommitted_size\x18\x01 \x01(\x03\x12\x10\n\x08\x63omplete\x18\x02 \x01(\x08\x32\x92\x02\n\nByteStream\x12I\n\x04Read\x12\x1e.google.bytestream.ReadRequest\x1a\x1f.google.bytestream.ReadResponse0\x01\x12L\n\x05Write\x12\x1f.google.bytestream.WriteRequest\x1a .google.bytestream.WriteResponse(\x01\x12k\n\x10QueryWriteStatus\x12*.google.bytestream.QueryWriteStatusRequest\x1a+.google.bytestream.QueryWriteStatusResponseBe\n\x15\x63om.google.bytestreamB\x0f\x42yteStreamProtoZ;google.golang.org/genproto/googleapis/bytestream;bytestreamb\x06proto3')
- ,
- dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,])
-
-
+ name="google/bytestream/bytestream.proto",
+ package="google.bytestream",
+ syntax="proto3",
+ serialized_options=_b(
+ "\n\025com.google.bytestreamB\017ByteStreamProtoZ;google.golang.org/genproto/googleapis/bytestream;bytestream"
+ ),
+ serialized_pb=_b(
+ '\n"google/bytestream/bytestream.proto\x12\x11google.bytestream\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/wrappers.proto"M\n\x0bReadRequest\x12\x15\n\rresource_name\x18\x01 \x01(\t\x12\x13\n\x0bread_offset\x18\x02 \x01(\x03\x12\x12\n\nread_limit\x18\x03 \x01(\x03"\x1c\n\x0cReadResponse\x12\x0c\n\x04\x64\x61ta\x18\n \x01(\x0c"_\n\x0cWriteRequest\x12\x15\n\rresource_name\x18\x01 \x01(\t\x12\x14\n\x0cwrite_offset\x18\x02 \x01(\x03\x12\x14\n\x0c\x66inish_write\x18\x03 \x01(\x08\x12\x0c\n\x04\x64\x61ta\x18\n \x01(\x0c"\'\n\rWriteResponse\x12\x16\n\x0e\x63ommitted_size\x18\x01 \x01(\x03"0\n\x17QueryWriteStatusRequest\x12\x15\n\rresource_name\x18\x01 \x01(\t"D\n\x18QueryWriteStatusResponse\x12\x16\n\x0e\x63ommitted_size\x18\x01 \x01(\x03\x12\x10\n\x08\x63omplete\x18\x02 \x01(\x08\x32\x92\x02\n\nByteStream\x12I\n\x04Read\x12\x1e.google.bytestream.ReadRequest\x1a\x1f.google.bytestream.ReadResponse0\x01\x12L\n\x05Write\x12\x1f.google.bytestream.WriteRequest\x1a .google.bytestream.WriteResponse(\x01\x12k\n\x10QueryWriteStatus\x12*.google.bytestream.QueryWriteStatusRequest\x1a+.google.bytestream.QueryWriteStatusResponseBe\n\x15\x63om.google.bytestreamB\x0f\x42yteStreamProtoZ;google.golang.org/genproto/googleapis/bytestream;bytestreamb\x06proto3'
+ ),
+ dependencies=[
+ google_dot_api_dot_annotations__pb2.DESCRIPTOR,
+ google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,
+ ],
+)
_READREQUEST = _descriptor.Descriptor(
- name='ReadRequest',
- full_name='google.bytestream.ReadRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='resource_name', full_name='google.bytestream.ReadRequest.resource_name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='read_offset', full_name='google.bytestream.ReadRequest.read_offset', index=1,
- number=2, type=3, cpp_type=2, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='read_limit', full_name='google.bytestream.ReadRequest.read_limit', index=2,
- number=3, type=3, cpp_type=2, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=119,
- serialized_end=196,
+ name="ReadRequest",
+ full_name="google.bytestream.ReadRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="resource_name",
+ full_name="google.bytestream.ReadRequest.resource_name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="read_offset",
+ full_name="google.bytestream.ReadRequest.read_offset",
+ index=1,
+ number=2,
+ type=3,
+ cpp_type=2,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="read_limit",
+ full_name="google.bytestream.ReadRequest.read_limit",
+ index=2,
+ number=3,
+ type=3,
+ cpp_type=2,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=119,
+ serialized_end=196,
)
_READRESPONSE = _descriptor.Descriptor(
- name='ReadResponse',
- full_name='google.bytestream.ReadResponse',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='data', full_name='google.bytestream.ReadResponse.data', index=0,
- number=10, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value=_b(""),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=198,
- serialized_end=226,
+ name="ReadResponse",
+ full_name="google.bytestream.ReadResponse",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="data",
+ full_name="google.bytestream.ReadResponse.data",
+ index=0,
+ number=10,
+ type=12,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b(""),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=198,
+ serialized_end=226,
)
_WRITEREQUEST = _descriptor.Descriptor(
- name='WriteRequest',
- full_name='google.bytestream.WriteRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='resource_name', full_name='google.bytestream.WriteRequest.resource_name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='write_offset', full_name='google.bytestream.WriteRequest.write_offset', index=1,
- number=2, type=3, cpp_type=2, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='finish_write', full_name='google.bytestream.WriteRequest.finish_write', index=2,
- number=3, type=8, cpp_type=7, label=1,
- has_default_value=False, default_value=False,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='data', full_name='google.bytestream.WriteRequest.data', index=3,
- number=10, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value=_b(""),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=228,
- serialized_end=323,
+ name="WriteRequest",
+ full_name="google.bytestream.WriteRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="resource_name",
+ full_name="google.bytestream.WriteRequest.resource_name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="write_offset",
+ full_name="google.bytestream.WriteRequest.write_offset",
+ index=1,
+ number=2,
+ type=3,
+ cpp_type=2,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="finish_write",
+ full_name="google.bytestream.WriteRequest.finish_write",
+ index=2,
+ number=3,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="data",
+ full_name="google.bytestream.WriteRequest.data",
+ index=3,
+ number=10,
+ type=12,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b(""),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=228,
+ serialized_end=323,
)
_WRITERESPONSE = _descriptor.Descriptor(
- name='WriteResponse',
- full_name='google.bytestream.WriteResponse',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='committed_size', full_name='google.bytestream.WriteResponse.committed_size', index=0,
- number=1, type=3, cpp_type=2, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=325,
- serialized_end=364,
+ name="WriteResponse",
+ full_name="google.bytestream.WriteResponse",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="committed_size",
+ full_name="google.bytestream.WriteResponse.committed_size",
+ index=0,
+ number=1,
+ type=3,
+ cpp_type=2,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=325,
+ serialized_end=364,
)
_QUERYWRITESTATUSREQUEST = _descriptor.Descriptor(
- name='QueryWriteStatusRequest',
- full_name='google.bytestream.QueryWriteStatusRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='resource_name', full_name='google.bytestream.QueryWriteStatusRequest.resource_name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=366,
- serialized_end=414,
+ name="QueryWriteStatusRequest",
+ full_name="google.bytestream.QueryWriteStatusRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="resource_name",
+ full_name="google.bytestream.QueryWriteStatusRequest.resource_name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=366,
+ serialized_end=414,
)
_QUERYWRITESTATUSRESPONSE = _descriptor.Descriptor(
- name='QueryWriteStatusResponse',
- full_name='google.bytestream.QueryWriteStatusResponse',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='committed_size', full_name='google.bytestream.QueryWriteStatusResponse.committed_size', index=0,
- number=1, type=3, cpp_type=2, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='complete', full_name='google.bytestream.QueryWriteStatusResponse.complete', index=1,
- number=2, type=8, cpp_type=7, label=1,
- has_default_value=False, default_value=False,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=416,
- serialized_end=484,
+ name="QueryWriteStatusResponse",
+ full_name="google.bytestream.QueryWriteStatusResponse",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="committed_size",
+ full_name="google.bytestream.QueryWriteStatusResponse.committed_size",
+ index=0,
+ number=1,
+ type=3,
+ cpp_type=2,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="complete",
+ full_name="google.bytestream.QueryWriteStatusResponse.complete",
+ index=1,
+ number=2,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=416,
+ serialized_end=484,
)
-DESCRIPTOR.message_types_by_name['ReadRequest'] = _READREQUEST
-DESCRIPTOR.message_types_by_name['ReadResponse'] = _READRESPONSE
-DESCRIPTOR.message_types_by_name['WriteRequest'] = _WRITEREQUEST
-DESCRIPTOR.message_types_by_name['WriteResponse'] = _WRITERESPONSE
-DESCRIPTOR.message_types_by_name['QueryWriteStatusRequest'] = _QUERYWRITESTATUSREQUEST
-DESCRIPTOR.message_types_by_name['QueryWriteStatusResponse'] = _QUERYWRITESTATUSRESPONSE
+DESCRIPTOR.message_types_by_name["ReadRequest"] = _READREQUEST
+DESCRIPTOR.message_types_by_name["ReadResponse"] = _READRESPONSE
+DESCRIPTOR.message_types_by_name["WriteRequest"] = _WRITEREQUEST
+DESCRIPTOR.message_types_by_name["WriteResponse"] = _WRITERESPONSE
+DESCRIPTOR.message_types_by_name["QueryWriteStatusRequest"] = _QUERYWRITESTATUSREQUEST
+DESCRIPTOR.message_types_by_name["QueryWriteStatusResponse"] = _QUERYWRITESTATUSRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-ReadRequest = _reflection.GeneratedProtocolMessageType('ReadRequest', (_message.Message,), {
- 'DESCRIPTOR' : _READREQUEST,
- '__module__' : 'google.bytestream.bytestream_pb2'
- # @@protoc_insertion_point(class_scope:google.bytestream.ReadRequest)
- })
+ReadRequest = _reflection.GeneratedProtocolMessageType(
+ "ReadRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _READREQUEST,
+ "__module__": "google.bytestream.bytestream_pb2"
+ # @@protoc_insertion_point(class_scope:google.bytestream.ReadRequest)
+ },
+)
_sym_db.RegisterMessage(ReadRequest)
-ReadResponse = _reflection.GeneratedProtocolMessageType('ReadResponse', (_message.Message,), {
- 'DESCRIPTOR' : _READRESPONSE,
- '__module__' : 'google.bytestream.bytestream_pb2'
- # @@protoc_insertion_point(class_scope:google.bytestream.ReadResponse)
- })
+ReadResponse = _reflection.GeneratedProtocolMessageType(
+ "ReadResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _READRESPONSE,
+ "__module__": "google.bytestream.bytestream_pb2"
+ # @@protoc_insertion_point(class_scope:google.bytestream.ReadResponse)
+ },
+)
_sym_db.RegisterMessage(ReadResponse)
-WriteRequest = _reflection.GeneratedProtocolMessageType('WriteRequest', (_message.Message,), {
- 'DESCRIPTOR' : _WRITEREQUEST,
- '__module__' : 'google.bytestream.bytestream_pb2'
- # @@protoc_insertion_point(class_scope:google.bytestream.WriteRequest)
- })
+WriteRequest = _reflection.GeneratedProtocolMessageType(
+ "WriteRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _WRITEREQUEST,
+ "__module__": "google.bytestream.bytestream_pb2"
+ # @@protoc_insertion_point(class_scope:google.bytestream.WriteRequest)
+ },
+)
_sym_db.RegisterMessage(WriteRequest)
-WriteResponse = _reflection.GeneratedProtocolMessageType('WriteResponse', (_message.Message,), {
- 'DESCRIPTOR' : _WRITERESPONSE,
- '__module__' : 'google.bytestream.bytestream_pb2'
- # @@protoc_insertion_point(class_scope:google.bytestream.WriteResponse)
- })
+WriteResponse = _reflection.GeneratedProtocolMessageType(
+ "WriteResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _WRITERESPONSE,
+ "__module__": "google.bytestream.bytestream_pb2"
+ # @@protoc_insertion_point(class_scope:google.bytestream.WriteResponse)
+ },
+)
_sym_db.RegisterMessage(WriteResponse)
-QueryWriteStatusRequest = _reflection.GeneratedProtocolMessageType('QueryWriteStatusRequest', (_message.Message,), {
- 'DESCRIPTOR' : _QUERYWRITESTATUSREQUEST,
- '__module__' : 'google.bytestream.bytestream_pb2'
- # @@protoc_insertion_point(class_scope:google.bytestream.QueryWriteStatusRequest)
- })
+QueryWriteStatusRequest = _reflection.GeneratedProtocolMessageType(
+ "QueryWriteStatusRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _QUERYWRITESTATUSREQUEST,
+ "__module__": "google.bytestream.bytestream_pb2"
+ # @@protoc_insertion_point(class_scope:google.bytestream.QueryWriteStatusRequest)
+ },
+)
_sym_db.RegisterMessage(QueryWriteStatusRequest)
-QueryWriteStatusResponse = _reflection.GeneratedProtocolMessageType('QueryWriteStatusResponse', (_message.Message,), {
- 'DESCRIPTOR' : _QUERYWRITESTATUSRESPONSE,
- '__module__' : 'google.bytestream.bytestream_pb2'
- # @@protoc_insertion_point(class_scope:google.bytestream.QueryWriteStatusResponse)
- })
+QueryWriteStatusResponse = _reflection.GeneratedProtocolMessageType(
+ "QueryWriteStatusResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _QUERYWRITESTATUSRESPONSE,
+ "__module__": "google.bytestream.bytestream_pb2"
+ # @@protoc_insertion_point(class_scope:google.bytestream.QueryWriteStatusResponse)
+ },
+)
_sym_db.RegisterMessage(QueryWriteStatusResponse)
DESCRIPTOR._options = None
_BYTESTREAM = _descriptor.ServiceDescriptor(
- name='ByteStream',
- full_name='google.bytestream.ByteStream',
- file=DESCRIPTOR,
- index=0,
- serialized_options=None,
- serialized_start=487,
- serialized_end=761,
- methods=[
- _descriptor.MethodDescriptor(
- name='Read',
- full_name='google.bytestream.ByteStream.Read',
+ name="ByteStream",
+ full_name="google.bytestream.ByteStream",
+ file=DESCRIPTOR,
index=0,
- containing_service=None,
- input_type=_READREQUEST,
- output_type=_READRESPONSE,
- serialized_options=None,
- ),
- _descriptor.MethodDescriptor(
- name='Write',
- full_name='google.bytestream.ByteStream.Write',
- index=1,
- containing_service=None,
- input_type=_WRITEREQUEST,
- output_type=_WRITERESPONSE,
serialized_options=None,
- ),
- _descriptor.MethodDescriptor(
- name='QueryWriteStatus',
- full_name='google.bytestream.ByteStream.QueryWriteStatus',
- index=2,
- containing_service=None,
- input_type=_QUERYWRITESTATUSREQUEST,
- output_type=_QUERYWRITESTATUSRESPONSE,
- serialized_options=None,
- ),
-])
+ serialized_start=487,
+ serialized_end=761,
+ methods=[
+ _descriptor.MethodDescriptor(
+ name="Read",
+ full_name="google.bytestream.ByteStream.Read",
+ index=0,
+ containing_service=None,
+ input_type=_READREQUEST,
+ output_type=_READRESPONSE,
+ serialized_options=None,
+ ),
+ _descriptor.MethodDescriptor(
+ name="Write",
+ full_name="google.bytestream.ByteStream.Write",
+ index=1,
+ containing_service=None,
+ input_type=_WRITEREQUEST,
+ output_type=_WRITERESPONSE,
+ serialized_options=None,
+ ),
+ _descriptor.MethodDescriptor(
+ name="QueryWriteStatus",
+ full_name="google.bytestream.ByteStream.QueryWriteStatus",
+ index=2,
+ containing_service=None,
+ input_type=_QUERYWRITESTATUSREQUEST,
+ output_type=_QUERYWRITESTATUSRESPONSE,
+ serialized_options=None,
+ ),
+ ],
+)
_sym_db.RegisterServiceDescriptor(_BYTESTREAM)
-DESCRIPTOR.services_by_name['ByteStream'] = _BYTESTREAM
+DESCRIPTOR.services_by_name["ByteStream"] = _BYTESTREAM
# @@protoc_insertion_point(module_scope)
diff --git a/src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py b/src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py
index ef993e040..a7b5ac589 100644
--- a/src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py
+++ b/src/buildstream/_protos/google/bytestream/bytestream_pb2_grpc.py
@@ -1,11 +1,13 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
-from buildstream._protos.google.bytestream import bytestream_pb2 as google_dot_bytestream_dot_bytestream__pb2
+from buildstream._protos.google.bytestream import (
+ bytestream_pb2 as google_dot_bytestream_dot_bytestream__pb2,
+)
class ByteStreamStub(object):
- """#### Introduction
+ """#### Introduction
The Byte Stream API enables a client to read and write a stream of bytes to
and from a resource. Resources have names, and these names are supplied in
@@ -30,31 +32,31 @@ class ByteStreamStub(object):
The errors returned by the service are in the Google canonical error space.
"""
- def __init__(self, channel):
- """Constructor.
+ def __init__(self, channel):
+ """Constructor.
Args:
channel: A grpc.Channel.
"""
- self.Read = channel.unary_stream(
- '/google.bytestream.ByteStream/Read',
- request_serializer=google_dot_bytestream_dot_bytestream__pb2.ReadRequest.SerializeToString,
- response_deserializer=google_dot_bytestream_dot_bytestream__pb2.ReadResponse.FromString,
+ self.Read = channel.unary_stream(
+ "/google.bytestream.ByteStream/Read",
+ request_serializer=google_dot_bytestream_dot_bytestream__pb2.ReadRequest.SerializeToString,
+ response_deserializer=google_dot_bytestream_dot_bytestream__pb2.ReadResponse.FromString,
)
- self.Write = channel.stream_unary(
- '/google.bytestream.ByteStream/Write',
- request_serializer=google_dot_bytestream_dot_bytestream__pb2.WriteRequest.SerializeToString,
- response_deserializer=google_dot_bytestream_dot_bytestream__pb2.WriteResponse.FromString,
+ self.Write = channel.stream_unary(
+ "/google.bytestream.ByteStream/Write",
+ request_serializer=google_dot_bytestream_dot_bytestream__pb2.WriteRequest.SerializeToString,
+ response_deserializer=google_dot_bytestream_dot_bytestream__pb2.WriteResponse.FromString,
)
- self.QueryWriteStatus = channel.unary_unary(
- '/google.bytestream.ByteStream/QueryWriteStatus',
- request_serializer=google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusRequest.SerializeToString,
- response_deserializer=google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusResponse.FromString,
+ self.QueryWriteStatus = channel.unary_unary(
+ "/google.bytestream.ByteStream/QueryWriteStatus",
+ request_serializer=google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusRequest.SerializeToString,
+ response_deserializer=google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusResponse.FromString,
)
class ByteStreamServicer(object):
- """#### Introduction
+ """#### Introduction
The Byte Stream API enables a client to read and write a stream of bytes to
and from a resource. Resources have names, and these names are supplied in
@@ -79,17 +81,17 @@ class ByteStreamServicer(object):
The errors returned by the service are in the Google canonical error space.
"""
- def Read(self, request, context):
- """`Read()` is used to retrieve the contents of a resource as a sequence
+ def Read(self, request, context):
+ """`Read()` is used to retrieve the contents of a resource as a sequence
of bytes. The bytes are returned in a sequence of responses, and the
responses are delivered as the results of a server-side streaming RPC.
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
- def Write(self, request_iterator, context):
- """`Write()` is used to send the contents of a resource as a sequence of
+ def Write(self, request_iterator, context):
+ """`Write()` is used to send the contents of a resource as a sequence of
bytes. The bytes are sent in a sequence of request protos of a client-side
streaming RPC.
@@ -112,12 +114,12 @@ class ByteStreamServicer(object):
service was able to commit and whether the service views the resource as
`complete` or not.
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
- def QueryWriteStatus(self, request, context):
- """`QueryWriteStatus()` is used to find the `committed_size` for a resource
+ def QueryWriteStatus(self, request, context):
+ """`QueryWriteStatus()` is used to find the `committed_size` for a resource
that is being written, which can then be used as the `write_offset` for
the next `Write()` call.
@@ -132,29 +134,30 @@ class ByteStreamServicer(object):
resource name, the sequence of returned `committed_size` values will be
non-decreasing.
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
def add_ByteStreamServicer_to_server(servicer, server):
- rpc_method_handlers = {
- 'Read': grpc.unary_stream_rpc_method_handler(
- servicer.Read,
- request_deserializer=google_dot_bytestream_dot_bytestream__pb2.ReadRequest.FromString,
- response_serializer=google_dot_bytestream_dot_bytestream__pb2.ReadResponse.SerializeToString,
- ),
- 'Write': grpc.stream_unary_rpc_method_handler(
- servicer.Write,
- request_deserializer=google_dot_bytestream_dot_bytestream__pb2.WriteRequest.FromString,
- response_serializer=google_dot_bytestream_dot_bytestream__pb2.WriteResponse.SerializeToString,
- ),
- 'QueryWriteStatus': grpc.unary_unary_rpc_method_handler(
- servicer.QueryWriteStatus,
- request_deserializer=google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusRequest.FromString,
- response_serializer=google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusResponse.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- 'google.bytestream.ByteStream', rpc_method_handlers)
- server.add_generic_rpc_handlers((generic_handler,))
+ rpc_method_handlers = {
+ "Read": grpc.unary_stream_rpc_method_handler(
+ servicer.Read,
+ request_deserializer=google_dot_bytestream_dot_bytestream__pb2.ReadRequest.FromString,
+ response_serializer=google_dot_bytestream_dot_bytestream__pb2.ReadResponse.SerializeToString,
+ ),
+ "Write": grpc.stream_unary_rpc_method_handler(
+ servicer.Write,
+ request_deserializer=google_dot_bytestream_dot_bytestream__pb2.WriteRequest.FromString,
+ response_serializer=google_dot_bytestream_dot_bytestream__pb2.WriteResponse.SerializeToString,
+ ),
+ "QueryWriteStatus": grpc.unary_unary_rpc_method_handler(
+ servicer.QueryWriteStatus,
+ request_deserializer=google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusRequest.FromString,
+ response_serializer=google_dot_bytestream_dot_bytestream__pb2.QueryWriteStatusResponse.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ "google.bytestream.ByteStream", rpc_method_handlers
+ )
+ server.add_generic_rpc_handlers((generic_handler,))
diff --git a/src/buildstream/_protos/google/longrunning/operations_pb2.py b/src/buildstream/_protos/google/longrunning/operations_pb2.py
index 01bd129b2..0b30b7c11 100644
--- a/src/buildstream/_protos/google/longrunning/operations_pb2.py
+++ b/src/buildstream/_protos/google/longrunning/operations_pb2.py
@@ -3,389 +3,580 @@
# source: google/longrunning/operations.proto
import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
+
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
-from buildstream._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+from buildstream._protos.google.api import (
+ annotations_pb2 as google_dot_api_dot_annotations__pb2,
+)
from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from buildstream._protos.google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
- name='google/longrunning/operations.proto',
- package='google.longrunning',
- syntax='proto3',
- serialized_options=_b('\n\026com.google.longrunningB\017OperationsProtoP\001Z=google.golang.org/genproto/googleapis/longrunning;longrunning\252\002\022Google.LongRunning\312\002\022Google\\LongRunning'),
- serialized_pb=_b('\n#google/longrunning/operations.proto\x12\x12google.longrunning\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/protobuf/any.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x17google/rpc/status.proto\"\xa8\x01\n\tOperation\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x08metadata\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\x12#\n\x05\x65rror\x18\x04 \x01(\x0b\x32\x12.google.rpc.StatusH\x00\x12(\n\x08response\x18\x05 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x42\x08\n\x06result\"#\n\x13GetOperationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\\\n\x15ListOperationsRequest\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"d\n\x16ListOperationsResponse\x12\x31\n\noperations\x18\x01 \x03(\x0b\x32\x1d.google.longrunning.Operation\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"&\n\x16\x43\x61ncelOperationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"&\n\x16\x44\x65leteOperationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\x8c\x04\n\nOperations\x12\x86\x01\n\x0eListOperations\x12).google.longrunning.ListOperationsRequest\x1a*.google.longrunning.ListOperationsResponse\"\x1d\x82\xd3\xe4\x93\x02\x17\x12\x15/v1/{name=operations}\x12x\n\x0cGetOperation\x12\'.google.longrunning.GetOperationRequest\x1a\x1d.google.longrunning.Operation\" \x82\xd3\xe4\x93\x02\x1a\x12\x18/v1/{name=operations/**}\x12w\n\x0f\x44\x65leteOperation\x12*.google.longrunning.DeleteOperationRequest\x1a\x16.google.protobuf.Empty\" \x82\xd3\xe4\x93\x02\x1a*\x18/v1/{name=operations/**}\x12\x81\x01\n\x0f\x43\x61ncelOperation\x12*.google.longrunning.CancelOperationRequest\x1a\x16.google.protobuf.Empty\"*\x82\xd3\xe4\x93\x02$\"\x1f/v1/{name=operations/**}:cancel:\x01*B\x94\x01\n\x16\x63om.google.longrunningB\x0fOperationsProtoP\x01Z=google.golang.org/genproto/googleapis/longrunning;longrunning\xaa\x02\x12Google.LongRunning\xca\x02\x12Google\\LongRunningb\x06proto3')
- ,
- dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_any__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,])
-
-
+ name="google/longrunning/operations.proto",
+ package="google.longrunning",
+ syntax="proto3",
+ serialized_options=_b(
+ "\n\026com.google.longrunningB\017OperationsProtoP\001Z=google.golang.org/genproto/googleapis/longrunning;longrunning\252\002\022Google.LongRunning\312\002\022Google\\LongRunning"
+ ),
+ serialized_pb=_b(
+ '\n#google/longrunning/operations.proto\x12\x12google.longrunning\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/protobuf/any.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x17google/rpc/status.proto"\xa8\x01\n\tOperation\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x08metadata\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\x12#\n\x05\x65rror\x18\x04 \x01(\x0b\x32\x12.google.rpc.StatusH\x00\x12(\n\x08response\x18\x05 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x42\x08\n\x06result"#\n\x13GetOperationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x15ListOperationsRequest\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"d\n\x16ListOperationsResponse\x12\x31\n\noperations\x18\x01 \x03(\x0b\x32\x1d.google.longrunning.Operation\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"&\n\x16\x43\x61ncelOperationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"&\n\x16\x44\x65leteOperationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\x8c\x04\n\nOperations\x12\x86\x01\n\x0eListOperations\x12).google.longrunning.ListOperationsRequest\x1a*.google.longrunning.ListOperationsResponse"\x1d\x82\xd3\xe4\x93\x02\x17\x12\x15/v1/{name=operations}\x12x\n\x0cGetOperation\x12\'.google.longrunning.GetOperationRequest\x1a\x1d.google.longrunning.Operation" \x82\xd3\xe4\x93\x02\x1a\x12\x18/v1/{name=operations/**}\x12w\n\x0f\x44\x65leteOperation\x12*.google.longrunning.DeleteOperationRequest\x1a\x16.google.protobuf.Empty" \x82\xd3\xe4\x93\x02\x1a*\x18/v1/{name=operations/**}\x12\x81\x01\n\x0f\x43\x61ncelOperation\x12*.google.longrunning.CancelOperationRequest\x1a\x16.google.protobuf.Empty"*\x82\xd3\xe4\x93\x02$"\x1f/v1/{name=operations/**}:cancel:\x01*B\x94\x01\n\x16\x63om.google.longrunningB\x0fOperationsProtoP\x01Z=google.golang.org/genproto/googleapis/longrunning;longrunning\xaa\x02\x12Google.LongRunning\xca\x02\x12Google\\LongRunningb\x06proto3'
+ ),
+ dependencies=[
+ google_dot_api_dot_annotations__pb2.DESCRIPTOR,
+ google_dot_protobuf_dot_any__pb2.DESCRIPTOR,
+ google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,
+ google_dot_rpc_dot_status__pb2.DESCRIPTOR,
+ ],
+)
_OPERATION = _descriptor.Descriptor(
- name='Operation',
- full_name='google.longrunning.Operation',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='name', full_name='google.longrunning.Operation.name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='metadata', full_name='google.longrunning.Operation.metadata', index=1,
- number=2, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='done', full_name='google.longrunning.Operation.done', index=2,
- number=3, type=8, cpp_type=7, label=1,
- has_default_value=False, default_value=False,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='error', full_name='google.longrunning.Operation.error', index=3,
- number=4, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='response', full_name='google.longrunning.Operation.response', index=4,
- number=5, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name='result', full_name='google.longrunning.Operation.result',
- index=0, containing_type=None, fields=[]),
- ],
- serialized_start=171,
- serialized_end=339,
+ name="Operation",
+ full_name="google.longrunning.Operation",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="google.longrunning.Operation.name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="metadata",
+ full_name="google.longrunning.Operation.metadata",
+ index=1,
+ number=2,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="done",
+ full_name="google.longrunning.Operation.done",
+ index=2,
+ number=3,
+ type=8,
+ cpp_type=7,
+ label=1,
+ has_default_value=False,
+ default_value=False,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="error",
+ full_name="google.longrunning.Operation.error",
+ index=3,
+ number=4,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="response",
+ full_name="google.longrunning.Operation.response",
+ index=4,
+ number=5,
+ type=11,
+ cpp_type=10,
+ label=1,
+ has_default_value=False,
+ default_value=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name="result",
+ full_name="google.longrunning.Operation.result",
+ index=0,
+ containing_type=None,
+ fields=[],
+ ),
+ ],
+ serialized_start=171,
+ serialized_end=339,
)
_GETOPERATIONREQUEST = _descriptor.Descriptor(
- name='GetOperationRequest',
- full_name='google.longrunning.GetOperationRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='name', full_name='google.longrunning.GetOperationRequest.name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=341,
- serialized_end=376,
+ name="GetOperationRequest",
+ full_name="google.longrunning.GetOperationRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="google.longrunning.GetOperationRequest.name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=341,
+ serialized_end=376,
)
_LISTOPERATIONSREQUEST = _descriptor.Descriptor(
- name='ListOperationsRequest',
- full_name='google.longrunning.ListOperationsRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='name', full_name='google.longrunning.ListOperationsRequest.name', index=0,
- number=4, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='filter', full_name='google.longrunning.ListOperationsRequest.filter', index=1,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='page_size', full_name='google.longrunning.ListOperationsRequest.page_size', index=2,
- number=2, type=5, cpp_type=1, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='page_token', full_name='google.longrunning.ListOperationsRequest.page_token', index=3,
- number=3, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=378,
- serialized_end=470,
+ name="ListOperationsRequest",
+ full_name="google.longrunning.ListOperationsRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="google.longrunning.ListOperationsRequest.name",
+ index=0,
+ number=4,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="filter",
+ full_name="google.longrunning.ListOperationsRequest.filter",
+ index=1,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="page_size",
+ full_name="google.longrunning.ListOperationsRequest.page_size",
+ index=2,
+ number=2,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="page_token",
+ full_name="google.longrunning.ListOperationsRequest.page_token",
+ index=3,
+ number=3,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=378,
+ serialized_end=470,
)
_LISTOPERATIONSRESPONSE = _descriptor.Descriptor(
- name='ListOperationsResponse',
- full_name='google.longrunning.ListOperationsResponse',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='operations', full_name='google.longrunning.ListOperationsResponse.operations', index=0,
- number=1, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='next_page_token', full_name='google.longrunning.ListOperationsResponse.next_page_token', index=1,
- number=2, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=472,
- serialized_end=572,
+ name="ListOperationsResponse",
+ full_name="google.longrunning.ListOperationsResponse",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="operations",
+ full_name="google.longrunning.ListOperationsResponse.operations",
+ index=0,
+ number=1,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="next_page_token",
+ full_name="google.longrunning.ListOperationsResponse.next_page_token",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=472,
+ serialized_end=572,
)
_CANCELOPERATIONREQUEST = _descriptor.Descriptor(
- name='CancelOperationRequest',
- full_name='google.longrunning.CancelOperationRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='name', full_name='google.longrunning.CancelOperationRequest.name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=574,
- serialized_end=612,
+ name="CancelOperationRequest",
+ full_name="google.longrunning.CancelOperationRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="google.longrunning.CancelOperationRequest.name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=574,
+ serialized_end=612,
)
_DELETEOPERATIONREQUEST = _descriptor.Descriptor(
- name='DeleteOperationRequest',
- full_name='google.longrunning.DeleteOperationRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='name', full_name='google.longrunning.DeleteOperationRequest.name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=614,
- serialized_end=652,
+ name="DeleteOperationRequest",
+ full_name="google.longrunning.DeleteOperationRequest",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="name",
+ full_name="google.longrunning.DeleteOperationRequest.name",
+ index=0,
+ number=1,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=614,
+ serialized_end=652,
)
-_OPERATION.fields_by_name['metadata'].message_type = google_dot_protobuf_dot_any__pb2._ANY
-_OPERATION.fields_by_name['error'].message_type = google_dot_rpc_dot_status__pb2._STATUS
-_OPERATION.fields_by_name['response'].message_type = google_dot_protobuf_dot_any__pb2._ANY
-_OPERATION.oneofs_by_name['result'].fields.append(
- _OPERATION.fields_by_name['error'])
-_OPERATION.fields_by_name['error'].containing_oneof = _OPERATION.oneofs_by_name['result']
-_OPERATION.oneofs_by_name['result'].fields.append(
- _OPERATION.fields_by_name['response'])
-_OPERATION.fields_by_name['response'].containing_oneof = _OPERATION.oneofs_by_name['result']
-_LISTOPERATIONSRESPONSE.fields_by_name['operations'].message_type = _OPERATION
-DESCRIPTOR.message_types_by_name['Operation'] = _OPERATION
-DESCRIPTOR.message_types_by_name['GetOperationRequest'] = _GETOPERATIONREQUEST
-DESCRIPTOR.message_types_by_name['ListOperationsRequest'] = _LISTOPERATIONSREQUEST
-DESCRIPTOR.message_types_by_name['ListOperationsResponse'] = _LISTOPERATIONSRESPONSE
-DESCRIPTOR.message_types_by_name['CancelOperationRequest'] = _CANCELOPERATIONREQUEST
-DESCRIPTOR.message_types_by_name['DeleteOperationRequest'] = _DELETEOPERATIONREQUEST
+_OPERATION.fields_by_name[
+ "metadata"
+].message_type = google_dot_protobuf_dot_any__pb2._ANY
+_OPERATION.fields_by_name["error"].message_type = google_dot_rpc_dot_status__pb2._STATUS
+_OPERATION.fields_by_name[
+ "response"
+].message_type = google_dot_protobuf_dot_any__pb2._ANY
+_OPERATION.oneofs_by_name["result"].fields.append(_OPERATION.fields_by_name["error"])
+_OPERATION.fields_by_name["error"].containing_oneof = _OPERATION.oneofs_by_name[
+ "result"
+]
+_OPERATION.oneofs_by_name["result"].fields.append(_OPERATION.fields_by_name["response"])
+_OPERATION.fields_by_name["response"].containing_oneof = _OPERATION.oneofs_by_name[
+ "result"
+]
+_LISTOPERATIONSRESPONSE.fields_by_name["operations"].message_type = _OPERATION
+DESCRIPTOR.message_types_by_name["Operation"] = _OPERATION
+DESCRIPTOR.message_types_by_name["GetOperationRequest"] = _GETOPERATIONREQUEST
+DESCRIPTOR.message_types_by_name["ListOperationsRequest"] = _LISTOPERATIONSREQUEST
+DESCRIPTOR.message_types_by_name["ListOperationsResponse"] = _LISTOPERATIONSRESPONSE
+DESCRIPTOR.message_types_by_name["CancelOperationRequest"] = _CANCELOPERATIONREQUEST
+DESCRIPTOR.message_types_by_name["DeleteOperationRequest"] = _DELETEOPERATIONREQUEST
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-Operation = _reflection.GeneratedProtocolMessageType('Operation', (_message.Message,), {
- 'DESCRIPTOR' : _OPERATION,
- '__module__' : 'google.longrunning.operations_pb2'
- # @@protoc_insertion_point(class_scope:google.longrunning.Operation)
- })
+Operation = _reflection.GeneratedProtocolMessageType(
+ "Operation",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _OPERATION,
+ "__module__": "google.longrunning.operations_pb2"
+ # @@protoc_insertion_point(class_scope:google.longrunning.Operation)
+ },
+)
_sym_db.RegisterMessage(Operation)
-GetOperationRequest = _reflection.GeneratedProtocolMessageType('GetOperationRequest', (_message.Message,), {
- 'DESCRIPTOR' : _GETOPERATIONREQUEST,
- '__module__' : 'google.longrunning.operations_pb2'
- # @@protoc_insertion_point(class_scope:google.longrunning.GetOperationRequest)
- })
+GetOperationRequest = _reflection.GeneratedProtocolMessageType(
+ "GetOperationRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _GETOPERATIONREQUEST,
+ "__module__": "google.longrunning.operations_pb2"
+ # @@protoc_insertion_point(class_scope:google.longrunning.GetOperationRequest)
+ },
+)
_sym_db.RegisterMessage(GetOperationRequest)
-ListOperationsRequest = _reflection.GeneratedProtocolMessageType('ListOperationsRequest', (_message.Message,), {
- 'DESCRIPTOR' : _LISTOPERATIONSREQUEST,
- '__module__' : 'google.longrunning.operations_pb2'
- # @@protoc_insertion_point(class_scope:google.longrunning.ListOperationsRequest)
- })
+ListOperationsRequest = _reflection.GeneratedProtocolMessageType(
+ "ListOperationsRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LISTOPERATIONSREQUEST,
+ "__module__": "google.longrunning.operations_pb2"
+ # @@protoc_insertion_point(class_scope:google.longrunning.ListOperationsRequest)
+ },
+)
_sym_db.RegisterMessage(ListOperationsRequest)
-ListOperationsResponse = _reflection.GeneratedProtocolMessageType('ListOperationsResponse', (_message.Message,), {
- 'DESCRIPTOR' : _LISTOPERATIONSRESPONSE,
- '__module__' : 'google.longrunning.operations_pb2'
- # @@protoc_insertion_point(class_scope:google.longrunning.ListOperationsResponse)
- })
+ListOperationsResponse = _reflection.GeneratedProtocolMessageType(
+ "ListOperationsResponse",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _LISTOPERATIONSRESPONSE,
+ "__module__": "google.longrunning.operations_pb2"
+ # @@protoc_insertion_point(class_scope:google.longrunning.ListOperationsResponse)
+ },
+)
_sym_db.RegisterMessage(ListOperationsResponse)
-CancelOperationRequest = _reflection.GeneratedProtocolMessageType('CancelOperationRequest', (_message.Message,), {
- 'DESCRIPTOR' : _CANCELOPERATIONREQUEST,
- '__module__' : 'google.longrunning.operations_pb2'
- # @@protoc_insertion_point(class_scope:google.longrunning.CancelOperationRequest)
- })
+CancelOperationRequest = _reflection.GeneratedProtocolMessageType(
+ "CancelOperationRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _CANCELOPERATIONREQUEST,
+ "__module__": "google.longrunning.operations_pb2"
+ # @@protoc_insertion_point(class_scope:google.longrunning.CancelOperationRequest)
+ },
+)
_sym_db.RegisterMessage(CancelOperationRequest)
-DeleteOperationRequest = _reflection.GeneratedProtocolMessageType('DeleteOperationRequest', (_message.Message,), {
- 'DESCRIPTOR' : _DELETEOPERATIONREQUEST,
- '__module__' : 'google.longrunning.operations_pb2'
- # @@protoc_insertion_point(class_scope:google.longrunning.DeleteOperationRequest)
- })
+DeleteOperationRequest = _reflection.GeneratedProtocolMessageType(
+ "DeleteOperationRequest",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _DELETEOPERATIONREQUEST,
+ "__module__": "google.longrunning.operations_pb2"
+ # @@protoc_insertion_point(class_scope:google.longrunning.DeleteOperationRequest)
+ },
+)
_sym_db.RegisterMessage(DeleteOperationRequest)
DESCRIPTOR._options = None
_OPERATIONS = _descriptor.ServiceDescriptor(
- name='Operations',
- full_name='google.longrunning.Operations',
- file=DESCRIPTOR,
- index=0,
- serialized_options=None,
- serialized_start=655,
- serialized_end=1179,
- methods=[
- _descriptor.MethodDescriptor(
- name='ListOperations',
- full_name='google.longrunning.Operations.ListOperations',
+ name="Operations",
+ full_name="google.longrunning.Operations",
+ file=DESCRIPTOR,
index=0,
- containing_service=None,
- input_type=_LISTOPERATIONSREQUEST,
- output_type=_LISTOPERATIONSRESPONSE,
- serialized_options=_b('\202\323\344\223\002\027\022\025/v1/{name=operations}'),
- ),
- _descriptor.MethodDescriptor(
- name='GetOperation',
- full_name='google.longrunning.Operations.GetOperation',
- index=1,
- containing_service=None,
- input_type=_GETOPERATIONREQUEST,
- output_type=_OPERATION,
- serialized_options=_b('\202\323\344\223\002\032\022\030/v1/{name=operations/**}'),
- ),
- _descriptor.MethodDescriptor(
- name='DeleteOperation',
- full_name='google.longrunning.Operations.DeleteOperation',
- index=2,
- containing_service=None,
- input_type=_DELETEOPERATIONREQUEST,
- output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
- serialized_options=_b('\202\323\344\223\002\032*\030/v1/{name=operations/**}'),
- ),
- _descriptor.MethodDescriptor(
- name='CancelOperation',
- full_name='google.longrunning.Operations.CancelOperation',
- index=3,
- containing_service=None,
- input_type=_CANCELOPERATIONREQUEST,
- output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
- serialized_options=_b('\202\323\344\223\002$\"\037/v1/{name=operations/**}:cancel:\001*'),
- ),
-])
+ serialized_options=None,
+ serialized_start=655,
+ serialized_end=1179,
+ methods=[
+ _descriptor.MethodDescriptor(
+ name="ListOperations",
+ full_name="google.longrunning.Operations.ListOperations",
+ index=0,
+ containing_service=None,
+ input_type=_LISTOPERATIONSREQUEST,
+ output_type=_LISTOPERATIONSRESPONSE,
+ serialized_options=_b(
+ "\202\323\344\223\002\027\022\025/v1/{name=operations}"
+ ),
+ ),
+ _descriptor.MethodDescriptor(
+ name="GetOperation",
+ full_name="google.longrunning.Operations.GetOperation",
+ index=1,
+ containing_service=None,
+ input_type=_GETOPERATIONREQUEST,
+ output_type=_OPERATION,
+ serialized_options=_b(
+ "\202\323\344\223\002\032\022\030/v1/{name=operations/**}"
+ ),
+ ),
+ _descriptor.MethodDescriptor(
+ name="DeleteOperation",
+ full_name="google.longrunning.Operations.DeleteOperation",
+ index=2,
+ containing_service=None,
+ input_type=_DELETEOPERATIONREQUEST,
+ output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
+ serialized_options=_b(
+ "\202\323\344\223\002\032*\030/v1/{name=operations/**}"
+ ),
+ ),
+ _descriptor.MethodDescriptor(
+ name="CancelOperation",
+ full_name="google.longrunning.Operations.CancelOperation",
+ index=3,
+ containing_service=None,
+ input_type=_CANCELOPERATIONREQUEST,
+ output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
+ serialized_options=_b(
+ '\202\323\344\223\002$"\037/v1/{name=operations/**}:cancel:\001*'
+ ),
+ ),
+ ],
+)
_sym_db.RegisterServiceDescriptor(_OPERATIONS)
-DESCRIPTOR.services_by_name['Operations'] = _OPERATIONS
+DESCRIPTOR.services_by_name["Operations"] = _OPERATIONS
# @@protoc_insertion_point(module_scope)
diff --git a/src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py b/src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py
index 8f89862e7..24240730a 100644
--- a/src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py
+++ b/src/buildstream/_protos/google/longrunning/operations_pb2_grpc.py
@@ -1,12 +1,14 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
-from buildstream._protos.google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2
+from buildstream._protos.google.longrunning import (
+ operations_pb2 as google_dot_longrunning_dot_operations__pb2,
+)
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
class OperationsStub(object):
- """Manages long-running operations with an API service.
+ """Manages long-running operations with an API service.
When an API method normally takes long time to complete, it can be designed
to return [Operation][google.longrunning.Operation] to the client, and the client can use this
@@ -17,36 +19,36 @@ class OperationsStub(object):
so developers can have a consistent client experience.
"""
- def __init__(self, channel):
- """Constructor.
+ def __init__(self, channel):
+ """Constructor.
Args:
channel: A grpc.Channel.
"""
- self.ListOperations = channel.unary_unary(
- '/google.longrunning.Operations/ListOperations',
- request_serializer=google_dot_longrunning_dot_operations__pb2.ListOperationsRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.ListOperationsResponse.FromString,
+ self.ListOperations = channel.unary_unary(
+ "/google.longrunning.Operations/ListOperations",
+ request_serializer=google_dot_longrunning_dot_operations__pb2.ListOperationsRequest.SerializeToString,
+ response_deserializer=google_dot_longrunning_dot_operations__pb2.ListOperationsResponse.FromString,
)
- self.GetOperation = channel.unary_unary(
- '/google.longrunning.Operations/GetOperation',
- request_serializer=google_dot_longrunning_dot_operations__pb2.GetOperationRequest.SerializeToString,
- response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ self.GetOperation = channel.unary_unary(
+ "/google.longrunning.Operations/GetOperation",
+ request_serializer=google_dot_longrunning_dot_operations__pb2.GetOperationRequest.SerializeToString,
+ response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
)
- self.DeleteOperation = channel.unary_unary(
- '/google.longrunning.Operations/DeleteOperation',
- request_serializer=google_dot_longrunning_dot_operations__pb2.DeleteOperationRequest.SerializeToString,
- response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ self.DeleteOperation = channel.unary_unary(
+ "/google.longrunning.Operations/DeleteOperation",
+ request_serializer=google_dot_longrunning_dot_operations__pb2.DeleteOperationRequest.SerializeToString,
+ response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
- self.CancelOperation = channel.unary_unary(
- '/google.longrunning.Operations/CancelOperation',
- request_serializer=google_dot_longrunning_dot_operations__pb2.CancelOperationRequest.SerializeToString,
- response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ self.CancelOperation = channel.unary_unary(
+ "/google.longrunning.Operations/CancelOperation",
+ request_serializer=google_dot_longrunning_dot_operations__pb2.CancelOperationRequest.SerializeToString,
+ response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class OperationsServicer(object):
- """Manages long-running operations with an API service.
+ """Manages long-running operations with an API service.
When an API method normally takes long time to complete, it can be designed
to return [Operation][google.longrunning.Operation] to the client, and the client can use this
@@ -57,38 +59,38 @@ class OperationsServicer(object):
so developers can have a consistent client experience.
"""
- def ListOperations(self, request, context):
- """Lists operations that match the specified filter in the request. If the
+ def ListOperations(self, request, context):
+ """Lists operations that match the specified filter in the request. If the
server doesn't support this method, it returns `UNIMPLEMENTED`.
NOTE: the `name` binding below allows API services to override the binding
to use different resource name schemes, such as `users/*/operations`.
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
- def GetOperation(self, request, context):
- """Gets the latest state of a long-running operation. Clients can use this
+ def GetOperation(self, request, context):
+ """Gets the latest state of a long-running operation. Clients can use this
method to poll the operation result at intervals as recommended by the API
service.
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
- def DeleteOperation(self, request, context):
- """Deletes a long-running operation. This method indicates that the client is
+ def DeleteOperation(self, request, context):
+ """Deletes a long-running operation. This method indicates that the client is
no longer interested in the operation result. It does not cancel the
operation. If the server doesn't support this method, it returns
`google.rpc.Code.UNIMPLEMENTED`.
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
- def CancelOperation(self, request, context):
- """Starts asynchronous cancellation on a long-running operation. The server
+ def CancelOperation(self, request, context):
+ """Starts asynchronous cancellation on a long-running operation. The server
makes a best effort to cancel the operation, but success is not
guaranteed. If the server doesn't support this method, it returns
`google.rpc.Code.UNIMPLEMENTED`. Clients can use
@@ -99,34 +101,35 @@ class OperationsServicer(object):
an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1,
corresponding to `Code.CANCELLED`.
"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details("Method not implemented!")
+ raise NotImplementedError("Method not implemented!")
def add_OperationsServicer_to_server(servicer, server):
- rpc_method_handlers = {
- 'ListOperations': grpc.unary_unary_rpc_method_handler(
- servicer.ListOperations,
- request_deserializer=google_dot_longrunning_dot_operations__pb2.ListOperationsRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.ListOperationsResponse.SerializeToString,
- ),
- 'GetOperation': grpc.unary_unary_rpc_method_handler(
- servicer.GetOperation,
- request_deserializer=google_dot_longrunning_dot_operations__pb2.GetOperationRequest.FromString,
- response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
- ),
- 'DeleteOperation': grpc.unary_unary_rpc_method_handler(
- servicer.DeleteOperation,
- request_deserializer=google_dot_longrunning_dot_operations__pb2.DeleteOperationRequest.FromString,
- response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
- ),
- 'CancelOperation': grpc.unary_unary_rpc_method_handler(
- servicer.CancelOperation,
- request_deserializer=google_dot_longrunning_dot_operations__pb2.CancelOperationRequest.FromString,
- response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- 'google.longrunning.Operations', rpc_method_handlers)
- server.add_generic_rpc_handlers((generic_handler,))
+ rpc_method_handlers = {
+ "ListOperations": grpc.unary_unary_rpc_method_handler(
+ servicer.ListOperations,
+ request_deserializer=google_dot_longrunning_dot_operations__pb2.ListOperationsRequest.FromString,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.ListOperationsResponse.SerializeToString,
+ ),
+ "GetOperation": grpc.unary_unary_rpc_method_handler(
+ servicer.GetOperation,
+ request_deserializer=google_dot_longrunning_dot_operations__pb2.GetOperationRequest.FromString,
+ response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
+ ),
+ "DeleteOperation": grpc.unary_unary_rpc_method_handler(
+ servicer.DeleteOperation,
+ request_deserializer=google_dot_longrunning_dot_operations__pb2.DeleteOperationRequest.FromString,
+ response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+ ),
+ "CancelOperation": grpc.unary_unary_rpc_method_handler(
+ servicer.CancelOperation,
+ request_deserializer=google_dot_longrunning_dot_operations__pb2.CancelOperationRequest.FromString,
+ response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ "google.longrunning.Operations", rpc_method_handlers
+ )
+ server.add_generic_rpc_handlers((generic_handler,))
diff --git a/src/buildstream/_protos/google/rpc/code_pb2.py b/src/buildstream/_protos/google/rpc/code_pb2.py
index 5131fa97e..120fb0812 100644
--- a/src/buildstream/_protos/google/rpc/code_pb2.py
+++ b/src/buildstream/_protos/google/rpc/code_pb2.py
@@ -3,106 +3,121 @@
# source: google/rpc/code.proto
import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
+
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
-
-
DESCRIPTOR = _descriptor.FileDescriptor(
- name='google/rpc/code.proto',
- package='google.rpc',
- syntax='proto3',
- serialized_options=_b('\n\016com.google.rpcB\tCodeProtoP\001Z3google.golang.org/genproto/googleapis/rpc/code;code\242\002\003RPC'),
- serialized_pb=_b('\n\x15google/rpc/code.proto\x12\ngoogle.rpc*\xb7\x02\n\x04\x43ode\x12\x06\n\x02OK\x10\x00\x12\r\n\tCANCELLED\x10\x01\x12\x0b\n\x07UNKNOWN\x10\x02\x12\x14\n\x10INVALID_ARGUMENT\x10\x03\x12\x15\n\x11\x44\x45\x41\x44LINE_EXCEEDED\x10\x04\x12\r\n\tNOT_FOUND\x10\x05\x12\x12\n\x0e\x41LREADY_EXISTS\x10\x06\x12\x15\n\x11PERMISSION_DENIED\x10\x07\x12\x13\n\x0fUNAUTHENTICATED\x10\x10\x12\x16\n\x12RESOURCE_EXHAUSTED\x10\x08\x12\x17\n\x13\x46\x41ILED_PRECONDITION\x10\t\x12\x0b\n\x07\x41\x42ORTED\x10\n\x12\x10\n\x0cOUT_OF_RANGE\x10\x0b\x12\x11\n\rUNIMPLEMENTED\x10\x0c\x12\x0c\n\x08INTERNAL\x10\r\x12\x0f\n\x0bUNAVAILABLE\x10\x0e\x12\r\n\tDATA_LOSS\x10\x0f\x42X\n\x0e\x63om.google.rpcB\tCodeProtoP\x01Z3google.golang.org/genproto/googleapis/rpc/code;code\xa2\x02\x03RPCb\x06proto3')
+ name="google/rpc/code.proto",
+ package="google.rpc",
+ syntax="proto3",
+ serialized_options=_b(
+ "\n\016com.google.rpcB\tCodeProtoP\001Z3google.golang.org/genproto/googleapis/rpc/code;code\242\002\003RPC"
+ ),
+ serialized_pb=_b(
+ "\n\x15google/rpc/code.proto\x12\ngoogle.rpc*\xb7\x02\n\x04\x43ode\x12\x06\n\x02OK\x10\x00\x12\r\n\tCANCELLED\x10\x01\x12\x0b\n\x07UNKNOWN\x10\x02\x12\x14\n\x10INVALID_ARGUMENT\x10\x03\x12\x15\n\x11\x44\x45\x41\x44LINE_EXCEEDED\x10\x04\x12\r\n\tNOT_FOUND\x10\x05\x12\x12\n\x0e\x41LREADY_EXISTS\x10\x06\x12\x15\n\x11PERMISSION_DENIED\x10\x07\x12\x13\n\x0fUNAUTHENTICATED\x10\x10\x12\x16\n\x12RESOURCE_EXHAUSTED\x10\x08\x12\x17\n\x13\x46\x41ILED_PRECONDITION\x10\t\x12\x0b\n\x07\x41\x42ORTED\x10\n\x12\x10\n\x0cOUT_OF_RANGE\x10\x0b\x12\x11\n\rUNIMPLEMENTED\x10\x0c\x12\x0c\n\x08INTERNAL\x10\r\x12\x0f\n\x0bUNAVAILABLE\x10\x0e\x12\r\n\tDATA_LOSS\x10\x0f\x42X\n\x0e\x63om.google.rpcB\tCodeProtoP\x01Z3google.golang.org/genproto/googleapis/rpc/code;code\xa2\x02\x03RPCb\x06proto3"
+ ),
)
_CODE = _descriptor.EnumDescriptor(
- name='Code',
- full_name='google.rpc.Code',
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name='OK', index=0, number=0,
- serialized_options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='CANCELLED', index=1, number=1,
- serialized_options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='UNKNOWN', index=2, number=2,
- serialized_options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='INVALID_ARGUMENT', index=3, number=3,
- serialized_options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='DEADLINE_EXCEEDED', index=4, number=4,
- serialized_options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='NOT_FOUND', index=5, number=5,
- serialized_options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='ALREADY_EXISTS', index=6, number=6,
- serialized_options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='PERMISSION_DENIED', index=7, number=7,
- serialized_options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='UNAUTHENTICATED', index=8, number=16,
- serialized_options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='RESOURCE_EXHAUSTED', index=9, number=8,
- serialized_options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='FAILED_PRECONDITION', index=10, number=9,
- serialized_options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='ABORTED', index=11, number=10,
- serialized_options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='OUT_OF_RANGE', index=12, number=11,
- serialized_options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='UNIMPLEMENTED', index=13, number=12,
- serialized_options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='INTERNAL', index=14, number=13,
- serialized_options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='UNAVAILABLE', index=15, number=14,
- serialized_options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='DATA_LOSS', index=16, number=15,
- serialized_options=None,
- type=None),
- ],
- containing_type=None,
- serialized_options=None,
- serialized_start=38,
- serialized_end=349,
+ name="Code",
+ full_name="google.rpc.Code",
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name="OK", index=0, number=0, serialized_options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="CANCELLED", index=1, number=1, serialized_options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="UNKNOWN", index=2, number=2, serialized_options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="INVALID_ARGUMENT",
+ index=3,
+ number=3,
+ serialized_options=None,
+ type=None,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="DEADLINE_EXCEEDED",
+ index=4,
+ number=4,
+ serialized_options=None,
+ type=None,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="NOT_FOUND", index=5, number=5, serialized_options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="ALREADY_EXISTS", index=6, number=6, serialized_options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="PERMISSION_DENIED",
+ index=7,
+ number=7,
+ serialized_options=None,
+ type=None,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="UNAUTHENTICATED",
+ index=8,
+ number=16,
+ serialized_options=None,
+ type=None,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="RESOURCE_EXHAUSTED",
+ index=9,
+ number=8,
+ serialized_options=None,
+ type=None,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="FAILED_PRECONDITION",
+ index=10,
+ number=9,
+ serialized_options=None,
+ type=None,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="ABORTED", index=11, number=10, serialized_options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="OUT_OF_RANGE", index=12, number=11, serialized_options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="UNIMPLEMENTED",
+ index=13,
+ number=12,
+ serialized_options=None,
+ type=None,
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="INTERNAL", index=14, number=13, serialized_options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="UNAVAILABLE", index=15, number=14, serialized_options=None, type=None
+ ),
+ _descriptor.EnumValueDescriptor(
+ name="DATA_LOSS", index=16, number=15, serialized_options=None, type=None
+ ),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=38,
+ serialized_end=349,
)
_sym_db.RegisterEnumDescriptor(_CODE)
@@ -126,7 +141,7 @@ UNAVAILABLE = 14
DATA_LOSS = 15
-DESCRIPTOR.enum_types_by_name['Code'] = _CODE
+DESCRIPTOR.enum_types_by_name["Code"] = _CODE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
diff --git a/src/buildstream/_protos/google/rpc/code_pb2_grpc.py b/src/buildstream/_protos/google/rpc/code_pb2_grpc.py
index a89435267..07cb78fe0 100644
--- a/src/buildstream/_protos/google/rpc/code_pb2_grpc.py
+++ b/src/buildstream/_protos/google/rpc/code_pb2_grpc.py
@@ -1,3 +1,2 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
-
diff --git a/src/buildstream/_protos/google/rpc/status_pb2.py b/src/buildstream/_protos/google/rpc/status_pb2.py
index 7b353aec6..23d973223 100644
--- a/src/buildstream/_protos/google/rpc/status_pb2.py
+++ b/src/buildstream/_protos/google/rpc/status_pb2.py
@@ -3,11 +3,13 @@
# source: google/rpc/status.proto
import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+
+_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
+
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
@@ -17,70 +19,106 @@ from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
- name='google/rpc/status.proto',
- package='google.rpc',
- syntax='proto3',
- serialized_options=_b('\n\016com.google.rpcB\013StatusProtoP\001Z7google.golang.org/genproto/googleapis/rpc/status;status\242\002\003RPC'),
- serialized_pb=_b('\n\x17google/rpc/status.proto\x12\ngoogle.rpc\x1a\x19google/protobuf/any.proto\"N\n\x06Status\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x05\x12\x0f\n\x07message\x18\x02 \x01(\t\x12%\n\x07\x64\x65tails\x18\x03 \x03(\x0b\x32\x14.google.protobuf.AnyB^\n\x0e\x63om.google.rpcB\x0bStatusProtoP\x01Z7google.golang.org/genproto/googleapis/rpc/status;status\xa2\x02\x03RPCb\x06proto3')
- ,
- dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,])
-
-
+ name="google/rpc/status.proto",
+ package="google.rpc",
+ syntax="proto3",
+ serialized_options=_b(
+ "\n\016com.google.rpcB\013StatusProtoP\001Z7google.golang.org/genproto/googleapis/rpc/status;status\242\002\003RPC"
+ ),
+ serialized_pb=_b(
+ '\n\x17google/rpc/status.proto\x12\ngoogle.rpc\x1a\x19google/protobuf/any.proto"N\n\x06Status\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x05\x12\x0f\n\x07message\x18\x02 \x01(\t\x12%\n\x07\x64\x65tails\x18\x03 \x03(\x0b\x32\x14.google.protobuf.AnyB^\n\x0e\x63om.google.rpcB\x0bStatusProtoP\x01Z7google.golang.org/genproto/googleapis/rpc/status;status\xa2\x02\x03RPCb\x06proto3'
+ ),
+ dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,],
+)
_STATUS = _descriptor.Descriptor(
- name='Status',
- full_name='google.rpc.Status',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='code', full_name='google.rpc.Status.code', index=0,
- number=1, type=5, cpp_type=1, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='message', full_name='google.rpc.Status.message', index=1,
- number=2, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='details', full_name='google.rpc.Status.details', index=2,
- number=3, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=66,
- serialized_end=144,
+ name="Status",
+ full_name="google.rpc.Status",
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name="code",
+ full_name="google.rpc.Status.code",
+ index=0,
+ number=1,
+ type=5,
+ cpp_type=1,
+ label=1,
+ has_default_value=False,
+ default_value=0,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="message",
+ full_name="google.rpc.Status.message",
+ index=1,
+ number=2,
+ type=9,
+ cpp_type=9,
+ label=1,
+ has_default_value=False,
+ default_value=_b("").decode("utf-8"),
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ _descriptor.FieldDescriptor(
+ name="details",
+ full_name="google.rpc.Status.details",
+ index=2,
+ number=3,
+ type=11,
+ cpp_type=10,
+ label=3,
+ has_default_value=False,
+ default_value=[],
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ is_extension=False,
+ extension_scope=None,
+ serialized_options=None,
+ file=DESCRIPTOR,
+ ),
+ ],
+ extensions=[],
+ nested_types=[],
+ enum_types=[],
+ serialized_options=None,
+ is_extendable=False,
+ syntax="proto3",
+ extension_ranges=[],
+ oneofs=[],
+ serialized_start=66,
+ serialized_end=144,
)
-_STATUS.fields_by_name['details'].message_type = google_dot_protobuf_dot_any__pb2._ANY
-DESCRIPTOR.message_types_by_name['Status'] = _STATUS
+_STATUS.fields_by_name["details"].message_type = google_dot_protobuf_dot_any__pb2._ANY
+DESCRIPTOR.message_types_by_name["Status"] = _STATUS
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-Status = _reflection.GeneratedProtocolMessageType('Status', (_message.Message,), {
- 'DESCRIPTOR' : _STATUS,
- '__module__' : 'google.rpc.status_pb2'
- # @@protoc_insertion_point(class_scope:google.rpc.Status)
- })
+Status = _reflection.GeneratedProtocolMessageType(
+ "Status",
+ (_message.Message,),
+ {
+ "DESCRIPTOR": _STATUS,
+ "__module__": "google.rpc.status_pb2"
+ # @@protoc_insertion_point(class_scope:google.rpc.Status)
+ },
+)
_sym_db.RegisterMessage(Status)
diff --git a/src/buildstream/_protos/google/rpc/status_pb2_grpc.py b/src/buildstream/_protos/google/rpc/status_pb2_grpc.py
index a89435267..07cb78fe0 100644
--- a/src/buildstream/_protos/google/rpc/status_pb2_grpc.py
+++ b/src/buildstream/_protos/google/rpc/status_pb2_grpc.py
@@ -1,3 +1,2 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
-
diff --git a/src/buildstream/_remote.py b/src/buildstream/_remote.py
index 8527ca4cc..671adb95c 100644
--- a/src/buildstream/_remote.py
+++ b/src/buildstream/_remote.py
@@ -35,14 +35,18 @@ class RemoteType(FastEnum):
ALL = "all"
def __str__(self):
- return self.name.lower().replace('_', '-')
+ return self.name.lower().replace("_", "-")
# RemoteSpec():
#
# Defines the basic structure of a remote specification.
#
-class RemoteSpec(namedtuple('RemoteSpec', 'url push server_cert client_key client_cert instance_name type')):
+class RemoteSpec(
+ namedtuple(
+ "RemoteSpec", "url push server_cert client_key client_cert instance_name type"
+ )
+):
# new_from_config_node
#
@@ -60,15 +64,28 @@ class RemoteSpec(namedtuple('RemoteSpec', 'url push server_cert client_key clien
#
@classmethod
def new_from_config_node(cls, spec_node, basedir=None):
- spec_node.validate_keys(['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance-name', 'type'])
-
- url = spec_node.get_str('url')
+ spec_node.validate_keys(
+ [
+ "url",
+ "push",
+ "server-cert",
+ "client-key",
+ "client-cert",
+ "instance-name",
+ "type",
+ ]
+ )
+
+ url = spec_node.get_str("url")
if not url:
- provenance = spec_node.get_node('url').get_provenance()
- raise LoadError("{}: empty artifact cache URL".format(provenance), LoadErrorReason.INVALID_DATA)
+ provenance = spec_node.get_node("url").get_provenance()
+ raise LoadError(
+ "{}: empty artifact cache URL".format(provenance),
+ LoadErrorReason.INVALID_DATA,
+ )
- push = spec_node.get_bool('push', default=False)
- instance_name = spec_node.get_str('instance-name', default=None)
+ push = spec_node.get_bool("push", default=False)
+ instance_name = spec_node.get_str("instance-name", default=None)
def parse_cert(key):
cert = spec_node.get_str(key, default=None)
@@ -76,22 +93,34 @@ class RemoteSpec(namedtuple('RemoteSpec', 'url push server_cert client_key clien
cert = os.path.join(basedir, cert)
return cert
- cert_keys = ('server-cert', 'client-key', 'client-cert')
- server_cert, client_key, client_cert = tuple(parse_cert(key) for key in cert_keys)
+ cert_keys = ("server-cert", "client-key", "client-cert")
+ server_cert, client_key, client_cert = tuple(
+ parse_cert(key) for key in cert_keys
+ )
if client_key and not client_cert:
- provenance = spec_node.get_node('client-key').get_provenance()
- raise LoadError("{}: 'client-key' was specified without 'client-cert'".format(provenance),
- LoadErrorReason.INVALID_DATA)
+ provenance = spec_node.get_node("client-key").get_provenance()
+ raise LoadError(
+ "{}: 'client-key' was specified without 'client-cert'".format(
+ provenance
+ ),
+ LoadErrorReason.INVALID_DATA,
+ )
if client_cert and not client_key:
- provenance = spec_node.get_node('client-cert').get_provenance()
- raise LoadError("{}: 'client-cert' was specified without 'client-key'".format(provenance),
- LoadErrorReason.INVALID_DATA)
+ provenance = spec_node.get_node("client-cert").get_provenance()
+ raise LoadError(
+ "{}: 'client-cert' was specified without 'client-key'".format(
+ provenance
+ ),
+ LoadErrorReason.INVALID_DATA,
+ )
- type_ = spec_node.get_enum('type', RemoteType, default=RemoteType.ALL)
+ type_ = spec_node.get_enum("type", RemoteType, default=RemoteType.ALL)
- return cls(url, push, server_cert, client_key, client_cert, instance_name, type_)
+ return cls(
+ url, push, server_cert, client_key, client_cert, instance_name, type_
+ )
# FIXME: This can be made much nicer in python 3.7 through the use of
@@ -104,11 +133,11 @@ class RemoteSpec(namedtuple('RemoteSpec', 'url push server_cert client_key clien
RemoteSpec.__new__.__defaults__ = ( # type: ignore
# mandatory # url - The url of the remote
# mandatory # push - Whether the remote should be used for pushing
- None, # server_cert - The server certificate
- None, # client_key - The (private) client key
- None, # client_cert - The (public) client certificate
- None, # instance_name - The (grpc) instance name of the remote
- RemoteType.ALL # type - The type of the remote (index, storage, both)
+ None, # server_cert - The server certificate
+ None, # client_key - The (private) client key
+ None, # client_cert - The (public) client certificate
+ None, # instance_name - The (grpc) instance name of the remote
+ RemoteType.ALL, # type - The type of the remote (index, storage, both)
)
@@ -122,7 +151,7 @@ RemoteSpec.__new__.__defaults__ = ( # type: ignore
# Customization for the particular protocol is expected to be
# performed in children.
#
-class BaseRemote():
+class BaseRemote:
key_name = None
def __init__(self, spec):
@@ -150,25 +179,28 @@ class BaseRemote():
# Set up the communcation channel
url = urlparse(self.spec.url)
- if url.scheme == 'http':
+ if url.scheme == "http":
port = url.port or 80
- self.channel = grpc.insecure_channel('{}:{}'.format(url.hostname, port))
- elif url.scheme == 'https':
+ self.channel = grpc.insecure_channel("{}:{}".format(url.hostname, port))
+ elif url.scheme == "https":
port = url.port or 443
try:
server_cert, client_key, client_cert = _read_files(
- self.spec.server_cert,
- self.spec.client_key,
- self.spec.client_cert)
+ self.spec.server_cert, self.spec.client_key, self.spec.client_cert
+ )
except FileNotFoundError as e:
raise RemoteError("Could not read certificates: {}".format(e)) from e
self.server_cert = server_cert
self.client_key = client_key
self.client_cert = client_cert
- credentials = grpc.ssl_channel_credentials(root_certificates=self.server_cert,
- private_key=self.client_key,
- certificate_chain=self.client_cert)
- self.channel = grpc.secure_channel('{}:{}'.format(url.hostname, port), credentials)
+ credentials = grpc.ssl_channel_credentials(
+ root_certificates=self.server_cert,
+ private_key=self.client_key,
+ certificate_chain=self.client_cert,
+ )
+ self.channel = grpc.secure_channel(
+ "{}:{}".format(url.hostname, port), credentials
+ )
else:
raise RemoteError("Unsupported URL: {}".format(self.spec.url))
@@ -254,7 +286,8 @@ class BaseRemote():
def _read_files(*files):
def read_file(f):
if f:
- with open(f, 'rb') as data:
+ with open(f, "rb") as data:
return data.read()
return None
+
return (read_file(f) for f in files)
diff --git a/src/buildstream/_scheduler/jobs/elementjob.py b/src/buildstream/_scheduler/jobs/elementjob.py
index 246eb75c6..2a9f935b5 100644
--- a/src/buildstream/_scheduler/jobs/elementjob.py
+++ b/src/buildstream/_scheduler/jobs/elementjob.py
@@ -69,9 +69,9 @@ class ElementJob(Job):
super().__init__(*args, **kwargs)
self.set_name(element._get_full_name())
self.queue = queue
- self._element = element # Set the Element pertaining to the job
- self._action_cb = action_cb # The action callable function
- self._complete_cb = complete_cb # The complete callable function
+ self._element = element # Set the Element pertaining to the job
+ self._action_cb = action_cb # The action callable function
+ self._complete_cb = complete_cb # The complete callable function
# Set the plugin element name & key for logging purposes
self.set_message_element_name(self.name)
@@ -81,7 +81,9 @@ class ElementJob(Job):
self._complete_cb(self, self._element, status, self._result)
def create_child_job(self, *args, **kwargs):
- return ChildElementJob(*args, element=self._element, action_cb=self._action_cb, **kwargs)
+ return ChildElementJob(
+ *args, element=self._element, action_cb=self._action_cb, **kwargs
+ )
class ChildElementJob(ChildJob):
@@ -96,10 +98,14 @@ class ChildElementJob(ChildJob):
#
# This should probably be omitted for non-build tasks but it's harmless here
elt_env = self._element.get_environment()
- env_dump = yaml.round_trip_dump(elt_env, default_flow_style=False, allow_unicode=True)
- self.message(MessageType.LOG,
- "Build environment for element {}".format(self._element.name),
- detail=env_dump)
+ env_dump = yaml.round_trip_dump(
+ elt_env, default_flow_style=False, allow_unicode=True
+ )
+ self.message(
+ MessageType.LOG,
+ "Build environment for element {}".format(self._element.name),
+ detail=env_dump,
+ )
# Run the action
return self._action_cb(self._element)
@@ -109,6 +115,6 @@ class ChildElementJob(ChildJob):
workspace = self._element._get_workspace()
if workspace is not None:
- data['workspace'] = workspace.to_dict()
+ data["workspace"] = workspace.to_dict()
return data
diff --git a/src/buildstream/_scheduler/jobs/job.py b/src/buildstream/_scheduler/jobs/job.py
index 1d7697b02..3a5694a71 100644
--- a/src/buildstream/_scheduler/jobs/job.py
+++ b/src/buildstream/_scheduler/jobs/job.py
@@ -63,7 +63,7 @@ class JobStatus(FastEnum):
# Used to distinguish between status messages and return values
-class _Envelope():
+class _Envelope:
def __init__(self, message_type, message):
self.message_type = message_type
self.message = message
@@ -121,35 +121,38 @@ class _MessageType(FastEnum):
# that should be used - should contain {pid}.
# max_retries (int): The maximum number of retries
#
-class Job():
-
+class Job:
def __init__(self, scheduler, action_name, logfile, *, max_retries=0):
#
# Public members
#
- self.name = None # The name of the job, set by the job's subclass
- self.action_name = action_name # The action name for the Queue
- self.child_data = None # Data to be sent to the main process
+ self.name = None # The name of the job, set by the job's subclass
+ self.action_name = action_name # The action name for the Queue
+ self.child_data = None # Data to be sent to the main process
#
# Private members
#
- self._scheduler = scheduler # The scheduler
- self._queue = None # A message passing queue
- self._process = None # The Process object
- self._watcher = None # Child process watcher
- self._listening = False # Whether the parent is currently listening
- self._suspended = False # Whether this job is currently suspended
- self._max_retries = max_retries # Maximum number of automatic retries
- self._result = None # Return value of child action in the parent
- self._tries = 0 # Try count, for retryable jobs
- self._terminated = False # Whether this job has been explicitly terminated
+ self._scheduler = scheduler # The scheduler
+ self._queue = None # A message passing queue
+ self._process = None # The Process object
+ self._watcher = None # Child process watcher
+ self._listening = False # Whether the parent is currently listening
+ self._suspended = False # Whether this job is currently suspended
+ self._max_retries = max_retries # Maximum number of automatic retries
+ self._result = None # Return value of child action in the parent
+ self._tries = 0 # Try count, for retryable jobs
+ self._terminated = False # Whether this job has been explicitly terminated
self._logfile = logfile
- self._message_element_name = None # The plugin instance element name for messaging
- self._message_element_key = None # The element key for messaging
- self._element = None # The Element() passed to the Job() constructor, if applicable
+ self._message_element_name = (
+ None # The plugin instance element name for messaging
+ )
+ self._message_element_key = None # The element key for messaging
+ self._element = (
+ None # The Element() passed to the Job() constructor, if applicable
+ )
# set_name()
#
@@ -176,29 +179,28 @@ class Job():
self._max_retries,
self._tries,
self._message_element_name,
- self._message_element_key
+ self._message_element_key,
)
- if self._scheduler.context.platform.does_multiprocessing_start_require_pickling():
+ if (
+ self._scheduler.context.platform.does_multiprocessing_start_require_pickling()
+ ):
pickled = pickle_child_job(
- child_job,
- self._scheduler.context.get_projects(),
+ child_job, self._scheduler.context.get_projects(),
)
self._process = Process(
- target=do_pickled_child_job,
- args=[pickled, self._queue],
+ target=do_pickled_child_job, args=[pickled, self._queue],
)
else:
- self._process = Process(
- target=child_job.child_action,
- args=[self._queue],
- )
+ self._process = Process(target=child_job.child_action, args=[self._queue],)
# Block signals which are handled in the main process such that
# the child process does not inherit the parent's state, but the main
# process will be notified of any signal after we launch the child.
#
- with _signals.blocked([signal.SIGINT, signal.SIGTSTP, signal.SIGTERM], ignore=False):
+ with _signals.blocked(
+ [signal.SIGINT, signal.SIGTSTP, signal.SIGTERM], ignore=False
+ ):
self._process.start()
# Wait for the child task to complete.
@@ -279,8 +281,10 @@ class Job():
#
def kill(self):
# Force kill
- self.message(MessageType.WARN,
- "{} did not terminate gracefully, killing".format(self.action_name))
+ self.message(
+ MessageType.WARN,
+ "{} did not terminate gracefully, killing".format(self.action_name),
+ )
utils._kill_process_tree(self._process.pid)
# suspend()
@@ -289,8 +293,7 @@ class Job():
#
def suspend(self):
if not self._suspended:
- self.message(MessageType.STATUS,
- "{} suspending".format(self.action_name))
+ self.message(MessageType.STATUS, "{} suspending".format(self.action_name))
try:
# Use SIGTSTP so that child processes may handle and propagate
@@ -314,8 +317,7 @@ class Job():
def resume(self, silent=False):
if self._suspended:
if not silent and not self._scheduler.terminated:
- self.message(MessageType.STATUS,
- "{} resuming".format(self.action_name))
+ self.message(MessageType.STATUS, "{} resuming".format(self.action_name))
os.kill(self._process.pid, signal.SIGCONT)
self._suspended = False
@@ -356,14 +358,22 @@ class Job():
# kwargs: Remaining Message() constructor arguments, note that you can
# override 'element_name' and 'element_key' this way.
#
- def message(self, message_type, message, element_name=None, element_key=None, **kwargs):
- kwargs['scheduler'] = True
+ def message(
+ self, message_type, message, element_name=None, element_key=None, **kwargs
+ ):
+ kwargs["scheduler"] = True
# If default name & key values not provided, set as given job attributes
if element_name is None:
element_name = self._message_element_name
if element_key is None:
element_key = self._message_element_key
- message = Message(message_type, message, element_name=element_name, element_key=element_key, **kwargs)
+ message = Message(
+ message_type,
+ message,
+ element_name=element_name,
+ element_key=element_key,
+ **kwargs
+ )
self._scheduler.notify_messenger(message)
# get_element()
@@ -395,8 +405,11 @@ class Job():
# lists, dicts, numbers, but not Element instances).
#
def handle_message(self, message):
- raise ImplError("Job '{kind}' does not implement handle_message()"
- .format(kind=type(self).__name__))
+ raise ImplError(
+ "Job '{kind}' does not implement handle_message()".format(
+ kind=type(self).__name__
+ )
+ )
# parent_complete()
#
@@ -408,8 +421,11 @@ class Job():
# result (any): The result returned by child_process().
#
def parent_complete(self, status, result):
- raise ImplError("Job '{kind}' does not implement parent_complete()"
- .format(kind=type(self).__name__))
+ raise ImplError(
+ "Job '{kind}' does not implement parent_complete()".format(
+ kind=type(self).__name__
+ )
+ )
# create_child_job()
#
@@ -427,8 +443,11 @@ class Job():
# (ChildJob): An instance of a subclass of ChildJob.
#
def create_child_job(self, *args, **kwargs):
- raise ImplError("Job '{kind}' does not implement create_child_job()"
- .format(kind=type(self).__name__))
+ raise ImplError(
+ "Job '{kind}' does not implement create_child_job()".format(
+ kind=type(self).__name__
+ )
+ )
#######################################################
# Local Private Methods #
@@ -459,15 +478,23 @@ class Job():
returncode = _ReturnCode(returncode)
except ValueError:
# An unexpected return code was returned; fail permanently and report
- self.message(MessageType.ERROR,
- "Internal job process unexpectedly died with exit code {}".format(returncode),
- logfile=self._logfile)
+ self.message(
+ MessageType.ERROR,
+ "Internal job process unexpectedly died with exit code {}".format(
+ returncode
+ ),
+ logfile=self._logfile,
+ )
returncode = _ReturnCode.PERM_FAIL
# We don't want to retry if we got OK or a permanent fail.
retry_flag = returncode == _ReturnCode.FAIL
- if retry_flag and (self._tries <= self._max_retries) and not self._scheduler.terminated:
+ if (
+ retry_flag
+ and (self._tries <= self._max_retries)
+ and not self._scheduler.terminated
+ ):
self.start()
return
@@ -511,8 +538,7 @@ class Job():
# For regression tests only, save the last error domain / reason
# reported from a child task in the main process, this global state
# is currently managed in _exceptions.py
- set_last_task_error(envelope.message['domain'],
- envelope.message['reason'])
+ set_last_task_error(envelope.message["domain"], envelope.message["reason"])
elif envelope.message_type is _MessageType.RESULT:
assert self._result is None
self._result = envelope.message
@@ -523,7 +549,8 @@ class Job():
self.handle_message(envelope.message)
else:
assert False, "Unhandled message type '{}': {}".format(
- envelope.message_type, envelope.message)
+ envelope.message_type, envelope.message
+ )
# _parent_process_queue()
#
@@ -561,7 +588,8 @@ class Job():
#
if not self._listening:
self._scheduler.loop.add_reader(
- self._queue._reader.fileno(), self._parent_recv)
+ self._queue._reader.fileno(), self._parent_recv
+ )
self._listening = True
# _parent_stop_listening()
@@ -597,11 +625,18 @@ class Job():
# message_element_key (tuple): None, or the element display key tuple
# to be supplied to the Message() constructor.
#
-class ChildJob():
-
+class ChildJob:
def __init__(
- self, action_name, messenger, logdir, logfile, max_retries, tries,
- message_element_name, message_element_key):
+ self,
+ action_name,
+ messenger,
+ logdir,
+ logfile,
+ max_retries,
+ tries,
+ message_element_name,
+ message_element_key,
+ ):
self.action_name = action_name
@@ -631,15 +666,24 @@ class ChildJob():
# for front end display if not already set or explicitly
# overriden here.
#
- def message(self, message_type, message, element_name=None, element_key=None, **kwargs):
- kwargs['scheduler'] = True
+ def message(
+ self, message_type, message, element_name=None, element_key=None, **kwargs
+ ):
+ kwargs["scheduler"] = True
# If default name & key values not provided, set as given job attributes
if element_name is None:
element_name = self._message_element_name
if element_key is None:
element_key = self._message_element_key
- self._messenger.message(Message(message_type, message, element_name=element_name,
- element_key=element_key, **kwargs))
+ self._messenger.message(
+ Message(
+ message_type,
+ message,
+ element_name=element_name,
+ element_key=element_key,
+ **kwargs
+ )
+ )
# send_message()
#
@@ -676,8 +720,11 @@ class ChildJob():
# the result of the Job.
#
def child_process(self):
- raise ImplError("ChildJob '{kind}' does not implement child_process()"
- .format(kind=type(self).__name__))
+ raise ImplError(
+ "ChildJob '{kind}' does not implement child_process()".format(
+ kind=type(self).__name__
+ )
+ )
# child_process_data()
#
@@ -731,22 +778,26 @@ class ChildJob():
def resume_time():
nonlocal stopped_time
nonlocal starttime
- starttime += (datetime.datetime.now() - stopped_time)
+ starttime += datetime.datetime.now() - stopped_time
# Time, log and and run the action function
#
- with _signals.suspendable(stop_time, resume_time), \
- self._messenger.recorded_messages(self._logfile, self._logdir) as filename:
+ with _signals.suspendable(
+ stop_time, resume_time
+ ), self._messenger.recorded_messages(self._logfile, self._logdir) as filename:
self.message(MessageType.START, self.action_name, logfile=filename)
try:
# Try the task action
- result = self.child_process() # pylint: disable=assignment-from-no-return
+ result = (
+ self.child_process()
+ ) # pylint: disable=assignment-from-no-return
except SkipJob as e:
elapsed = datetime.datetime.now() - starttime
- self.message(MessageType.SKIPPED, str(e),
- elapsed=elapsed, logfile=filename)
+ self.message(
+ MessageType.SKIPPED, str(e), elapsed=elapsed, logfile=filename
+ )
# Alert parent of skip by return code
self._child_shutdown(_ReturnCode.SKIPPED)
@@ -755,13 +806,21 @@ class ChildJob():
retry_flag = e.temporary
if retry_flag and (self._tries <= self._max_retries):
- self.message(MessageType.FAIL,
- "Try #{} failed, retrying".format(self._tries),
- elapsed=elapsed, logfile=filename)
+ self.message(
+ MessageType.FAIL,
+ "Try #{} failed, retrying".format(self._tries),
+ elapsed=elapsed,
+ logfile=filename,
+ )
else:
- self.message(MessageType.FAIL, str(e),
- elapsed=elapsed, detail=e.detail,
- logfile=filename, sandbox=e.sandbox)
+ self.message(
+ MessageType.FAIL,
+ str(e),
+ elapsed=elapsed,
+ detail=e.detail,
+ logfile=filename,
+ sandbox=e.sandbox,
+ )
self._send_message(_MessageType.CHILD_DATA, self.child_process_data())
@@ -770,20 +829,28 @@ class ChildJob():
# Set return code based on whether or not the error was temporary.
#
- self._child_shutdown(_ReturnCode.FAIL if retry_flag else _ReturnCode.PERM_FAIL)
+ self._child_shutdown(
+ _ReturnCode.FAIL if retry_flag else _ReturnCode.PERM_FAIL
+ )
- except Exception: # pylint: disable=broad-except
+ except Exception: # pylint: disable=broad-except
# If an unhandled (not normalized to BstError) occurs, that's a bug,
# send the traceback and formatted exception back to the frontend
# and print it to the log file.
#
elapsed = datetime.datetime.now() - starttime
- detail = "An unhandled exception occured:\n\n{}".format(traceback.format_exc())
-
- self.message(MessageType.BUG, self.action_name,
- elapsed=elapsed, detail=detail,
- logfile=filename)
+ detail = "An unhandled exception occured:\n\n{}".format(
+ traceback.format_exc()
+ )
+
+ self.message(
+ MessageType.BUG,
+ self.action_name,
+ elapsed=elapsed,
+ detail=detail,
+ logfile=filename,
+ )
# Unhandled exceptions should permenantly fail
self._child_shutdown(_ReturnCode.PERM_FAIL)
@@ -793,8 +860,12 @@ class ChildJob():
self._child_send_result(result)
elapsed = datetime.datetime.now() - starttime
- self.message(MessageType.SUCCESS, self.action_name, elapsed=elapsed,
- logfile=filename)
+ self.message(
+ MessageType.SUCCESS,
+ self.action_name,
+ elapsed=elapsed,
+ logfile=filename,
+ )
# Shutdown needs to stay outside of the above context manager,
# make sure we dont try to handle SIGTERM while the process
@@ -833,10 +904,7 @@ class ChildJob():
domain = e.domain
reason = e.reason
- self._send_message(_MessageType.ERROR, {
- 'domain': domain,
- 'reason': reason
- })
+ self._send_message(_MessageType.ERROR, {"domain": domain, "reason": reason})
# _child_send_result()
#
diff --git a/src/buildstream/_scheduler/jobs/jobpickler.py b/src/buildstream/_scheduler/jobs/jobpickler.py
index b0465ec9e..0b482d080 100644
--- a/src/buildstream/_scheduler/jobs/jobpickler.py
+++ b/src/buildstream/_scheduler/jobs/jobpickler.py
@@ -23,7 +23,9 @@ import io
import pickle
from ..._protos.buildstream.v2.artifact_pb2 import Artifact as ArtifactProto
-from ..._protos.build.bazel.remote.execution.v2.remote_execution_pb2 import Digest as DigestProto
+from ..._protos.build.bazel.remote.execution.v2.remote_execution_pb2 import (
+ Digest as DigestProto,
+)
# BuildStream toplevel imports
from ..._loader import Loader
@@ -37,9 +39,7 @@ _NAME_TO_PROTO_CLASS = {
"digest": DigestProto,
}
-_PROTO_CLASS_TO_NAME = {
- cls: name for name, cls in _NAME_TO_PROTO_CLASS.items()
-}
+_PROTO_CLASS_TO_NAME = {cls: name for name, cls in _NAME_TO_PROTO_CLASS.items()}
# pickle_child_job()
@@ -57,10 +57,7 @@ def pickle_child_job(child_job, projects):
# necessary for the job, this includes e.g. the global state of the node
# module.
node_module_state = node._get_state_for_pickling()
- return _pickle_child_job_data(
- (child_job, node_module_state),
- projects,
- )
+ return _pickle_child_job_data((child_job, node_module_state), projects,)
# do_pickled_child_job()
diff --git a/src/buildstream/_scheduler/queues/buildqueue.py b/src/buildstream/_scheduler/queues/buildqueue.py
index dc33e6510..d98b49476 100644
--- a/src/buildstream/_scheduler/queues/buildqueue.py
+++ b/src/buildstream/_scheduler/queues/buildqueue.py
@@ -50,10 +50,15 @@ class BuildQueue(Queue):
self._tried.add(element)
_, description, detail = element._get_build_result()
logfile = element._get_build_log()
- self._message(element, MessageType.FAIL, description,
- detail=detail, action_name=self.action_name,
- elapsed=timedelta(seconds=0),
- logfile=logfile)
+ self._message(
+ element,
+ MessageType.FAIL,
+ description,
+ detail=detail,
+ action_name=self.action_name,
+ elapsed=timedelta(seconds=0),
+ logfile=logfile,
+ )
self._done_queue.append(element)
element_name = element._get_full_name()
self._task_group.add_failed_task(element_name)
diff --git a/src/buildstream/_scheduler/queues/queue.py b/src/buildstream/_scheduler/queues/queue.py
index 49fae5677..62ebcc003 100644
--- a/src/buildstream/_scheduler/queues/queue.py
+++ b/src/buildstream/_scheduler/queues/queue.py
@@ -57,11 +57,11 @@ class QueueStatus(FastEnum):
# Args:
# scheduler (Scheduler): The Scheduler
#
-class Queue():
+class Queue:
# These should be overridden on class data of of concrete Queue implementations
- action_name = None # type: Optional[str]
- complete_name = None # type: Optional[str]
+ action_name = None # type: Optional[str]
+ complete_name = None # type: Optional[str]
# Resources this queues' jobs want
resources = [] # type: List[int]
@@ -72,20 +72,27 @@ class Queue():
#
self._scheduler = scheduler
self._resources = scheduler.resources # Shared resource pool
- self._ready_queue = [] # Ready elements
- self._done_queue = deque() # Processed / Skipped elements
+ self._ready_queue = [] # Ready elements
+ self._done_queue = deque() # Processed / Skipped elements
self._max_retries = 0
- self._required_element_check = False # Whether we should check that elements are required before enqueuing
+ self._required_element_check = (
+ False # Whether we should check that elements are required before enqueuing
+ )
# Assert the subclass has setup class data
assert self.action_name is not None
assert self.complete_name is not None
- if ResourceType.UPLOAD in self.resources or ResourceType.DOWNLOAD in self.resources:
+ if (
+ ResourceType.UPLOAD in self.resources
+ or ResourceType.DOWNLOAD in self.resources
+ ):
self._max_retries = scheduler.context.sched_network_retries
- self._task_group = self._scheduler._state.add_task_group(self.action_name, self.complete_name)
+ self._task_group = self._scheduler._state.add_task_group(
+ self.action_name, self.complete_name
+ )
# destroy()
#
@@ -162,8 +169,11 @@ class Queue():
# element (Element): The element waiting to be pushed into the queue
#
def register_pending_element(self, element):
- raise ImplError("Queue type: {} does not implement register_pending_element()"
- .format(self.action_name))
+ raise ImplError(
+ "Queue type: {} does not implement register_pending_element()".format(
+ self.action_name
+ )
+ )
#####################################################
# Scheduler / Pipeline facing APIs #
@@ -229,12 +239,16 @@ class Queue():
ready.append(element)
return [
- ElementJob(self._scheduler, self.action_name,
- self._element_log_path(element),
- element=element, queue=self,
- action_cb=self.get_process_func(),
- complete_cb=self._job_done,
- max_retries=self._max_retries)
+ ElementJob(
+ self._scheduler,
+ self.action_name,
+ self._element_log_path(element),
+ element=element,
+ queue=self,
+ action_cb=self.get_process_func(),
+ complete_cb=self._job_done,
+ max_retries=self._max_retries,
+ )
for element in ready
]
@@ -267,7 +281,7 @@ class Queue():
def _update_workspaces(self, element, job):
workspace_dict = None
if job.child_data:
- workspace_dict = job.child_data.get('workspace', None)
+ workspace_dict = job.child_data.get("workspace", None)
# Handle any workspace modifications now
#
@@ -278,11 +292,19 @@ class Queue():
try:
workspaces.save_config()
except BstError as e:
- self._message(element, MessageType.ERROR, "Error saving workspaces", detail=str(e))
- except Exception: # pylint: disable=broad-except
- self._message(element, MessageType.BUG,
- "Unhandled exception while saving workspaces",
- detail=traceback.format_exc())
+ self._message(
+ element,
+ MessageType.ERROR,
+ "Error saving workspaces",
+ detail=str(e),
+ )
+ except Exception: # pylint: disable=broad-except
+ self._message(
+ element,
+ MessageType.BUG,
+ "Unhandled exception while saving workspaces",
+ detail=traceback.format_exc(),
+ )
# _job_done()
#
@@ -312,7 +334,9 @@ class Queue():
# Report error and mark as failed
#
- self._message(element, MessageType.ERROR, "Post processing error", detail=str(e))
+ self._message(
+ element, MessageType.ERROR, "Post processing error", detail=str(e)
+ )
self._task_group.add_failed_task(element._get_full_name())
# Treat this as a task error as it's related to a task
@@ -322,13 +346,16 @@ class Queue():
#
set_last_task_error(e.domain, e.reason)
- except Exception: # pylint: disable=broad-except
+ except Exception: # pylint: disable=broad-except
# Report unhandled exceptions and mark as failed
#
- self._message(element, MessageType.BUG,
- "Unhandled exception in post processing",
- detail=traceback.format_exc())
+ self._message(
+ element,
+ MessageType.BUG,
+ "Unhandled exception in post processing",
+ detail=traceback.format_exc(),
+ )
self._task_group.add_failed_task(element._get_full_name())
else:
# All elements get placed on the done queue for later processing.
@@ -345,7 +372,9 @@ class Queue():
# Convenience wrapper for Queue implementations to send
# a message for the element they are processing
def _message(self, element, message_type, brief, **kwargs):
- message = Message(message_type, brief, element_name=element._get_full_name(), **kwargs)
+ message = Message(
+ message_type, brief, element_name=element._get_full_name(), **kwargs
+ )
self._scheduler.notify_messenger(message)
def _element_log_path(self, element):
@@ -372,7 +401,7 @@ class Queue():
if status == QueueStatus.SKIP:
# Place skipped elements into the done queue immediately
self._task_group.add_skipped_task()
- self._done_queue.append(element) # Elements to proceed to the next queue
+ self._done_queue.append(element) # Elements to proceed to the next queue
elif status == QueueStatus.READY:
# Push elements which are ready to be processed immediately into the queue
heapq.heappush(self._ready_queue, (element._depth, element))
diff --git a/src/buildstream/_scheduler/resources.py b/src/buildstream/_scheduler/resources.py
index 73bf66b4a..946a7f0b1 100644
--- a/src/buildstream/_scheduler/resources.py
+++ b/src/buildstream/_scheduler/resources.py
@@ -1,17 +1,17 @@
-class ResourceType():
+class ResourceType:
CACHE = 0
DOWNLOAD = 1
PROCESS = 2
UPLOAD = 3
-class Resources():
+class Resources:
def __init__(self, num_builders, num_fetchers, num_pushers):
self._max_resources = {
ResourceType.CACHE: 0,
ResourceType.DOWNLOAD: num_fetchers,
ResourceType.PROCESS: num_builders,
- ResourceType.UPLOAD: num_pushers
+ ResourceType.UPLOAD: num_pushers,
}
# Resources jobs are currently using.
@@ -19,7 +19,7 @@ class Resources():
ResourceType.CACHE: 0,
ResourceType.DOWNLOAD: 0,
ResourceType.PROCESS: 0,
- ResourceType.UPLOAD: 0
+ ResourceType.UPLOAD: 0,
}
# Resources jobs currently want exclusive access to. The set
@@ -31,7 +31,7 @@ class Resources():
ResourceType.CACHE: set(),
ResourceType.DOWNLOAD: set(),
ResourceType.PROCESS: set(),
- ResourceType.UPLOAD: set()
+ ResourceType.UPLOAD: set(),
}
# reserve()
@@ -90,8 +90,10 @@ class Resources():
# available. If we don't have enough, the job cannot be
# scheduled.
for resource in resources:
- if (self._max_resources[resource] > 0 and
- self._used_resources[resource] >= self._max_resources[resource]):
+ if (
+ self._max_resources[resource] > 0
+ and self._used_resources[resource] >= self._max_resources[resource]
+ ):
return False
# Now we register the fact that our job is using the resources
diff --git a/src/buildstream/_scheduler/scheduler.py b/src/buildstream/_scheduler/scheduler.py
index d3faa2a8e..0555b1103 100644
--- a/src/buildstream/_scheduler/scheduler.py
+++ b/src/buildstream/_scheduler/scheduler.py
@@ -73,17 +73,18 @@ class NotificationType(FastEnum):
# required. NOTE: The notification object should be lightweight
# and all attributes must be picklable.
#
-class Notification():
-
- def __init__(self,
- notification_type,
- *,
- full_name=None,
- job_action=None,
- job_status=None,
- time=None,
- element=None,
- message=None):
+class Notification:
+ def __init__(
+ self,
+ notification_type,
+ *,
+ full_name=None,
+ job_action=None,
+ job_status=None,
+ time=None,
+ element=None,
+ message=None
+ ):
self.notification_type = notification_type
self.full_name = full_name
self.job_action = job_action
@@ -113,40 +114,42 @@ class Notification():
# interrupt_callback: A callback to handle ^C
# ticker_callback: A callback call once per second
#
-class Scheduler():
-
- def __init__(self, context,
- start_time, state, notification_queue, notifier):
+class Scheduler:
+ def __init__(self, context, start_time, state, notification_queue, notifier):
#
# Public members
#
- self.queues = None # Exposed for the frontend to print summaries
- self.context = context # The Context object shared with Queues
- self.terminated = False # Whether the scheduler was asked to terminate or has terminated
- self.suspended = False # Whether the scheduler is currently suspended
+ self.queues = None # Exposed for the frontend to print summaries
+ self.context = context # The Context object shared with Queues
+ self.terminated = (
+ False # Whether the scheduler was asked to terminate or has terminated
+ )
+ self.suspended = False # Whether the scheduler is currently suspended
# These are shared with the Job, but should probably be removed or made private in some way.
- self.loop = None # Shared for Job access to observe the message queue
- self.internal_stops = 0 # Amount of SIGSTP signals we've introduced, this is shared with job.py
+ self.loop = None # Shared for Job access to observe the message queue
+ self.internal_stops = (
+ 0 # Amount of SIGSTP signals we've introduced, this is shared with job.py
+ )
#
# Private members
#
- self._active_jobs = [] # Jobs currently being run in the scheduler
- self._starttime = start_time # Initial application start time
- self._suspendtime = None # Session time compensation for suspended state
- self._queue_jobs = True # Whether we should continue to queue jobs
+ self._active_jobs = [] # Jobs currently being run in the scheduler
+ self._starttime = start_time # Initial application start time
+ self._suspendtime = None # Session time compensation for suspended state
+ self._queue_jobs = True # Whether we should continue to queue jobs
self._state = state
- self._casd_process = None # handle to the casd process for monitoring purpose
+ self._casd_process = None # handle to the casd process for monitoring purpose
# Bidirectional queue to send notifications back to the Scheduler's owner
self._notification_queue = notification_queue
self._notifier = notifier
- self.resources = Resources(context.sched_builders,
- context.sched_fetchers,
- context.sched_pushers)
+ self.resources = Resources(
+ context.sched_builders, context.sched_fetchers, context.sched_pushers
+ )
# run()
#
@@ -188,7 +191,9 @@ class Scheduler():
_watcher.add_child_handler(casd_process.pid, self._abort_on_casd_failure)
# Start the profiler
- with PROFILER.profile(Topics.SCHEDULER, "_".join(queue.action_name for queue in self.queues)):
+ with PROFILER.profile(
+ Topics.SCHEDULER, "_".join(queue.action_name for queue in self.queues)
+ ):
# Run the queues
self._sched()
self.loop.run_forever()
@@ -307,11 +312,13 @@ class Scheduler():
element_info = None
# Now check for more jobs
- notification = Notification(NotificationType.JOB_COMPLETE,
- full_name=job.name,
- job_action=job.action_name,
- job_status=status,
- element=element_info)
+ notification = Notification(
+ NotificationType.JOB_COMPLETE,
+ full_name=job.name,
+ job_action=job.action_name,
+ job_status=status,
+ element=element_info,
+ )
self._notify(notification)
self._sched()
@@ -342,7 +349,9 @@ class Scheduler():
# returncode (int): the return code with which buildbox-casd exited
#
def _abort_on_casd_failure(self, pid, returncode):
- message = Message(MessageType.BUG, "buildbox-casd died while the pipeline was active.")
+ message = Message(
+ MessageType.BUG, "buildbox-casd died while the pipeline was active."
+ )
self._notify(Notification(NotificationType.MESSAGE, message=message))
self._casd_process.returncode = returncode
@@ -357,10 +366,12 @@ class Scheduler():
#
def _start_job(self, job):
self._active_jobs.append(job)
- notification = Notification(NotificationType.JOB_START,
- full_name=job.name,
- job_action=job.action_name,
- time=self._state.elapsed_time(start_time=self._starttime))
+ notification = Notification(
+ NotificationType.JOB_START,
+ full_name=job.name,
+ job_action=job.action_name,
+ time=self._state.elapsed_time(start_time=self._starttime),
+ )
self._notify(notification)
job.start()
@@ -396,9 +407,9 @@ class Scheduler():
# to fetch tasks for elements which failed to pull, and
# thus need all the pulls to complete before ever starting
# a build
- ready.extend(chain.from_iterable(
- q.harvest_jobs() for q in reversed(self.queues)
- ))
+ ready.extend(
+ chain.from_iterable(q.harvest_jobs() for q in reversed(self.queues))
+ )
# harvest_jobs() may have decided to skip some jobs, making
# them eligible for promotion to the next queue as a side effect.
@@ -408,7 +419,11 @@ class Scheduler():
# Make sure fork is allowed before starting jobs
if not self.context.prepare_fork():
- message = Message(MessageType.BUG, "Fork is not allowed", detail="Background threads are active")
+ message = Message(
+ MessageType.BUG,
+ "Fork is not allowed",
+ detail="Background threads are active",
+ )
self._notify(Notification(NotificationType.MESSAGE, message=message))
self.terminate_jobs()
return
@@ -468,8 +483,10 @@ class Scheduler():
self.suspended = False
# Notify that we're unsuspended
self._notify(Notification(NotificationType.SUSPENDED))
- self._starttime += (datetime.datetime.now() - self._suspendtime)
- self._notify(Notification(NotificationType.SCHED_START_TIME, time=self._starttime))
+ self._starttime += datetime.datetime.now() - self._suspendtime
+ self._notify(
+ Notification(NotificationType.SCHED_START_TIME, time=self._starttime)
+ )
self._suspendtime = None
# _interrupt_event():
diff --git a/src/buildstream/_signals.py b/src/buildstream/_signals.py
index 31982c199..425a57239 100644
--- a/src/buildstream/_signals.py
+++ b/src/buildstream/_signals.py
@@ -37,8 +37,8 @@ if TYPE_CHECKING:
# typing.MutableSequence. However, that is only available in Python versions
# 3.5.4 onward and 3.6.1 onward.
# Debian 9 ships with 3.5.3.
-terminator_stack = deque() # type: MutableSequence[Callable]
-suspendable_stack = deque() # type: MutableSequence[Callable]
+terminator_stack = deque() # type: MutableSequence[Callable]
+suspendable_stack = deque() # type: MutableSequence[Callable]
# Per process SIGTERM handler
@@ -47,16 +47,18 @@ def terminator_handler(signal_, frame):
terminator_ = terminator_stack.pop()
try:
terminator_()
- except: # noqa pylint: disable=bare-except
+ except: # noqa pylint: disable=bare-except
# Ensure we print something if there's an exception raised when
# processing the handlers. Note that the default exception
# handler won't be called because we os._exit next, so we must
# catch all possible exceptions with the unqualified 'except'
# clause.
traceback.print_exc(file=sys.stderr)
- print('Error encountered in BuildStream while processing custom SIGTERM handler:',
- terminator_,
- file=sys.stderr)
+ print(
+ "Error encountered in BuildStream while processing custom SIGTERM handler:",
+ terminator_,
+ file=sys.stderr,
+ )
# Use special exit here, terminate immediately, recommended
# for precisely this situation where child processes are teminated.
@@ -79,7 +81,7 @@ def terminator_handler(signal_, frame):
#
@contextmanager
def terminator(terminate_func):
- global terminator_stack # pylint: disable=global-statement
+ global terminator_stack # pylint: disable=global-statement
# Signal handling only works in the main thread
if threading.current_thread() != threading.main_thread():
@@ -101,7 +103,7 @@ def terminator(terminate_func):
# Just a simple object for holding on to two callbacks
-class Suspender():
+class Suspender:
def __init__(self, suspend_callback, resume_callback):
self.suspend = suspend_callback
self.resume = resume_callback
@@ -144,7 +146,7 @@ def suspend_handler(sig, frame):
#
@contextmanager
def suspendable(suspend_callback, resume_callback):
- global suspendable_stack # pylint: disable=global-statement
+ global suspendable_stack # pylint: disable=global-statement
outermost = bool(not suspendable_stack)
suspender = Suspender(suspend_callback, resume_callback)
diff --git a/src/buildstream/_site.py b/src/buildstream/_site.py
index 8940fa34a..db0587120 100644
--- a/src/buildstream/_site.py
+++ b/src/buildstream/_site.py
@@ -30,22 +30,22 @@ import subprocess
root = os.path.dirname(os.path.abspath(__file__))
# The Element plugin directory
-element_plugins = os.path.join(root, 'plugins', 'elements')
+element_plugins = os.path.join(root, "plugins", "elements")
# The Source plugin directory
-source_plugins = os.path.join(root, 'plugins', 'sources')
+source_plugins = os.path.join(root, "plugins", "sources")
# Default user configuration
-default_user_config = os.path.join(root, 'data', 'userconfig.yaml')
+default_user_config = os.path.join(root, "data", "userconfig.yaml")
# Default project configuration
-default_project_config = os.path.join(root, 'data', 'projectconfig.yaml')
+default_project_config = os.path.join(root, "data", "projectconfig.yaml")
# Script template to call module building scripts
-build_all_template = os.path.join(root, 'data', 'build-all.sh.in')
+build_all_template = os.path.join(root, "data", "build-all.sh.in")
# Module building script template
-build_module_template = os.path.join(root, 'data', 'build-module.sh.in')
+build_module_template = os.path.join(root, "data", "build-module.sh.in")
def get_bwrap_version():
@@ -53,7 +53,7 @@ def get_bwrap_version():
#
# returns None if no bwrap was found
# otherwise returns a tuple of 3 int: major, minor, patch
- bwrap_path = shutil.which('bwrap')
+ bwrap_path = shutil.which("bwrap")
if not bwrap_path:
return None
diff --git a/src/buildstream/_sourcecache.py b/src/buildstream/_sourcecache.py
index cdbe5b9cf..66e1c1bb9 100644
--- a/src/buildstream/_sourcecache.py
+++ b/src/buildstream/_sourcecache.py
@@ -26,12 +26,15 @@ from .storage._casbaseddirectory import CasBasedDirectory
from ._basecache import BaseCache
from ._exceptions import CASError, CASRemoteError, SourceCacheError
from . import utils
-from ._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc, \
- source_pb2, source_pb2_grpc
+from ._protos.buildstream.v2 import (
+ buildstream_pb2,
+ buildstream_pb2_grpc,
+ source_pb2,
+ source_pb2_grpc,
+)
class SourceRemote(BaseRemote):
-
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.source_service = None
@@ -65,8 +68,10 @@ class SourceRemote(BaseRemote):
except grpc.RpcError as e:
# Check if this remote has the artifact service
if e.code() == grpc.StatusCode.UNIMPLEMENTED:
- return ("Configured remote does not have the BuildStream "
- "capabilities service. Please check remote configuration.")
+ return (
+ "Configured remote does not have the BuildStream "
+ "capabilities service. Please check remote configuration."
+ )
# Else raise exception with details
return "Remote initialisation failed: {}".format(e.details())
@@ -74,7 +79,7 @@ class SourceRemote(BaseRemote):
return "Configured remote does not support source service"
if self.spec.push and not response.source_capabilities.allow_updates:
- return 'Source server does not allow push'
+ return "Source server does not allow push"
return None
@@ -132,7 +137,7 @@ class SourceCache(BaseCache):
def __init__(self, context):
super().__init__(context)
- self.sourcerefdir = os.path.join(context.cachedir, 'source_protos')
+ self.sourcerefdir = os.path.join(context.cachedir, "source_protos")
os.makedirs(self.sourcerefdir, exist_ok=True)
# list_sources()
@@ -185,7 +190,9 @@ class SourceCache(BaseCache):
vdir.import_files(self.export(previous_source))
if not source.BST_STAGE_VIRTUAL_DIRECTORY:
- with utils._tempdir(dir=self.context.tmpdir, prefix='staging-temp') as tmpdir:
+ with utils._tempdir(
+ dir=self.context.tmpdir, prefix="staging-temp"
+ ) as tmpdir:
if not vdir.is_empty():
vdir.export_files(tmpdir)
source._stage(tmpdir)
@@ -236,12 +243,16 @@ class SourceCache(BaseCache):
source_proto = self._pull_source(ref, remote)
if source_proto is None:
- source.info("Remote source service ({}) does not have source {} cached".format(
- remote, display_key))
+ source.info(
+ "Remote source service ({}) does not have source {} cached".format(
+ remote, display_key
+ )
+ )
continue
except CASError as e:
- raise SourceCacheError("Failed to pull source {}: {}".format(
- display_key, e)) from e
+ raise SourceCacheError(
+ "Failed to pull source {}: {}".format(display_key, e)
+ ) from e
if not source_proto:
return False
@@ -249,28 +260,40 @@ class SourceCache(BaseCache):
for remote in storage_remotes:
try:
remote.init()
- source.status("Pulling data for source {} <- {}".format(display_key, remote))
+ source.status(
+ "Pulling data for source {} <- {}".format(display_key, remote)
+ )
# Fetch source blobs
self.cas._fetch_directory(remote, source_proto.files)
- required_blobs = self.cas.required_blobs_for_directory(source_proto.files)
+ required_blobs = self.cas.required_blobs_for_directory(
+ source_proto.files
+ )
missing_blobs = self.cas.local_missing_blobs(required_blobs)
missing_blobs = self.cas.fetch_blobs(remote, missing_blobs)
if missing_blobs:
- source.info("Remote cas ({}) does not have source {} cached".format(
- remote, display_key))
+ source.info(
+ "Remote cas ({}) does not have source {} cached".format(
+ remote, display_key
+ )
+ )
continue
source.info("Pulled source {} <- {}".format(display_key, remote))
return True
except BlobNotFound as e:
# Not all blobs are available on this remote
- source.info("Remote cas ({}) does not have blob {} cached".format(remote, e.blob))
+ source.info(
+ "Remote cas ({}) does not have blob {} cached".format(
+ remote, e.blob
+ )
+ )
continue
except CASError as e:
- raise SourceCacheError("Failed to pull source {}: {}".format(
- display_key, e)) from e
+ raise SourceCacheError(
+ "Failed to pull source {}: {}".format(display_key, e)
+ ) from e
return False
@@ -302,14 +325,18 @@ class SourceCache(BaseCache):
display_key = source._get_brief_display_key()
for remote in storage_remotes:
remote.init()
- source.status("Pushing data for source {} -> {}".format(display_key, remote))
+ source.status(
+ "Pushing data for source {} -> {}".format(display_key, remote)
+ )
source_proto = self._get_source(ref)
try:
self.cas._send_directory(remote, source_proto.files)
pushed_storage = True
except CASRemoteError:
- source.info("Failed to push source files {} -> {}".format(display_key, remote))
+ source.info(
+ "Failed to push source files {} -> {}".format(display_key, remote)
+ )
continue
for remote in index_remotes:
@@ -318,12 +345,19 @@ class SourceCache(BaseCache):
# check whether cache has files already
if self._pull_source(ref, remote) is not None:
- source.info("Remote ({}) already has source {} cached"
- .format(remote, display_key))
+ source.info(
+ "Remote ({}) already has source {} cached".format(
+ remote, display_key
+ )
+ )
continue
if not self._push_source(ref, remote):
- source.info("Failed to push source metadata {} -> {}".format(display_key, remote))
+ source.info(
+ "Failed to push source metadata {} -> {}".format(
+ display_key, remote
+ )
+ )
continue
source.info("Pushed source {} -> {}".format(display_key, remote))
@@ -343,19 +377,20 @@ class SourceCache(BaseCache):
def _store_proto(self, proto, ref):
path = self._source_path(ref)
os.makedirs(os.path.dirname(path), exist_ok=True)
- with utils.save_file_atomic(path, 'w+b') as f:
+ with utils.save_file_atomic(path, "w+b") as f:
f.write(proto.SerializeToString())
def _get_source(self, ref):
path = self._source_path(ref)
source_proto = source_pb2.Source()
try:
- with open(path, 'r+b') as f:
+ with open(path, "r+b") as f:
source_proto.ParseFromString(f.read())
return source_proto
except FileNotFoundError as e:
- raise SourceCacheError("Attempted to access unavailable source: {}"
- .format(e)) from e
+ raise SourceCacheError(
+ "Attempted to access unavailable source: {}".format(e)
+ ) from e
def _source_path(self, ref):
return os.path.join(self.sourcerefdir, ref)
@@ -364,7 +399,7 @@ class SourceCache(BaseCache):
for root, _, files in os.walk(self.sourcerefdir):
for source_file in files:
source = source_pb2.Source()
- with open(os.path.join(root, source_file), 'r+b') as f:
+ with open(os.path.join(root, source_file), "r+b") as f:
source.ParseFromString(f.read())
yield source.files
diff --git a/src/buildstream/_sourcefactory.py b/src/buildstream/_sourcefactory.py
index 1d959a140..f98505dea 100644
--- a/src/buildstream/_sourcefactory.py
+++ b/src/buildstream/_sourcefactory.py
@@ -30,14 +30,15 @@ from .source import Source
# plugin_origins (list): Data used to search for external Source plugins
#
class SourceFactory(PluginContext):
+ def __init__(self, plugin_base, *, format_versions={}, plugin_origins=None):
- def __init__(self, plugin_base, *,
- format_versions={},
- plugin_origins=None):
-
- super().__init__(plugin_base, Source, [_site.source_plugins],
- format_versions=format_versions,
- plugin_origins=plugin_origins)
+ super().__init__(
+ plugin_base,
+ Source,
+ [_site.source_plugins],
+ format_versions=format_versions,
+ plugin_origins=plugin_origins,
+ )
# create():
#
diff --git a/src/buildstream/_state.py b/src/buildstream/_state.py
index 310e12a63..07e1f8c9e 100644
--- a/src/buildstream/_state.py
+++ b/src/buildstream/_state.py
@@ -28,7 +28,7 @@ from collections import OrderedDict
# state (State): The state object
# complete_name (str): Optional name for frontend status rendering, e.g. 'built'
#
-class TaskGroup():
+class TaskGroup:
def __init__(self, name, state, complete_name=None):
self.name = name
self.complete_name = complete_name
@@ -98,14 +98,14 @@ class TaskGroup():
# Args:
# session_start (datetime): The time the session started
#
-class State():
+class State:
def __init__(self, session_start):
self._session_start = session_start
self.task_groups = OrderedDict() # key is TaskGroup name
# Note: A Task's full_name is technically unique, but only accidentally.
- self.tasks = OrderedDict() # key is a tuple of action_name and full_name
+ self.tasks = OrderedDict() # key is a tuple of action_name and full_name
self._task_added_cbs = []
self._task_removed_cbs = []
@@ -244,7 +244,9 @@ class State():
# TaskGroup: The task group created
#
def add_task_group(self, name, complete_name=None):
- assert name not in self.task_groups, "Trying to add task group '{}' to '{}'".format(name, self.task_groups)
+ assert (
+ name not in self.task_groups
+ ), "Trying to add task group '{}' to '{}'".format(name, self.task_groups)
group = TaskGroup(name, self, complete_name)
self.task_groups[name] = group
@@ -281,8 +283,9 @@ class State():
#
def add_task(self, action_name, full_name, elapsed_offset=None):
task_key = (action_name, full_name)
- assert task_key not in self.tasks, \
- "Trying to add task '{}:{}' to '{}'".format(action_name, full_name, self.tasks)
+ assert task_key not in self.tasks, "Trying to add task '{}:{}' to '{}'".format(
+ action_name, full_name, self.tasks
+ )
if not elapsed_offset:
elapsed_offset = self.elapsed_time()
@@ -366,7 +369,7 @@ class State():
# e.g. an element's name.
# elapsed_offset (timedelta): The time the task started, relative to
# buildstream's start time.
-class _Task():
+class _Task:
def __init__(self, state, action_name, full_name, elapsed_offset):
self._state = state
self.action_name = action_name
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index c71bec80a..402473e33 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -31,10 +31,27 @@ from fnmatch import fnmatch
from collections import deque
from ._artifactelement import verify_artifact_ref, ArtifactElement
-from ._exceptions import StreamError, ImplError, BstError, ArtifactElementError, ArtifactError
+from ._exceptions import (
+ StreamError,
+ ImplError,
+ BstError,
+ ArtifactElementError,
+ ArtifactError,
+)
from ._message import Message, MessageType
-from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, \
- SourcePushQueue, BuildQueue, PullQueue, ArtifactPushQueue, NotificationType, Notification, JobStatus
+from ._scheduler import (
+ Scheduler,
+ SchedStatus,
+ TrackQueue,
+ FetchQueue,
+ SourcePushQueue,
+ BuildQueue,
+ PullQueue,
+ ArtifactPushQueue,
+ NotificationType,
+ Notification,
+ JobStatus,
+)
from ._pipeline import Pipeline, PipelineSelection
from ._profile import Topics, PROFILER
from ._state import State
@@ -55,20 +72,24 @@ from . import Scope, Consistency
# interrupt_callback (callable): A callback to invoke when we get interrupted
# ticker_callback (callable): Invoked every second while running the scheduler
#
-class Stream():
-
- def __init__(self, context, session_start, *,
- session_start_callback=None,
- interrupt_callback=None,
- ticker_callback=None):
+class Stream:
+ def __init__(
+ self,
+ context,
+ session_start,
+ *,
+ session_start_callback=None,
+ interrupt_callback=None,
+ ticker_callback=None
+ ):
#
# Public members
#
- self.targets = [] # Resolved target elements
- self.session_elements = [] # List of elements being processed this session
- self.total_elements = [] # Total list of elements based on targets
- self.queues = [] # Queue objects
+ self.targets = [] # Resolved target elements
+ self.session_elements = [] # List of elements being processed this session
+ self.total_elements = [] # Total list of elements based on targets
+ self.queues = [] # Queue objects
#
# Private members
@@ -80,17 +101,26 @@ class Stream():
self._pipeline = None
self._state = State(session_start) # Owned by Stream, used by Core to set state
self._notification_queue = deque()
- self._starttime = session_start # Synchronised with Scheduler's relative start time
+ self._starttime = (
+ session_start # Synchronised with Scheduler's relative start time
+ )
context.messenger.set_state(self._state)
- self._scheduler = Scheduler(context, session_start, self._state, self._notification_queue,
- self._scheduler_notification_handler)
+ self._scheduler = Scheduler(
+ context,
+ session_start,
+ self._state,
+ self._notification_queue,
+ self._scheduler_notification_handler,
+ )
self._first_non_track_queue = None
self._session_start_callback = session_start_callback
self._ticker_callback = ticker_callback
self._interrupt_callback = interrupt_callback
- self._notifier = self._scheduler._stream_notification_handler # Assign the schedulers notification handler
+ self._notifier = (
+ self._scheduler._stream_notification_handler
+ ) # Assign the schedulers notification handler
self._scheduler_running = False
self._scheduler_terminated = False
self._scheduler_suspended = False
@@ -138,17 +168,26 @@ class Stream():
#
# Returns:
# (list of Element): The selected elements
- def load_selection(self, targets, *,
- selection=PipelineSelection.NONE,
- except_targets=(),
- use_artifact_config=False,
- load_refs=False):
- with PROFILER.profile(Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, "-") for t in targets)):
- target_objects, _ = self._load(targets, (),
- selection=selection,
- except_targets=except_targets,
- use_artifact_config=use_artifact_config,
- load_refs=load_refs)
+ def load_selection(
+ self,
+ targets,
+ *,
+ selection=PipelineSelection.NONE,
+ except_targets=(),
+ use_artifact_config=False,
+ load_refs=False
+ ):
+ with PROFILER.profile(
+ Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, "-") for t in targets)
+ ):
+ target_objects, _ = self._load(
+ targets,
+ (),
+ selection=selection,
+ except_targets=except_targets,
+ use_artifact_config=use_artifact_config,
+ load_refs=load_refs,
+ )
return target_objects
@@ -171,14 +210,20 @@ class Stream():
# Returns:
# (int): The exit code of the launched shell
#
- def shell(self, element, scope, prompt, *,
- directory=None,
- mounts=None,
- isolate=False,
- command=None,
- usebuildtree=None,
- pull_dependencies=None,
- unique_id=None):
+ def shell(
+ self,
+ element,
+ scope,
+ prompt,
+ *,
+ directory=None,
+ mounts=None,
+ isolate=False,
+ command=None,
+ usebuildtree=None,
+ pull_dependencies=None,
+ unique_id=None
+ ):
# Load the Element via the unique_id if given
if unique_id and element is None:
@@ -189,16 +234,22 @@ class Stream():
# definitions to control the execution environment only.
if directory is None:
missing_deps = [
- dep for dep in self._pipeline.dependencies([element], scope)
+ dep
+ for dep in self._pipeline.dependencies([element], scope)
if not dep._cached()
]
if missing_deps:
if not pull_dependencies:
raise StreamError(
"Elements need to be built or downloaded before staging a shell environment",
- detail="\n"
- .join(list(map(lambda x: x._get_full_name(), missing_deps))))
- self._message(MessageType.INFO, "Attempting to fetch missing or incomplete artifacts")
+ detail="\n".join(
+ list(map(lambda x: x._get_full_name(), missing_deps))
+ ),
+ )
+ self._message(
+ MessageType.INFO,
+ "Attempting to fetch missing or incomplete artifacts",
+ )
self._scheduler.clear_queues()
self._add_queue(PullQueue(self._scheduler))
plan = self._pipeline.add_elements([element], missing_deps)
@@ -212,7 +263,10 @@ class Stream():
require_buildtree = self._buildtree_pull_required([element])
# Attempt a pull queue for the given element if remote and context allow it
if require_buildtree:
- self._message(MessageType.INFO, "Attempting to fetch missing artifact buildtree")
+ self._message(
+ MessageType.INFO,
+ "Attempting to fetch missing artifact buildtree",
+ )
self._scheduler.clear_queues()
self._add_queue(PullQueue(self._scheduler))
self._enqueue_plan(require_buildtree)
@@ -226,12 +280,21 @@ class Stream():
if usebuildtree == "always":
raise StreamError(message)
- self._message(MessageType.INFO, message + ", shell will be loaded without it")
+ self._message(
+ MessageType.INFO, message + ", shell will be loaded without it"
+ )
else:
buildtree = True
- return element._shell(scope, directory, mounts=mounts, isolate=isolate, prompt=prompt, command=command,
- usebuildtree=buildtree)
+ return element._shell(
+ scope,
+ directory,
+ mounts=mounts,
+ isolate=isolate,
+ prompt=prompt,
+ command=command,
+ usebuildtree=buildtree,
+ )
# build()
#
@@ -246,28 +309,36 @@ class Stream():
# If `remote` specified as None, then regular configuration will be used
# to determine where to push artifacts to.
#
- def build(self, targets, *,
- selection=PipelineSelection.PLAN,
- ignore_junction_targets=False,
- remote=None):
+ def build(
+ self,
+ targets,
+ *,
+ selection=PipelineSelection.PLAN,
+ ignore_junction_targets=False,
+ remote=None
+ ):
use_config = True
if remote:
use_config = False
- elements, _ = \
- self._load(targets, [],
- selection=selection,
- ignore_junction_targets=ignore_junction_targets,
- use_artifact_config=use_config,
- artifact_remote_url=remote,
- use_source_config=True,
- dynamic_plan=True)
+ elements, _ = self._load(
+ targets,
+ [],
+ selection=selection,
+ ignore_junction_targets=ignore_junction_targets,
+ use_artifact_config=use_config,
+ artifact_remote_url=remote,
+ use_source_config=True,
+ dynamic_plan=True,
+ )
# Assert that the elements are consistent
self._pipeline.assert_consistent(elements)
- if all(project.remote_execution_specs for project in self._context.get_projects()):
+ if all(
+ project.remote_execution_specs for project in self._context.get_projects()
+ ):
# Remote execution is configured for all projects.
# Require artifact files only for target elements and their runtime dependencies.
self._context.set_artifact_files_optional()
@@ -311,12 +382,16 @@ class Stream():
# track_cross_junctions (bool): Whether tracking should cross junction boundaries
# remote (str|None): The URL of a specific remote server to pull from.
#
- def fetch(self, targets, *,
- selection=PipelineSelection.PLAN,
- except_targets=None,
- track_targets=False,
- track_cross_junctions=False,
- remote=None):
+ def fetch(
+ self,
+ targets,
+ *,
+ selection=PipelineSelection.PLAN,
+ except_targets=None,
+ track_targets=False,
+ track_cross_junctions=False,
+ remote=None
+ ):
if track_targets:
track_targets = targets
@@ -331,14 +406,17 @@ class Stream():
if remote:
use_source_config = False
- elements, track_elements = \
- self._load(targets, track_targets,
- selection=selection, track_selection=track_selection,
- except_targets=except_targets,
- track_except_targets=track_except_targets,
- track_cross_junctions=track_cross_junctions,
- use_source_config=use_source_config,
- source_remote_url=remote)
+ elements, track_elements = self._load(
+ targets,
+ track_targets,
+ selection=selection,
+ track_selection=track_selection,
+ except_targets=except_targets,
+ track_except_targets=track_except_targets,
+ track_cross_junctions=track_cross_junctions,
+ use_source_config=use_source_config,
+ source_remote_url=remote,
+ )
# Delegated to a shared fetch method
self._fetch(elements, track_elements=track_elements)
@@ -356,20 +434,27 @@ class Stream():
# If no error is encountered while tracking, then the project files
# are rewritten inline.
#
- def track(self, targets, *,
- selection=PipelineSelection.REDIRECT,
- except_targets=None,
- cross_junctions=False):
+ def track(
+ self,
+ targets,
+ *,
+ selection=PipelineSelection.REDIRECT,
+ except_targets=None,
+ cross_junctions=False
+ ):
# We pass no target to build. Only to track. Passing build targets
# would fully load project configuration which might not be
# possible before tracking is done.
- _, elements = \
- self._load([], targets,
- selection=selection, track_selection=selection,
- except_targets=except_targets,
- track_except_targets=except_targets,
- track_cross_junctions=cross_junctions)
+ _, elements = self._load(
+ [],
+ targets,
+ selection=selection,
+ track_selection=selection,
+ except_targets=except_targets,
+ track_except_targets=except_targets,
+ track_cross_junctions=cross_junctions,
+ )
self._scheduler.clear_queues()
track_queue = TrackQueue(self._scheduler)
@@ -390,21 +475,28 @@ class Stream():
# If `remote` specified as None, then regular configuration will be used
# to determine where to pull artifacts from.
#
- def pull(self, targets, *,
- selection=PipelineSelection.NONE,
- ignore_junction_targets=False,
- remote=None):
+ def pull(
+ self,
+ targets,
+ *,
+ selection=PipelineSelection.NONE,
+ ignore_junction_targets=False,
+ remote=None
+ ):
use_config = True
if remote:
use_config = False
- elements, _ = self._load(targets, (),
- selection=selection,
- ignore_junction_targets=ignore_junction_targets,
- use_artifact_config=use_config,
- artifact_remote_url=remote,
- load_refs=True)
+ elements, _ = self._load(
+ targets,
+ (),
+ selection=selection,
+ ignore_junction_targets=ignore_junction_targets,
+ use_artifact_config=use_config,
+ artifact_remote_url=remote,
+ load_refs=True,
+ )
if not self._artifacts.has_fetch_remotes():
raise StreamError("No artifact caches available for pulling artifacts")
@@ -432,21 +524,28 @@ class Stream():
# a pull queue will be created if user context and available remotes allow for
# attempting to fetch them.
#
- def push(self, targets, *,
- selection=PipelineSelection.NONE,
- ignore_junction_targets=False,
- remote=None):
+ def push(
+ self,
+ targets,
+ *,
+ selection=PipelineSelection.NONE,
+ ignore_junction_targets=False,
+ remote=None
+ ):
use_config = True
if remote:
use_config = False
- elements, _ = self._load(targets, (),
- selection=selection,
- ignore_junction_targets=ignore_junction_targets,
- use_artifact_config=use_config,
- artifact_remote_url=remote,
- load_refs=True)
+ elements, _ = self._load(
+ targets,
+ (),
+ selection=selection,
+ ignore_junction_targets=ignore_junction_targets,
+ use_artifact_config=use_config,
+ artifact_remote_url=remote,
+ load_refs=True,
+ )
if not self._artifacts.has_push_remotes():
raise StreamError("No artifact caches available for pushing artifacts")
@@ -456,7 +555,9 @@ class Stream():
# Check if we require a pull queue, with given artifact state and context
require_buildtrees = self._buildtree_pull_required(elements)
if require_buildtrees:
- self._message(MessageType.INFO, "Attempting to fetch missing artifact buildtrees")
+ self._message(
+ MessageType.INFO, "Attempting to fetch missing artifact buildtrees"
+ )
self._add_queue(PullQueue(self._scheduler))
self._enqueue_plan(require_buildtrees)
@@ -488,10 +589,15 @@ class Stream():
# NOTE: Usually we check the _SchedulerErrorAction when a *job* has failed.
# However, we cannot create a PushQueue job unless we intentionally
# ready an uncached element in the PushQueue.
- if self._context.sched_error_action == _SchedulerErrorAction.CONTINUE and uncached_elements:
+ if (
+ self._context.sched_error_action == _SchedulerErrorAction.CONTINUE
+ and uncached_elements
+ ):
names = [element.name for element in uncached_elements]
- fail_str = "Error while pushing. The following elements were not pushed as they are " \
+ fail_str = (
+ "Error while pushing. The following elements were not pushed as they are "
"not yet cached:\n\n\t{}\n".format("\n\t".join(names))
+ )
raise StreamError(fail_str)
@@ -515,17 +621,23 @@ class Stream():
# pull (bool): If true will attempt to pull any missing or incomplete
# artifacts.
#
- def checkout(self, target, *,
- location=None,
- force=False,
- selection=PipelineSelection.RUN,
- integrate=True,
- hardlinks=False,
- compression='',
- pull=False,
- tar=False):
-
- elements, _ = self._load((target,), (), selection=selection, use_artifact_config=True, load_refs=True)
+ def checkout(
+ self,
+ target,
+ *,
+ location=None,
+ force=False,
+ selection=PipelineSelection.RUN,
+ integrate=True,
+ hardlinks=False,
+ compression="",
+ pull=False,
+ tar=False
+ ):
+
+ elements, _ = self._load(
+ (target,), (), selection=selection, use_artifact_config=True, load_refs=True
+ )
# self.targets contains a list of the loaded target objects
# if we specify --deps build, Stream._load() will return a list
@@ -537,22 +649,35 @@ class Stream():
uncached_elts = [elt for elt in elements if not elt._cached()]
if uncached_elts and pull:
- self._message(MessageType.INFO, "Attempting to fetch missing or incomplete artifact")
+ self._message(
+ MessageType.INFO, "Attempting to fetch missing or incomplete artifact"
+ )
self._scheduler.clear_queues()
self._add_queue(PullQueue(self._scheduler))
self._enqueue_plan(uncached_elts)
self._run()
try:
- scope = {'run': Scope.RUN, 'build': Scope.BUILD, 'none': Scope.NONE, 'all': Scope.ALL}
- with target._prepare_sandbox(scope=scope[selection], directory=None,
- integrate=integrate) as sandbox:
+ scope = {
+ "run": Scope.RUN,
+ "build": Scope.BUILD,
+ "none": Scope.NONE,
+ "all": Scope.ALL,
+ }
+ with target._prepare_sandbox(
+ scope=scope[selection], directory=None, integrate=integrate
+ ) as sandbox:
# Copy or move the sandbox to the target directory
virdir = sandbox.get_virtual_directory()
- self._export_artifact(tar, location, compression, target, hardlinks, virdir)
+ self._export_artifact(
+ tar, location, compression, target, hardlinks, virdir
+ )
except BstError as e:
- raise StreamError("Error while staging dependencies into a sandbox"
- ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
+ raise StreamError(
+ "Error while staging dependencies into a sandbox" ": '{}'".format(e),
+ detail=e.detail,
+ reason=e.reason,
+ ) from e
# _export_artifact()
#
@@ -568,34 +693,32 @@ class Stream():
#
def _export_artifact(self, tar, location, compression, target, hardlinks, virdir):
if not tar:
- with target.timed_activity("Checking out files in '{}'"
- .format(location)):
+ with target.timed_activity("Checking out files in '{}'".format(location)):
try:
if hardlinks:
self._checkout_hardlinks(virdir, location)
else:
virdir.export_files(location)
except OSError as e:
- raise StreamError("Failed to checkout files: '{}'"
- .format(e)) from e
+ raise StreamError("Failed to checkout files: '{}'".format(e)) from e
else:
- to_stdout = location == '-'
+ to_stdout = location == "-"
mode = _handle_compression(compression, to_stream=to_stdout)
with target.timed_activity("Creating tarball"):
if to_stdout:
# Save the stdout FD to restore later
saved_fd = os.dup(sys.stdout.fileno())
try:
- with os.fdopen(sys.stdout.fileno(), 'wb') as fo:
+ with os.fdopen(sys.stdout.fileno(), "wb") as fo:
with tarfile.open(fileobj=fo, mode=mode) as tf:
- virdir.export_to_tar(tf, '.')
+ virdir.export_to_tar(tf, ".")
finally:
# No matter what, restore stdout for further use
os.dup2(saved_fd, sys.stdout.fileno())
os.close(saved_fd)
else:
with tarfile.open(location, mode=mode) as tf:
- virdir.export_to_tar(tf, '.')
+ virdir.export_to_tar(tf, ".")
# artifact_show()
#
@@ -604,13 +727,11 @@ class Stream():
# Args:
# targets (str): Targets to show the cached state of
#
- def artifact_show(self, targets, *,
- selection=PipelineSelection.NONE):
+ def artifact_show(self, targets, *, selection=PipelineSelection.NONE):
# Obtain list of Element and/or ArtifactElement objects
- target_objects = self.load_selection(targets,
- selection=selection,
- use_artifact_config=True,
- load_refs=True)
+ target_objects = self.load_selection(
+ targets, selection=selection, use_artifact_config=True, load_refs=True
+ )
if self._artifacts.has_fetch_remotes():
self._pipeline.check_remotes(target_objects)
@@ -635,7 +756,9 @@ class Stream():
#
def artifact_log(self, targets):
# Return list of Element and/or ArtifactElement objects
- target_objects = self.load_selection(targets, selection=PipelineSelection.NONE, load_refs=True)
+ target_objects = self.load_selection(
+ targets, selection=PipelineSelection.NONE, load_refs=True
+ )
artifact_logs = {}
for obj in target_objects:
@@ -644,7 +767,9 @@ class Stream():
self._message(MessageType.WARN, "{} is not cached".format(ref))
continue
elif not obj._cached_logs():
- self._message(MessageType.WARN, "{} is cached without log files".format(ref))
+ self._message(
+ MessageType.WARN, "{} is cached without log files".format(ref)
+ )
continue
artifact_logs[obj.name] = obj.get_logs()
@@ -663,7 +788,9 @@ class Stream():
#
def artifact_list_contents(self, targets):
# Return list of Element and/or ArtifactElement objects
- target_objects = self.load_selection(targets, selection=PipelineSelection.NONE, load_refs=True)
+ target_objects = self.load_selection(
+ targets, selection=PipelineSelection.NONE, load_refs=True
+ )
elements_to_files = {}
for obj in target_objects:
@@ -685,10 +812,11 @@ class Stream():
# Args:
# targets (str): Targets to remove
#
- def artifact_delete(self, targets, *,
- selection=PipelineSelection.NONE):
+ def artifact_delete(self, targets, *, selection=PipelineSelection.NONE):
# Return list of Element and/or ArtifactElement objects
- target_objects = self.load_selection(targets, selection=selection, load_refs=True)
+ target_objects = self.load_selection(
+ targets, selection=selection, load_refs=True
+ )
# Some of the targets may refer to the same key, so first obtain a
# set of the refs to be removed.
@@ -726,20 +854,24 @@ class Stream():
# compression (str): The type of compression for tarball
# include_build_scripts (bool): Whether to include build scripts in the checkout
#
- def source_checkout(self, target, *,
- location=None,
- force=False,
- deps='none',
- except_targets=(),
- tar=False,
- compression=None,
- include_build_scripts=False):
+ def source_checkout(
+ self,
+ target,
+ *,
+ location=None,
+ force=False,
+ deps="none",
+ except_targets=(),
+ tar=False,
+ compression=None,
+ include_build_scripts=False
+ ):
self._check_location_writable(location, force=force, tar=tar)
- elements, _ = self._load((target,), (),
- selection=deps,
- except_targets=except_targets)
+ elements, _ = self._load(
+ (target,), (), selection=deps, except_targets=except_targets
+ )
# Assert all sources are cached in the source dir
self._fetch(elements)
@@ -747,11 +879,15 @@ class Stream():
# Stage all sources determined by scope
try:
- self._source_checkout(elements, location, force, deps,
- tar, compression, include_build_scripts)
+ self._source_checkout(
+ elements, location, force, deps, tar, compression, include_build_scripts
+ )
except BstError as e:
- raise StreamError("Error while writing sources"
- ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
+ raise StreamError(
+ "Error while writing sources" ": '{}'".format(e),
+ detail=e.detail,
+ reason=e.reason,
+ ) from e
self._message(MessageType.INFO, "Checked out sources to '{}'".format(location))
@@ -766,11 +902,7 @@ class Stream():
# force (bool): Whether to ignore contents in an existing directory
# custom_dir (str): Custom location to create a workspace or false to use default location.
#
- def workspace_open(self, targets, *,
- no_checkout,
- track_first,
- force,
- custom_dir):
+ def workspace_open(self, targets, *, no_checkout, track_first, force, custom_dir):
# This function is a little funny but it is trying to be as atomic as possible.
if track_first:
@@ -778,10 +910,13 @@ class Stream():
else:
track_targets = ()
- elements, track_elements = self._load(targets, track_targets,
- selection=PipelineSelection.REDIRECT,
- track_selection=PipelineSelection.REDIRECT,
- ignore_workspaces=True)
+ elements, track_elements = self._load(
+ targets,
+ track_targets,
+ selection=PipelineSelection.REDIRECT,
+ track_selection=PipelineSelection.REDIRECT,
+ ignore_workspaces=True,
+ )
workspaces = self._context.get_workspaces()
@@ -799,44 +934,69 @@ class Stream():
for target in elements:
if not list(target.sources()):
- build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
+ build_depends = [
+ x.name for x in target.dependencies(Scope.BUILD, recurse=False)
+ ]
if not build_depends:
- raise StreamError("The element {} has no sources".format(target.name))
+ raise StreamError(
+ "The element {} has no sources".format(target.name)
+ )
detail = "Try opening a workspace on one of its dependencies instead:\n"
detail += " \n".join(build_depends)
- raise StreamError("The element {} has no sources".format(target.name), detail=detail)
+ raise StreamError(
+ "The element {} has no sources".format(target.name), detail=detail
+ )
# Check for workspace config
workspace = workspaces.get_workspace(target._get_full_name())
if workspace:
if not force:
- raise StreamError("Element '{}' already has workspace defined at: {}"
- .format(target.name, workspace.get_absolute_path()))
+ raise StreamError(
+ "Element '{}' already has workspace defined at: {}".format(
+ target.name, workspace.get_absolute_path()
+ )
+ )
if not no_checkout:
- target.warn("Replacing existing workspace for element '{}' defined at: {}"
- .format(target.name, workspace.get_absolute_path()))
- self.workspace_close(target._get_full_name(), remove_dir=not no_checkout)
+ target.warn(
+ "Replacing existing workspace for element '{}' defined at: {}".format(
+ target.name, workspace.get_absolute_path()
+ )
+ )
+ self.workspace_close(
+ target._get_full_name(), remove_dir=not no_checkout
+ )
target_consistency = target._get_consistency()
- if not no_checkout and target_consistency < Consistency.CACHED and \
- target_consistency._source_cached():
- raise StreamError("Could not stage uncached source. For {} ".format(target.name) +
- "Use `--track` to track and " +
- "fetch the latest version of the " +
- "source.")
+ if (
+ not no_checkout
+ and target_consistency < Consistency.CACHED
+ and target_consistency._source_cached()
+ ):
+ raise StreamError(
+ "Could not stage uncached source. For {} ".format(target.name)
+ + "Use `--track` to track and "
+ + "fetch the latest version of the "
+ + "source."
+ )
if not custom_dir:
- directory = os.path.abspath(os.path.join(self._context.workspacedir, target.name))
- if directory[-4:] == '.bst':
+ directory = os.path.abspath(
+ os.path.join(self._context.workspacedir, target.name)
+ )
+ if directory[-4:] == ".bst":
directory = directory[:-4]
expanded_directories.append(directory)
if custom_dir:
if len(elements) != 1:
- raise StreamError("Exactly one element can be given if --directory is used",
- reason='directory-with-multiple-elements')
+ raise StreamError(
+ "Exactly one element can be given if --directory is used",
+ reason="directory-with-multiple-elements",
+ )
directory = os.path.abspath(custom_dir)
- expanded_directories = [directory, ]
+ expanded_directories = [
+ directory,
+ ]
else:
# If this fails it is a bug in what ever calls this, usually cli.py and so can not be tested for via the
# run bst test mechanism.
@@ -845,12 +1005,20 @@ class Stream():
for target, directory in zip(elements, expanded_directories):
if os.path.exists(directory):
if not os.path.isdir(directory):
- raise StreamError("For element '{}', Directory path is not a directory: {}"
- .format(target.name, directory), reason='bad-directory')
+ raise StreamError(
+ "For element '{}', Directory path is not a directory: {}".format(
+ target.name, directory
+ ),
+ reason="bad-directory",
+ )
if not (no_checkout or force) and os.listdir(directory):
- raise StreamError("For element '{}', Directory path is not empty: {}"
- .format(target.name, directory), reason='bad-directory')
+ raise StreamError(
+ "For element '{}', Directory path is not empty: {}".format(
+ target.name, directory
+ ),
+ reason="bad-directory",
+ )
if os.listdir(directory):
if force and not no_checkout:
utils._force_rmtree(directory)
@@ -859,8 +1027,10 @@ class Stream():
# Now it does the bits that can not be made atomic.
targetGenerator = zip(elements, expanded_directories)
for target, directory in targetGenerator:
- self._message(MessageType.INFO, "Creating workspace for element {}"
- .format(target.name))
+ self._message(
+ MessageType.INFO,
+ "Creating workspace for element {}".format(target.name),
+ )
workspace = workspaces.get_workspace(target._get_full_name())
if workspace and not no_checkout:
@@ -870,15 +1040,23 @@ class Stream():
try:
os.makedirs(directory, exist_ok=True)
except OSError as e:
- todo_elements = " ".join([str(target.name) for target, directory_dict in targetGenerator])
+ todo_elements = " ".join(
+ [str(target.name) for target, directory_dict in targetGenerator]
+ )
if todo_elements:
# This output should make creating the remaining workspaces as easy as possible.
- todo_elements = "\nDid not try to create workspaces for " + todo_elements
- raise StreamError("Failed to create workspace directory: {}".format(e) + todo_elements) from e
+ todo_elements = (
+ "\nDid not try to create workspaces for " + todo_elements
+ )
+ raise StreamError(
+ "Failed to create workspace directory: {}".format(e) + todo_elements
+ ) from e
workspaces.create_workspace(target, directory, checkout=not no_checkout)
- self._message(MessageType.INFO, "Created a workspace for element: {}"
- .format(target._get_full_name()))
+ self._message(
+ MessageType.INFO,
+ "Created a workspace for element: {}".format(target._get_full_name()),
+ )
# workspace_close
#
@@ -894,13 +1072,17 @@ class Stream():
# Remove workspace directory if prompted
if remove_dir:
- with self._context.messenger.timed_activity("Removing workspace directory {}"
- .format(workspace.get_absolute_path())):
+ with self._context.messenger.timed_activity(
+ "Removing workspace directory {}".format(workspace.get_absolute_path())
+ ):
try:
shutil.rmtree(workspace.get_absolute_path())
except OSError as e:
- raise StreamError("Could not remove '{}': {}"
- .format(workspace.get_absolute_path(), e)) from e
+ raise StreamError(
+ "Could not remove '{}': {}".format(
+ workspace.get_absolute_path(), e
+ )
+ ) from e
# Delete the workspace and save the configuration
workspaces.delete_workspace(element_name)
@@ -919,9 +1101,12 @@ class Stream():
#
def workspace_reset(self, targets, *, soft, track_first):
- elements, _ = self._load(targets, [],
- selection=PipelineSelection.REDIRECT,
- track_selection=PipelineSelection.REDIRECT)
+ elements, _ = self._load(
+ targets,
+ [],
+ selection=PipelineSelection.REDIRECT,
+ track_selection=PipelineSelection.REDIRECT,
+ )
nonexisting = []
for element in elements:
@@ -937,14 +1122,23 @@ class Stream():
if soft:
workspace.prepared = False
- self._message(MessageType.INFO, "Reset workspace state for {} at: {}"
- .format(element.name, workspace_path))
+ self._message(
+ MessageType.INFO,
+ "Reset workspace state for {} at: {}".format(
+ element.name, workspace_path
+ ),
+ )
continue
self.workspace_close(element._get_full_name(), remove_dir=True)
workspaces.save_config()
- self.workspace_open([element._get_full_name()],
- no_checkout=False, track_first=track_first, force=True, custom_dir=workspace_path)
+ self.workspace_open(
+ [element._get_full_name()],
+ no_checkout=False,
+ track_first=track_first,
+ force=True,
+ custom_dir=workspace_path,
+ )
# workspace_exists
#
@@ -992,14 +1186,12 @@ class Stream():
workspaces = []
for element_name, workspace_ in self._context.get_workspaces().list():
workspace_detail = {
- 'element': element_name,
- 'directory': workspace_.get_absolute_path(),
+ "element": element_name,
+ "directory": workspace_.get_absolute_path(),
}
workspaces.append(workspace_detail)
- _yaml.roundtrip_dump({
- 'workspaces': workspaces
- })
+ _yaml.roundtrip_dump({"workspaces": workspaces})
# redirect_element_names()
#
@@ -1025,9 +1217,12 @@ class Stream():
else:
output_elements.add(e)
if load_elements:
- loaded_elements, _ = self._load(load_elements, (),
- selection=PipelineSelection.REDIRECT,
- track_selection=PipelineSelection.REDIRECT)
+ loaded_elements, _ = self._load(
+ load_elements,
+ (),
+ selection=PipelineSelection.REDIRECT,
+ track_selection=PipelineSelection.REDIRECT,
+ )
for e in loaded_elements:
output_elements.add(e.name)
@@ -1158,30 +1353,41 @@ class Stream():
# (list of Element): The primary element selection
# (list of Element): The tracking element selection
#
- def _load(self, targets, track_targets, *,
- selection=PipelineSelection.NONE,
- track_selection=PipelineSelection.NONE,
- except_targets=(),
- track_except_targets=(),
- track_cross_junctions=False,
- ignore_junction_targets=False,
- use_artifact_config=False,
- use_source_config=False,
- artifact_remote_url=None,
- source_remote_url=None,
- dynamic_plan=False,
- load_refs=False,
- ignore_workspaces=False):
+ def _load(
+ self,
+ targets,
+ track_targets,
+ *,
+ selection=PipelineSelection.NONE,
+ track_selection=PipelineSelection.NONE,
+ except_targets=(),
+ track_except_targets=(),
+ track_cross_junctions=False,
+ ignore_junction_targets=False,
+ use_artifact_config=False,
+ use_source_config=False,
+ artifact_remote_url=None,
+ source_remote_url=None,
+ dynamic_plan=False,
+ load_refs=False,
+ ignore_workspaces=False
+ ):
# Classify element and artifact strings
target_elements, target_artifacts = self._classify_artifacts(targets)
if target_artifacts:
if not load_refs:
- detail = '\n'.join(target_artifacts)
- raise ArtifactElementError("Cannot perform this operation with artifact refs:", detail=detail)
+ detail = "\n".join(target_artifacts)
+ raise ArtifactElementError(
+ "Cannot perform this operation with artifact refs:", detail=detail
+ )
if selection in (PipelineSelection.ALL, PipelineSelection.RUN):
- raise StreamError("Error: '--deps {}' is not supported for artifact refs".format(selection))
+ raise StreamError(
+ "Error: '--deps {}' is not supported for artifact refs".format(
+ selection
+ )
+ )
# Load rewritable if we have any tracking selection to make
rewritable = False
@@ -1189,19 +1395,37 @@ class Stream():
rewritable = True
# Load all target elements
- loadable = [target_elements, except_targets, track_targets, track_except_targets]
+ loadable = [
+ target_elements,
+ except_targets,
+ track_targets,
+ track_except_targets,
+ ]
if any(loadable):
- elements, except_elements, track_elements, track_except_elements = \
- self._pipeline.load(loadable, rewritable=rewritable, ignore_workspaces=ignore_workspaces)
+ (
+ elements,
+ except_elements,
+ track_elements,
+ track_except_elements,
+ ) = self._pipeline.load(
+ loadable, rewritable=rewritable, ignore_workspaces=ignore_workspaces
+ )
else:
- elements, except_elements, track_elements, track_except_elements = [], [], [], []
+ elements, except_elements, track_elements, track_except_elements = (
+ [],
+ [],
+ [],
+ [],
+ )
# Load all target artifacts
- artifacts = self._pipeline.load_artifacts(target_artifacts) if target_artifacts else []
+ artifacts = (
+ self._pipeline.load_artifacts(target_artifacts) if target_artifacts else []
+ )
# Optionally filter out junction elements
if ignore_junction_targets:
- elements = [e for e in elements if e.get_kind() != 'junction']
+ elements = [e for e in elements if e.get_kind() != "junction"]
# Hold on to the targets
self.targets = elements + artifacts
@@ -1213,8 +1437,10 @@ class Stream():
# This can happen with `bst build --track`
#
if targets and not self._pipeline.targets_include(elements, track_elements):
- raise StreamError("Specified tracking targets that are not "
- "within the scope of primary targets")
+ raise StreamError(
+ "Specified tracking targets that are not "
+ "within the scope of primary targets"
+ )
# First take care of marking tracking elements, this must be
# done before resolving element states.
@@ -1236,14 +1462,14 @@ class Stream():
for project, project_elements in track_projects.items():
selected = self._pipeline.get_selection(project_elements, track_selection)
- selected = self._pipeline.track_cross_junction_filter(project,
- selected,
- track_cross_junctions)
+ selected = self._pipeline.track_cross_junction_filter(
+ project, selected, track_cross_junctions
+ )
track_selected.extend(selected)
- track_selected = self._pipeline.except_elements(track_elements,
- track_selected,
- track_except_elements)
+ track_selected = self._pipeline.except_elements(
+ track_elements, track_selected, track_except_elements
+ )
for element in track_selected:
element._schedule_tracking()
@@ -1257,16 +1483,20 @@ class Stream():
project.ensure_fully_loaded()
# Connect to remote caches, this needs to be done before resolving element state
- self._artifacts.setup_remotes(use_config=use_artifact_config, remote_url=artifact_remote_url)
- self._sourcecache.setup_remotes(use_config=use_source_config, remote_url=source_remote_url)
+ self._artifacts.setup_remotes(
+ use_config=use_artifact_config, remote_url=artifact_remote_url
+ )
+ self._sourcecache.setup_remotes(
+ use_config=use_source_config, remote_url=source_remote_url
+ )
# Now move on to loading primary selection.
#
self._pipeline.resolve_elements(self.targets)
selected = self._pipeline.get_selection(self.targets, selection, silent=False)
- selected = self._pipeline.except_elements(self.targets,
- selected,
- except_elements)
+ selected = self._pipeline.except_elements(
+ self.targets, selected, except_elements
+ )
if selection == PipelineSelection.PLAN and dynamic_plan:
# We use a dynamic build plan, only request artifacts of top-level targets,
@@ -1286,8 +1516,7 @@ class Stream():
#
def _message(self, message_type, message, **kwargs):
args = dict(kwargs)
- self._context.messenger.message(
- Message(message_type, message, **args))
+ self._context.messenger.message(Message(message_type, message, **args))
# _add_queue()
#
@@ -1328,9 +1557,9 @@ class Stream():
# unique_id (str): A unique_id to load an Element instance
#
def _failure_retry(self, action_name, unique_id):
- notification = Notification(NotificationType.RETRY,
- job_action=action_name,
- element=unique_id)
+ notification = Notification(
+ NotificationType.RETRY, job_action=action_name, element=unique_id
+ )
self._notify(notification)
# _run()
@@ -1347,7 +1576,9 @@ class Stream():
if self._session_start_callback is not None:
self._session_start_callback()
- status = self._scheduler.run(self.queues, self._context.get_cascache().get_casd_process())
+ status = self._scheduler.run(
+ self.queues, self._context.get_cascache().get_casd_process()
+ )
if status == SchedStatus.ERROR:
raise StreamError()
@@ -1377,8 +1608,7 @@ class Stream():
# Filter out elements with cached sources, only from the fetch plan
# let the track plan resolve new refs.
- cached = [elt for elt in fetch_plan
- if not elt._should_fetch(fetch_original)]
+ cached = [elt for elt in fetch_plan if not elt._should_fetch(fetch_original)]
fetch_plan = self._pipeline.subtract_elements(fetch_plan, cached)
# Construct queues, enqueue and run
@@ -1413,21 +1643,22 @@ class Stream():
try:
os.makedirs(location, exist_ok=True)
except OSError as e:
- raise StreamError("Failed to create destination directory: '{}'"
- .format(e)) from e
+ raise StreamError(
+ "Failed to create destination directory: '{}'".format(e)
+ ) from e
if not os.access(location, os.W_OK):
- raise StreamError("Destination directory '{}' not writable"
- .format(location))
+ raise StreamError(
+ "Destination directory '{}' not writable".format(location)
+ )
if not force and os.listdir(location):
- raise StreamError("Destination directory '{}' not empty"
- .format(location))
- elif os.path.exists(location) and location != '-':
+ raise StreamError(
+ "Destination directory '{}' not empty".format(location)
+ )
+ elif os.path.exists(location) and location != "-":
if not os.access(location, os.W_OK):
- raise StreamError("Output file '{}' not writable"
- .format(location))
+ raise StreamError("Output file '{}' not writable".format(location))
if not force and os.path.exists(location):
- raise StreamError("Output file '{}' already exists"
- .format(location))
+ raise StreamError("Output file '{}' already exists".format(location))
# Helper function for checkout()
#
@@ -1435,18 +1666,23 @@ class Stream():
try:
utils.safe_remove(directory)
except OSError as e:
- raise StreamError("Failed to remove checkout directory: {}".format(e)) from e
+ raise StreamError(
+ "Failed to remove checkout directory: {}".format(e)
+ ) from e
sandbox_vroot.export_files(directory, can_link=True, can_destroy=True)
# Helper function for source_checkout()
- def _source_checkout(self, elements,
- location=None,
- force=False,
- deps='none',
- tar=False,
- compression=None,
- include_build_scripts=False):
+ def _source_checkout(
+ self,
+ elements,
+ location=None,
+ force=False,
+ deps="none",
+ tar=False,
+ compression=None,
+ include_build_scripts=False,
+ ):
location = os.path.abspath(location)
# Stage all our sources in a temporary directory. The this
@@ -1462,8 +1698,9 @@ class Stream():
else:
self._move_directory(temp_source_dir.name, location, force)
except OSError as e:
- raise StreamError("Failed to checkout sources to {}: {}"
- .format(location, e)) from e
+ raise StreamError(
+ "Failed to checkout sources to {}: {}".format(location, e)
+ ) from e
finally:
with suppress(FileNotFoundError):
temp_source_dir.cleanup()
@@ -1505,10 +1742,10 @@ class Stream():
# Create a tarball from the content of directory
def _create_tarball(self, directory, tar_name, compression):
if compression is None:
- compression = ''
+ compression = ""
mode = _handle_compression(compression)
try:
- with utils.save_file_atomic(tar_name, mode='wb') as f:
+ with utils.save_file_atomic(tar_name, mode="wb") as f:
tarball = tarfile.open(fileobj=f, mode=mode)
for item in os.listdir(str(directory)):
file_to_add = os.path.join(directory, item)
@@ -1582,7 +1819,11 @@ class Stream():
for element in elements:
# Check if element is partially cached without its buildtree, as the element
# artifact may not be cached at all
- if element._cached() and not element._cached_buildtree() and element._buildtree_exists():
+ if (
+ element._cached()
+ and not element._cached_buildtree()
+ and element._buildtree_exists()
+ ):
required_list.append(element)
return required_list
@@ -1605,7 +1846,7 @@ class Stream():
artifact_globs = []
for target in targets:
- if target.endswith('.bst'):
+ if target.endswith(".bst"):
if any(c in "*?[" for c in target):
element_globs.append(target)
else:
@@ -1635,7 +1876,12 @@ class Stream():
for glob in artifact_globs:
artifact_refs.extend(self._artifacts.list_artifacts(glob=glob))
if not artifact_refs:
- self._message(MessageType.WARN, "No artifacts found for globs: {}".format(', '.join(artifact_globs)))
+ self._message(
+ MessageType.WARN,
+ "No artifacts found for globs: {}".format(
+ ", ".join(artifact_globs)
+ ),
+ )
return element_targets, artifact_refs
@@ -1651,12 +1897,17 @@ class Stream():
elif notification.notification_type == NotificationType.TICK:
self._ticker_callback()
elif notification.notification_type == NotificationType.JOB_START:
- self._state.add_task(notification.job_action, notification.full_name, notification.time)
+ self._state.add_task(
+ notification.job_action, notification.full_name, notification.time
+ )
elif notification.notification_type == NotificationType.JOB_COMPLETE:
self._state.remove_task(notification.job_action, notification.full_name)
if notification.job_status == JobStatus.FAIL:
- self._state.fail_task(notification.job_action, notification.full_name,
- notification.element)
+ self._state.fail_task(
+ notification.job_action,
+ notification.full_name,
+ notification.element,
+ )
elif notification.notification_type == NotificationType.SCHED_START_TIME:
self._starttime = notification.time
elif notification.notification_type == NotificationType.RUNNING:
@@ -1701,5 +1952,5 @@ class Stream():
# (str): The tarfile mode string
#
def _handle_compression(compression, *, to_stream=False):
- mode_prefix = 'w|' if to_stream else 'w:'
+ mode_prefix = "w|" if to_stream else "w:"
return mode_prefix + compression
diff --git a/src/buildstream/_version.py b/src/buildstream/_version.py
index 03f946cb8..12dde1df8 100644
--- a/src/buildstream/_version.py
+++ b/src/buildstream/_version.py
@@ -60,17 +60,18 @@ HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
+
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
+
return decorate
-def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
- env=None):
+def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
@@ -78,10 +79,13 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
- p = subprocess.Popen([c] + args, cwd=cwd, env=env,
- stdout=subprocess.PIPE,
- stderr=(subprocess.PIPE if hide_stderr
- else None))
+ p = subprocess.Popen(
+ [c] + args,
+ cwd=cwd,
+ env=env,
+ stdout=subprocess.PIPE,
+ stderr=(subprocess.PIPE if hide_stderr else None),
+ )
break
except EnvironmentError:
e = sys.exc_info()[1]
@@ -118,16 +122,22 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
- return {"version": dirname[len(parentdir_prefix):],
- "full-revisionid": None,
- "dirty": False, "error": None, "date": None}
+ return {
+ "version": dirname[len(parentdir_prefix) :],
+ "full-revisionid": None,
+ "dirty": False,
+ "error": None,
+ "date": None,
+ }
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
- print("Tried directories %s but none started with prefix %s" %
- (str(rootdirs), parentdir_prefix))
+ print(
+ "Tried directories %s but none started with prefix %s"
+ % (str(rootdirs), parentdir_prefix)
+ )
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@@ -183,7 +193,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
- tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
+ tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
@@ -192,7 +202,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
- tags = set([r for r in refs if re.search(r'\d', r)])
+ tags = set([r for r in refs if re.search(r"\d", r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
@@ -200,19 +210,26 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
- r = ref[len(tag_prefix):]
+ r = ref[len(tag_prefix) :]
if verbose:
print("picking %s" % r)
- return {"version": r,
- "full-revisionid": keywords["full"].strip(),
- "dirty": False, "error": None,
- "date": date}
+ return {
+ "version": r,
+ "full-revisionid": keywords["full"].strip(),
+ "dirty": False,
+ "error": None,
+ "date": date,
+ }
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
- return {"version": "0+unknown",
- "full-revisionid": keywords["full"].strip(),
- "dirty": False, "error": "no suitable tags", "date": None}
+ return {
+ "version": "0+unknown",
+ "full-revisionid": keywords["full"].strip(),
+ "dirty": False,
+ "error": "no suitable tags",
+ "date": None,
+ }
@register_vcs_handler("git", "pieces_from_vcs")
@@ -227,8 +244,7 @@ def git_pieces_from_vcs(tag_prefix, tag_regex, root, verbose, run_command=run_co
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
- out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
- hide_stderr=True)
+ out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
@@ -236,10 +252,19 @@ def git_pieces_from_vcs(tag_prefix, tag_regex, root, verbose, run_command=run_co
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
- describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
- "--always", "--long",
- "--match", "%s%s" % (tag_prefix, tag_regex)],
- cwd=root)
+ describe_out, rc = run_command(
+ GITS,
+ [
+ "describe",
+ "--tags",
+ "--dirty",
+ "--always",
+ "--long",
+ "--match",
+ "%s%s" % (tag_prefix, tag_regex),
+ ],
+ cwd=root,
+ )
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
@@ -262,17 +287,16 @@ def git_pieces_from_vcs(tag_prefix, tag_regex, root, verbose, run_command=run_co
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
- git_describe = git_describe[:git_describe.rindex("-dirty")]
+ git_describe = git_describe[: git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
- mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
+ mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
- pieces["error"] = ("unable to parse git-describe output: '%s'"
- % describe_out)
+ pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out
return pieces
# tag
@@ -281,10 +305,12 @@ def git_pieces_from_vcs(tag_prefix, tag_regex, root, verbose, run_command=run_co
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
- pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
- % (full_tag, tag_prefix))
+ pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
+ full_tag,
+ tag_prefix,
+ )
return pieces
- pieces["closest-tag"] = full_tag[len(tag_prefix):]
+ pieces["closest-tag"] = full_tag[len(tag_prefix) :]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
@@ -295,13 +321,13 @@ def git_pieces_from_vcs(tag_prefix, tag_regex, root, verbose, run_command=run_co
else:
# HEX: no tags
pieces["closest-tag"] = None
- count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
- cwd=root)
+ count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
- cwd=root)[0].strip()
+ date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[
+ 0
+ ].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
@@ -332,8 +358,7 @@ def render_pep440(pieces):
rendered += ".dirty"
else:
# exception #1
- rendered = "0+untagged.%d.g%s" % (pieces["distance"],
- pieces["short"])
+ rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
@@ -447,11 +472,13 @@ def render_git_describe_long(pieces):
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
- return {"version": "unknown",
- "full-revisionid": pieces.get("long"),
- "dirty": None,
- "error": pieces["error"],
- "date": None}
+ return {
+ "version": "unknown",
+ "full-revisionid": pieces.get("long"),
+ "dirty": None,
+ "error": pieces["error"],
+ "date": None,
+ }
if not style or style == "default":
style = "pep440" # the default
@@ -471,9 +498,13 @@ def render(pieces, style):
else:
raise ValueError("unknown style '%s'" % style)
- return {"version": rendered, "full-revisionid": pieces["long"],
- "dirty": pieces["dirty"], "error": None,
- "date": pieces.get("date")}
+ return {
+ "version": rendered,
+ "full-revisionid": pieces["long"],
+ "dirty": pieces["dirty"],
+ "error": None,
+ "date": pieces.get("date"),
+ }
def get_versions():
@@ -487,8 +518,7 @@ def get_versions():
verbose = cfg.verbose
try:
- return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
- verbose)
+ return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose)
except NotThisMethod:
pass
@@ -497,13 +527,16 @@ def get_versions():
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
- for i in cfg.versionfile_source.split('/'):
+ for i in cfg.versionfile_source.split("/"):
root = os.path.dirname(root)
except NameError:
- return {"version": "0+unknown", "full-revisionid": None,
- "dirty": None,
- "error": "unable to find root of source tree",
- "date": None}
+ return {
+ "version": "0+unknown",
+ "full-revisionid": None,
+ "dirty": None,
+ "error": "unable to find root of source tree",
+ "date": None,
+ }
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, cfg.tag_regex, root, verbose)
@@ -517,6 +550,10 @@ def get_versions():
except NotThisMethod:
pass
- return {"version": "0+unknown", "full-revisionid": None,
- "dirty": None,
- "error": "unable to compute version", "date": None}
+ return {
+ "version": "0+unknown",
+ "full-revisionid": None,
+ "dirty": None,
+ "error": "unable to compute version",
+ "date": None,
+ }
diff --git a/src/buildstream/_workspaces.py b/src/buildstream/_workspaces.py
index f9023dc54..488630634 100644
--- a/src/buildstream/_workspaces.py
+++ b/src/buildstream/_workspaces.py
@@ -38,7 +38,7 @@ WORKSPACE_PROJECT_FILE = ".bstproject.yaml"
# Args:
# directory (str): The directory that the workspace exists in.
#
-class WorkspaceProject():
+class WorkspaceProject:
def __init__(self, directory):
self._projects = []
self._directory = directory
@@ -51,7 +51,7 @@ class WorkspaceProject():
# (str): The path to a project
#
def get_default_project_path(self):
- return self._projects[0]['project-path']
+ return self._projects[0]["project-path"]
# get_default_element()
#
@@ -61,7 +61,7 @@ class WorkspaceProject():
# (str): The name of an element
#
def get_default_element(self):
- return self._projects[0]['element-name']
+ return self._projects[0]["element-name"]
# to_dict()
#
@@ -72,8 +72,8 @@ class WorkspaceProject():
#
def to_dict(self):
ret = {
- 'projects': self._projects,
- 'format-version': BST_WORKSPACE_PROJECT_FORMAT_VERSION,
+ "projects": self._projects,
+ "format-version": BST_WORKSPACE_PROJECT_FORMAT_VERSION,
}
return ret
@@ -91,13 +91,16 @@ class WorkspaceProject():
@classmethod
def from_dict(cls, directory, dictionary):
# Only know how to handle one format-version at the moment.
- format_version = int(dictionary['format-version'])
- assert format_version == BST_WORKSPACE_PROJECT_FORMAT_VERSION, \
- "Format version {} not found in {}".format(BST_WORKSPACE_PROJECT_FORMAT_VERSION, dictionary)
+ format_version = int(dictionary["format-version"])
+ assert (
+ format_version == BST_WORKSPACE_PROJECT_FORMAT_VERSION
+ ), "Format version {} not found in {}".format(
+ BST_WORKSPACE_PROJECT_FORMAT_VERSION, dictionary
+ )
workspace_project = cls(directory)
- for item in dictionary['projects']:
- workspace_project.add_project(item['project-path'], item['element-name'])
+ for item in dictionary["projects"]:
+ workspace_project.add_project(item["project-path"], item["element-name"])
return workspace_project
@@ -145,15 +148,17 @@ class WorkspaceProject():
# element_name (str): The name of the element that the workspace belongs to.
#
def add_project(self, project_path, element_name):
- assert (project_path and element_name)
- self._projects.append({'project-path': project_path, 'element-name': element_name})
+ assert project_path and element_name
+ self._projects.append(
+ {"project-path": project_path, "element-name": element_name}
+ )
# WorkspaceProjectCache()
#
# A class to manage workspace project data for multiple workspaces.
#
-class WorkspaceProjectCache():
+class WorkspaceProjectCache:
def __init__(self):
self._projects = {} # Mapping of a workspace directory to its WorkspaceProject
@@ -216,8 +221,10 @@ class WorkspaceProjectCache():
def remove(self, directory):
workspace_project = self.get(directory)
if not workspace_project:
- raise LoadError("Failed to find a {} file to remove".format(WORKSPACE_PROJECT_FILE),
- LoadErrorReason.MISSING_FILE)
+ raise LoadError(
+ "Failed to find a {} file to remove".format(WORKSPACE_PROJECT_FILE),
+ LoadErrorReason.MISSING_FILE,
+ )
path = workspace_project.get_filename()
try:
os.unlink(path)
@@ -242,8 +249,16 @@ class WorkspaceProjectCache():
# changed between failed builds. Should be
# made obsolete with failed build artifacts.
#
-class Workspace():
- def __init__(self, toplevel_project, *, last_successful=None, path=None, prepared=False, running_files=None):
+class Workspace:
+ def __init__(
+ self,
+ toplevel_project,
+ *,
+ last_successful=None,
+ path=None,
+ prepared=False,
+ running_files=None
+ ):
self.prepared = prepared
self.last_successful = last_successful
self._path = path
@@ -261,9 +276,9 @@ class Workspace():
#
def to_dict(self):
ret = {
- 'prepared': self.prepared,
- 'path': self._path,
- 'running_files': self.running_files
+ "prepared": self.prepared,
+ "path": self._path,
+ "running_files": self.running_files,
}
if self.last_successful is not None:
ret["last_successful"] = self.last_successful
@@ -321,7 +336,9 @@ class Workspace():
if os.path.isdir(fullpath):
utils.copy_files(fullpath, directory)
else:
- destfile = os.path.join(directory, os.path.basename(self.get_absolute_path()))
+ destfile = os.path.join(
+ directory, os.path.basename(self.get_absolute_path())
+ )
utils.safe_copy(fullpath, destfile)
# add_running_files()
@@ -363,8 +380,10 @@ class Workspace():
try:
stat = os.lstat(filename)
except OSError as e:
- raise LoadError("Failed to stat file in workspace: {}".format(e),
- LoadErrorReason.MISSING_FILE)
+ raise LoadError(
+ "Failed to stat file in workspace: {}".format(e),
+ LoadErrorReason.MISSING_FILE,
+ )
# Use the mtime of any file with sub second precision
return stat.st_mtime_ns
@@ -378,13 +397,16 @@ class Workspace():
if os.path.isdir(fullpath):
filelist = utils.list_relative_paths(fullpath)
filelist = [
- (relpath, os.path.join(fullpath, relpath)) for relpath in filelist
+ (relpath, os.path.join(fullpath, relpath))
+ for relpath in filelist
if relpath not in excluded_files
]
else:
filelist = [(self.get_absolute_path(), fullpath)]
- self._key = [(relpath, unique_key(fullpath)) for relpath, fullpath in filelist]
+ self._key = [
+ (relpath, unique_key(fullpath)) for relpath, fullpath in filelist
+ ]
return self._key
@@ -404,7 +426,7 @@ class Workspace():
# toplevel_project (Project): Top project used to resolve paths.
# workspace_project_cache (WorkspaceProjectCache): The cache of WorkspaceProjects
#
-class Workspaces():
+class Workspaces:
def __init__(self, toplevel_project, workspace_project_cache):
self._toplevel_project = toplevel_project
self._bst_directory = os.path.join(toplevel_project.directory, ".bst")
@@ -440,17 +462,25 @@ class Workspaces():
else:
workspace_path = path
- self._workspaces[element_name] = Workspace(self._toplevel_project, path=workspace_path)
+ self._workspaces[element_name] = Workspace(
+ self._toplevel_project, path=workspace_path
+ )
if checkout:
with target.timed_activity("Staging sources to {}".format(path)):
target._open_workspace()
- workspace_project = self._workspace_project_cache.add(path, project_dir, element_name)
+ workspace_project = self._workspace_project_cache.add(
+ path, project_dir, element_name
+ )
project_file_path = workspace_project.get_filename()
if os.path.exists(project_file_path):
- target.warn("{} was staged from this element's sources".format(WORKSPACE_PROJECT_FILE))
+ target.warn(
+ "{} was staged from this element's sources".format(
+ WORKSPACE_PROJECT_FILE
+ )
+ )
workspace_project.write()
self.save_config()
@@ -525,11 +555,11 @@ class Workspaces():
assert utils._is_main_process()
config = {
- 'format-version': BST_WORKSPACE_FORMAT_VERSION,
- 'workspaces': {
+ "format-version": BST_WORKSPACE_FORMAT_VERSION,
+ "workspaces": {
element: workspace.to_dict()
for element, workspace in self._workspaces.items()
- }
+ },
}
os.makedirs(self._bst_directory, exist_ok=True)
_yaml.roundtrip_dump(config, self._get_filename())
@@ -572,10 +602,12 @@ class Workspaces():
#
def _parse_workspace_config(self, workspaces):
try:
- version = workspaces.get_int('format-version', default=0)
+ version = workspaces.get_int("format-version", default=0)
except ValueError:
- raise LoadError("Format version is not an integer in workspace configuration",
- LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "Format version is not an integer in workspace configuration",
+ LoadErrorReason.INVALID_DATA,
+ )
if version == 0:
# Pre-versioning format can be of two forms
@@ -588,17 +620,23 @@ class Workspaces():
elif config_type is MappingNode:
sources = list(config.values())
if len(sources) > 1:
- detail = "There are multiple workspaces open for '{}'.\n" + \
- "This is not supported anymore.\n" + \
- "Please remove this element from '{}'."
- raise LoadError(detail.format(element, self._get_filename()),
- LoadErrorReason.INVALID_DATA)
+ detail = (
+ "There are multiple workspaces open for '{}'.\n"
+ + "This is not supported anymore.\n"
+ + "Please remove this element from '{}'."
+ )
+ raise LoadError(
+ detail.format(element, self._get_filename()),
+ LoadErrorReason.INVALID_DATA,
+ )
workspaces[element] = sources[0]
else:
- raise LoadError("Workspace config is in unexpected format.",
- LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "Workspace config is in unexpected format.",
+ LoadErrorReason.INVALID_DATA,
+ )
res = {
element: Workspace(self._toplevel_project, path=config.as_str())
@@ -607,13 +645,19 @@ class Workspaces():
elif 1 <= version <= BST_WORKSPACE_FORMAT_VERSION:
workspaces = workspaces.get_mapping("workspaces", default={})
- res = {element: self._load_workspace(node)
- for element, node in workspaces.items()}
+ res = {
+ element: self._load_workspace(node)
+ for element, node in workspaces.items()
+ }
else:
- raise LoadError("Workspace configuration format version {} not supported."
- "Your version of buildstream may be too old. Max supported version: {}"
- .format(version, BST_WORKSPACE_FORMAT_VERSION), LoadErrorReason.INVALID_DATA)
+ raise LoadError(
+ "Workspace configuration format version {} not supported."
+ "Your version of buildstream may be too old. Max supported version: {}".format(
+ version, BST_WORKSPACE_FORMAT_VERSION
+ ),
+ LoadErrorReason.INVALID_DATA,
+ )
return res
@@ -628,15 +672,15 @@ class Workspaces():
# (Workspace): A newly instantiated Workspace
#
def _load_workspace(self, node):
- running_files = node.get_mapping('running_files', default=None)
+ running_files = node.get_mapping("running_files", default=None)
if running_files:
running_files = running_files.strip_node_info()
dictionary = {
- 'prepared': node.get_bool('prepared', default=False),
- 'path': node.get_str('path'),
- 'last_successful': node.get_str('last_successful', default=None),
- 'running_files': running_files,
+ "prepared": node.get_bool("prepared", default=False),
+ "path": node.get_str("path"),
+ "last_successful": node.get_str("last_successful", default=None),
+ "running_files": running_files,
}
return Workspace.from_dict(self._toplevel_project, dictionary)
diff --git a/src/buildstream/buildelement.py b/src/buildstream/buildelement.py
index 7fe97c168..55f0dc0c3 100644
--- a/src/buildstream/buildelement.py
+++ b/src/buildstream/buildelement.py
@@ -144,17 +144,21 @@ from .types import Scope
# This list is preserved because of an unfortunate situation, we
# need to remove these older commands which were secret and never
# documented, but without breaking the cache keys.
-_legacy_command_steps = ['bootstrap-commands',
- 'configure-commands',
- 'build-commands',
- 'test-commands',
- 'install-commands',
- 'strip-commands']
-
-_command_steps = ['configure-commands',
- 'build-commands',
- 'install-commands',
- 'strip-commands']
+_legacy_command_steps = [
+ "bootstrap-commands",
+ "configure-commands",
+ "build-commands",
+ "test-commands",
+ "install-commands",
+ "strip-commands",
+]
+
+_command_steps = [
+ "configure-commands",
+ "build-commands",
+ "install-commands",
+ "strip-commands",
+]
class BuildElement(Element):
@@ -190,21 +194,21 @@ class BuildElement(Element):
# cache key, while having the side effect of setting max-jobs to 1,
# which is normally automatically resolved and does not affect
# the cache key.
- if self.get_variable('notparallel'):
- dictionary['notparallel'] = True
+ if self.get_variable("notparallel"):
+ dictionary["notparallel"] = True
return dictionary
def configure_sandbox(self, sandbox):
- build_root = self.get_variable('build-root')
- install_root = self.get_variable('install-root')
+ build_root = self.get_variable("build-root")
+ install_root = self.get_variable("install-root")
# Tell the sandbox to mount the build root and install root
sandbox.mark_directory(build_root)
sandbox.mark_directory(install_root)
# Allow running all commands in a specified subdirectory
- command_subdir = self.get_variable('command-subdir')
+ command_subdir = self.get_variable("command-subdir")
if command_subdir:
command_dir = os.path.join(build_root, command_subdir)
else:
@@ -230,16 +234,18 @@ class BuildElement(Element):
dep.integrate(sandbox)
# Stage sources in the build root
- self.stage_sources(sandbox, self.get_variable('build-root'))
+ self.stage_sources(sandbox, self.get_variable("build-root"))
def assemble(self, sandbox):
# Run commands
for command_name in _command_steps:
commands = self.__commands[command_name]
- if not commands or command_name == 'configure-commands':
+ if not commands or command_name == "configure-commands":
continue
- with sandbox.batch(SandboxFlags.ROOT_READ_ONLY, label="Running {}".format(command_name)):
+ with sandbox.batch(
+ SandboxFlags.ROOT_READ_ONLY, label="Running {}".format(command_name)
+ ):
for cmd in commands:
self.__run_command(sandbox, cmd)
@@ -247,23 +253,28 @@ class BuildElement(Element):
# to - if an element later attempts to stage to a location
# that is not empty, we abort the build - in this case this
# will almost certainly happen.
- staged_build = os.path.join(self.get_variable('install-root'),
- self.get_variable('build-root'))
+ staged_build = os.path.join(
+ self.get_variable("install-root"), self.get_variable("build-root")
+ )
if os.path.isdir(staged_build) and os.listdir(staged_build):
- self.warn("Writing to %{install-root}/%{build-root}.",
- detail="Writing to this directory will almost " +
- "certainly cause an error, since later elements " +
- "will not be allowed to stage to %{build-root}.")
+ self.warn(
+ "Writing to %{install-root}/%{build-root}.",
+ detail="Writing to this directory will almost "
+ + "certainly cause an error, since later elements "
+ + "will not be allowed to stage to %{build-root}.",
+ )
# Return the payload, this is configurable but is generally
# always the /buildstream-install directory
- return self.get_variable('install-root')
+ return self.get_variable("install-root")
def prepare(self, sandbox):
- commands = self.__commands['configure-commands']
+ commands = self.__commands["configure-commands"]
if commands:
- with sandbox.batch(SandboxFlags.ROOT_READ_ONLY, label="Running configure-commands"):
+ with sandbox.batch(
+ SandboxFlags.ROOT_READ_ONLY, label="Running configure-commands"
+ ):
for cmd in commands:
self.__run_command(sandbox, cmd)
@@ -282,15 +293,12 @@ class BuildElement(Element):
#############################################################
def __get_commands(self, node, name):
raw_commands = node.get_sequence(name, [])
- return [
- self.node_subst_vars(command)
- for command in raw_commands
- ]
+ return [self.node_subst_vars(command) for command in raw_commands]
def __run_command(self, sandbox, cmd):
# Note the -e switch to 'sh' means to exit with an error
# if any untested command fails.
#
- sandbox.run(['sh', '-c', '-e', cmd + '\n'],
- SandboxFlags.ROOT_READ_ONLY,
- label=cmd)
+ sandbox.run(
+ ["sh", "-c", "-e", cmd + "\n"], SandboxFlags.ROOT_READ_ONLY, label=cmd
+ )
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index 9a0a71a97..fe9993d41 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -90,8 +90,14 @@ from pyroaring import BitMap # pylint: disable=no-name-in-module
from . import _yaml
from ._variables import Variables
from ._versions import BST_CORE_ARTIFACT_VERSION
-from ._exceptions import BstError, LoadError, LoadErrorReason, ImplError, \
- ErrorDomain, SourceCacheError
+from ._exceptions import (
+ BstError,
+ LoadError,
+ LoadErrorReason,
+ ImplError,
+ ErrorDomain,
+ SourceCacheError,
+)
from .utils import FileListResult
from . import utils
from . import _cachekey
@@ -122,6 +128,7 @@ if TYPE_CHECKING:
from ._context import Context
from ._loader.metaelement import MetaElement
from ._project import Project
+
# pylint: enable=cyclic-import
@@ -136,14 +143,23 @@ class ElementError(BstError):
collect: An optional directory containing partial install contents
temporary: An indicator to whether the error may occur if the operation was run again. (*Since: 1.2*)
"""
- def __init__(self,
- message: str,
- *,
- detail: str = None,
- reason: str = None,
- collect: str = None,
- temporary: bool = False):
- super().__init__(message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason, temporary=temporary)
+
+ def __init__(
+ self,
+ message: str,
+ *,
+ detail: str = None,
+ reason: str = None,
+ collect: str = None,
+ temporary: bool = False
+ ):
+ super().__init__(
+ message,
+ detail=detail,
+ domain=ErrorDomain.ELEMENT,
+ reason=reason,
+ temporary=temporary,
+ )
self.collect = collect
@@ -156,12 +172,13 @@ class Element(Plugin):
All elements derive from this class, this interface defines how
the core will be interacting with Elements.
"""
+
# The defaults from the yaml file and project
__defaults = None
# A hash of Element by MetaElement
- __instantiated_elements = {} # type: Dict[MetaElement, Element]
+ __instantiated_elements = {} # type: Dict[MetaElement, Element]
# A list of (source, ref) tuples which were redundantly specified
- __redundant_source_refs = [] # type: List[Tuple[Source, SourceRef]]
+ __redundant_source_refs = [] # type: List[Tuple[Source, SourceRef]]
BST_ARTIFACT_VERSION = 0
"""The element plugin's artifact version
@@ -215,10 +232,16 @@ class Element(Plugin):
*Since: 1.90*
"""
- def __init__(self, context: 'Context', project: 'Project', meta: 'MetaElement', plugin_conf: Dict[str, Any]):
+ def __init__(
+ self,
+ context: "Context",
+ project: "Project",
+ meta: "MetaElement",
+ plugin_conf: Dict[str, Any],
+ ):
- self.__cache_key_dict = None # Dict for cache key calculation
- self.__cache_key = None # Our cached cache key
+ self.__cache_key_dict = None # Dict for cache key calculation
+ self.__cache_key = None # Our cached cache key
super().__init__(meta.name, context, project, meta.provenance, "element")
@@ -236,75 +259,113 @@ class Element(Plugin):
"""
# Direct runtime dependency Elements
- self.__runtime_dependencies = [] # type: List[Element]
+ self.__runtime_dependencies = [] # type: List[Element]
# Direct build dependency Elements
- self.__build_dependencies = [] # type: List[Element]
+ self.__build_dependencies = [] # type: List[Element]
# Direct build dependency subset which require strict rebuilds
- self.__strict_dependencies = [] # type: List[Element]
+ self.__strict_dependencies = [] # type: List[Element]
# Direct reverse build dependency Elements
- self.__reverse_build_deps = set() # type: Set[Element]
+ self.__reverse_build_deps = set() # type: Set[Element]
# Direct reverse runtime dependency Elements
- self.__reverse_runtime_deps = set() # type: Set[Element]
- self.__build_deps_without_strict_cache_key = None # Number of build dependencies without a strict key
- self.__runtime_deps_without_strict_cache_key = None # Number of runtime dependencies without a strict key
- self.__build_deps_without_cache_key = None # Number of build dependencies without a cache key
- self.__runtime_deps_without_cache_key = None # Number of runtime dependencies without a cache key
- self.__build_deps_uncached = None # Build dependencies which are not yet cached
- self.__runtime_deps_uncached = None # Runtime dependencies which are not yet cached
- self.__updated_strict_cache_keys_of_rdeps = False # Whether we've updated strict cache keys of rdeps
- self.__ready_for_runtime = False # Whether the element and its runtime dependencies have cache keys
- self.__ready_for_runtime_and_cached = False # Whether all runtime deps are cached, as well as the element
- self.__cached_remotely = None # Whether the element is cached remotely
+ self.__reverse_runtime_deps = set() # type: Set[Element]
+ self.__build_deps_without_strict_cache_key = (
+ None # Number of build dependencies without a strict key
+ )
+ self.__runtime_deps_without_strict_cache_key = (
+ None # Number of runtime dependencies without a strict key
+ )
+ self.__build_deps_without_cache_key = (
+ None # Number of build dependencies without a cache key
+ )
+ self.__runtime_deps_without_cache_key = (
+ None # Number of runtime dependencies without a cache key
+ )
+ self.__build_deps_uncached = None # Build dependencies which are not yet cached
+ self.__runtime_deps_uncached = (
+ None # Runtime dependencies which are not yet cached
+ )
+ self.__updated_strict_cache_keys_of_rdeps = (
+ False # Whether we've updated strict cache keys of rdeps
+ )
+ self.__ready_for_runtime = (
+ False # Whether the element and its runtime dependencies have cache keys
+ )
+ self.__ready_for_runtime_and_cached = (
+ False # Whether all runtime deps are cached, as well as the element
+ )
+ self.__cached_remotely = None # Whether the element is cached remotely
# List of Sources
- self.__sources = [] # type: List[Source]
- self.__weak_cache_key = None # Our cached weak cache key
- self.__strict_cache_key = None # Our cached cache key for strict builds
+ self.__sources = [] # type: List[Source]
+ self.__weak_cache_key = None # Our cached weak cache key
+ self.__strict_cache_key = None # Our cached cache key for strict builds
self.__artifacts = context.artifactcache # Artifact cache
self.__sourcecache = context.sourcecache # Source cache
- self.__consistency = Consistency.INCONSISTENT # Cached overall consistency state
- self.__assemble_scheduled = False # Element is scheduled to be assembled
- self.__assemble_done = False # Element is assembled
- self.__tracking_scheduled = False # Sources are scheduled to be tracked
- self.__pull_done = False # Whether pull was attempted
- self.__cached_successfully = None # If the Element is known to be successfully cached
- self.__source_cached = None # If the sources are known to be successfully cached
- self.__splits = None # Resolved regex objects for computing split domains
- self.__whitelist_regex = None # Resolved regex object to check if file is allowed to overlap
+ self.__consistency = (
+ Consistency.INCONSISTENT
+ ) # Cached overall consistency state
+ self.__assemble_scheduled = False # Element is scheduled to be assembled
+ self.__assemble_done = False # Element is assembled
+ self.__tracking_scheduled = False # Sources are scheduled to be tracked
+ self.__pull_done = False # Whether pull was attempted
+ self.__cached_successfully = (
+ None # If the Element is known to be successfully cached
+ )
+ self.__source_cached = (
+ None # If the sources are known to be successfully cached
+ )
+ self.__splits = None # Resolved regex objects for computing split domains
+ self.__whitelist_regex = (
+ None # Resolved regex object to check if file is allowed to overlap
+ )
# Location where Element.stage_sources() was called
self.__staged_sources_directory = None # type: Optional[str]
- self.__tainted = None # Whether the artifact is tainted and should not be shared
- self.__required = False # Whether the artifact is required in the current session
- self.__artifact_files_required = False # Whether artifact files are required in the local cache
- self.__build_result = None # The result of assembling this Element (success, description, detail)
- self._build_log_path = None # The path of the build log for this Element
+ self.__tainted = (
+ None # Whether the artifact is tainted and should not be shared
+ )
+ self.__required = (
+ False # Whether the artifact is required in the current session
+ )
+ self.__artifact_files_required = (
+ False # Whether artifact files are required in the local cache
+ )
+ self.__build_result = (
+ None # The result of assembling this Element (success, description, detail)
+ )
+ self._build_log_path = None # The path of the build log for this Element
# Artifact class for direct artifact composite interaction
- self.__artifact = None # type: Optional[Artifact]
- self.__strict_artifact = None # Artifact for strict cache key
- self.__meta_kind = meta.kind # The kind of this source, required for unpickling
+ self.__artifact = None # type: Optional[Artifact]
+ self.__strict_artifact = None # Artifact for strict cache key
+ self.__meta_kind = meta.kind # The kind of this source, required for unpickling
# the index of the last source in this element that requires previous
# sources for staging
self.__last_source_requires_previous_ix = None
- self.__batch_prepare_assemble = False # Whether batching across prepare()/assemble() is configured
- self.__batch_prepare_assemble_flags = 0 # Sandbox flags for batching across prepare()/assemble()
+ self.__batch_prepare_assemble = (
+ False # Whether batching across prepare()/assemble() is configured
+ )
+ self.__batch_prepare_assemble_flags = (
+ 0 # Sandbox flags for batching across prepare()/assemble()
+ )
# Collect dir for batching across prepare()/assemble()
self.__batch_prepare_assemble_collect = None # type: Optional[str]
# Callbacks
- self.__required_callback = None # Callback to Queues
- self.__can_query_cache_callback = None # Callback to PullQueue/FetchQueue
- self.__buildable_callback = None # Callback to BuildQueue
+ self.__required_callback = None # Callback to Queues
+ self.__can_query_cache_callback = None # Callback to PullQueue/FetchQueue
+ self.__buildable_callback = None # Callback to BuildQueue
- self._depth = None # Depth of Element in its current dependency graph
- self._resolved_initial_state = False # Whether the initial state of the Element has been resolved
+ self._depth = None # Depth of Element in its current dependency graph
+ self._resolved_initial_state = (
+ False # Whether the initial state of the Element has been resolved
+ )
# Ensure we have loaded this class's defaults
self.__init_defaults(project, plugin_conf, meta.kind, meta.is_junction)
# Collect the composited variables and resolve them
variables = self.__extract_variables(project, meta)
- variables['element-name'] = self.name
+ variables["element-name"] = self.name
self.__variables = Variables(variables)
# Collect the composited environment now that we have variables
@@ -348,7 +409,7 @@ class Element(Plugin):
#############################################################
# Abstract Methods #
#############################################################
- def configure_sandbox(self, sandbox: 'Sandbox') -> None:
+ def configure_sandbox(self, sandbox: "Sandbox") -> None:
"""Configures the the sandbox for execution
Args:
@@ -360,10 +421,13 @@ class Element(Plugin):
Elements must implement this method to configure the sandbox object
for execution.
"""
- raise ImplError("element plugin '{kind}' does not implement configure_sandbox()".format(
- kind=self.get_kind()))
+ raise ImplError(
+ "element plugin '{kind}' does not implement configure_sandbox()".format(
+ kind=self.get_kind()
+ )
+ )
- def stage(self, sandbox: 'Sandbox') -> None:
+ def stage(self, sandbox: "Sandbox") -> None:
"""Stage inputs into the sandbox directories
Args:
@@ -377,10 +441,13 @@ class Element(Plugin):
objects, by staging the artifacts of the elements this element depends
on, or both.
"""
- raise ImplError("element plugin '{kind}' does not implement stage()".format(
- kind=self.get_kind()))
+ raise ImplError(
+ "element plugin '{kind}' does not implement stage()".format(
+ kind=self.get_kind()
+ )
+ )
- def prepare(self, sandbox: 'Sandbox') -> None:
+ def prepare(self, sandbox: "Sandbox") -> None:
"""Run one-off preparation commands.
This is run before assemble(), but is guaranteed to run only
@@ -400,7 +467,7 @@ class Element(Plugin):
*Since: 1.2*
"""
- def assemble(self, sandbox: 'Sandbox') -> str:
+ def assemble(self, sandbox: "Sandbox") -> str:
"""Assemble the output artifact
Args:
@@ -415,8 +482,11 @@ class Element(Plugin):
Elements must implement this method to create an output
artifact from its sources and dependencies.
"""
- raise ImplError("element plugin '{kind}' does not implement assemble()".format(
- kind=self.get_kind()))
+ raise ImplError(
+ "element plugin '{kind}' does not implement assemble()".format(
+ kind=self.get_kind()
+ )
+ )
def generate_script(self) -> str:
"""Generate a build (sh) script to build this element
@@ -437,13 +507,16 @@ class Element(Plugin):
If the script fails, it is expected to return with an exit
code != 0.
"""
- raise ImplError("element plugin '{kind}' does not implement write_script()".format(
- kind=self.get_kind()))
+ raise ImplError(
+ "element plugin '{kind}' does not implement write_script()".format(
+ kind=self.get_kind()
+ )
+ )
#############################################################
# Public Methods #
#############################################################
- def sources(self) -> Iterator['Source']:
+ def sources(self) -> Iterator["Source"]:
"""A generator function to enumerate the element sources
Yields:
@@ -452,7 +525,9 @@ class Element(Plugin):
for source in self.__sources:
yield source
- def dependencies(self, scope: Scope, *, recurse: bool = True, visited=None) -> Iterator['Element']:
+ def dependencies(
+ self, scope: Scope, *, recurse: bool = True, visited=None
+ ) -> Iterator["Element"]:
"""dependencies(scope, *, recurse=True)
A generator function which yields the dependencies of the given element.
@@ -479,13 +554,19 @@ class Element(Plugin):
if scope in (Scope.RUN, Scope.ALL):
yield from self.__runtime_dependencies
else:
+
def visit(element, scope, visited):
if scope == Scope.ALL:
visited[0].add(element._unique_id)
visited[1].add(element._unique_id)
- for dep in chain(element.__build_dependencies, element.__runtime_dependencies):
- if dep._unique_id not in visited[0] and dep._unique_id not in visited[1]:
+ for dep in chain(
+ element.__build_dependencies, element.__runtime_dependencies
+ ):
+ if (
+ dep._unique_id not in visited[0]
+ and dep._unique_id not in visited[1]
+ ):
yield from visit(dep, Scope.ALL, visited)
yield element
@@ -519,7 +600,7 @@ class Element(Plugin):
yield from visit(self, scope, visited)
- def search(self, scope: Scope, name: str) -> Optional['Element']:
+ def search(self, scope: Scope, name: str) -> Optional["Element"]:
"""Search for a dependency by name
Args:
@@ -535,7 +616,7 @@ class Element(Plugin):
return None
- def node_subst_vars(self, node: 'ScalarNode') -> str:
+ def node_subst_vars(self, node: "ScalarNode") -> str:
"""Replace any variables in the string contained in the node and returns it.
Args:
@@ -559,9 +640,11 @@ class Element(Plugin):
return self.__variables.subst(node.as_str())
except LoadError as e:
provenance = node.get_provenance()
- raise LoadError('{}: {}'.format(provenance, e), e.reason, detail=e.detail) from e
+ raise LoadError(
+ "{}: {}".format(provenance, e), e.reason, detail=e.detail
+ ) from e
- def node_subst_sequence_vars(self, node: 'SequenceNode[ScalarNode]') -> List[str]:
+ def node_subst_sequence_vars(self, node: "SequenceNode[ScalarNode]") -> List[str]:
"""Substitute any variables in the given sequence
Args:
@@ -580,14 +663,18 @@ class Element(Plugin):
ret.append(self.__variables.subst(value.as_str()))
except LoadError as e:
provenance = value.get_provenance()
- raise LoadError('{}: {}'.format(provenance, e), e.reason, detail=e.detail) from e
+ raise LoadError(
+ "{}: {}".format(provenance, e), e.reason, detail=e.detail
+ ) from e
return ret
- def compute_manifest(self,
- *,
- include: Optional[List[str]] = None,
- exclude: Optional[List[str]] = None,
- orphans: bool = True) -> str:
+ def compute_manifest(
+ self,
+ *,
+ include: Optional[List[str]] = None,
+ exclude: Optional[List[str]] = None,
+ orphans: bool = True
+ ) -> str:
"""Compute and return this element's selective manifest
The manifest consists on the list of file paths in the
@@ -630,14 +717,16 @@ class Element(Plugin):
return _compose_artifact_name(self.project_name, self.normal_name, key)
- def stage_artifact(self,
- sandbox: 'Sandbox',
- *,
- path: str = None,
- include: Optional[List[str]] = None,
- exclude: Optional[List[str]] = None,
- orphans: bool = True,
- update_mtimes: Optional[List[str]] = None) -> FileListResult:
+ def stage_artifact(
+ self,
+ sandbox: "Sandbox",
+ *,
+ path: str = None,
+ include: Optional[List[str]] = None,
+ exclude: Optional[List[str]] = None,
+ orphans: bool = True,
+ update_mtimes: Optional[List[str]] = None
+ ) -> FileListResult:
"""Stage this element's output artifact in the sandbox
This will stage the files from the artifact to the sandbox at specified location.
@@ -675,10 +764,15 @@ class Element(Plugin):
"""
if not self._cached():
- detail = "No artifacts have been cached yet for that element\n" + \
- "Try building the element first with `bst build`\n"
- raise ElementError("No artifacts to stage",
- detail=detail, reason="uncached-checkout-attempt")
+ detail = (
+ "No artifacts have been cached yet for that element\n"
+ + "Try building the element first with `bst build`\n"
+ )
+ raise ElementError(
+ "No artifacts to stage",
+ detail=detail,
+ reason="uncached-checkout-attempt",
+ )
if update_mtimes is None:
update_mtimes = []
@@ -686,50 +780,68 @@ class Element(Plugin):
# Time to use the artifact, check once more that it's there
self.__assert_cached()
- with self.timed_activity("Staging {}/{}".format(self.name, self._get_brief_display_key())):
+ with self.timed_activity(
+ "Staging {}/{}".format(self.name, self._get_brief_display_key())
+ ):
# Disable type checking since we can't easily tell mypy that
# `self.__artifact` can't be None at this stage.
- files_vdir = self.__artifact.get_files() # type: ignore
+ files_vdir = self.__artifact.get_files() # type: ignore
# Hard link it into the staging area
#
vbasedir = sandbox.get_virtual_directory()
- vstagedir = vbasedir \
- if path is None \
+ vstagedir = (
+ vbasedir
+ if path is None
else vbasedir.descend(*path.lstrip(os.sep).split(os.sep))
+ )
split_filter = self.__split_filter_func(include, exclude, orphans)
# We must not hardlink files whose mtimes we want to update
if update_mtimes:
+
def link_filter(path):
- return ((split_filter is None or split_filter(path)) and
- path not in update_mtimes)
+ return (
+ split_filter is None or split_filter(path)
+ ) and path not in update_mtimes
def copy_filter(path):
- return ((split_filter is None or split_filter(path)) and
- path in update_mtimes)
+ return (
+ split_filter is None or split_filter(path)
+ ) and path in update_mtimes
+
else:
link_filter = split_filter
- result = vstagedir.import_files(files_vdir, filter_callback=link_filter,
- report_written=True, can_link=True)
+ result = vstagedir.import_files(
+ files_vdir,
+ filter_callback=link_filter,
+ report_written=True,
+ can_link=True,
+ )
if update_mtimes:
- copy_result = vstagedir.import_files(files_vdir, filter_callback=copy_filter,
- report_written=True, update_mtime=True)
+ copy_result = vstagedir.import_files(
+ files_vdir,
+ filter_callback=copy_filter,
+ report_written=True,
+ update_mtime=True,
+ )
result = result.combine(copy_result)
return result
- def stage_dependency_artifacts(self,
- sandbox: 'Sandbox',
- scope: Scope,
- *,
- path: str = None,
- include: Optional[List[str]] = None,
- exclude: Optional[List[str]] = None,
- orphans: bool = True) -> None:
+ def stage_dependency_artifacts(
+ self,
+ sandbox: "Sandbox",
+ scope: Scope,
+ *,
+ path: str = None,
+ include: Optional[List[str]] = None,
+ exclude: Optional[List[str]] = None,
+ orphans: bool = True
+ ) -> None:
"""Stage element dependencies in scope
This is primarily a convenience wrapper around
@@ -751,8 +863,8 @@ class Element(Plugin):
occur.
"""
ignored = {}
- overlaps = OrderedDict() # type: OrderedDict[str, List[str]]
- files_written = {} # type: Dict[str, List[str]]
+ overlaps = OrderedDict() # type: OrderedDict[str, List[str]]
+ files_written = {} # type: Dict[str, List[str]]
old_dep_keys = None
workspace = self._get_workspace()
context = self._get_context()
@@ -763,7 +875,9 @@ class Element(Plugin):
# build is still in the artifact cache
#
if self.__artifacts.contains(self, workspace.last_successful):
- last_successful = Artifact(self, context, strong_key=workspace.last_successful)
+ last_successful = Artifact(
+ self, context, strong_key=workspace.last_successful
+ )
# Get a dict of dependency strong keys
old_dep_keys = last_successful.get_metadata_dependencies()
else:
@@ -772,7 +886,9 @@ class Element(Plugin):
workspace.prepared = False
workspace.last_successful = None
- self.info("Resetting workspace state, last successful build is no longer in the cache")
+ self.info(
+ "Resetting workspace state, last successful build is no longer in the cache"
+ )
# In case we are staging in the main process
if utils._is_main_process():
@@ -803,12 +919,14 @@ class Element(Plugin):
if utils._is_main_process():
context.get_workspaces().save_config()
- result = dep.stage_artifact(sandbox,
- path=path,
- include=include,
- exclude=exclude,
- orphans=orphans,
- update_mtimes=to_update)
+ result = dep.stage_artifact(
+ sandbox,
+ path=path,
+ include=include,
+ exclude=exclude,
+ orphans=orphans,
+ update_mtimes=to_update,
+ )
if result.overwritten:
for overwrite in result.overwritten:
# Completely new overwrite
@@ -838,11 +956,16 @@ class Element(Plugin):
overlap_warning_elements.append(elm)
overlap_warning = True
- warning_detail += _overlap_error_detail(f, overlap_warning_elements, elements)
+ warning_detail += _overlap_error_detail(
+ f, overlap_warning_elements, elements
+ )
if overlap_warning:
- self.warn("Non-whitelisted overlaps detected", detail=warning_detail,
- warning_token=CoreWarnings.OVERLAPS)
+ self.warn(
+ "Non-whitelisted overlaps detected",
+ detail=warning_detail,
+ warning_token=CoreWarnings.OVERLAPS,
+ )
if ignored:
detail = "Not staging files which would replace non-empty directories:\n"
@@ -851,7 +974,7 @@ class Element(Plugin):
detail += " " + " ".join(["/" + f + "\n" for f in value])
self.warn("Ignored files", detail=detail)
- def integrate(self, sandbox: 'Sandbox') -> None:
+ def integrate(self, sandbox: "Sandbox") -> None:
"""Integrate currently staged filesystem against this artifact.
Args:
@@ -863,19 +986,20 @@ class Element(Plugin):
commands will create and update important system cache files
required for running the installed software (such as the ld.so.cache).
"""
- bstdata = self.get_public_data('bst')
+ bstdata = self.get_public_data("bst")
environment = self.get_environment()
if bstdata is not None:
with sandbox.batch(SandboxFlags.NONE):
- commands = bstdata.get_sequence('integration-commands', [])
+ commands = bstdata.get_sequence("integration-commands", [])
for command in commands:
cmd = self.node_subst_vars(command)
- sandbox.run(['sh', '-e', '-c', cmd], 0, env=environment, cwd='/',
- label=cmd)
+ sandbox.run(
+ ["sh", "-e", "-c", cmd], 0, env=environment, cwd="/", label=cmd
+ )
- def stage_sources(self, sandbox: 'Sandbox', directory: str) -> None:
+ def stage_sources(self, sandbox: "Sandbox", directory: str) -> None:
"""Stage this element's sources to a directory in the sandbox
Args:
@@ -892,7 +1016,7 @@ class Element(Plugin):
self._stage_sources_in_sandbox(sandbox, directory)
- def get_public_data(self, domain: str) -> 'MappingNode[Any, Any]':
+ def get_public_data(self, domain: str) -> "MappingNode[Any, Any]":
"""Fetch public data on this element
Args:
@@ -911,13 +1035,13 @@ class Element(Plugin):
# Disable type-checking since we can't easily tell mypy that
# `self.__dynamic_public` can't be None here.
- data = self.__dynamic_public.get_mapping(domain, default=None) # type: ignore
+ data = self.__dynamic_public.get_mapping(domain, default=None) # type: ignore
if data is not None:
data = data.clone()
return data
- def set_public_data(self, domain: str, data: 'MappingNode[Any, Any]') -> None:
+ def set_public_data(self, domain: str, data: "MappingNode[Any, Any]") -> None:
"""Set public data on this element
Args:
@@ -935,7 +1059,7 @@ class Element(Plugin):
if data is not None:
data = data.clone()
- self.__dynamic_public[domain] = data # type: ignore
+ self.__dynamic_public[domain] = data # type: ignore
def get_environment(self) -> Dict[str, str]:
"""Fetch the environment suitable for running in the sandbox
@@ -959,7 +1083,9 @@ class Element(Plugin):
# Flat is not recognized correctly by Pylint as being a dictionary
return self.__variables.flat.get(varname) # pylint: disable=no-member
- def batch_prepare_assemble(self, flags: int, *, collect: Optional[str] = None) -> None:
+ def batch_prepare_assemble(
+ self, flags: int, *, collect: Optional[str] = None
+ ) -> None:
""" Configure command batching across prepare() and assemble()
Args:
@@ -971,7 +1097,11 @@ class Element(Plugin):
to enable batching of all sandbox commands issued in prepare() and assemble().
"""
if self.__batch_prepare_assemble:
- raise ElementError("{}: Command batching for prepare/assemble is already configured".format(self))
+ raise ElementError(
+ "{}: Command batching for prepare/assemble is already configured".format(
+ self
+ )
+ )
self.__batch_prepare_assemble = True
self.__batch_prepare_assemble_flags = flags
@@ -1016,8 +1146,7 @@ class Element(Plugin):
# Instantiate sources and generate their keys
for meta_source in meta.sources:
meta_source.first_pass = meta.is_junction
- source = meta.project.create_source(meta_source,
- first_pass=meta.first_pass)
+ source = meta.project.create_source(meta_source, first_pass=meta.first_pass)
redundant_ref = source._load_ref()
@@ -1190,8 +1319,7 @@ class Element(Plugin):
# (bool): Whether this element can currently be built
#
def _buildable(self):
- if self._get_consistency() < Consistency.CACHED and \
- not self._source_cached():
+ if self._get_consistency() < Consistency.CACHED and not self._source_cached():
return False
if not self.__assemble_scheduled:
@@ -1261,11 +1389,14 @@ class Element(Plugin):
# If the element wasn't assembled and isn't scheduled to be assemble,
# or cached, or waiting to be pulled but has an artifact then schedule
# the assembly.
- if (not self.__assemble_scheduled and not self.__assemble_done and
- self.__artifact and
- self._is_required() and
- not self._cached() and
- not self._pull_pending()):
+ if (
+ not self.__assemble_scheduled
+ and not self.__assemble_done
+ and self.__artifact
+ and self._is_required()
+ and not self._cached()
+ and not self._pull_pending()
+ ):
self._schedule_assemble()
# If a build has been scheduled, we know that the element
@@ -1298,7 +1429,7 @@ class Element(Plugin):
cache_key = self._get_cache_key()
if not cache_key:
- cache_key = "{:?<64}".format('')
+ cache_key = "{:?<64}".format("")
elif cache_key == self.__strict_cache_key:
# Strong cache key used in this session matches cache key
# that would be used in strict build mode
@@ -1364,9 +1495,14 @@ class Element(Plugin):
# Complimentary warning that the new ref will be unused.
if old_ref != new_ref and self._get_workspace():
- detail = "This source has an open workspace.\n" \
+ detail = (
+ "This source has an open workspace.\n"
+ "To start using the new reference, please close the existing workspace."
- source.warn("Updated reference will be ignored as source has open workspace", detail=detail)
+ )
+ source.warn(
+ "Updated reference will be ignored as source has open workspace",
+ detail=detail,
+ )
return refs
@@ -1376,11 +1512,17 @@ class Element(Plugin):
# is used to stage things by the `bst artifact checkout` codepath
#
@contextmanager
- def _prepare_sandbox(self, scope, directory, shell=False, integrate=True, usebuildtree=False):
+ def _prepare_sandbox(
+ self, scope, directory, shell=False, integrate=True, usebuildtree=False
+ ):
# bst shell and bst artifact checkout require a local sandbox.
bare_directory = bool(directory)
- with self.__sandbox(directory, config=self.__sandbox_config, allow_remote=False,
- bare_directory=bare_directory) as sandbox:
+ with self.__sandbox(
+ directory,
+ config=self.__sandbox_config,
+ allow_remote=False,
+ bare_directory=bare_directory,
+ ) as sandbox:
sandbox._usebuildtree = usebuildtree
# Configure always comes first, and we need it.
@@ -1392,7 +1534,9 @@ class Element(Plugin):
self.stage(sandbox)
else:
# Stage deps in the sandbox root
- with self.timed_activity("Staging dependencies", silent_nested=True):
+ with self.timed_activity(
+ "Staging dependencies", silent_nested=True
+ ):
self.stage_dependency_artifacts(sandbox, scope)
# Run any integration commands provided by the dependencies
@@ -1421,7 +1565,9 @@ class Element(Plugin):
# Stage all sources that need to be copied
sandbox_vroot = sandbox.get_virtual_directory()
- host_vdirectory = sandbox_vroot.descend(*directory.lstrip(os.sep).split(os.sep), create=True)
+ host_vdirectory = sandbox_vroot.descend(
+ *directory.lstrip(os.sep).split(os.sep), create=True
+ )
self._stage_sources_at(host_vdirectory, usebuildtree=sandbox._usebuildtree)
# _stage_sources_at():
@@ -1438,22 +1584,30 @@ class Element(Plugin):
# It's advantageous to have this temporary directory on
# the same file system as the rest of our cache.
- with self.timed_activity("Staging sources", silent_nested=True), \
- utils._tempdir(dir=context.tmpdir, prefix='staging-temp') as temp_staging_directory:
+ with self.timed_activity("Staging sources", silent_nested=True), utils._tempdir(
+ dir=context.tmpdir, prefix="staging-temp"
+ ) as temp_staging_directory:
import_dir = temp_staging_directory
if not isinstance(vdirectory, Directory):
vdirectory = FileBasedDirectory(vdirectory)
if not vdirectory.is_empty():
- raise ElementError("Staging directory '{}' is not empty".format(vdirectory))
+ raise ElementError(
+ "Staging directory '{}' is not empty".format(vdirectory)
+ )
# Check if we have a cached buildtree to use
if usebuildtree:
import_dir = self.__artifact.get_buildtree()
if import_dir.is_empty():
detail = "Element type either does not expect a buildtree or it was explictily cached without one."
- self.warn("WARNING: {} Artifact contains an empty buildtree".format(self.name), detail=detail)
+ self.warn(
+ "WARNING: {} Artifact contains an empty buildtree".format(
+ self.name
+ ),
+ detail=detail,
+ )
# No cached buildtree, stage source from source cache
else:
@@ -1474,12 +1628,18 @@ class Element(Plugin):
import_dir.import_files(source_dir)
except SourceCacheError as e:
- raise ElementError("Error trying to export source for {}: {}"
- .format(self.name, e))
+ raise ElementError(
+ "Error trying to export source for {}: {}".format(
+ self.name, e
+ )
+ )
except VirtualDirectoryError as e:
- raise ElementError("Error trying to import sources together for {}: {}"
- .format(self.name, e),
- reason="import-source-files-fail")
+ raise ElementError(
+ "Error trying to import sources together for {}: {}".format(
+ self.name, e
+ ),
+ reason="import-source-files-fail",
+ )
with utils._deterministic_umask():
vdirectory.import_files(import_dir)
@@ -1587,8 +1747,9 @@ class Element(Plugin):
self._update_ready_for_runtime_and_cached()
if self._get_workspace() and self._cached_success():
- assert utils._is_main_process(), \
- "Attempted to save workspace configuration from child process"
+ assert (
+ utils._is_main_process()
+ ), "Attempted to save workspace configuration from child process"
#
# Note that this block can only happen in the
# main process, since `self._cached_success()` cannot
@@ -1624,29 +1785,39 @@ class Element(Plugin):
with self._output_file() as output_file:
if not self.__sandbox_config_supported:
- self.warn("Sandbox configuration is not supported by the platform.",
- detail="Falling back to UID {} GID {}. Artifact will not be pushed."
- .format(self.__sandbox_config.build_uid, self.__sandbox_config.build_gid))
+ self.warn(
+ "Sandbox configuration is not supported by the platform.",
+ detail="Falling back to UID {} GID {}. Artifact will not be pushed.".format(
+ self.__sandbox_config.build_uid, self.__sandbox_config.build_gid
+ ),
+ )
# Explicitly clean it up, keep the build dir around if exceptions are raised
os.makedirs(context.builddir, exist_ok=True)
- rootdir = tempfile.mkdtemp(prefix="{}-".format(self.normal_name), dir=context.builddir)
+ rootdir = tempfile.mkdtemp(
+ prefix="{}-".format(self.normal_name), dir=context.builddir
+ )
# Cleanup the build directory on explicit SIGTERM
def cleanup_rootdir():
utils._force_rmtree(rootdir)
- with _signals.terminator(cleanup_rootdir), \
- self.__sandbox(rootdir, output_file, output_file, self.__sandbox_config) as sandbox: # noqa
+ with _signals.terminator(cleanup_rootdir), self.__sandbox(
+ rootdir, output_file, output_file, self.__sandbox_config
+ ) as sandbox: # noqa
# Let the sandbox know whether the buildtree will be required.
# This allows the remote execution sandbox to skip buildtree
# download when it's not needed.
- buildroot = self.get_variable('build-root')
+ buildroot = self.get_variable("build-root")
cache_buildtrees = context.cache_buildtrees
if cache_buildtrees != _CacheBuildTrees.NEVER:
- always_cache_buildtrees = cache_buildtrees == _CacheBuildTrees.ALWAYS
- sandbox._set_build_directory(buildroot, always=always_cache_buildtrees)
+ always_cache_buildtrees = (
+ cache_buildtrees == _CacheBuildTrees.ALWAYS
+ )
+ sandbox._set_build_directory(
+ buildroot, always=always_cache_buildtrees
+ )
if not self.BST_RUN_COMMANDS:
# Element doesn't need to run any commands in the sandbox.
@@ -1667,8 +1838,10 @@ class Element(Plugin):
self.stage(sandbox)
try:
if self.__batch_prepare_assemble:
- cm = sandbox.batch(self.__batch_prepare_assemble_flags,
- collect=self.__batch_prepare_assemble_collect)
+ cm = sandbox.batch(
+ self.__batch_prepare_assemble_flags,
+ collect=self.__batch_prepare_assemble_collect,
+ )
else:
cm = contextlib.suppress()
@@ -1676,14 +1849,18 @@ class Element(Plugin):
# Step 3 - Prepare
self.__prepare(sandbox)
# Step 4 - Assemble
- collect = self.assemble(sandbox) # pylint: disable=assignment-from-no-return
+ collect = self.assemble(
+ sandbox
+ ) # pylint: disable=assignment-from-no-return
self.__set_build_result(success=True, description="succeeded")
except (ElementError, SandboxCommandError) as e:
# Shelling into a sandbox is useful to debug this error
e.sandbox = True
- self.__set_build_result(success=False, description=str(e), detail=e.detail)
+ self.__set_build_result(
+ success=False, description=str(e), detail=e.detail
+ )
self._cache_artifact(rootdir, sandbox, e.collect)
raise
@@ -1710,11 +1887,13 @@ class Element(Plugin):
# result. Element types without a build-root dir will be cached
# with an empty buildtreedir regardless of this configuration.
- if cache_buildtrees == _CacheBuildTrees.ALWAYS or \
- (cache_buildtrees == _CacheBuildTrees.AUTO and not build_success):
+ if cache_buildtrees == _CacheBuildTrees.ALWAYS or (
+ cache_buildtrees == _CacheBuildTrees.AUTO and not build_success
+ ):
try:
sandbox_build_dir = sandbox_vroot.descend(
- *self.get_variable('build-root').lstrip(os.sep).split(os.sep))
+ *self.get_variable("build-root").lstrip(os.sep).split(os.sep)
+ )
sandbox._fetch_missing_blobs(sandbox_build_dir)
except VirtualDirectoryError:
# Directory could not be found. Pre-virtual
@@ -1724,7 +1903,9 @@ class Element(Plugin):
if collect is not None:
try:
- collectvdir = sandbox_vroot.descend(*collect.lstrip(os.sep).split(os.sep))
+ collectvdir = sandbox_vroot.descend(
+ *collect.lstrip(os.sep).split(os.sep)
+ )
sandbox._fetch_missing_blobs(collectvdir)
except VirtualDirectoryError:
pass
@@ -1733,14 +1914,15 @@ class Element(Plugin):
self._assemble_done()
with self.timed_activity("Caching artifact"):
- artifact_size = self.__artifact.cache(rootdir, sandbox_build_dir, collectvdir,
- buildresult, publicdata)
+ artifact_size = self.__artifact.cache(
+ rootdir, sandbox_build_dir, collectvdir, buildresult, publicdata
+ )
if collect is not None and collectvdir is None:
raise ElementError(
"Directory '{}' was not found inside the sandbox, "
- "unable to collect artifact contents"
- .format(collect))
+ "unable to collect artifact contents".format(collect)
+ )
return artifact_size
@@ -1841,8 +2023,9 @@ class Element(Plugin):
def _skip_source_push(self):
if not self.__sources or self._get_workspace():
return True
- return not (self.__sourcecache.has_push_remotes(plugin=self) and
- self._source_cached())
+ return not (
+ self.__sourcecache.has_push_remotes(plugin=self) and self._source_cached()
+ )
def _source_push(self):
# try and push sources if we've got them
@@ -1917,10 +2100,21 @@ class Element(Plugin):
# Returns: Exit code
#
# If directory is not specified, one will be staged using scope
- def _shell(self, scope=None, directory=None, *, mounts=None, isolate=False, prompt=None, command=None,
- usebuildtree=False):
-
- with self._prepare_sandbox(scope, directory, shell=True, usebuildtree=usebuildtree) as sandbox:
+ def _shell(
+ self,
+ scope=None,
+ directory=None,
+ *,
+ mounts=None,
+ isolate=False,
+ prompt=None,
+ command=None,
+ usebuildtree=False
+ ):
+
+ with self._prepare_sandbox(
+ scope, directory, shell=True, usebuildtree=usebuildtree
+ ) as sandbox:
environment = self.get_environment()
environment = copy.copy(environment)
flags = SandboxFlags.INTERACTIVE | SandboxFlags.ROOT_READ_ONLY
@@ -1929,10 +2123,14 @@ class Element(Plugin):
# subproject, we want to use the rules defined by the main one.
context = self._get_context()
project = context.get_toplevel_project()
- shell_command, shell_environment, shell_host_files = project.get_shell_config()
+ (
+ shell_command,
+ shell_environment,
+ shell_host_files,
+ ) = project.get_shell_config()
if prompt is not None:
- environment['PS1'] = prompt
+ environment["PS1"] = prompt
# Special configurations for non-isolated sandboxes
if not isolate:
@@ -1952,7 +2150,11 @@ class Element(Plugin):
for mount in shell_host_files + mounts:
if not os.path.exists(mount.host_path):
if not mount.optional:
- self.warn("Not mounting non-existing host file: {}".format(mount.host_path))
+ self.warn(
+ "Not mounting non-existing host file: {}".format(
+ mount.host_path
+ )
+ )
else:
sandbox.mark_directory(mount.path)
sandbox._set_mount_source(mount.path, mount.host_path)
@@ -1988,8 +2190,9 @@ class Element(Plugin):
# additional support from Source implementations.
#
os.makedirs(context.builddir, exist_ok=True)
- with utils._tempdir(dir=context.builddir, prefix='workspace-{}'
- .format(self.normal_name)) as temp:
+ with utils._tempdir(
+ dir=context.builddir, prefix="workspace-{}".format(self.normal_name)
+ ) as temp:
for source in self.sources():
source._init_workspace(temp)
@@ -2018,10 +2221,10 @@ class Element(Plugin):
script = script_template.format(
name=self.normal_name,
- build_root=self.get_variable('build-root'),
- install_root=self.get_variable('install-root'),
+ build_root=self.get_variable("build-root"),
+ install_root=self.get_variable("install-root"),
variables=variable_string,
- commands=self.generate_script()
+ commands=self.generate_script(),
)
os.makedirs(directory, exist_ok=True)
@@ -2106,8 +2309,10 @@ class Element(Plugin):
continue
# try and fetch from source cache
- if source._get_consistency() < Consistency.CACHED and \
- self.__sourcecache.has_fetch_remotes():
+ if (
+ source._get_consistency() < Consistency.CACHED
+ and self.__sourcecache.has_fetch_remotes()
+ ):
if self.__sourcecache.pull(source):
continue
@@ -2149,26 +2354,26 @@ class Element(Plugin):
project = self._get_project()
self.__cache_key_dict = {
- 'core-artifact-version': BST_CORE_ARTIFACT_VERSION,
- 'element-plugin-key': self.get_unique_key(),
- 'element-plugin-name': self.get_kind(),
- 'element-plugin-version': self.BST_ARTIFACT_VERSION,
- 'sandbox': self.__sandbox_config.get_unique_key(),
- 'environment': cache_env,
- 'public': self.__public.strip_node_info()
+ "core-artifact-version": BST_CORE_ARTIFACT_VERSION,
+ "element-plugin-key": self.get_unique_key(),
+ "element-plugin-name": self.get_kind(),
+ "element-plugin-version": self.BST_ARTIFACT_VERSION,
+ "sandbox": self.__sandbox_config.get_unique_key(),
+ "environment": cache_env,
+ "public": self.__public.strip_node_info(),
}
- self.__cache_key_dict['sources'] = []
+ self.__cache_key_dict["sources"] = []
for source in self.__sources:
- self.__cache_key_dict['sources'].append(
- {'key': source._get_unique_key(),
- 'name': source._get_source_name()})
+ self.__cache_key_dict["sources"].append(
+ {"key": source._get_unique_key(), "name": source._get_source_name()}
+ )
- self.__cache_key_dict['fatal-warnings'] = sorted(project._fatal_warnings)
+ self.__cache_key_dict["fatal-warnings"] = sorted(project._fatal_warnings)
cache_key_dict = self.__cache_key_dict.copy()
- cache_key_dict['dependencies'] = dependencies
+ cache_key_dict["dependencies"] = dependencies
return _cachekey.generate_key(cache_key_dict)
@@ -2202,8 +2407,9 @@ class Element(Plugin):
Args:
fetch_original (bool): whether we need to original unstaged source
"""
- if (self._get_consistency() == Consistency.CACHED and fetch_original) or \
- (self._source_cached() and not fetch_original):
+ if (self._get_consistency() == Consistency.CACHED and fetch_original) or (
+ self._source_cached() and not fetch_original
+ ):
return False
else:
return True
@@ -2285,8 +2491,11 @@ class Element(Plugin):
#
def _update_ready_for_runtime_and_cached(self):
if not self.__ready_for_runtime_and_cached:
- if self.__runtime_deps_uncached == 0 and self._cached_success() and \
- self.__cache_key:
+ if (
+ self.__runtime_deps_uncached == 0
+ and self._cached_success()
+ and self.__cache_key
+ ):
self.__ready_for_runtime_and_cached = True
# Notify reverse dependencies
@@ -2436,6 +2645,7 @@ class Element(Plugin):
self.prepare(sandbox)
if workspace:
+
def mark_workspace_prepared():
workspace.prepared = True
@@ -2451,36 +2661,59 @@ class Element(Plugin):
def __preflight(self):
if self.BST_FORBID_RDEPENDS and self.BST_FORBID_BDEPENDS:
- if any(self.dependencies(Scope.RUN, recurse=False)) or any(self.dependencies(Scope.BUILD, recurse=False)):
- raise ElementError("{}: Dependencies are forbidden for '{}' elements"
- .format(self, self.get_kind()), reason="element-forbidden-depends")
+ if any(self.dependencies(Scope.RUN, recurse=False)) or any(
+ self.dependencies(Scope.BUILD, recurse=False)
+ ):
+ raise ElementError(
+ "{}: Dependencies are forbidden for '{}' elements".format(
+ self, self.get_kind()
+ ),
+ reason="element-forbidden-depends",
+ )
if self.BST_FORBID_RDEPENDS:
if any(self.dependencies(Scope.RUN, recurse=False)):
- raise ElementError("{}: Runtime dependencies are forbidden for '{}' elements"
- .format(self, self.get_kind()), reason="element-forbidden-rdepends")
+ raise ElementError(
+ "{}: Runtime dependencies are forbidden for '{}' elements".format(
+ self, self.get_kind()
+ ),
+ reason="element-forbidden-rdepends",
+ )
if self.BST_FORBID_BDEPENDS:
if any(self.dependencies(Scope.BUILD, recurse=False)):
- raise ElementError("{}: Build dependencies are forbidden for '{}' elements"
- .format(self, self.get_kind()), reason="element-forbidden-bdepends")
+ raise ElementError(
+ "{}: Build dependencies are forbidden for '{}' elements".format(
+ self, self.get_kind()
+ ),
+ reason="element-forbidden-bdepends",
+ )
if self.BST_FORBID_SOURCES:
if any(self.sources()):
- raise ElementError("{}: Sources are forbidden for '{}' elements"
- .format(self, self.get_kind()), reason="element-forbidden-sources")
+ raise ElementError(
+ "{}: Sources are forbidden for '{}' elements".format(
+ self, self.get_kind()
+ ),
+ reason="element-forbidden-sources",
+ )
try:
self.preflight()
except BstError as e:
# Prepend provenance to the error
- raise ElementError("{}: {}".format(self, e), reason=e.reason, detail=e.detail) from e
+ raise ElementError(
+ "{}: {}".format(self, e), reason=e.reason, detail=e.detail
+ ) from e
# Ensure that the first source does not need access to previous soruces
if self.__sources and self.__sources[0]._requires_previous_sources():
- raise ElementError("{}: {} cannot be the first source of an element "
- "as it requires access to previous sources"
- .format(self, self.__sources[0]))
+ raise ElementError(
+ "{}: {} cannot be the first source of an element "
+ "as it requires access to previous sources".format(
+ self, self.__sources[0]
+ )
+ )
# Preflight the sources
for source in self.sources():
@@ -2492,7 +2725,8 @@ class Element(Plugin):
#
def __assert_cached(self):
assert self._cached(), "{}: Missing artifact {}".format(
- self, self._get_brief_display_key())
+ self, self._get_brief_display_key()
+ )
# __get_tainted():
#
@@ -2515,11 +2749,16 @@ class Element(Plugin):
workspaced = self.__artifact.get_metadata_workspaced()
# Whether this artifact's dependencies have workspaces
- workspaced_dependencies = self.__artifact.get_metadata_workspaced_dependencies()
+ workspaced_dependencies = (
+ self.__artifact.get_metadata_workspaced_dependencies()
+ )
# Other conditions should be or-ed
- self.__tainted = (workspaced or workspaced_dependencies or
- not self.__sandbox_config_supported)
+ self.__tainted = (
+ workspaced
+ or workspaced_dependencies
+ or not self.__sandbox_config_supported
+ )
return self.__tainted
@@ -2550,7 +2789,15 @@ class Element(Plugin):
# (Sandbox): A usable sandbox
#
@contextmanager
- def __sandbox(self, directory, stdout=None, stderr=None, config=None, allow_remote=True, bare_directory=False):
+ def __sandbox(
+ self,
+ directory,
+ stdout=None,
+ stderr=None,
+ config=None,
+ allow_remote=True,
+ bare_directory=False,
+ ):
context = self._get_context()
project = self._get_project()
platform = context.platform
@@ -2558,45 +2805,70 @@ class Element(Plugin):
if directory is not None and allow_remote and self.__use_remote_execution():
if not self.BST_VIRTUAL_DIRECTORY:
- raise ElementError("Element {} is configured to use remote execution but plugin does not support it."
- .format(self.name), detail="Plugin '{kind}' does not support virtual directories."
- .format(kind=self.get_kind()))
-
- self.info("Using a remote sandbox for artifact {} with directory '{}'".format(self.name, directory))
-
- output_files_required = context.require_artifact_files or self._artifact_files_required()
-
- sandbox = SandboxRemote(context, project,
- directory,
- plugin=self,
- stdout=stdout,
- stderr=stderr,
- config=config,
- specs=self.__remote_execution_specs,
- bare_directory=bare_directory,
- allow_real_directory=False,
- output_files_required=output_files_required)
+ raise ElementError(
+ "Element {} is configured to use remote execution but plugin does not support it.".format(
+ self.name
+ ),
+ detail="Plugin '{kind}' does not support virtual directories.".format(
+ kind=self.get_kind()
+ ),
+ )
+
+ self.info(
+ "Using a remote sandbox for artifact {} with directory '{}'".format(
+ self.name, directory
+ )
+ )
+
+ output_files_required = (
+ context.require_artifact_files or self._artifact_files_required()
+ )
+
+ sandbox = SandboxRemote(
+ context,
+ project,
+ directory,
+ plugin=self,
+ stdout=stdout,
+ stderr=stderr,
+ config=config,
+ specs=self.__remote_execution_specs,
+ bare_directory=bare_directory,
+ allow_real_directory=False,
+ output_files_required=output_files_required,
+ )
yield sandbox
elif directory is not None and os.path.exists(directory):
- sandbox = platform.create_sandbox(context, project,
- directory,
- plugin=self,
- stdout=stdout,
- stderr=stderr,
- config=config,
- bare_directory=bare_directory,
- allow_real_directory=not self.BST_VIRTUAL_DIRECTORY)
+ sandbox = platform.create_sandbox(
+ context,
+ project,
+ directory,
+ plugin=self,
+ stdout=stdout,
+ stderr=stderr,
+ config=config,
+ bare_directory=bare_directory,
+ allow_real_directory=not self.BST_VIRTUAL_DIRECTORY,
+ )
yield sandbox
else:
os.makedirs(context.builddir, exist_ok=True)
- rootdir = tempfile.mkdtemp(prefix="{}-".format(self.normal_name), dir=context.builddir)
+ rootdir = tempfile.mkdtemp(
+ prefix="{}-".format(self.normal_name), dir=context.builddir
+ )
# Recursive contextmanager...
- with self.__sandbox(rootdir, stdout=stdout, stderr=stderr, config=config,
- allow_remote=allow_remote, bare_directory=False) as sandbox:
+ with self.__sandbox(
+ rootdir,
+ stdout=stdout,
+ stderr=stderr,
+ config=config,
+ allow_remote=allow_remote,
+ bare_directory=False,
+ ) as sandbox:
yield sandbox
# Cleanup the build dir
@@ -2618,9 +2890,9 @@ class Element(Plugin):
# Extend project wide split rules with any split rules defined by the element
element_splits._composite(splits)
- element_bst['split-rules'] = splits
- element_public['bst'] = element_bst
- defaults['public'] = element_public
+ element_bst["split-rules"] = splits
+ element_public["bst"] = element_bst
+ defaults["public"] = element_public
@classmethod
def __init_defaults(cls, project, plugin_conf, kind, is_junction):
@@ -2690,7 +2962,7 @@ class Element(Plugin):
else:
project_nocache = project.base_env_nocache
- default_nocache = cls.__defaults.get_str_list('environment-nocache', default=[])
+ default_nocache = cls.__defaults.get_str_list("environment-nocache", default=[])
element_nocache = meta.env_nocache
# Accumulate values from the element default, the project and the element
@@ -2705,7 +2977,7 @@ class Element(Plugin):
#
@classmethod
def __extract_variables(cls, project, meta):
- default_vars = cls.__defaults.get_mapping('variables', default={})
+ default_vars = cls.__defaults.get_mapping("variables", default={})
if meta.is_junction:
variables = project.first_pass_config.base_variables.clone()
@@ -2716,7 +2988,7 @@ class Element(Plugin):
meta.variables._composite(variables)
variables._assert_fully_composited()
- for var in ('project-name', 'element-name', 'max-jobs'):
+ for var in ("project-name", "element-name", "max-jobs"):
node = variables.get_node(var, allow_none=True)
if node is None:
@@ -2724,8 +2996,12 @@ class Element(Plugin):
provenance = node.get_provenance()
if not provenance._is_synthetic:
- raise LoadError("{}: invalid redefinition of protected variable '{}'"
- .format(provenance, var), LoadErrorReason.PROTECTED_VARIABLE_REDEFINED)
+ raise LoadError(
+ "{}: invalid redefinition of protected variable '{}'".format(
+ provenance, var
+ ),
+ LoadErrorReason.PROTECTED_VARIABLE_REDEFINED,
+ )
return variables
@@ -2736,7 +3012,7 @@ class Element(Plugin):
def __extract_config(cls, meta):
# The default config is already composited with the project overrides
- config = cls.__defaults.get_mapping('config', default={})
+ config = cls.__defaults.get_mapping("config", default={})
config = config.clone()
meta.config._composite(config)
@@ -2749,10 +3025,7 @@ class Element(Plugin):
@classmethod
def __extract_sandbox_config(cls, context, project, meta):
if meta.is_junction:
- sandbox_config = Node.from_dict({
- 'build-uid': 0,
- 'build-gid': 0
- })
+ sandbox_config = Node.from_dict({"build-uid": 0, "build-gid": 0})
else:
sandbox_config = project._sandbox.clone()
@@ -2762,7 +3035,7 @@ class Element(Plugin):
host_os = platform.get_host_os()
# The default config is already composited with the project overrides
- sandbox_defaults = cls.__defaults.get_mapping('sandbox', default={})
+ sandbox_defaults = cls.__defaults.get_mapping("sandbox", default={})
sandbox_defaults = sandbox_defaults.clone()
sandbox_defaults._composite(sandbox_config)
@@ -2770,41 +3043,44 @@ class Element(Plugin):
sandbox_config._assert_fully_composited()
# Sandbox config, unlike others, has fixed members so we should validate them
- sandbox_config.validate_keys(['build-uid', 'build-gid', 'build-os', 'build-arch'])
+ sandbox_config.validate_keys(
+ ["build-uid", "build-gid", "build-os", "build-arch"]
+ )
- build_arch = sandbox_config.get_str('build-arch', default=None)
+ build_arch = sandbox_config.get_str("build-arch", default=None)
if build_arch:
build_arch = Platform.canonicalize_arch(build_arch)
else:
build_arch = host_arch
return SandboxConfig(
- sandbox_config.get_int('build-uid'),
- sandbox_config.get_int('build-gid'),
- sandbox_config.get_str('build-os', default=host_os),
- build_arch)
+ sandbox_config.get_int("build-uid"),
+ sandbox_config.get_int("build-gid"),
+ sandbox_config.get_str("build-os", default=host_os),
+ build_arch,
+ )
# This makes a special exception for the split rules, which
# elements may extend but whos defaults are defined in the project.
#
@classmethod
def __extract_public(cls, meta):
- base_public = cls.__defaults.get_mapping('public', default={})
+ base_public = cls.__defaults.get_mapping("public", default={})
base_public = base_public.clone()
- base_bst = base_public.get_mapping('bst', default={})
- base_splits = base_bst.get_mapping('split-rules', default={})
+ base_bst = base_public.get_mapping("bst", default={})
+ base_splits = base_bst.get_mapping("split-rules", default={})
element_public = meta.public.clone()
- element_bst = element_public.get_mapping('bst', default={})
- element_splits = element_bst.get_mapping('split-rules', default={})
+ element_bst = element_public.get_mapping("bst", default={})
+ element_splits = element_bst.get_mapping("split-rules", default={})
# Allow elements to extend the default splits defined in their project or
# element specific defaults
element_splits._composite(base_splits)
- element_bst['split-rules'] = base_splits
- element_public['bst'] = element_bst
+ element_bst["split-rules"] = base_splits
+ element_public["bst"] = element_bst
element_public._assert_fully_composited()
@@ -2812,24 +3088,27 @@ class Element(Plugin):
# Expand the splits in the public data using the Variables in the element
def __expand_splits(self, element_public):
- element_bst = element_public.get_mapping('bst', default={})
- element_splits = element_bst.get_mapping('split-rules', default={})
+ element_bst = element_public.get_mapping("bst", default={})
+ element_splits = element_bst.get_mapping("split-rules", default={})
# Resolve any variables in the public split rules directly
for domain, splits in element_splits.items():
splits = [
- self.__variables.subst(split.strip())
- for split in splits.as_str_list()
+ self.__variables.subst(split.strip()) for split in splits.as_str_list()
]
element_splits[domain] = splits
return element_public
def __init_splits(self):
- bstdata = self.get_public_data('bst')
- splits = bstdata.get_mapping('split-rules')
+ bstdata = self.get_public_data("bst")
+ splits = bstdata.get_mapping("split-rules")
self.__splits = {
- domain: re.compile('^(?:' + '|'.join([utils._glob2re(r) for r in rules.as_str_list()]) + ')$')
+ domain: re.compile(
+ "^(?:"
+ + "|".join([utils._glob2re(r) for r in rules.as_str_list()])
+ + ")$"
+ )
for domain, rules in splits.items()
}
@@ -2909,7 +3188,9 @@ class Element(Plugin):
return partial(self.__split_filter, element_domains, include, exclude, orphans)
def __compute_splits(self, include=None, exclude=None, orphans=True):
- filter_func = self.__split_filter_func(include=include, exclude=exclude, orphans=orphans)
+ filter_func = self.__split_filter_func(
+ include=include, exclude=exclude, orphans=orphans
+ )
files_vdir = self.__artifact.get_files()
@@ -2930,10 +3211,12 @@ class Element(Plugin):
# the build, but I can think of no reason to change it mid-build.
# If this ever changes, things will go wrong unexpectedly.
if not self.__whitelist_regex:
- bstdata = self.get_public_data('bst')
- whitelist = bstdata.get_str_list('overlap-whitelist', default=[])
- whitelist_expressions = [utils._glob2re(self.__variables.subst(exp.strip())) for exp in whitelist]
- expression = ('^(?:' + '|'.join(whitelist_expressions) + ')$')
+ bstdata = self.get_public_data("bst")
+ whitelist = bstdata.get_str_list("overlap-whitelist", default=[])
+ whitelist_expressions = [
+ utils._glob2re(self.__variables.subst(exp.strip())) for exp in whitelist
+ ]
+ expression = "^(?:" + "|".join(whitelist_expressions) + ")$"
self.__whitelist_regex = re.compile(expression)
return self.__whitelist_regex.match(os.path.join(os.sep, path))
@@ -2991,8 +3274,7 @@ class Element(Plugin):
#
def __pull_weak(self, *, pull_buildtrees):
weak_key = self._get_cache_key(strength=_KeyStrength.WEAK)
- if not self.__artifacts.pull(self, weak_key,
- pull_buildtrees=pull_buildtrees):
+ if not self.__artifacts.pull(self, weak_key, pull_buildtrees=pull_buildtrees):
return False
# extract strong cache key from this newly fetched artifact
@@ -3014,7 +3296,9 @@ class Element(Plugin):
# commit all other sources by themselves
for ix, source in enumerate(self.__sources):
if source.BST_REQUIRES_PREVIOUS_SOURCES_STAGE:
- self.__sourcecache.commit(source, self.__sources[last_requires_previous:ix])
+ self.__sourcecache.commit(
+ source, self.__sources[last_requires_previous:ix]
+ )
last_requires_previous = ix
else:
self.__sourcecache.commit(source, [])
@@ -3102,7 +3386,9 @@ class Element(Plugin):
if self.__strict_cache_key is None:
dependencies = [
- [e.project_name, e.name, e.__strict_cache_key] if e.__strict_cache_key is not None else None
+ [e.project_name, e.name, e.__strict_cache_key]
+ if e.__strict_cache_key is not None
+ else None
for e in self.dependencies(Scope.BUILD)
]
self.__strict_cache_key = self._calculate_cache_key(dependencies)
@@ -3118,7 +3404,10 @@ class Element(Plugin):
else:
self.__update_strict_cache_key_of_rdeps()
- if self.__strict_cache_key is not None and self.__can_query_cache_callback is not None:
+ if (
+ self.__strict_cache_key is not None
+ and self.__can_query_cache_callback is not None
+ ):
self.__can_query_cache_callback(self)
self.__can_query_cache_callback = None
@@ -3145,8 +3434,12 @@ class Element(Plugin):
return
if not self.__strict_artifact:
- self.__strict_artifact = Artifact(self, context, strong_key=self.__strict_cache_key,
- weak_key=self.__weak_cache_key)
+ self.__strict_artifact = Artifact(
+ self,
+ context,
+ strong_key=self.__strict_cache_key,
+ weak_key=self.__weak_cache_key,
+ )
if context.get_strict():
self.__artifact = self.__strict_artifact
@@ -3202,8 +3495,10 @@ class Element(Plugin):
#
def __update_strict_cache_key_of_rdeps(self):
if not self.__updated_strict_cache_keys_of_rdeps:
- if self.__runtime_deps_without_strict_cache_key == 0 and \
- self.__strict_cache_key is not None:
+ if (
+ self.__runtime_deps_without_strict_cache_key == 0
+ and self.__strict_cache_key is not None
+ ):
self.__updated_strict_cache_keys_of_rdeps = True
# Notify reverse dependencies
@@ -3237,8 +3532,10 @@ class Element(Plugin):
#
def __update_ready_for_runtime(self):
if not self.__ready_for_runtime:
- if self.__runtime_deps_without_cache_key == 0 and \
- self.__cache_key is not None:
+ if (
+ self.__runtime_deps_without_cache_key == 0
+ and self.__cache_key is not None
+ ):
self.__ready_for_runtime = True
# Notify reverse dependencies
@@ -3265,10 +3562,12 @@ class Element(Plugin):
def _overlap_error_detail(f, forbidden_overlap_elements, elements):
if forbidden_overlap_elements:
- return ("/{}: {} {} not permitted to overlap other elements, order {} \n"
- .format(f, " and ".join(forbidden_overlap_elements),
- "is" if len(forbidden_overlap_elements) == 1 else "are",
- " above ".join(reversed(elements))))
+ return "/{}: {} {} not permitted to overlap other elements, order {} \n".format(
+ f,
+ " and ".join(forbidden_overlap_elements),
+ "is" if len(forbidden_overlap_elements) == 1 else "are",
+ " above ".join(reversed(elements)),
+ )
else:
return ""
@@ -3285,7 +3584,7 @@ def _overlap_error_detail(f, forbidden_overlap_elements, elements):
# (str): The normalised element name
#
def _get_normal_name(element_name):
- return os.path.splitext(element_name.replace(os.sep, '-'))[0]
+ return os.path.splitext(element_name.replace(os.sep, "-"))[0]
# _compose_artifact_name():
@@ -3301,12 +3600,9 @@ def _get_normal_name(element_name):
# (str): The constructed artifact name path
#
def _compose_artifact_name(project_name, normal_name, cache_key):
- valid_chars = string.digits + string.ascii_letters + '-._'
- normal_name = ''.join([
- x if x in valid_chars else '_'
- for x in normal_name
- ])
+ valid_chars = string.digits + string.ascii_letters + "-._"
+ normal_name = "".join([x if x in valid_chars else "_" for x in normal_name])
# Note that project names are not allowed to contain slashes. Element names containing
# a '/' will have this replaced with a '-' upon Element object instantiation.
- return '{0}/{1}/{2}'.format(project_name, normal_name, cache_key)
+ return "{0}/{1}/{2}".format(project_name, normal_name, cache_key)
diff --git a/src/buildstream/plugin.py b/src/buildstream/plugin.py
index c1ee333f7..2e34106de 100644
--- a/src/buildstream/plugin.py
+++ b/src/buildstream/plugin.py
@@ -127,10 +127,11 @@ if TYPE_CHECKING:
# pylint: disable=cyclic-import
from ._context import Context
from ._project import Project
+
# pylint: enable=cyclic-import
-class Plugin():
+class Plugin:
"""Plugin()
Base Plugin class.
@@ -210,15 +211,17 @@ class Plugin():
#
# Note that Plugins can only be instantiated in the main process before
# scheduling tasks.
- __TABLE = WeakValueDictionary() # type: WeakValueDictionary[int, Plugin]
-
- def __init__(self,
- name: str,
- context: 'Context',
- project: 'Project',
- provenance: ProvenanceInformation,
- type_tag: str,
- unique_id: Optional[int] = None):
+ __TABLE = WeakValueDictionary() # type: WeakValueDictionary[int, Plugin]
+
+ def __init__(
+ self,
+ name: str,
+ context: "Context",
+ project: "Project",
+ provenance: ProvenanceInformation,
+ type_tag: str,
+ unique_id: Optional[int] = None,
+ ):
self.name = name
"""The plugin name
@@ -248,30 +251,31 @@ class Plugin():
# plugin in a subprocess and should use the same ID.
self._unique_id = unique_id
- self.__context = context # The Context object
+ self.__context = context # The Context object
# Note that when pickling jobs over to a child process, we rely on this
# reference to the Project, it keeps the plugin factory alive. If the
# factory were to be GC'd then we would see undefined behaviour. Make
# sure to test plugin pickling if this reference is to be removed.
- self.__project = project # The Project object
+ self.__project = project # The Project object
self.__provenance = provenance # The Provenance information
- self.__type_tag = type_tag # The type of plugin (element or source)
- self.__configuring = False # Whether we are currently configuring
+ self.__type_tag = type_tag # The type of plugin (element or source)
+ self.__configuring = False # Whether we are currently configuring
# Get the full_name as project & type_tag are resolved
self.__full_name = self.__get_full_name()
# Infer the kind identifier
modulename = type(self).__module__
- self.__kind = modulename.split('.')[-1]
+ self.__kind = modulename.split(".")[-1]
self.debug("Created: {}".format(self))
# If this plugin has been deprecated, emit a warning.
if self.BST_PLUGIN_DEPRECATED and not self.__deprecation_warning_silenced():
- detail = "Using deprecated plugin {}: {}".format(self.__kind,
- self.BST_PLUGIN_DEPRECATION_MESSAGE)
+ detail = "Using deprecated plugin {}: {}".format(
+ self.__kind, self.BST_PLUGIN_DEPRECATION_MESSAGE
+ )
self.__message(MessageType.WARN, detail)
def __del__(self):
@@ -282,9 +286,8 @@ class Plugin():
def __str__(self):
return "{kind} {typetag} at {provenance}".format(
- kind=self.__kind,
- typetag=self.__type_tag,
- provenance=self.__provenance)
+ kind=self.__kind, typetag=self.__type_tag, provenance=self.__provenance
+ )
#############################################################
# Abstract Methods #
@@ -312,8 +315,11 @@ class Plugin():
:func:`Element.node_subst_member() <buildstream.element.Element.node_subst_member>`
method can be used.
"""
- raise ImplError("{tag} plugin '{kind}' does not implement configure()".format(
- tag=self.__type_tag, kind=self.get_kind()))
+ raise ImplError(
+ "{tag} plugin '{kind}' does not implement configure()".format(
+ tag=self.__type_tag, kind=self.get_kind()
+ )
+ )
def preflight(self) -> None:
"""Preflight Check
@@ -333,8 +339,11 @@ class Plugin():
them with :func:`utils.get_host_tool() <buildstream.utils.get_host_tool>` which
will raise an error automatically informing the user that a host tool is needed.
"""
- raise ImplError("{tag} plugin '{kind}' does not implement preflight()".format(
- tag=self.__type_tag, kind=self.get_kind()))
+ raise ImplError(
+ "{tag} plugin '{kind}' does not implement preflight()".format(
+ tag=self.__type_tag, kind=self.get_kind()
+ )
+ )
def get_unique_key(self) -> SourceRef:
"""Return something which uniquely identifies the plugin input
@@ -355,8 +364,11 @@ class Plugin():
which is to say that the Source is expected to have an exact *ref* indicating
exactly what source is going to be staged.
"""
- raise ImplError("{tag} plugin '{kind}' does not implement get_unique_key()".format(
- tag=self.__type_tag, kind=self.get_kind()))
+ raise ImplError(
+ "{tag} plugin '{kind}' does not implement get_unique_key()".format(
+ tag=self.__type_tag, kind=self.get_kind()
+ )
+ )
#############################################################
# Public Methods #
@@ -369,8 +381,7 @@ class Plugin():
"""
return self.__kind
- def node_get_project_path(self, node, *,
- check_is_file=False, check_is_dir=False):
+ def node_get_project_path(self, node, *, check_is_file=False, check_is_dir=False):
"""Fetches a project path from a dictionary node and validates it
Paths are asserted to never lead to a directory outside of the
@@ -408,9 +419,9 @@ class Plugin():
"""
- return self.__project.get_path_from_node(node,
- check_is_file=check_is_file,
- check_is_dir=check_is_dir)
+ return self.__project.get_path_from_node(
+ node, check_is_file=check_is_file, check_is_dir=check_is_dir
+ )
def debug(self, brief: str, *, detail: Optional[str] = None) -> None:
"""Print a debugging message
@@ -448,7 +459,13 @@ class Plugin():
"""
self.__message(MessageType.INFO, brief, detail=detail)
- def warn(self, brief: str, *, detail: Optional[str] = None, warning_token: Optional[str] = None) -> None:
+ def warn(
+ self,
+ brief: str,
+ *,
+ detail: Optional[str] = None,
+ warning_token: Optional[str] = None
+ ) -> None:
"""Print a warning message, checks warning_token against project configuration
Args:
@@ -468,7 +485,9 @@ class Plugin():
if project._warning_is_fatal(warning_token):
detail = detail if detail else ""
- raise PluginError(message="{}\n{}".format(brief, detail), reason=warning_token)
+ raise PluginError(
+ message="{}\n{}".format(brief, detail), reason=warning_token
+ )
self.__message(MessageType.WARN, brief=brief, detail=detail)
@@ -485,11 +504,13 @@ class Plugin():
self.__message(MessageType.LOG, brief, detail=detail)
@contextmanager
- def timed_activity(self,
- activity_name: str,
- *,
- detail: Optional[str] = None,
- silent_nested: bool = False) -> Generator[None, None, None]:
+ def timed_activity(
+ self,
+ activity_name: str,
+ *,
+ detail: Optional[str] = None,
+ silent_nested: bool = False
+ ) -> Generator[None, None, None]:
"""Context manager for performing timed activities in plugins
Args:
@@ -511,13 +532,21 @@ class Plugin():
# This will raise SourceError on its own
self.call(... command which takes time ...)
"""
- with self.__context.messenger.timed_activity(activity_name,
- element_name=self._get_full_name(),
- detail=detail,
- silent_nested=silent_nested):
+ with self.__context.messenger.timed_activity(
+ activity_name,
+ element_name=self._get_full_name(),
+ detail=detail,
+ silent_nested=silent_nested,
+ ):
yield
- def call(self, *popenargs, fail: Optional[str] = None, fail_temporarily: bool = False, **kwargs) -> int:
+ def call(
+ self,
+ *popenargs,
+ fail: Optional[str] = None,
+ fail_temporarily: bool = False,
+ **kwargs
+ ) -> int:
"""A wrapper for subprocess.call()
Args:
@@ -548,10 +577,14 @@ class Plugin():
"Failed to download ponies from {}".format(
self.mirror_directory))
"""
- exit_code, _ = self.__call(*popenargs, fail=fail, fail_temporarily=fail_temporarily, **kwargs)
+ exit_code, _ = self.__call(
+ *popenargs, fail=fail, fail_temporarily=fail_temporarily, **kwargs
+ )
return exit_code
- def check_output(self, *popenargs, fail=None, fail_temporarily=False, **kwargs) -> Tuple[int, str]:
+ def check_output(
+ self, *popenargs, fail=None, fail_temporarily=False, **kwargs
+ ) -> Tuple[int, str]:
"""A wrapper for subprocess.check_output()
Args:
@@ -597,7 +630,13 @@ class Plugin():
raise SourceError(
fmt.format(plugin=self, track=tracking)) from e
"""
- return self.__call(*popenargs, collect_stdout=True, fail=fail, fail_temporarily=fail_temporarily, **kwargs)
+ return self.__call(
+ *popenargs,
+ collect_stdout=True,
+ fail=fail,
+ fail_temporarily=fail_temporarily,
+ **kwargs
+ )
#############################################################
# Private Methods used in BuildStream #
@@ -722,8 +761,11 @@ class Plugin():
# so it's not an ImplError - those apply to custom plugins. Direct
# descendants of Plugin must implement this, e.g. Element and Source.
# Raise NotImplementedError as this would be an internal bug.
- raise NotImplementedError("{tag} plugin '{kind}' does not implement _get_args_for_child_job_pickling()".format(
- tag=self.__type_tag, kind=self.get_kind()))
+ raise NotImplementedError(
+ "{tag} plugin '{kind}' does not implement _get_args_for_child_job_pickling()".format(
+ tag=self.__type_tag, kind=self.get_kind()
+ )
+ )
#############################################################
# Local Private Methods #
@@ -731,36 +773,47 @@ class Plugin():
# Internal subprocess implementation for the call() and check_output() APIs
#
- def __call(self, *popenargs, collect_stdout=False, fail=None, fail_temporarily=False, **kwargs):
+ def __call(
+ self,
+ *popenargs,
+ collect_stdout=False,
+ fail=None,
+ fail_temporarily=False,
+ **kwargs
+ ):
with self._output_file() as output_file:
- if 'stdout' not in kwargs:
- kwargs['stdout'] = output_file
- if 'stderr' not in kwargs:
- kwargs['stderr'] = output_file
+ if "stdout" not in kwargs:
+ kwargs["stdout"] = output_file
+ if "stderr" not in kwargs:
+ kwargs["stderr"] = output_file
if collect_stdout:
- kwargs['stdout'] = subprocess.PIPE
+ kwargs["stdout"] = subprocess.PIPE
self.__note_command(output_file, *popenargs, **kwargs)
exit_code, output = utils._call(*popenargs, **kwargs)
if fail and exit_code:
- raise PluginError("{plugin}: {message}".format(plugin=self, message=fail),
- temporary=fail_temporarily)
+ raise PluginError(
+ "{plugin}: {message}".format(plugin=self, message=fail),
+ temporary=fail_temporarily,
+ )
return (exit_code, output)
def __message(self, message_type, brief, **kwargs):
- message = Message(message_type, brief, element_name=self._get_full_name(), **kwargs)
+ message = Message(
+ message_type, brief, element_name=self._get_full_name(), **kwargs
+ )
self.__context.messenger.message(message)
def __note_command(self, output, *popenargs, **kwargs):
- workdir = kwargs.get('cwd', os.getcwd())
+ workdir = kwargs.get("cwd", os.getcwd())
command = " ".join(popenargs[0])
- output.write('Running host command {}: {}\n'.format(workdir, command))
+ output.write("Running host command {}: {}\n".format(workdir, command))
output.flush()
- self.status('Running host command', detail=command)
+ self.status("Running host command", detail=command)
def __deprecation_warning_silenced(self):
if not self.BST_PLUGIN_DEPRECATED:
@@ -770,10 +823,10 @@ class Plugin():
project = self.__project
for key, value in project.element_overrides.items():
- if value.get_bool('suppress-deprecation-warnings', default=False):
+ if value.get_bool("suppress-deprecation-warnings", default=False):
silenced_warnings.add(key)
for key, value in project.source_overrides.items():
- if value.get_bool('suppress-deprecation-warnings', default=False):
+ if value.get_bool("suppress-deprecation-warnings", default=False):
silenced_warnings.add(key)
return self.get_kind() in silenced_warnings
@@ -781,9 +834,11 @@ class Plugin():
def __get_full_name(self):
project = self.__project
# Set the name, depending on element or source plugin type
- name = self._element_name if self.__type_tag == "source" else self.name # pylint: disable=no-member
+ name = (
+ self._element_name if self.__type_tag == "source" else self.name
+ ) # pylint: disable=no-member
if project.junction:
- return '{}:{}'.format(project.junction.name, name)
+ return "{}:{}".format(project.junction.name, name)
else:
return name
@@ -791,9 +846,7 @@ class Plugin():
# A local table for _prefix_warning()
#
__CORE_WARNINGS = [
- value
- for name, value in CoreWarnings.__dict__.items()
- if not name.startswith("__")
+ value for name, value in CoreWarnings.__dict__.items() if not name.startswith("__")
]
diff --git a/src/buildstream/plugins/elements/autotools.py b/src/buildstream/plugins/elements/autotools.py
index 7a05336b7..71b2e5854 100644
--- a/src/buildstream/plugins/elements/autotools.py
+++ b/src/buildstream/plugins/elements/autotools.py
@@ -66,8 +66,9 @@ class AutotoolsElement(BuildElement):
# Enable command batching across prepare() and assemble()
def configure_sandbox(self, sandbox):
super().configure_sandbox(sandbox)
- self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
- collect=self.get_variable('install-root'))
+ self.batch_prepare_assemble(
+ SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root")
+ )
# Plugin entry point
diff --git a/src/buildstream/plugins/elements/compose.py b/src/buildstream/plugins/elements/compose.py
index 1c523eeb2..461320008 100644
--- a/src/buildstream/plugins/elements/compose.py
+++ b/src/buildstream/plugins/elements/compose.py
@@ -59,27 +59,27 @@ class ComposeElement(Element):
BST_VIRTUAL_DIRECTORY = True
def configure(self, node):
- node.validate_keys([
- 'integrate', 'include', 'exclude', 'include-orphans'
- ])
+ node.validate_keys(["integrate", "include", "exclude", "include-orphans"])
# We name this variable 'integration' only to avoid
# collision with the Element.integrate() method.
- self.integration = node.get_bool('integrate')
- self.include = node.get_str_list('include')
- self.exclude = node.get_str_list('exclude')
- self.include_orphans = node.get_bool('include-orphans')
+ self.integration = node.get_bool("integrate")
+ self.include = node.get_str_list("include")
+ self.exclude = node.get_str_list("exclude")
+ self.include_orphans = node.get_bool("include-orphans")
def preflight(self):
pass
def get_unique_key(self):
- key = {'integrate': self.integration,
- 'include': sorted(self.include),
- 'orphans': self.include_orphans}
+ key = {
+ "integrate": self.integration,
+ "include": sorted(self.include),
+ "orphans": self.include_orphans,
+ }
if self.exclude:
- key['exclude'] = sorted(self.exclude)
+ key["exclude"] = sorted(self.exclude)
return key
@@ -101,9 +101,11 @@ class ComposeElement(Element):
if require_split:
with self.timed_activity("Computing split", silent_nested=True):
for dep in self.dependencies(Scope.BUILD):
- files = dep.compute_manifest(include=self.include,
- exclude=self.exclude,
- orphans=self.include_orphans)
+ files = dep.compute_manifest(
+ include=self.include,
+ exclude=self.exclude,
+ orphans=self.include_orphans,
+ )
manifest.update(files)
# Make a snapshot of all the files.
@@ -138,13 +140,16 @@ class ComposeElement(Element):
for path in basedir_contents:
if path not in snapshot:
added_files.add(path)
- self.info("Integration modified {}, added {} and removed {} files"
- .format(len(modified_files), len(added_files), len(removed_files)))
+ self.info(
+ "Integration modified {}, added {} and removed {} files".format(
+ len(modified_files), len(added_files), len(removed_files)
+ )
+ )
# The remainder of this is expensive, make an early exit if
# we're not being selective about what is to be included.
if not require_split:
- return '/'
+ return "/"
# Do we want to force include files which were modified by
# the integration commands, even if they were not added ?
@@ -156,7 +161,7 @@ class ComposeElement(Element):
# instead of into a subdir. The element assemble() method should
# support this in some way.
#
- installdir = vbasedir.descend('buildstream', 'install', create=True)
+ installdir = vbasedir.descend("buildstream", "install", create=True)
# We already saved the manifest for created files in the integration phase,
# now collect the rest of the manifest.
@@ -181,12 +186,16 @@ class ComposeElement(Element):
def import_filter(path):
return path in manifest
- with self.timed_activity("Creating composition", detail=detail, silent_nested=True):
+ with self.timed_activity(
+ "Creating composition", detail=detail, silent_nested=True
+ ):
self.info("Composing {} files".format(len(manifest)))
- installdir.import_files(vbasedir, filter_callback=import_filter, can_link=True)
+ installdir.import_files(
+ vbasedir, filter_callback=import_filter, can_link=True
+ )
# And we're done
- return os.path.join(os.sep, 'buildstream', 'install')
+ return os.path.join(os.sep, "buildstream", "install")
# Plugin entry point
diff --git a/src/buildstream/plugins/elements/filter.py b/src/buildstream/plugins/elements/filter.py
index d808c9e5a..17e15c80c 100644
--- a/src/buildstream/plugins/elements/filter.py
+++ b/src/buildstream/plugins/elements/filter.py
@@ -167,17 +167,17 @@ class FilterElement(Element):
BST_RUN_COMMANDS = False
def configure(self, node):
- node.validate_keys([
- 'include', 'exclude', 'include-orphans', 'pass-integration'
- ])
+ node.validate_keys(
+ ["include", "exclude", "include-orphans", "pass-integration"]
+ )
- self.include_node = node.get_sequence('include')
- self.exclude_node = node.get_sequence('exclude')
+ self.include_node = node.get_sequence("include")
+ self.exclude_node = node.get_sequence("exclude")
self.include = self.include_node.as_str_list()
self.exclude = self.exclude_node.as_str_list()
- self.include_orphans = node.get_bool('include-orphans')
- self.pass_integration = node.get_bool('pass-integration', False)
+ self.include_orphans = node.get_bool("include-orphans")
+ self.pass_integration = node.get_bool("pass-integration", False)
def preflight(self):
# Exactly one build-depend is permitted
@@ -186,9 +186,13 @@ class FilterElement(Element):
detail = "Full list of build-depends:\n"
deps_list = " \n".join([x.name for x in build_deps])
detail += deps_list
- raise ElementError("{}: {} element must have exactly 1 build-dependency, actually have {}"
- .format(self, type(self).__name__, len(build_deps)),
- detail=detail, reason="filter-bdepend-wrong-count")
+ raise ElementError(
+ "{}: {} element must have exactly 1 build-dependency, actually have {}".format(
+ self, type(self).__name__, len(build_deps)
+ ),
+ detail=detail,
+ reason="filter-bdepend-wrong-count",
+ )
# That build-depend must not also be a runtime-depend
runtime_deps = list(self.dependencies(Scope.RUN, recurse=False))
@@ -196,23 +200,33 @@ class FilterElement(Element):
detail = "Full list of runtime depends:\n"
deps_list = " \n".join([x.name for x in runtime_deps])
detail += deps_list
- raise ElementError("{}: {} element's build dependency must not also be a runtime dependency"
- .format(self, type(self).__name__),
- detail=detail, reason="filter-bdepend-also-rdepend")
+ raise ElementError(
+ "{}: {} element's build dependency must not also be a runtime dependency".format(
+ self, type(self).__name__
+ ),
+ detail=detail,
+ reason="filter-bdepend-also-rdepend",
+ )
# If a parent does not produce an artifact, fail and inform user that the dependency
# must produce artifacts
if not build_deps[0].BST_ELEMENT_HAS_ARTIFACT:
- detail = "{} does not produce an artifact, so there is nothing to filter".format(build_deps[0].name)
- raise ElementError("{}: {} element's build dependency must produce an artifact"
- .format(self, type(self).__name__),
- detail=detail, reason="filter-bdepend-no-artifact")
+ detail = "{} does not produce an artifact, so there is nothing to filter".format(
+ build_deps[0].name
+ )
+ raise ElementError(
+ "{}: {} element's build dependency must produce an artifact".format(
+ self, type(self).__name__
+ ),
+ detail=detail,
+ reason="filter-bdepend-no-artifact",
+ )
def get_unique_key(self):
key = {
- 'include': sorted(self.include),
- 'exclude': sorted(self.exclude),
- 'orphans': self.include_orphans,
+ "include": sorted(self.include),
+ "exclude": sorted(self.exclude),
+ "orphans": self.include_orphans,
}
return key
@@ -226,8 +240,8 @@ class FilterElement(Element):
with self.timed_activity("Staging artifact", silent_nested=True):
for dep in self.dependencies(Scope.BUILD, recurse=False):
# Check that all the included/excluded domains exist
- pub_data = dep.get_public_data('bst')
- split_rules = pub_data.get_mapping('split-rules', {})
+ pub_data = dep.get_public_data("bst")
+ split_rules = pub_data.get_mapping("split-rules", {})
unfound_includes = []
for domain in self.include:
if domain not in split_rules:
@@ -239,19 +253,35 @@ class FilterElement(Element):
detail = []
if unfound_includes:
- detail.append("Unknown domains were used in {}".format(self.include_node.get_provenance()))
- detail.extend([' - {}'.format(domain) for domain in unfound_includes])
+ detail.append(
+ "Unknown domains were used in {}".format(
+ self.include_node.get_provenance()
+ )
+ )
+ detail.extend(
+ [" - {}".format(domain) for domain in unfound_includes]
+ )
if unfound_excludes:
- detail.append("Unknown domains were used in {}".format(self.exclude_node.get_provenance()))
- detail.extend([' - {}'.format(domain) for domain in unfound_excludes])
+ detail.append(
+ "Unknown domains were used in {}".format(
+ self.exclude_node.get_provenance()
+ )
+ )
+ detail.extend(
+ [" - {}".format(domain) for domain in unfound_excludes]
+ )
if detail:
- detail = '\n'.join(detail)
+ detail = "\n".join(detail)
raise ElementError("Unknown domains declared.", detail=detail)
- dep.stage_artifact(sandbox, include=self.include,
- exclude=self.exclude, orphans=self.include_orphans)
+ dep.stage_artifact(
+ sandbox,
+ include=self.include,
+ exclude=self.exclude,
+ orphans=self.include_orphans,
+ )
return ""
def _get_source_element(self):
diff --git a/src/buildstream/plugins/elements/import.py b/src/buildstream/plugins/elements/import.py
index 404a0f4ee..b7318b131 100644
--- a/src/buildstream/plugins/elements/import.py
+++ b/src/buildstream/plugins/elements/import.py
@@ -45,25 +45,22 @@ class ImportElement(Element):
BST_RUN_COMMANDS = False
def configure(self, node):
- node.validate_keys([
- 'source', 'target'
- ])
+ node.validate_keys(["source", "target"])
- self.source = self.node_subst_vars(node.get_scalar('source'))
- self.target = self.node_subst_vars(node.get_scalar('target'))
+ self.source = self.node_subst_vars(node.get_scalar("source"))
+ self.target = self.node_subst_vars(node.get_scalar("target"))
def preflight(self):
# Assert that we have at least one source to fetch.
sources = list(self.sources())
if not sources:
- raise ElementError("{}: An import element must have at least one source.".format(self))
+ raise ElementError(
+ "{}: An import element must have at least one source.".format(self)
+ )
def get_unique_key(self):
- return {
- 'source': self.source,
- 'target': self.target
- }
+ return {"source": self.source, "target": self.target}
def configure_sandbox(self, sandbox):
pass
@@ -74,35 +71,42 @@ class ImportElement(Element):
def assemble(self, sandbox):
# Stage sources into the input directory
- self.stage_sources(sandbox, 'input')
+ self.stage_sources(sandbox, "input")
rootdir = sandbox.get_virtual_directory()
- inputdir = rootdir.descend('input')
- outputdir = rootdir.descend('output', create=True)
+ inputdir = rootdir.descend("input")
+ outputdir = rootdir.descend("output", create=True)
# The directory to grab
inputdir = inputdir.descend(*self.source.strip(os.sep).split(os.sep))
# The output target directory
- outputdir = outputdir.descend(*self.target.strip(os.sep).split(os.sep), create=True)
+ outputdir = outputdir.descend(
+ *self.target.strip(os.sep).split(os.sep), create=True
+ )
if inputdir.is_empty():
- raise ElementError("{}: No files were found inside directory '{}'"
- .format(self, self.source))
+ raise ElementError(
+ "{}: No files were found inside directory '{}'".format(
+ self, self.source
+ )
+ )
# Move it over
outputdir.import_files(inputdir)
# And we're done
- return '/output'
+ return "/output"
def generate_script(self):
- build_root = self.get_variable('build-root')
- install_root = self.get_variable('install-root')
+ build_root = self.get_variable("build-root")
+ install_root = self.get_variable("install-root")
commands = []
# The directory to grab
- inputdir = os.path.join(build_root, self.normal_name, self.source.lstrip(os.sep))
+ inputdir = os.path.join(
+ build_root, self.normal_name, self.source.lstrip(os.sep)
+ )
inputdir = inputdir.rstrip(os.sep)
# The output target directory
@@ -111,7 +115,9 @@ class ImportElement(Element):
# Ensure target directory parent exists but target directory doesn't
commands.append("mkdir -p {}".format(os.path.dirname(outputdir)))
- commands.append("[ ! -e {outputdir} ] || rmdir {outputdir}".format(outputdir=outputdir))
+ commands.append(
+ "[ ! -e {outputdir} ] || rmdir {outputdir}".format(outputdir=outputdir)
+ )
# Move it over
commands.append("mv {} {}".format(inputdir, outputdir))
diff --git a/src/buildstream/plugins/elements/junction.py b/src/buildstream/plugins/elements/junction.py
index aec32516b..f9327352e 100644
--- a/src/buildstream/plugins/elements/junction.py
+++ b/src/buildstream/plugins/elements/junction.py
@@ -187,13 +187,17 @@ class JunctionElement(Element):
BST_FORBID_RDEPENDS = True
def configure(self, node):
- self.path = node.get_str('path', default='')
- self.options = node.get_mapping('options', default={})
- self.target = node.get_str('target', default=None)
+ self.path = node.get_str("path", default="")
+ self.options = node.get_mapping("options", default={})
+ self.target = node.get_str("target", default=None)
self.target_element = None
self.target_junction = None
- self.cache_junction_elements = node.get_bool('cache-junction-elements', default=False)
- self.ignore_junction_remotes = node.get_bool('ignore-junction-remotes', default=False)
+ self.cache_junction_elements = node.get_bool(
+ "cache-junction-elements", default=False
+ )
+ self.ignore_junction_remotes = node.get_bool(
+ "ignore-junction-remotes", default=False
+ )
def preflight(self):
# "target" cannot be used in conjunction with:
@@ -201,23 +205,33 @@ class JunctionElement(Element):
# 2. config['options']
# 3. config['path']
if self.target and any(self.sources()):
- raise ElementError("junction elements cannot define both 'sources' and 'target' config option")
+ raise ElementError(
+ "junction elements cannot define both 'sources' and 'target' config option"
+ )
if self.target and any(self.options.items()):
- raise ElementError("junction elements cannot define both 'options' and 'target'")
+ raise ElementError(
+ "junction elements cannot define both 'options' and 'target'"
+ )
if self.target and self.path:
- raise ElementError("junction elements cannot define both 'path' and 'target'")
+ raise ElementError(
+ "junction elements cannot define both 'path' and 'target'"
+ )
# Validate format of target, if defined
if self.target:
try:
self.target_junction, self.target_element = self.target.split(":")
except ValueError:
- raise ElementError("'target' option must be in format '{junction-name}:{element-name}'")
+ raise ElementError(
+ "'target' option must be in format '{junction-name}:{element-name}'"
+ )
# We cannot target a junction that has the same name as us, since that
# will cause an infinite recursion while trying to load it.
if self.name == self.target_element:
- raise ElementError("junction elements cannot target an element with the same name")
+ raise ElementError(
+ "junction elements cannot target an element with the same name"
+ )
def get_unique_key(self):
# Junctions do not produce artifacts. get_unique_key() implementation
diff --git a/src/buildstream/plugins/elements/manual.py b/src/buildstream/plugins/elements/manual.py
index bbda65312..4e9fded17 100644
--- a/src/buildstream/plugins/elements/manual.py
+++ b/src/buildstream/plugins/elements/manual.py
@@ -42,8 +42,9 @@ class ManualElement(BuildElement):
# Enable command batching across prepare() and assemble()
def configure_sandbox(self, sandbox):
super().configure_sandbox(sandbox)
- self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
- collect=self.get_variable('install-root'))
+ self.batch_prepare_assemble(
+ SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root")
+ )
# Plugin entry point
diff --git a/src/buildstream/plugins/elements/pip.py b/src/buildstream/plugins/elements/pip.py
index 4a9eefde1..175568955 100644
--- a/src/buildstream/plugins/elements/pip.py
+++ b/src/buildstream/plugins/elements/pip.py
@@ -42,8 +42,9 @@ class PipElement(BuildElement):
# Enable command batching across prepare() and assemble()
def configure_sandbox(self, sandbox):
super().configure_sandbox(sandbox)
- self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
- collect=self.get_variable('install-root'))
+ self.batch_prepare_assemble(
+ SandboxFlags.ROOT_READ_ONLY, collect=self.get_variable("install-root")
+ )
# Plugin entry point
diff --git a/src/buildstream/plugins/elements/script.py b/src/buildstream/plugins/elements/script.py
index f3f0a2f7a..abfb7b3b0 100644
--- a/src/buildstream/plugins/elements/script.py
+++ b/src/buildstream/plugins/elements/script.py
@@ -46,21 +46,19 @@ class ScriptElement(buildstream.ScriptElement):
BST_VIRTUAL_DIRECTORY = True
def configure(self, node):
- for n in node.get_sequence('layout', []):
- dst = self.node_subst_vars(n.get_scalar('destination'))
- elm = self.node_subst_vars(n.get_scalar('element', None))
+ for n in node.get_sequence("layout", []):
+ dst = self.node_subst_vars(n.get_scalar("destination"))
+ elm = self.node_subst_vars(n.get_scalar("element", None))
self.layout_add(elm, dst)
- node.validate_keys([
- 'commands', 'root-read-only', 'layout'
- ])
+ node.validate_keys(["commands", "root-read-only", "layout"])
cmds = self.node_subst_sequence_vars(node.get_sequence("commands"))
self.add_commands("commands", cmds)
self.set_work_dir()
self.set_install_root()
- self.set_root_read_only(node.get_bool('root-read-only', default=False))
+ self.set_root_read_only(node.get_bool("root-read-only", default=False))
# Plugin entry point
diff --git a/src/buildstream/plugins/elements/stack.py b/src/buildstream/plugins/elements/stack.py
index dbb59a43d..e8c1a47d5 100644
--- a/src/buildstream/plugins/elements/stack.py
+++ b/src/buildstream/plugins/elements/stack.py
@@ -58,10 +58,10 @@ class StackElement(Element):
# Just create a dummy empty artifact, its existence is a statement
# that all this stack's dependencies are built.
vrootdir = sandbox.get_virtual_directory()
- vrootdir.descend('output', create=True)
+ vrootdir.descend("output", create=True)
# And we're done
- return '/output'
+ return "/output"
# Plugin entry point
diff --git a/src/buildstream/plugins/sources/_downloadablefilesource.py b/src/buildstream/plugins/sources/_downloadablefilesource.py
index 2d53f8a56..2db4274f2 100644
--- a/src/buildstream/plugins/sources/_downloadablefilesource.py
+++ b/src/buildstream/plugins/sources/_downloadablefilesource.py
@@ -12,7 +12,6 @@ from buildstream import utils
class _NetrcFTPOpener(urllib.request.FTPHandler):
-
def __init__(self, netrc_config):
self.netrc = netrc_config
@@ -28,11 +27,11 @@ class _NetrcFTPOpener(urllib.request.FTPHandler):
def _unsplit(self, host, port, user, passwd):
if port:
- host = '{}:{}'.format(host, port)
+ host = "{}:{}".format(host, port)
if user:
if passwd:
- user = '{}:{}'.format(user, passwd)
- host = '{}@{}'.format(user, host)
+ user = "{}:{}".format(user, passwd)
+ host = "{}@{}".format(user, host)
return host
@@ -50,7 +49,6 @@ class _NetrcFTPOpener(urllib.request.FTPHandler):
class _NetrcPasswordManager:
-
def __init__(self, netrc_config):
self.netrc = netrc_config
@@ -72,17 +70,18 @@ class _NetrcPasswordManager:
class DownloadableFileSource(Source):
# pylint: disable=attribute-defined-outside-init
- COMMON_CONFIG_KEYS = Source.COMMON_CONFIG_KEYS + ['url', 'ref', 'etag']
+ COMMON_CONFIG_KEYS = Source.COMMON_CONFIG_KEYS + ["url", "ref", "etag"]
__urlopener = None
__default_mirror_file = None
def configure(self, node):
- self.original_url = node.get_str('url')
- self.ref = node.get_str('ref', None)
+ self.original_url = node.get_str("url")
+ self.ref = node.get_str("ref", None)
self.url = self.translate_url(self.original_url)
- self._mirror_dir = os.path.join(self.get_mirror_directory(),
- utils.url_directory_name(self.original_url))
+ self._mirror_dir = os.path.join(
+ self.get_mirror_directory(), utils.url_directory_name(self.original_url)
+ )
self._warn_deprecated_etag(node)
def preflight(self):
@@ -102,28 +101,29 @@ class DownloadableFileSource(Source):
return Consistency.RESOLVED
def load_ref(self, node):
- self.ref = node.get_str('ref', None)
+ self.ref = node.get_str("ref", None)
self._warn_deprecated_etag(node)
def get_ref(self):
return self.ref
def set_ref(self, ref, node):
- node['ref'] = self.ref = ref
+ node["ref"] = self.ref = ref
def track(self):
# there is no 'track' field in the source to determine what/whether
# or not to update refs, because tracking a ref is always a conscious
# decision by the user.
- with self.timed_activity("Tracking {}".format(self.url),
- silent_nested=True):
+ with self.timed_activity("Tracking {}".format(self.url), silent_nested=True):
new_ref = self._ensure_mirror()
if self.ref and self.ref != new_ref:
- detail = "When tracking, new ref differs from current ref:\n" \
- + " Tracked URL: {}\n".format(self.url) \
- + " Current ref: {}\n".format(self.ref) \
+ detail = (
+ "When tracking, new ref differs from current ref:\n"
+ + " Tracked URL: {}\n".format(self.url)
+ + " Current ref: {}\n".format(self.ref)
+ " New ref: {}\n".format(new_ref)
+ )
self.warn("Potential man-in-the-middle attack!", detail=detail)
return new_ref
@@ -142,25 +142,28 @@ class DownloadableFileSource(Source):
with self.timed_activity("Fetching {}".format(self.url), silent_nested=True):
sha256 = self._ensure_mirror()
if sha256 != self.ref:
- raise SourceError("File downloaded from {} has sha256sum '{}', not '{}'!"
- .format(self.url, sha256, self.ref))
+ raise SourceError(
+ "File downloaded from {} has sha256sum '{}', not '{}'!".format(
+ self.url, sha256, self.ref
+ )
+ )
def _warn_deprecated_etag(self, node):
- etag = node.get_str('etag', None)
+ etag = node.get_str("etag", None)
if etag:
provenance = node.get_scalar(etag).get_provenance()
self.warn('{} "etag" is deprecated and ignored.'.format(provenance))
def _get_etag(self, ref):
- etagfilename = os.path.join(self._mirror_dir, '{}.etag'.format(ref))
+ etagfilename = os.path.join(self._mirror_dir, "{}.etag".format(ref))
if os.path.exists(etagfilename):
- with open(etagfilename, 'r') as etagfile:
+ with open(etagfilename, "r") as etagfile:
return etagfile.read()
return None
def _store_etag(self, ref, etag):
- etagfilename = os.path.join(self._mirror_dir, '{}.etag'.format(ref))
+ etagfilename = os.path.join(self._mirror_dir, "{}.etag".format(ref))
with utils.save_file_atomic(etagfilename) as etagfile:
etagfile.write(etag)
@@ -170,7 +173,7 @@ class DownloadableFileSource(Source):
with self.tempdir() as td:
default_name = os.path.basename(self.url)
request = urllib.request.Request(self.url)
- request.add_header('Accept', '*/*')
+ request.add_header("Accept", "*/*")
# We do not use etag in case what we have in cache is
# not matching ref in order to be able to recover from
@@ -180,18 +183,18 @@ class DownloadableFileSource(Source):
# Do not re-download the file if the ETag matches.
if etag and self.get_consistency() == Consistency.CACHED:
- request.add_header('If-None-Match', etag)
+ request.add_header("If-None-Match", etag)
opener = self.__get_urlopener()
with contextlib.closing(opener.open(request)) as response:
info = response.info()
- etag = info['ETag'] if 'ETag' in info else None
+ etag = info["ETag"] if "ETag" in info else None
filename = info.get_filename(default_name)
filename = os.path.basename(filename)
local_file = os.path.join(td, filename)
- with open(local_file, 'wb') as dest:
+ with open(local_file, "wb") as dest:
shutil.copyfileobj(response, dest)
# Make sure url-specific mirror dir exists.
@@ -214,14 +217,21 @@ class DownloadableFileSource(Source):
# Because we use etag only for matching ref, currently specified ref is what
# we would have downloaded.
return self.ref
- raise SourceError("{}: Error mirroring {}: {}"
- .format(self, self.url, e), temporary=True) from e
-
- except (urllib.error.URLError, urllib.error.ContentTooShortError, OSError, ValueError) as e:
+ raise SourceError(
+ "{}: Error mirroring {}: {}".format(self, self.url, e), temporary=True
+ ) from e
+
+ except (
+ urllib.error.URLError,
+ urllib.error.ContentTooShortError,
+ OSError,
+ ValueError,
+ ) as e:
# Note that urllib.request.Request in the try block may throw a
# ValueError for unknown url types, so we handle it here.
- raise SourceError("{}: Error mirroring {}: {}"
- .format(self, self.url, e), temporary=True) from e
+ raise SourceError(
+ "{}: Error mirroring {}: {}".format(self, self.url, e), temporary=True
+ ) from e
def _get_mirror_file(self, sha=None):
if sha is not None:
@@ -245,11 +255,13 @@ class DownloadableFileSource(Source):
#
DownloadableFileSource.__urlopener = urllib.request.build_opener()
except netrc.NetrcParseError as e:
- self.warn('{}: While reading .netrc: {}'.format(self, e))
+ self.warn("{}: While reading .netrc: {}".format(self, e))
return urllib.request.build_opener()
else:
netrc_pw_mgr = _NetrcPasswordManager(netrc_config)
http_auth = urllib.request.HTTPBasicAuthHandler(netrc_pw_mgr)
ftp_handler = _NetrcFTPOpener(netrc_config)
- DownloadableFileSource.__urlopener = urllib.request.build_opener(http_auth, ftp_handler)
+ DownloadableFileSource.__urlopener = urllib.request.build_opener(
+ http_auth, ftp_handler
+ )
return DownloadableFileSource.__urlopener
diff --git a/src/buildstream/plugins/sources/bzr.py b/src/buildstream/plugins/sources/bzr.py
index 6fccf1e8b..657a885c2 100644
--- a/src/buildstream/plugins/sources/bzr.py
+++ b/src/buildstream/plugins/sources/bzr.py
@@ -67,16 +67,16 @@ class BzrSource(Source):
# pylint: disable=attribute-defined-outside-init
def configure(self, node):
- node.validate_keys(['url', 'track', 'ref', *Source.COMMON_CONFIG_KEYS])
+ node.validate_keys(["url", "track", "ref", *Source.COMMON_CONFIG_KEYS])
- self.original_url = node.get_str('url')
- self.tracking = node.get_str('track')
- self.ref = node.get_str('ref', None)
+ self.original_url = node.get_str("url")
+ self.tracking = node.get_str("track")
+ self.ref = node.get_str("ref", None)
self.url = self.translate_url(self.original_url)
def preflight(self):
# Check if bzr is installed, get the binary at the same time.
- self.host_bzr = utils.get_host_tool('bzr')
+ self.host_bzr = utils.get_host_tool("bzr")
def get_unique_key(self):
return [self.original_url, self.tracking, self.ref]
@@ -93,56 +93,85 @@ class BzrSource(Source):
return Consistency.RESOLVED
def load_ref(self, node):
- self.ref = node.get_str('ref', None)
+ self.ref = node.get_str("ref", None)
def get_ref(self):
return self.ref
def set_ref(self, ref, node):
- node['ref'] = self.ref = ref
+ node["ref"] = self.ref = ref
def track(self):
- with self.timed_activity("Tracking {}".format(self.url),
- silent_nested=True), self._locked():
+ with self.timed_activity(
+ "Tracking {}".format(self.url), silent_nested=True
+ ), self._locked():
self._ensure_mirror(skip_ref_check=True)
- ret, out = self.check_output([self.host_bzr, "version-info",
- "--custom", "--template={revno}",
- self._get_branch_dir()],
- fail="Failed to read the revision number at '{}'"
- .format(self._get_branch_dir()))
+ ret, out = self.check_output(
+ [
+ self.host_bzr,
+ "version-info",
+ "--custom",
+ "--template={revno}",
+ self._get_branch_dir(),
+ ],
+ fail="Failed to read the revision number at '{}'".format(
+ self._get_branch_dir()
+ ),
+ )
if ret != 0:
- raise SourceError("{}: Failed to get ref for tracking {}".format(self, self.tracking))
+ raise SourceError(
+ "{}: Failed to get ref for tracking {}".format(self, self.tracking)
+ )
return out
def fetch(self):
- with self.timed_activity("Fetching {}".format(self.url),
- silent_nested=True), self._locked():
+ with self.timed_activity(
+ "Fetching {}".format(self.url), silent_nested=True
+ ), self._locked():
self._ensure_mirror()
def stage(self, directory):
- self.call([self.host_bzr, "checkout", "--lightweight",
- "--revision=revno:{}".format(self.ref),
- self._get_branch_dir(), directory],
- fail="Failed to checkout revision {} from branch {} to {}"
- .format(self.ref, self._get_branch_dir(), directory))
+ self.call(
+ [
+ self.host_bzr,
+ "checkout",
+ "--lightweight",
+ "--revision=revno:{}".format(self.ref),
+ self._get_branch_dir(),
+ directory,
+ ],
+ fail="Failed to checkout revision {} from branch {} to {}".format(
+ self.ref, self._get_branch_dir(), directory
+ ),
+ )
# Remove .bzr dir
shutil.rmtree(os.path.join(directory, ".bzr"))
def init_workspace(self, directory):
url = os.path.join(self.url, self.tracking)
- with self.timed_activity('Setting up workspace "{}"'.format(directory), silent_nested=True):
+ with self.timed_activity(
+ 'Setting up workspace "{}"'.format(directory), silent_nested=True
+ ):
# Checkout from the cache
- self.call([self.host_bzr, "branch",
- "--use-existing-dir",
- "--revision=revno:{}".format(self.ref),
- self._get_branch_dir(), directory],
- fail="Failed to branch revision {} from branch {} to {}"
- .format(self.ref, self._get_branch_dir(), directory))
+ self.call(
+ [
+ self.host_bzr,
+ "branch",
+ "--use-existing-dir",
+ "--revision=revno:{}".format(self.ref),
+ self._get_branch_dir(),
+ directory,
+ ],
+ fail="Failed to branch revision {} from branch {} to {}".format(
+ self.ref, self._get_branch_dir(), directory
+ ),
+ )
# Switch the parent branch to the source's origin
- self.call([self.host_bzr, "switch",
- "--directory={}".format(directory), url],
- fail="Failed to switch workspace's parent branch to {}".format(url))
+ self.call(
+ [self.host_bzr, "switch", "--directory={}".format(directory), url],
+ fail="Failed to switch workspace's parent branch to {}".format(url),
+ )
# _locked()
#
@@ -151,13 +180,12 @@ class BzrSource(Source):
#
@contextmanager
def _locked(self):
- lockdir = os.path.join(self.get_mirror_directory(), 'locks')
+ lockdir = os.path.join(self.get_mirror_directory(), "locks")
lockfile = os.path.join(
- lockdir,
- utils.url_directory_name(self.original_url) + '.lock'
+ lockdir, utils.url_directory_name(self.original_url) + ".lock"
)
os.makedirs(lockdir, exist_ok=True)
- with open(lockfile, 'w') as lock:
+ with open(lockfile, "w") as lock:
fcntl.flock(lock, fcntl.LOCK_EX)
try:
yield
@@ -169,41 +197,63 @@ class BzrSource(Source):
if not os.path.exists(self._get_branch_dir()):
return False
- return self.call([self.host_bzr, "revno",
- "--revision=revno:{}".format(self.ref),
- self._get_branch_dir()]) == 0
+ return (
+ self.call(
+ [
+ self.host_bzr,
+ "revno",
+ "--revision=revno:{}".format(self.ref),
+ self._get_branch_dir(),
+ ]
+ )
+ == 0
+ )
def _get_branch_dir(self):
return os.path.join(self._get_mirror_dir(), self.tracking)
def _get_mirror_dir(self):
- return os.path.join(self.get_mirror_directory(),
- utils.url_directory_name(self.original_url))
+ return os.path.join(
+ self.get_mirror_directory(), utils.url_directory_name(self.original_url)
+ )
def _ensure_mirror(self, skip_ref_check=False):
mirror_dir = self._get_mirror_dir()
bzr_metadata_dir = os.path.join(mirror_dir, ".bzr")
if not os.path.exists(bzr_metadata_dir):
- self.call([self.host_bzr, "init-repo", "--no-trees", mirror_dir],
- fail="Failed to initialize bzr repository")
+ self.call(
+ [self.host_bzr, "init-repo", "--no-trees", mirror_dir],
+ fail="Failed to initialize bzr repository",
+ )
branch_dir = os.path.join(mirror_dir, self.tracking)
branch_url = self.url + "/" + self.tracking
if not os.path.exists(branch_dir):
# `bzr branch` the branch if it doesn't exist
# to get the upstream code
- self.call([self.host_bzr, "branch", branch_url, branch_dir],
- fail="Failed to branch from {} to {}".format(branch_url, branch_dir))
+ self.call(
+ [self.host_bzr, "branch", branch_url, branch_dir],
+ fail="Failed to branch from {} to {}".format(branch_url, branch_dir),
+ )
else:
# `bzr pull` the branch if it does exist
# to get any changes to the upstream code
- self.call([self.host_bzr, "pull", "--directory={}".format(branch_dir), branch_url],
- fail="Failed to pull new changes for {}".format(branch_dir))
+ self.call(
+ [
+ self.host_bzr,
+ "pull",
+ "--directory={}".format(branch_dir),
+ branch_url,
+ ],
+ fail="Failed to pull new changes for {}".format(branch_dir),
+ )
if not skip_ref_check and not self._check_ref():
- raise SourceError("Failed to ensure ref '{}' was mirrored".format(self.ref),
- reason="ref-not-mirrored")
+ raise SourceError(
+ "Failed to ensure ref '{}' was mirrored".format(self.ref),
+ reason="ref-not-mirrored",
+ )
def setup():
diff --git a/src/buildstream/plugins/sources/deb.py b/src/buildstream/plugins/sources/deb.py
index cc88cf53c..407241689 100644
--- a/src/buildstream/plugins/sources/deb.py
+++ b/src/buildstream/plugins/sources/deb.py
@@ -50,7 +50,7 @@ details on common configuration options for sources.
import tarfile
from contextlib import contextmanager
-import arpy # pylint: disable=import-error
+import arpy # pylint: disable=import-error
from .tar import TarSource
@@ -61,17 +61,19 @@ class DebSource(TarSource):
def configure(self, node):
super().configure(node)
- self.base_dir = node.get_str('base-dir', None)
+ self.base_dir = node.get_str("base-dir", None)
def preflight(self):
return
@contextmanager
def _get_tar(self):
- with open(self._get_mirror_file(), 'rb') as deb_file:
+ with open(self._get_mirror_file(), "rb") as deb_file:
arpy_archive = arpy.Archive(fileobj=deb_file)
arpy_archive.read_all_headers()
- data_tar_arpy = [v for k, v in arpy_archive.archived_files.items() if b"data.tar" in k][0]
+ data_tar_arpy = [
+ v for k, v in arpy_archive.archived_files.items() if b"data.tar" in k
+ ][0]
# ArchiveFileData is not enough like a file object for tarfile to use.
# Monkey-patching a seekable method makes it close enough for TarFile to open.
data_tar_arpy.seekable = lambda *args: True
diff --git a/src/buildstream/plugins/sources/local.py b/src/buildstream/plugins/sources/local.py
index 6114c60c9..471992af9 100644
--- a/src/buildstream/plugins/sources/local.py
+++ b/src/buildstream/plugins/sources/local.py
@@ -55,8 +55,8 @@ class LocalSource(Source):
self.__unique_key = None
def configure(self, node):
- node.validate_keys(['path', *Source.COMMON_CONFIG_KEYS])
- self.path = self.node_get_project_path(node.get_scalar('path'))
+ node.validate_keys(["path", *Source.COMMON_CONFIG_KEYS])
+ self.path = self.node_get_project_path(node.get_scalar("path"))
self.fullpath = os.path.join(self.get_project_directory(), self.path)
def preflight(self):
@@ -91,7 +91,8 @@ class LocalSource(Source):
if result.overwritten or result.ignored:
raise SourceError(
"Failed to stage source: files clash with existing directory",
- reason='ensure-stage-dir-fail')
+ reason="ensure-stage-dir-fail",
+ )
def _get_local_path(self):
return self.fullpath
diff --git a/src/buildstream/plugins/sources/patch.py b/src/buildstream/plugins/sources/patch.py
index 24b5bfe2f..2be4ee2f7 100644
--- a/src/buildstream/plugins/sources/patch.py
+++ b/src/buildstream/plugins/sources/patch.py
@@ -56,8 +56,9 @@ class PatchSource(Source):
def configure(self, node):
node.validate_keys(["path", "strip-level", *Source.COMMON_CONFIG_KEYS])
- self.path = self.node_get_project_path(node.get_scalar('path'),
- check_is_file=True)
+ self.path = self.node_get_project_path(
+ node.get_scalar("path"), check_is_file=True
+ )
self.strip_level = node.get_int("strip-level", default=1)
self.fullpath = os.path.join(self.get_project_directory(), self.path)
@@ -89,12 +90,23 @@ class PatchSource(Source):
# Bail out with a comprehensive message if the target directory is empty
if not os.listdir(directory):
- raise SourceError("Nothing to patch in directory '{}'".format(directory),
- reason="patch-no-files")
+ raise SourceError(
+ "Nothing to patch in directory '{}'".format(directory),
+ reason="patch-no-files",
+ )
strip_level_option = "-p{}".format(self.strip_level)
- self.call([self.host_patch, strip_level_option, "-i", self.fullpath, "-d", directory],
- fail="Failed to apply patch {}".format(self.path))
+ self.call(
+ [
+ self.host_patch,
+ strip_level_option,
+ "-i",
+ self.fullpath,
+ "-d",
+ directory,
+ ],
+ fail="Failed to apply patch {}".format(self.path),
+ )
# Plugin entry point
diff --git a/src/buildstream/plugins/sources/pip.py b/src/buildstream/plugins/sources/pip.py
index 40ddf8c68..41e414855 100644
--- a/src/buildstream/plugins/sources/pip.py
+++ b/src/buildstream/plugins/sources/pip.py
@@ -74,21 +74,21 @@ import re
from buildstream import Consistency, Source, SourceError, utils
-_OUTPUT_DIRNAME = '.bst_pip_downloads'
-_PYPI_INDEX_URL = 'https://pypi.org/simple/'
+_OUTPUT_DIRNAME = ".bst_pip_downloads"
+_PYPI_INDEX_URL = "https://pypi.org/simple/"
# Used only for finding pip command
_PYTHON_VERSIONS = [
- 'python', # when running in a venv, we might not have the exact version
- 'python2.7',
- 'python3.0',
- 'python3.1',
- 'python3.2',
- 'python3.3',
- 'python3.4',
- 'python3.5',
- 'python3.6',
- 'python3.7',
+ "python", # when running in a venv, we might not have the exact version
+ "python2.7",
+ "python3.0",
+ "python3.1",
+ "python3.2",
+ "python3.3",
+ "python3.4",
+ "python3.5",
+ "python3.6",
+ "python3.7",
]
# List of allowed extensions taken from
@@ -96,8 +96,9 @@ _PYTHON_VERSIONS = [
# Names of source distribution archives must be of the form
# '%{package-name}-%{version}.%{extension}'.
_SDIST_RE = re.compile(
- r'^([\w.-]+?)-((?:[\d.]+){2,})\.(?:tar|tar.bz2|tar.gz|tar.xz|tar.Z|zip)$',
- re.IGNORECASE)
+ r"^([\w.-]+?)-((?:[\d.]+){2,})\.(?:tar|tar.bz2|tar.gz|tar.xz|tar.Z|zip)$",
+ re.IGNORECASE,
+)
class PipSource(Source):
@@ -109,16 +110,21 @@ class PipSource(Source):
BST_REQUIRES_PREVIOUS_SOURCES_TRACK = True
def configure(self, node):
- node.validate_keys(['url', 'packages', 'ref', 'requirements-files'] +
- Source.COMMON_CONFIG_KEYS)
- self.ref = node.get_str('ref', None)
- self.original_url = node.get_str('url', _PYPI_INDEX_URL)
+ node.validate_keys(
+ ["url", "packages", "ref", "requirements-files"] + Source.COMMON_CONFIG_KEYS
+ )
+ self.ref = node.get_str("ref", None)
+ self.original_url = node.get_str("url", _PYPI_INDEX_URL)
self.index_url = self.translate_url(self.original_url)
- self.packages = node.get_str_list('packages', [])
- self.requirements_files = node.get_str_list('requirements-files', [])
+ self.packages = node.get_str_list("packages", [])
+ self.requirements_files = node.get_str_list("requirements-files", [])
if not (self.packages or self.requirements_files):
- raise SourceError("{}: Either 'packages' or 'requirements-files' must be specified". format(self))
+ raise SourceError(
+ "{}: Either 'packages' or 'requirements-files' must be specified".format(
+ self
+ )
+ )
def preflight(self):
# Try to find a pip version that spports download command
@@ -126,9 +132,9 @@ class PipSource(Source):
for python in reversed(_PYTHON_VERSIONS):
try:
host_python = utils.get_host_tool(python)
- rc = self.call([host_python, '-m', 'pip', 'download', '--help'])
+ rc = self.call([host_python, "-m", "pip", "download", "--help"])
if rc == 0:
- self.host_pip = [host_python, '-m', 'pip']
+ self.host_pip = [host_python, "-m", "pip"]
break
except utils.ProgramNotFoundError:
pass
@@ -150,10 +156,10 @@ class PipSource(Source):
return self.ref
def load_ref(self, node):
- self.ref = node.get_str('ref', None)
+ self.ref = node.get_str("ref", None)
def set_ref(self, ref, node):
- node['ref'] = self.ref = ref
+ node["ref"] = self.ref = ref
def track(self, previous_sources_dir):
# XXX pip does not offer any public API other than the CLI tool so it
@@ -163,32 +169,44 @@ class PipSource(Source):
# for details.
# As a result, we have to wastefully install the packages during track.
with self.tempdir() as tmpdir:
- install_args = self.host_pip + ['download',
- '--no-binary', ':all:',
- '--index-url', self.index_url,
- '--dest', tmpdir]
+ install_args = self.host_pip + [
+ "download",
+ "--no-binary",
+ ":all:",
+ "--index-url",
+ self.index_url,
+ "--dest",
+ tmpdir,
+ ]
for requirement_file in self.requirements_files:
fpath = os.path.join(previous_sources_dir, requirement_file)
- install_args += ['-r', fpath]
+ install_args += ["-r", fpath]
install_args += self.packages
self.call(install_args, fail="Failed to install python packages")
reqs = self._parse_sdist_names(tmpdir)
- return '\n'.join(["{}=={}".format(pkg, ver) for pkg, ver in reqs])
+ return "\n".join(["{}=={}".format(pkg, ver) for pkg, ver in reqs])
def fetch(self):
with self.tempdir() as tmpdir:
- packages = self.ref.strip().split('\n')
- package_dir = os.path.join(tmpdir, 'packages')
+ packages = self.ref.strip().split("\n")
+ package_dir = os.path.join(tmpdir, "packages")
os.makedirs(package_dir)
- self.call([*self.host_pip,
- 'download',
- '--no-binary', ':all:',
- '--index-url', self.index_url,
- '--dest', package_dir,
- *packages],
- fail="Failed to install python packages: {}".format(packages))
+ self.call(
+ [
+ *self.host_pip,
+ "download",
+ "--no-binary",
+ ":all:",
+ "--index-url",
+ self.index_url,
+ "--dest",
+ package_dir,
+ *packages,
+ ],
+ fail="Failed to install python packages: {}".format(packages),
+ )
# If the mirror directory already exists, assume that some other
# process has fetched the sources before us and ensure that we do
@@ -200,8 +218,11 @@ class PipSource(Source):
# before us.
pass
except OSError as e:
- raise SourceError("{}: Failed to move downloaded pip packages from '{}' to '{}': {}"
- .format(self, package_dir, self._mirror, e)) from e
+ raise SourceError(
+ "{}: Failed to move downloaded pip packages from '{}' to '{}': {}".format(
+ self, package_dir, self._mirror, e
+ )
+ ) from e
def stage(self, directory):
with self.timed_activity("Staging Python packages", silent_nested=True):
@@ -213,9 +234,11 @@ class PipSource(Source):
def _mirror(self):
if not self.ref:
return None
- return os.path.join(self.get_mirror_directory(),
- utils.url_directory_name(self.original_url),
- hashlib.sha256(self.ref.encode()).hexdigest())
+ return os.path.join(
+ self.get_mirror_directory(),
+ utils.url_directory_name(self.original_url),
+ hashlib.sha256(self.ref.encode()).hexdigest(),
+ )
# Parse names of downloaded source distributions
#
diff --git a/src/buildstream/plugins/sources/remote.py b/src/buildstream/plugins/sources/remote.py
index 68aa577fc..6705d20e5 100644
--- a/src/buildstream/plugins/sources/remote.py
+++ b/src/buildstream/plugins/sources/remote.py
@@ -62,13 +62,17 @@ class RemoteSource(DownloadableFileSource):
def configure(self, node):
super().configure(node)
- self.filename = node.get_str('filename', os.path.basename(self.url))
- self.executable = node.get_bool('executable', default=False)
+ self.filename = node.get_str("filename", os.path.basename(self.url))
+ self.executable = node.get_bool("executable", default=False)
if os.sep in self.filename:
- raise SourceError('{}: filename parameter cannot contain directories'.format(self),
- reason="filename-contains-directory")
- node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ['filename', 'executable'])
+ raise SourceError(
+ "{}: filename parameter cannot contain directories".format(self),
+ reason="filename-contains-directory",
+ )
+ node.validate_keys(
+ DownloadableFileSource.COMMON_CONFIG_KEYS + ["filename", "executable"]
+ )
def get_unique_key(self):
return super().get_unique_key() + [self.filename, self.executable]
diff --git a/src/buildstream/plugins/sources/tar.py b/src/buildstream/plugins/sources/tar.py
index 702b7ba56..7e5868baa 100644
--- a/src/buildstream/plugins/sources/tar.py
+++ b/src/buildstream/plugins/sources/tar.py
@@ -73,6 +73,7 @@ class ReadableTarInfo(tarfile.TarInfo):
`mode` attribute in `TarInfo`, the class that encapsulates the internal meta-data of the tarball,
so that the owner-read bit is always set.
"""
+
@property
def mode(self):
# ensure file is readable by owner
@@ -89,13 +90,13 @@ class TarSource(DownloadableFileSource):
def configure(self, node):
super().configure(node)
- self.base_dir = node.get_str('base-dir', '*')
- node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ['base-dir'])
+ self.base_dir = node.get_str("base-dir", "*")
+ node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ["base-dir"])
def preflight(self):
self.host_lzip = None
- if self.url.endswith('.lz'):
- self.host_lzip = utils.get_host_tool('lzip')
+ if self.url.endswith(".lz"):
+ self.host_lzip = utils.get_host_tool("lzip")
def get_unique_key(self):
return super().get_unique_key() + [self.base_dir]
@@ -104,19 +105,19 @@ class TarSource(DownloadableFileSource):
def _run_lzip(self):
assert self.host_lzip
with TemporaryFile() as lzip_stdout:
- with open(self._get_mirror_file(), 'r') as lzip_file:
- self.call([self.host_lzip, '-d'],
- stdin=lzip_file,
- stdout=lzip_stdout)
+ with open(self._get_mirror_file(), "r") as lzip_file:
+ self.call([self.host_lzip, "-d"], stdin=lzip_file, stdout=lzip_stdout)
lzip_stdout.seek(0, 0)
yield lzip_stdout
@contextmanager
def _get_tar(self):
- if self.url.endswith('.lz'):
+ if self.url.endswith(".lz"):
with self._run_lzip() as lzip_dec:
- with tarfile.open(fileobj=lzip_dec, mode='r:', tarinfo=ReadableTarInfo) as tar:
+ with tarfile.open(
+ fileobj=lzip_dec, mode="r:", tarinfo=ReadableTarInfo
+ ) as tar:
yield tar
else:
with tarfile.open(self._get_mirror_file(), tarinfo=ReadableTarInfo) as tar:
@@ -130,7 +131,10 @@ class TarSource(DownloadableFileSource):
base_dir = self._find_base_dir(tar, self.base_dir)
if base_dir:
- tar.extractall(path=directory, members=self._extract_members(tar, base_dir, directory))
+ tar.extractall(
+ path=directory,
+ members=self._extract_members(tar, base_dir, directory),
+ )
else:
tar.extractall(path=directory)
@@ -147,14 +151,18 @@ class TarSource(DownloadableFileSource):
def assert_safe(member):
final_path = os.path.abspath(os.path.join(target_dir, member.path))
if not final_path.startswith(target_dir):
- raise SourceError("{}: Tarfile attempts to extract outside the staging area: "
- "{} -> {}".format(self, member.path, final_path))
+ raise SourceError(
+ "{}: Tarfile attempts to extract outside the staging area: "
+ "{} -> {}".format(self, member.path, final_path)
+ )
if member.islnk():
linked_path = os.path.abspath(os.path.join(target_dir, member.linkname))
if not linked_path.startswith(target_dir):
- raise SourceError("{}: Tarfile attempts to hardlink outside the staging area: "
- "{} -> {}".format(self, member.path, final_path))
+ raise SourceError(
+ "{}: Tarfile attempts to hardlink outside the staging area: "
+ "{} -> {}".format(self, member.path, final_path)
+ )
# Don't need to worry about symlinks because they're just
# files here and won't be able to do much harm once we are
@@ -167,9 +175,9 @@ class TarSource(DownloadableFileSource):
for member in tar.getmembers():
# First, ensure that a member never starts with `./`
- if member.path.startswith('./'):
+ if member.path.startswith("./"):
member.path = member.path[2:]
- if member.islnk() and member.linkname.startswith('./'):
+ if member.islnk() and member.linkname.startswith("./"):
member.linkname = member.linkname[2:]
# Now extract only the paths which match the normalized path
@@ -202,16 +210,16 @@ class TarSource(DownloadableFileSource):
# Remove any possible leading './', offer more consistent behavior
# across tarballs encoded with or without a leading '.'
- member_name = member.name.lstrip('./')
+ member_name = member.name.lstrip("./")
if not member.isdir():
# Loop over the components of a path, for a path of a/b/c/d
# we will first visit 'a', then 'a/b' and then 'a/b/c', excluding
# the final component
- components = member_name.split('/')
+ components = member_name.split("/")
for i in range(len(components) - 1):
- dir_component = '/'.join([components[j] for j in range(i + 1)])
+ dir_component = "/".join([components[j] for j in range(i + 1)])
if dir_component not in visited:
visited.add(dir_component)
try:
@@ -219,7 +227,7 @@ class TarSource(DownloadableFileSource):
# exist in the archive
_ = tar.getmember(dir_component)
except KeyError:
- if dir_component != '.':
+ if dir_component != ".":
yield dir_component
continue
@@ -227,7 +235,7 @@ class TarSource(DownloadableFileSource):
# Avoid considering the '.' directory, if any is included in the archive
# this is to avoid the default 'base-dir: *' value behaving differently
# depending on whether the tarball was encoded with a leading '.' or not
- elif member_name == '.':
+ elif member_name == ".":
continue
yield member_name
@@ -236,7 +244,11 @@ class TarSource(DownloadableFileSource):
paths = self._list_tar_paths(tar)
matches = sorted(list(utils.glob(paths, pattern)))
if not matches:
- raise SourceError("{}: Could not find base directory matching pattern: {}".format(self, pattern))
+ raise SourceError(
+ "{}: Could not find base directory matching pattern: {}".format(
+ self, pattern
+ )
+ )
return matches[0]
diff --git a/src/buildstream/plugins/sources/workspace.py b/src/buildstream/plugins/sources/workspace.py
index 1088f07f6..56b4db1a4 100644
--- a/src/buildstream/plugins/sources/workspace.py
+++ b/src/buildstream/plugins/sources/workspace.py
@@ -62,9 +62,9 @@ class WorkspaceSource(Source):
return None
def configure(self, node: MappingNode) -> None:
- node.validate_keys(['path', 'ref', 'kind'])
- self.path = node.get_str('path')
- self.__digest = node.get_str('ref')
+ node.validate_keys(["path", "ref", "kind"])
+ self.path = node.get_str("path")
+ self.__digest = node.get_str("ref")
def preflight(self) -> None:
pass # pragma: nocover
@@ -82,7 +82,7 @@ class WorkspaceSource(Source):
#
# Raises AssertionError: existing workspaces should not be reinitialized
def init_workspace(self, directory: Directory) -> None:
- raise AssertionError('Attempting to re-open an existing workspace')
+ raise AssertionError("Attempting to re-open an existing workspace")
def get_consistency(self):
# always return cached state
@@ -99,7 +99,8 @@ class WorkspaceSource(Source):
if result.overwritten or result.ignored:
raise SourceError(
"Failed to stage source: files clash with existing directory",
- reason='ensure-stage-dir-fail')
+ reason="ensure-stage-dir-fail",
+ )
def _get_local_path(self) -> str:
return self.path
diff --git a/src/buildstream/plugins/sources/zip.py b/src/buildstream/plugins/sources/zip.py
index 322be58d7..69324b29d 100644
--- a/src/buildstream/plugins/sources/zip.py
+++ b/src/buildstream/plugins/sources/zip.py
@@ -72,14 +72,16 @@ class ZipSource(DownloadableFileSource):
def configure(self, node):
super().configure(node)
- self.base_dir = node.get_str('base-dir', '*')
- node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ['base-dir'])
+ self.base_dir = node.get_str("base-dir", "*")
+ node.validate_keys(DownloadableFileSource.COMMON_CONFIG_KEYS + ["base-dir"])
def get_unique_key(self):
return super().get_unique_key() + [self.base_dir]
def stage(self, directory):
- exec_rights = (stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) & ~(stat.S_IWGRP | stat.S_IWOTH)
+ exec_rights = (stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) & ~(
+ stat.S_IWGRP | stat.S_IWOTH
+ )
noexec_rights = exec_rights & ~(stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
try:
@@ -139,14 +141,14 @@ class ZipSource(DownloadableFileSource):
# ZipInfo.is_dir() is only available in python >= 3.6, but all
# it does is check for a trailing '/' in the name
#
- if not member.filename.endswith('/'):
+ if not member.filename.endswith("/"):
# Loop over the components of a path, for a path of a/b/c/d
# we will first visit 'a', then 'a/b' and then 'a/b/c', excluding
# the final component
- components = member.filename.split('/')
+ components = member.filename.split("/")
for i in range(len(components) - 1):
- dir_component = '/'.join([components[j] for j in range(i + 1)])
+ dir_component = "/".join([components[j] for j in range(i + 1)])
if dir_component not in visited:
visited[dir_component] = True
try:
@@ -154,7 +156,7 @@ class ZipSource(DownloadableFileSource):
# exist in the archive
_ = archive.getinfo(dir_component)
except KeyError:
- if dir_component != '.':
+ if dir_component != ".":
yield dir_component
continue
@@ -162,7 +164,7 @@ class ZipSource(DownloadableFileSource):
# Avoid considering the '.' directory, if any is included in the archive
# this is to avoid the default 'base-dir: *' value behaving differently
# depending on whether the archive was encoded with a leading '.' or not
- elif member.filename == '.' or member.filename == './':
+ elif member.filename == "." or member.filename == "./":
continue
yield member.filename
@@ -171,7 +173,11 @@ class ZipSource(DownloadableFileSource):
paths = self._list_archive_paths(archive)
matches = sorted(list(utils.glob(paths, pattern)))
if not matches:
- raise SourceError("{}: Could not find base directory matching pattern: {}".format(self, pattern))
+ raise SourceError(
+ "{}: Could not find base directory matching pattern: {}".format(
+ self, pattern
+ )
+ )
return matches[0]
diff --git a/src/buildstream/sandbox/_config.py b/src/buildstream/sandbox/_config.py
index 457f92b3c..614f22063 100644
--- a/src/buildstream/sandbox/_config.py
+++ b/src/buildstream/sandbox/_config.py
@@ -22,7 +22,7 @@
#
# A container for sandbox configuration data. We want the internals
# of this to be opaque, hence putting it in its own private file.
-class SandboxConfig():
+class SandboxConfig:
def __init__(self, build_uid, build_gid, build_os=None, build_arch=None):
self.build_uid = build_uid
self.build_gid = build_gid
@@ -46,17 +46,14 @@ class SandboxConfig():
# However this should be the right place to support
# such configurations in the future.
#
- unique_key = {
- 'os': self.build_os,
- 'arch': self.build_arch
- }
+ unique_key = {"os": self.build_os, "arch": self.build_arch}
# Avoid breaking cache key calculation with
# the addition of configurabuild build uid/gid
if self.build_uid != 0:
- unique_key['build-uid'] = self.build_uid
+ unique_key["build-uid"] = self.build_uid
if self.build_gid != 0:
- unique_key['build-gid'] = self.build_gid
+ unique_key["build-gid"] = self.build_gid
return unique_key
diff --git a/src/buildstream/sandbox/_mount.py b/src/buildstream/sandbox/_mount.py
index c0f26c8d7..b182a6adc 100644
--- a/src/buildstream/sandbox/_mount.py
+++ b/src/buildstream/sandbox/_mount.py
@@ -29,7 +29,7 @@ from .._fuse import SafeHardlinks
#
# Helper data object representing a single mount point in the mount map
#
-class Mount():
+class Mount:
def __init__(self, sandbox, mount_point, safe_hardlinks, fuse_mount_options=None):
# Getting _get_underlying_directory() here is acceptable as
# we're part of the sandbox code. This will fail if our
@@ -38,7 +38,9 @@ class Mount():
self.mount_point = mount_point
self.safe_hardlinks = safe_hardlinks
- self._fuse_mount_options = {} if fuse_mount_options is None else fuse_mount_options
+ self._fuse_mount_options = (
+ {} if fuse_mount_options is None else fuse_mount_options
+ )
# FIXME: When the criteria for mounting something and its parent
# mount is identical, then there is no need to mount an additional
@@ -53,9 +55,11 @@ class Mount():
scratch_directory = sandbox._get_scratch_directory()
# Redirected mount
self.mount_origin = os.path.join(root_directory, mount_point.lstrip(os.sep))
- self.mount_base = os.path.join(scratch_directory, utils.url_directory_name(mount_point))
- self.mount_source = os.path.join(self.mount_base, 'mount')
- self.mount_tempdir = os.path.join(self.mount_base, 'temp')
+ self.mount_base = os.path.join(
+ scratch_directory, utils.url_directory_name(mount_point)
+ )
+ self.mount_source = os.path.join(self.mount_base, "mount")
+ self.mount_tempdir = os.path.join(self.mount_base, "temp")
os.makedirs(self.mount_origin, exist_ok=True)
os.makedirs(self.mount_tempdir, exist_ok=True)
else:
@@ -74,16 +78,18 @@ class Mount():
# When mounting a regular file, ensure the parent
# directory exists in the sandbox; and that an empty
# file is created at the mount location.
- parent_dir = os.path.dirname(self.mount_source.rstrip('/'))
+ parent_dir = os.path.dirname(self.mount_source.rstrip("/"))
os.makedirs(parent_dir, exist_ok=True)
if not os.path.exists(self.mount_source):
- with open(self.mount_source, 'w'):
+ with open(self.mount_source, "w"):
pass
@contextmanager
def mounted(self, sandbox):
if self.safe_hardlinks:
- mount = SafeHardlinks(self.mount_origin, self.mount_tempdir, self._fuse_mount_options)
+ mount = SafeHardlinks(
+ self.mount_origin, self.mount_tempdir, self._fuse_mount_options
+ )
with mount.mounted(self.mount_source):
yield
else:
@@ -99,8 +105,7 @@ class Mount():
# sandbox (Sandbox): The sandbox object
# root_readonly (bool): Whether the sandbox root is readonly
#
-class MountMap():
-
+class MountMap:
def __init__(self, sandbox, root_readonly, fuse_mount_options=None):
# We will be doing the mounts in the order in which they were declared.
self.mounts = OrderedDict()
@@ -109,15 +114,17 @@ class MountMap():
fuse_mount_options = {}
# We want safe hardlinks on rootfs whenever root is not readonly
- self.mounts['/'] = Mount(sandbox, '/', not root_readonly, fuse_mount_options)
+ self.mounts["/"] = Mount(sandbox, "/", not root_readonly, fuse_mount_options)
for mark in sandbox._get_marked_directories():
- directory = mark['directory']
- artifact = mark['artifact']
+ directory = mark["directory"]
+ artifact = mark["artifact"]
# We want safe hardlinks for any non-root directory where
# artifacts will be staged to
- self.mounts[directory] = Mount(sandbox, directory, artifact, fuse_mount_options)
+ self.mounts[directory] = Mount(
+ sandbox, directory, artifact, fuse_mount_options
+ )
# get_mount_source()
#
diff --git a/src/buildstream/sandbox/_mounter.py b/src/buildstream/sandbox/_mounter.py
index 4e31ef67a..57f35bdec 100644
--- a/src/buildstream/sandbox/_mounter.py
+++ b/src/buildstream/sandbox/_mounter.py
@@ -25,22 +25,29 @@ from .. import utils, _signals
# A class to wrap the `mount` and `umount` system commands
-class Mounter():
+class Mounter:
@classmethod
- def _mount(cls, dest, src=None, mount_type=None,
- stdout=None, stderr=None, options=None,
- flags=None):
+ def _mount(
+ cls,
+ dest,
+ src=None,
+ mount_type=None,
+ stdout=None,
+ stderr=None,
+ options=None,
+ flags=None,
+ ):
if stdout is None:
stdout = sys.stdout
if stderr is None:
stderr = sys.stderr
- argv = [utils.get_host_tool('mount')]
+ argv = [utils.get_host_tool("mount")]
if mount_type:
- argv.extend(['-t', mount_type])
+ argv.extend(["-t", mount_type])
if options:
- argv.extend(['-o', options])
+ argv.extend(["-o", options])
if flags:
argv.extend(flags)
@@ -48,16 +55,12 @@ class Mounter():
argv += [src]
argv += [dest]
- status, _ = utils._call(
- argv,
- terminate=True,
- stdout=stdout,
- stderr=stderr
- )
+ status, _ = utils._call(argv, terminate=True, stdout=stdout, stderr=stderr)
if status != 0:
- raise SandboxError('`{}` failed with exit code {}'
- .format(' '.join(argv), status))
+ raise SandboxError(
+ "`{}` failed with exit code {}".format(" ".join(argv), status)
+ )
return dest
@@ -68,17 +71,13 @@ class Mounter():
if stderr is None:
stderr = sys.stderr
- cmd = [utils.get_host_tool('umount'), '-R', path]
- status, _ = utils._call(
- cmd,
- terminate=True,
- stdout=stdout,
- stderr=stderr
- )
+ cmd = [utils.get_host_tool("umount"), "-R", path]
+ status, _ = utils._call(cmd, terminate=True, stdout=stdout, stderr=stderr)
if status != 0:
- raise SandboxError('`{}` failed with exit code {}'
- .format(' '.join(cmd), status))
+ raise SandboxError(
+ "`{}` failed with exit code {}".format(" ".join(cmd), status)
+ )
# mount()
#
@@ -98,8 +97,7 @@ class Mounter():
#
@classmethod
@contextmanager
- def mount(cls, dest, src=None, stdout=None,
- stderr=None, mount_type=None, **kwargs):
+ def mount(cls, dest, src=None, stdout=None, stderr=None, mount_type=None, **kwargs):
if stdout is None:
stdout = sys.stdout
if stderr is None:
@@ -108,9 +106,11 @@ class Mounter():
def kill_proc():
cls._umount(dest, stdout, stderr)
- options = ','.join([key for key, val in kwargs.items() if val])
+ options = ",".join([key for key, val in kwargs.items() if val])
- path = cls._mount(dest, src, mount_type, stdout=stdout, stderr=stderr, options=options)
+ path = cls._mount(
+ dest, src, mount_type, stdout=stdout, stderr=stderr, options=options
+ )
try:
with _signals.terminator(kill_proc):
yield path
@@ -139,8 +139,7 @@ class Mounter():
#
@classmethod
@contextmanager
- def bind_mount(cls, dest, src=None, stdout=None,
- stderr=None, **kwargs):
+ def bind_mount(cls, dest, src=None, stdout=None, stderr=None, **kwargs):
if stdout is None:
stdout = sys.stdout
if stderr is None:
@@ -149,8 +148,8 @@ class Mounter():
def kill_proc():
cls._umount(dest, stdout, stderr)
- kwargs['rbind'] = True
- options = ','.join([key for key, val in kwargs.items() if val])
+ kwargs["rbind"] = True
+ options = ",".join([key for key, val in kwargs.items() if val])
path = cls._mount(dest, src, None, stdout, stderr, options)
@@ -158,7 +157,7 @@ class Mounter():
with _signals.terminator(kill_proc):
# Make the rbind a slave to avoid unmounting vital devices in
# /proc
- cls._mount(dest, flags=['--make-rslave'])
+ cls._mount(dest, flags=["--make-rslave"])
yield path
finally:
cls._umount(dest, stdout, stderr)
diff --git a/src/buildstream/sandbox/_sandboxbuildbox.py b/src/buildstream/sandbox/_sandboxbuildbox.py
index 4258ee26d..15e45a4df 100644
--- a/src/buildstream/sandbox/_sandboxbuildbox.py
+++ b/src/buildstream/sandbox/_sandboxbuildbox.py
@@ -34,22 +34,22 @@ from .._exceptions import SandboxError
# BuildBox-based sandbox implementation.
#
class SandboxBuildBox(Sandbox):
-
def __init__(self, context, project, directory, **kwargs):
- if kwargs.get('allow_real_directory'):
+ if kwargs.get("allow_real_directory"):
raise SandboxError("BuildBox does not support real directories")
- kwargs['allow_real_directory'] = False
+ kwargs["allow_real_directory"] = False
super().__init__(context, project, directory, **kwargs)
@classmethod
def check_available(cls):
try:
- utils.get_host_tool('buildbox')
+ utils.get_host_tool("buildbox")
except utils.ProgramNotFoundError as Error:
cls._dummy_reasons += ["buildbox not found"]
- raise SandboxError(" and ".join(cls._dummy_reasons),
- reason="unavailable-local-sandbox") from Error
+ raise SandboxError(
+ " and ".join(cls._dummy_reasons), reason="unavailable-local-sandbox"
+ ) from Error
@classmethod
def check_sandbox_config(cls, platform, config):
@@ -73,42 +73,48 @@ class SandboxBuildBox(Sandbox):
scratch_directory = self._get_scratch_directory()
if not self._has_command(command[0], env):
- raise SandboxCommandError("Staged artifacts do not provide command "
- "'{}'".format(command[0]),
- reason='missing-command')
+ raise SandboxCommandError(
+ "Staged artifacts do not provide command " "'{}'".format(command[0]),
+ reason="missing-command",
+ )
# Grab the full path of the buildbox binary
try:
- buildbox_command = [utils.get_host_tool('buildbox')]
+ buildbox_command = [utils.get_host_tool("buildbox")]
except ProgramNotFoundError as Err:
- raise SandboxError(("BuildBox not on path, you are using the BuildBox sandbox because "
- "BST_FORCE_SANDBOX=buildbox")) from Err
+ raise SandboxError(
+ (
+ "BuildBox not on path, you are using the BuildBox sandbox because "
+ "BST_FORCE_SANDBOX=buildbox"
+ )
+ ) from Err
for mark in self._get_marked_directories():
- path = mark['directory']
- assert path.startswith('/') and len(path) > 1
+ path = mark["directory"]
+ assert path.startswith("/") and len(path) > 1
root_directory.descend(*path[1:].split(os.path.sep), create=True)
digest = root_directory._get_digest()
- with open(os.path.join(scratch_directory, 'in'), 'wb') as input_digest_file:
+ with open(os.path.join(scratch_directory, "in"), "wb") as input_digest_file:
input_digest_file.write(digest.SerializeToString())
buildbox_command += ["--local=" + root_directory.cas_cache.casdir]
buildbox_command += ["--input-digest=in"]
buildbox_command += ["--output-digest=out"]
- common_details = ("BuildBox is a experimental sandbox and does not support the requested feature.\n"
- "You are using this feature because BST_FORCE_SANDBOX=buildbox.")
+ common_details = (
+ "BuildBox is a experimental sandbox and does not support the requested feature.\n"
+ "You are using this feature because BST_FORCE_SANDBOX=buildbox."
+ )
if not flags & SandboxFlags.NETWORK_ENABLED:
# TODO
self._issue_warning(
- "BuildBox sandbox does not have Networking yet",
- detail=common_details
+ "BuildBox sandbox does not have Networking yet", detail=common_details
)
if cwd is not None:
- buildbox_command += ['--chdir=' + cwd]
+ buildbox_command += ["--chdir=" + cwd]
# In interactive mode, we want a complete devpts inside
# the container, so there is a /dev/console and such. In
@@ -119,14 +125,14 @@ class SandboxBuildBox(Sandbox):
# TODO
self._issue_warning(
"BuildBox sandbox does not fully support BuildStream shells yet",
- detail=common_details
+ detail=common_details,
)
if flags & SandboxFlags.ROOT_READ_ONLY:
# TODO
self._issue_warning(
"BuildBox sandbox does not fully support BuildStream `Read only Root`",
- detail=common_details
+ detail=common_details,
)
# Set UID and GID
@@ -134,11 +140,11 @@ class SandboxBuildBox(Sandbox):
# TODO
self._issue_warning(
"BuildBox sandbox does not fully support BuildStream Inherit UID",
- detail=common_details
+ detail=common_details,
)
- os.makedirs(os.path.join(scratch_directory, 'mnt'), exist_ok=True)
- buildbox_command += ['mnt']
+ os.makedirs(os.path.join(scratch_directory, "mnt"), exist_ok=True)
+ buildbox_command += ["mnt"]
# Add the command
buildbox_command += command
@@ -150,7 +156,7 @@ class SandboxBuildBox(Sandbox):
with ExitStack() as stack:
# Ensure the cwd exists
if cwd is not None and len(cwd) > 1:
- assert cwd.startswith('/')
+ assert cwd.startswith("/")
root_directory.descend(*cwd[1:].split(os.path.sep), create=True)
# If we're interactive, we want to inherit our stdin,
@@ -162,15 +168,25 @@ class SandboxBuildBox(Sandbox):
stdin = stack.enter_context(open(os.devnull, "r"))
# Run buildbox !
- exit_code = self.run_buildbox(buildbox_command, stdin, stdout, stderr, env,
- interactive=(flags & SandboxFlags.INTERACTIVE),
- cwd=scratch_directory)
+ exit_code = self.run_buildbox(
+ buildbox_command,
+ stdin,
+ stdout,
+ stderr,
+ env,
+ interactive=(flags & SandboxFlags.INTERACTIVE),
+ cwd=scratch_directory,
+ )
if exit_code == 0:
- with open(os.path.join(scratch_directory, 'out'), 'rb') as output_digest_file:
+ with open(
+ os.path.join(scratch_directory, "out"), "rb"
+ ) as output_digest_file:
output_digest = remote_execution_pb2.Digest()
output_digest.ParseFromString(output_digest_file.read())
- self._vdir = CasBasedDirectory(root_directory.cas_cache, digest=output_digest)
+ self._vdir = CasBasedDirectory(
+ root_directory.cas_cache, digest=output_digest
+ )
return exit_code
@@ -194,7 +210,9 @@ class SandboxBuildBox(Sandbox):
group_id = os.getpgid(process.pid)
os.killpg(group_id, signal.SIGCONT)
- with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(kill_proc):
+ with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(
+ kill_proc
+ ):
process = subprocess.Popen(
argv,
close_fds=True,
@@ -203,7 +221,7 @@ class SandboxBuildBox(Sandbox):
stdout=stdout,
stderr=stderr,
cwd=cwd,
- start_new_session=interactive
+ start_new_session=interactive,
)
# Wait for the child process to finish, ensuring that
diff --git a/src/buildstream/sandbox/_sandboxbwrap.py b/src/buildstream/sandbox/_sandboxbwrap.py
index bd60eafc1..d17139293 100644
--- a/src/buildstream/sandbox/_sandboxbwrap.py
+++ b/src/buildstream/sandbox/_sandboxbwrap.py
@@ -48,34 +48,29 @@ class SandboxBwrap(Sandbox):
_have_good_bwrap = None
# Minimal set of devices for the sandbox
- DEVICES = [
- '/dev/full',
- '/dev/null',
- '/dev/urandom',
- '/dev/random',
- '/dev/zero'
- ]
+ DEVICES = ["/dev/full", "/dev/null", "/dev/urandom", "/dev/random", "/dev/zero"]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self.linux32 = kwargs['linux32']
+ self.linux32 = kwargs["linux32"]
@classmethod
def check_available(cls):
cls._have_fuse = os.path.exists("/dev/fuse")
if not cls._have_fuse:
- cls._dummy_reasons += ['Fuse is unavailable']
+ cls._dummy_reasons += ["Fuse is unavailable"]
try:
- utils.get_host_tool('bwrap')
+ utils.get_host_tool("bwrap")
except utils.ProgramNotFoundError as Error:
cls._bwrap_exists = False
cls._have_good_bwrap = False
cls._die_with_parent_available = False
cls._json_status_available = False
- cls._dummy_reasons += ['Bubblewrap not found']
- raise SandboxError(" and ".join(cls._dummy_reasons),
- reason="unavailable-local-sandbox") from Error
+ cls._dummy_reasons += ["Bubblewrap not found"]
+ raise SandboxError(
+ " and ".join(cls._dummy_reasons), reason="unavailable-local-sandbox"
+ ) from Error
bwrap_version = _site.get_bwrap_version()
@@ -84,7 +79,7 @@ class SandboxBwrap(Sandbox):
cls._die_with_parent_available = (0, 1, 8) <= bwrap_version
cls._json_status_available = (0, 3, 2) <= bwrap_version
if not cls._have_good_bwrap:
- cls._dummy_reasons += ['Bubblewrap is too old']
+ cls._dummy_reasons += ["Bubblewrap is too old"]
raise SandboxError(" and ".join(cls._dummy_reasons))
cls._uid = os.geteuid()
@@ -98,29 +93,40 @@ class SandboxBwrap(Sandbox):
# issue a warning if it's not available, and save the state
# locally so that we can inform the sandbox to not try it
# later on.
- bwrap = utils.get_host_tool('bwrap')
+ bwrap = utils.get_host_tool("bwrap")
try:
- whoami = utils.get_host_tool('whoami')
- output = subprocess.check_output([
- bwrap,
- '--ro-bind', '/', '/',
- '--unshare-user',
- '--uid', '0', '--gid', '0',
- whoami,
- ], universal_newlines=True).strip()
+ whoami = utils.get_host_tool("whoami")
+ output = subprocess.check_output(
+ [
+ bwrap,
+ "--ro-bind",
+ "/",
+ "/",
+ "--unshare-user",
+ "--uid",
+ "0",
+ "--gid",
+ "0",
+ whoami,
+ ],
+ universal_newlines=True,
+ ).strip()
except subprocess.CalledProcessError:
- output = ''
+ output = ""
except utils.ProgramNotFoundError:
- output = ''
+ output = ""
- return output == 'root'
+ return output == "root"
@classmethod
def check_sandbox_config(cls, local_platform, config):
if cls.user_ns_available:
# User namespace support allows arbitrary build UID/GID settings.
pass
- elif (config.build_uid != local_platform._uid or config.build_gid != local_platform._gid):
+ elif (
+ config.build_uid != local_platform._uid
+ or config.build_gid != local_platform._gid
+ ):
# Without user namespace support, the UID/GID in the sandbox
# will match the host UID/GID.
return False
@@ -130,7 +136,9 @@ class SandboxBwrap(Sandbox):
if config.build_os != host_os:
raise SandboxError("Configured and host OS don't match.")
if config.build_arch != host_arch and not local_platform.can_crossbuild(config):
- raise SandboxError("Configured architecture and host architecture don't match.")
+ raise SandboxError(
+ "Configured architecture and host architecture don't match."
+ )
return True
@@ -141,9 +149,10 @@ class SandboxBwrap(Sandbox):
root_directory = self.get_virtual_directory()._get_underlying_directory()
if not self._has_command(command[0], env):
- raise SandboxCommandError("Staged artifacts do not provide command "
- "'{}'".format(command[0]),
- reason='missing-command')
+ raise SandboxCommandError(
+ "Staged artifacts do not provide command " "'{}'".format(command[0]),
+ reason="missing-command",
+ )
# NOTE: MountMap transitively imports `_fuse/fuse.py` which raises an
# EnvironmentError when fuse is not found. Since this module is
@@ -154,29 +163,29 @@ class SandboxBwrap(Sandbox):
# Create the mount map, this will tell us where
# each mount point needs to be mounted from and to
mount_map = MountMap(self, flags & SandboxFlags.ROOT_READ_ONLY)
- root_mount_source = mount_map.get_mount_source('/')
+ root_mount_source = mount_map.get_mount_source("/")
# start command with linux32 if needed
if self.linux32:
- bwrap_command = [utils.get_host_tool('linux32')]
+ bwrap_command = [utils.get_host_tool("linux32")]
else:
bwrap_command = []
# Grab the full path of the bwrap binary
- bwrap_command += [utils.get_host_tool('bwrap')]
+ bwrap_command += [utils.get_host_tool("bwrap")]
for k, v in env.items():
- bwrap_command += ['--setenv', k, v]
+ bwrap_command += ["--setenv", k, v]
for k in os.environ.keys() - env.keys():
- bwrap_command += ['--unsetenv', k]
+ bwrap_command += ["--unsetenv", k]
# Create a new pid namespace, this also ensures that any subprocesses
# are cleaned up when the bwrap process exits.
- bwrap_command += ['--unshare-pid']
+ bwrap_command += ["--unshare-pid"]
# Ensure subprocesses are cleaned up when the bwrap parent dies.
if self._die_with_parent_available:
- bwrap_command += ['--die-with-parent']
+ bwrap_command += ["--die-with-parent"]
# Add in the root filesystem stuff first.
#
@@ -186,15 +195,12 @@ class SandboxBwrap(Sandbox):
bwrap_command += ["--bind", root_mount_source, "/"]
if not flags & SandboxFlags.NETWORK_ENABLED:
- bwrap_command += ['--unshare-net']
- bwrap_command += ['--unshare-uts', '--hostname', 'buildstream']
- bwrap_command += ['--unshare-ipc']
+ bwrap_command += ["--unshare-net"]
+ bwrap_command += ["--unshare-uts", "--hostname", "buildstream"]
+ bwrap_command += ["--unshare-ipc"]
# Give it a proc and tmpfs
- bwrap_command += [
- '--proc', '/proc',
- '--tmpfs', '/tmp'
- ]
+ bwrap_command += ["--proc", "/proc", "--tmpfs", "/tmp"]
# In interactive mode, we want a complete devpts inside
# the container, so there is a /dev/console and such. In
@@ -202,17 +208,19 @@ class SandboxBwrap(Sandbox):
# a minimal set of devices to expose to the sandbox.
#
if flags & SandboxFlags.INTERACTIVE:
- bwrap_command += ['--dev', '/dev']
+ bwrap_command += ["--dev", "/dev"]
else:
for device in self.DEVICES:
- bwrap_command += ['--dev-bind', device, device]
+ bwrap_command += ["--dev-bind", device, device]
# Add bind mounts to any marked directories
marked_directories = self._get_marked_directories()
mount_source_overrides = self._get_mount_sources()
for mark in marked_directories:
- mount_point = mark['directory']
- if mount_point in mount_source_overrides: # pylint: disable=consider-using-get
+ mount_point = mark["directory"]
+ if (
+ mount_point in mount_source_overrides
+ ): # pylint: disable=consider-using-get
mount_source = mount_source_overrides[mount_point]
else:
mount_source = mount_map.get_mount_source(mount_point)
@@ -225,22 +233,22 @@ class SandboxBwrap(Sandbox):
# harmless to do in a build environment where the directories
# we mount just never contain device files.
#
- bwrap_command += ['--dev-bind', mount_source, mount_point]
+ bwrap_command += ["--dev-bind", mount_source, mount_point]
if flags & SandboxFlags.ROOT_READ_ONLY:
bwrap_command += ["--remount-ro", "/"]
if cwd is not None:
- bwrap_command += ['--dir', cwd]
- bwrap_command += ['--chdir', cwd]
+ bwrap_command += ["--dir", cwd]
+ bwrap_command += ["--chdir", cwd]
# Set UID and GUI
if self.user_ns_available:
- bwrap_command += ['--unshare-user']
+ bwrap_command += ["--unshare-user"]
if not flags & SandboxFlags.INHERIT_UID:
uid = self._get_config().build_uid
gid = self._get_config().build_gid
- bwrap_command += ['--uid', str(uid), '--gid', str(gid)]
+ bwrap_command += ["--uid", str(uid), "--gid", str(gid)]
with ExitStack() as stack:
pass_fds = ()
@@ -248,7 +256,7 @@ class SandboxBwrap(Sandbox):
if self._json_status_available:
json_status_file = stack.enter_context(TemporaryFile())
pass_fds = (json_status_file.fileno(),)
- bwrap_command += ['--json-status-fd', str(json_status_file.fileno())]
+ bwrap_command += ["--json-status-fd", str(json_status_file.fileno())]
# Add the command
bwrap_command += command
@@ -260,7 +268,7 @@ class SandboxBwrap(Sandbox):
#
existing_basedirs = {
directory: os.path.exists(os.path.join(root_directory, directory))
- for directory in ['tmp', 'dev', 'proc']
+ for directory in ["tmp", "dev", "proc"]
}
# Use the MountMap context manager to ensure that any redirected
@@ -278,15 +286,21 @@ class SandboxBwrap(Sandbox):
stdin = stack.enter_context(open(os.devnull, "r"))
# Run bubblewrap !
- exit_code = self.run_bwrap(bwrap_command, stdin, stdout, stderr,
- (flags & SandboxFlags.INTERACTIVE), pass_fds)
+ exit_code = self.run_bwrap(
+ bwrap_command,
+ stdin,
+ stdout,
+ stderr,
+ (flags & SandboxFlags.INTERACTIVE),
+ pass_fds,
+ )
# Cleanup things which bwrap might have left behind, while
# everything is still mounted because bwrap can be creating
# the devices on the fuse mount, so we should remove it there.
if not flags & SandboxFlags.INTERACTIVE:
for device in self.DEVICES:
- device_path = os.path.join(root_mount_source, device.lstrip('/'))
+ device_path = os.path.join(root_mount_source, device.lstrip("/"))
# This will remove the device in a loop, allowing some
# retries in case the device file leaked by bubblewrap is still busy
@@ -294,7 +308,7 @@ class SandboxBwrap(Sandbox):
# Remove /tmp, this is a bwrap owned thing we want to be sure
# never ends up in an artifact
- for basedir in ['tmp', 'dev', 'proc']:
+ for basedir in ["tmp", "dev", "proc"]:
# Skip removal of directories which already existed before
# launching bwrap
@@ -336,12 +350,16 @@ class SandboxBwrap(Sandbox):
for line in json_status_file:
with suppress(json.decoder.JSONDecodeError):
o = json.loads(line.decode())
- if isinstance(o, collections.abc.Mapping) and 'exit-code' in o:
- child_exit_code = o['exit-code']
+ if isinstance(o, collections.abc.Mapping) and "exit-code" in o:
+ child_exit_code = o["exit-code"]
break
if child_exit_code is None:
- raise SandboxError("`bwrap' terminated during sandbox setup with exitcode {}".format(exit_code),
- reason="bwrap-sandbox-fail")
+ raise SandboxError(
+ "`bwrap' terminated during sandbox setup with exitcode {}".format(
+ exit_code
+ ),
+ reason="bwrap-sandbox-fail",
+ )
exit_code = child_exit_code
self._vdir._mark_changed()
@@ -427,7 +445,7 @@ class SandboxBwrap(Sandbox):
stdin=stdin,
stdout=stdout,
stderr=stderr,
- start_new_session=new_session
+ start_new_session=new_session,
)
# Wait for the child process to finish, ensuring that
diff --git a/src/buildstream/sandbox/_sandboxchroot.py b/src/buildstream/sandbox/_sandboxchroot.py
index 8d4c54058..ad76bf998 100644
--- a/src/buildstream/sandbox/_sandboxchroot.py
+++ b/src/buildstream/sandbox/_sandboxchroot.py
@@ -35,7 +35,7 @@ from . import Sandbox, SandboxFlags, SandboxCommandError
class SandboxChroot(Sandbox):
- _FUSE_MOUNT_OPTIONS = {'dev': True}
+ _FUSE_MOUNT_OPTIONS = {"dev": True}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@@ -43,8 +43,10 @@ class SandboxChroot(Sandbox):
uid = self._get_config().build_uid
gid = self._get_config().build_gid
if uid != 0 or gid != 0:
- raise SandboxError("Chroot sandboxes cannot specify a non-root uid/gid "
- "({},{} were supplied via config)".format(uid, gid))
+ raise SandboxError(
+ "Chroot sandboxes cannot specify a non-root uid/gid "
+ "({},{} were supplied via config)".format(uid, gid)
+ )
self.mount_map = None
@@ -78,20 +80,22 @@ class SandboxChroot(Sandbox):
def _run(self, command, flags, *, cwd, env):
if not self._has_command(command[0], env):
- raise SandboxCommandError("Staged artifacts do not provide command "
- "'{}'".format(command[0]),
- reason='missing-command')
+ raise SandboxCommandError(
+ "Staged artifacts do not provide command " "'{}'".format(command[0]),
+ reason="missing-command",
+ )
stdout, stderr = self._get_output()
# Create the mount map, this will tell us where
# each mount point needs to be mounted from and to
- self.mount_map = MountMap(self, flags & SandboxFlags.ROOT_READ_ONLY,
- self._FUSE_MOUNT_OPTIONS)
+ self.mount_map = MountMap(
+ self, flags & SandboxFlags.ROOT_READ_ONLY, self._FUSE_MOUNT_OPTIONS
+ )
# Create a sysroot and run the command inside it
with ExitStack() as stack:
- os.makedirs('/var/run/buildstream', exist_ok=True)
+ os.makedirs("/var/run/buildstream", exist_ok=True)
# FIXME: While we do not currently do anything to prevent
# network access, we also don't copy /etc/resolv.conf to
@@ -104,21 +108,22 @@ class SandboxChroot(Sandbox):
#
# Nonetheless a better solution could perhaps be found.
- rootfs = stack.enter_context(utils._tempdir(dir='/var/run/buildstream'))
+ rootfs = stack.enter_context(utils._tempdir(dir="/var/run/buildstream"))
stack.enter_context(self.create_devices(self._root, flags))
stack.enter_context(self.mount_dirs(rootfs, flags, stdout, stderr))
if flags & SandboxFlags.INTERACTIVE:
stdin = sys.stdin
else:
- stdin = stack.enter_context(open(os.devnull, 'r'))
+ stdin = stack.enter_context(open(os.devnull, "r"))
# Ensure the cwd exists
if cwd is not None:
workdir = os.path.join(rootfs, cwd.lstrip(os.sep))
os.makedirs(workdir, exist_ok=True)
- status = self.chroot(rootfs, command, stdin, stdout,
- stderr, cwd, env, flags)
+ status = self.chroot(
+ rootfs, command, stdin, stdout, stderr, cwd, env, flags
+ )
self._vdir._mark_changed()
return status
@@ -161,7 +166,9 @@ class SandboxChroot(Sandbox):
os.killpg(group_id, signal.SIGCONT)
try:
- with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(kill_proc):
+ with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(
+ kill_proc
+ ):
process = subprocess.Popen( # pylint: disable=subprocess-popen-preexec-fn
command,
close_fds=True,
@@ -173,7 +180,7 @@ class SandboxChroot(Sandbox):
# If you try to put gtk dialogs here Tristan (either)
# will personally scald you
preexec_fn=lambda: (os.chroot(rootfs), os.chdir(cwd)),
- start_new_session=flags & SandboxFlags.INTERACTIVE
+ start_new_session=flags & SandboxFlags.INTERACTIVE,
)
# Wait for the child process to finish, ensuring that
@@ -214,13 +221,16 @@ class SandboxChroot(Sandbox):
# Exceptions in preexec_fn are simply reported as
# 'Exception occurred in preexec_fn', turn these into
# a more readable message.
- if str(e) == 'Exception occurred in preexec_fn.':
- raise SandboxError('Could not chroot into {} or chdir into {}. '
- 'Ensure you are root and that the relevant directory exists.'
- .format(rootfs, cwd)) from e
+ if str(e) == "Exception occurred in preexec_fn.":
+ raise SandboxError(
+ "Could not chroot into {} or chdir into {}. "
+ "Ensure you are root and that the relevant directory exists.".format(
+ rootfs, cwd
+ )
+ ) from e
# Otherwise, raise a more general error
- raise SandboxError('Could not run command {}: {}'.format(command, e)) from e
+ raise SandboxError("Could not run command {}: {}".format(command, e)) from e
return code
@@ -251,8 +261,12 @@ class SandboxChroot(Sandbox):
devices.append(self.mknod(device, location))
except OSError as err:
if err.errno == 1:
- raise SandboxError("Permission denied while creating device node: {}.".format(err) +
- "BuildStream reqiures root permissions for these setttings.")
+ raise SandboxError(
+ "Permission denied while creating device node: {}.".format(
+ err
+ )
+ + "BuildStream reqiures root permissions for these setttings."
+ )
raise
@@ -286,7 +300,9 @@ class SandboxChroot(Sandbox):
mount_source = self.mount_map.get_mount_source(point)
mount_point = os.path.join(rootfs, point.lstrip(os.sep))
- with Mounter.bind_mount(mount_point, src=mount_source, stdout=stdout, stderr=stderr, **kwargs):
+ with Mounter.bind_mount(
+ mount_point, src=mount_source, stdout=stdout, stderr=stderr, **kwargs
+ ):
yield
@contextmanager
@@ -294,26 +310,35 @@ class SandboxChroot(Sandbox):
mount_point = os.path.join(rootfs, src.lstrip(os.sep))
os.makedirs(mount_point, exist_ok=True)
- with Mounter.bind_mount(mount_point, src=src, stdout=stdout, stderr=stderr, **kwargs):
+ with Mounter.bind_mount(
+ mount_point, src=src, stdout=stdout, stderr=stderr, **kwargs
+ ):
yield
with ExitStack() as stack:
stack.enter_context(self.mount_map.mounted(self))
- stack.enter_context(mount_point('/'))
+ stack.enter_context(mount_point("/"))
if flags & SandboxFlags.INTERACTIVE:
- stack.enter_context(mount_src('/dev'))
+ stack.enter_context(mount_src("/dev"))
- stack.enter_context(mount_src('/tmp'))
- stack.enter_context(mount_src('/proc'))
+ stack.enter_context(mount_src("/tmp"))
+ stack.enter_context(mount_src("/proc"))
for mark in self._get_marked_directories():
- stack.enter_context(mount_point(mark['directory']))
+ stack.enter_context(mount_point(mark["directory"]))
# Remount root RO if necessary
if flags & flags & SandboxFlags.ROOT_READ_ONLY:
- root_mount = Mounter.mount(rootfs, stdout=stdout, stderr=stderr, remount=True, ro=True, bind=True)
+ root_mount = Mounter.mount(
+ rootfs,
+ stdout=stdout,
+ stderr=stderr,
+ remount=True,
+ ro=True,
+ bind=True,
+ )
# Since the exit stack has already registered a mount
# for this path, we do not need to register another
# umount call.
@@ -343,10 +368,13 @@ class SandboxChroot(Sandbox):
os.mknod(target, mode=stat.S_IFCHR | dev.st_mode, device=target_dev)
except PermissionError as e:
- raise SandboxError('Could not create device {}, ensure that you have root permissions: {}')
+ raise SandboxError(
+ "Could not create device {}, ensure that you have root permissions: {}"
+ )
except OSError as e:
- raise SandboxError('Could not create device {}: {}'
- .format(target, e)) from e
+ raise SandboxError(
+ "Could not create device {}: {}".format(target, e)
+ ) from e
return target
diff --git a/src/buildstream/sandbox/_sandboxdummy.py b/src/buildstream/sandbox/_sandboxdummy.py
index ae3d5e512..78c08035d 100644
--- a/src/buildstream/sandbox/_sandboxdummy.py
+++ b/src/buildstream/sandbox/_sandboxdummy.py
@@ -28,9 +28,12 @@ class SandboxDummy(Sandbox):
def _run(self, command, flags, *, cwd, env):
if not self._has_command(command[0], env):
- raise SandboxCommandError("Staged artifacts do not provide command "
- "'{}'".format(command[0]),
- reason='missing-command')
+ raise SandboxCommandError(
+ "Staged artifacts do not provide command " "'{}'".format(command[0]),
+ reason="missing-command",
+ )
- raise SandboxError("This platform does not support local builds: {}".format(self._reason),
- reason="unavailable-local-sandbox")
+ raise SandboxError(
+ "This platform does not support local builds: {}".format(self._reason),
+ reason="unavailable-local-sandbox",
+ )
diff --git a/src/buildstream/sandbox/_sandboxremote.py b/src/buildstream/sandbox/_sandboxremote.py
index 77bb34fa9..72b0f8f1a 100644
--- a/src/buildstream/sandbox/_sandboxremote.py
+++ b/src/buildstream/sandbox/_sandboxremote.py
@@ -32,7 +32,10 @@ from .._message import Message, MessageType
from .sandbox import Sandbox, SandboxCommandError, _SandboxBatch
from ..storage._casbaseddirectory import CasBasedDirectory
from .. import _signals
-from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
+from .._protos.build.bazel.remote.execution.v2 import (
+ remote_execution_pb2,
+ remote_execution_pb2_grpc,
+)
from .._protos.google.rpc import code_pb2
from .._exceptions import BstError, SandboxError
from .. import _yaml
@@ -41,7 +44,9 @@ from .._cas import CASRemote
from .._remote import RemoteSpec
-class RemoteExecutionSpec(namedtuple('RemoteExecutionSpec', 'exec_service storage_service action_service')):
+class RemoteExecutionSpec(
+ namedtuple("RemoteExecutionSpec", "exec_service storage_service action_service")
+):
pass
@@ -51,59 +56,63 @@ class RemoteExecutionSpec(namedtuple('RemoteExecutionSpec', 'exec_service storag
# commands to a remote server and retrieves the results from it.
#
class SandboxRemote(Sandbox):
-
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self._output_files_required = kwargs.get('output_files_required', True)
+ self._output_files_required = kwargs.get("output_files_required", True)
- config = kwargs['specs'] # This should be a RemoteExecutionSpec
+ config = kwargs["specs"] # This should be a RemoteExecutionSpec
if config is None:
return
# gRPC doesn't support fork without exec, which is used in the main process.
assert not utils._is_main_process()
- self.storage_url = config.storage_service['url']
- self.exec_url = config.exec_service['url']
+ self.storage_url = config.storage_service["url"]
+ self.exec_url = config.exec_service["url"]
exec_certs = {}
- for key in ['client-cert', 'client-key', 'server-cert']:
+ for key in ["client-cert", "client-key", "server-cert"]:
if key in config.exec_service:
- with open(config.exec_service[key], 'rb') as f:
+ with open(config.exec_service[key], "rb") as f:
exec_certs[key] = f.read()
self.exec_credentials = grpc.ssl_channel_credentials(
- root_certificates=exec_certs.get('server-cert'),
- private_key=exec_certs.get('client-key'),
- certificate_chain=exec_certs.get('client-cert'))
+ root_certificates=exec_certs.get("server-cert"),
+ private_key=exec_certs.get("client-key"),
+ certificate_chain=exec_certs.get("client-cert"),
+ )
action_certs = {}
- for key in ['client-cert', 'client-key', 'server-cert']:
+ for key in ["client-cert", "client-key", "server-cert"]:
if key in config.action_service:
- with open(config.action_service[key], 'rb') as f:
+ with open(config.action_service[key], "rb") as f:
action_certs[key] = f.read()
if config.action_service:
- self.action_url = config.action_service['url']
- self.action_instance = config.action_service.get('instance-name', None)
+ self.action_url = config.action_service["url"]
+ self.action_instance = config.action_service.get("instance-name", None)
self.action_credentials = grpc.ssl_channel_credentials(
- root_certificates=action_certs.get('server-cert'),
- private_key=action_certs.get('client-key'),
- certificate_chain=action_certs.get('client-cert'))
+ root_certificates=action_certs.get("server-cert"),
+ private_key=action_certs.get("client-key"),
+ certificate_chain=action_certs.get("client-cert"),
+ )
else:
self.action_url = None
self.action_instance = None
self.action_credentials = None
- self.exec_instance = config.exec_service.get('instance-name', None)
- self.storage_instance = config.storage_service.get('instance-name', None)
-
- self.storage_remote_spec = RemoteSpec(self.storage_url, push=True,
- server_cert=config.storage_service.get('server-cert'),
- client_key=config.storage_service.get('client-key'),
- client_cert=config.storage_service.get('client-cert'),
- instance_name=self.storage_instance)
+ self.exec_instance = config.exec_service.get("instance-name", None)
+ self.storage_instance = config.storage_service.get("instance-name", None)
+
+ self.storage_remote_spec = RemoteSpec(
+ self.storage_url,
+ push=True,
+ server_cert=config.storage_service.get("server-cert"),
+ client_key=config.storage_service.get("client-key"),
+ client_cert=config.storage_service.get("client-cert"),
+ instance_name=self.storage_instance,
+ )
self.operation_name = None
def info(self, msg):
@@ -111,47 +120,51 @@ class SandboxRemote(Sandbox):
@staticmethod
def specs_from_config_node(config_node, basedir=None):
-
def require_node(config, keyname):
val = config.get_mapping(keyname, default=None)
if val is None:
provenance = remote_config.get_provenance()
- raise _yaml.LoadError("{}: '{}' was not present in the remote "
- "execution configuration (remote-execution). "
- .format(str(provenance), keyname),
- _yaml.LoadErrorReason.INVALID_DATA)
+ raise _yaml.LoadError(
+ "{}: '{}' was not present in the remote "
+ "execution configuration (remote-execution). ".format(
+ str(provenance), keyname
+ ),
+ _yaml.LoadErrorReason.INVALID_DATA,
+ )
return val
- remote_config = config_node.get_mapping('remote-execution', default=None)
+ remote_config = config_node.get_mapping("remote-execution", default=None)
if remote_config is None:
return None
- service_keys = ['execution-service', 'storage-service', 'action-cache-service']
+ service_keys = ["execution-service", "storage-service", "action-cache-service"]
- remote_config.validate_keys(['url', *service_keys])
+ remote_config.validate_keys(["url", *service_keys])
- exec_config = require_node(remote_config, 'execution-service')
- storage_config = require_node(remote_config, 'storage-service')
- action_config = remote_config.get_mapping('action-cache-service', default={})
+ exec_config = require_node(remote_config, "execution-service")
+ storage_config = require_node(remote_config, "storage-service")
+ action_config = remote_config.get_mapping("action-cache-service", default={})
- tls_keys = ['client-key', 'client-cert', 'server-cert']
+ tls_keys = ["client-key", "client-cert", "server-cert"]
- exec_config.validate_keys(['url', 'instance-name', *tls_keys])
- storage_config.validate_keys(['url', 'instance-name', *tls_keys])
+ exec_config.validate_keys(["url", "instance-name", *tls_keys])
+ storage_config.validate_keys(["url", "instance-name", *tls_keys])
if action_config:
- action_config.validate_keys(['url', 'instance-name', *tls_keys])
+ action_config.validate_keys(["url", "instance-name", *tls_keys])
# Maintain some backwards compatibility with older configs, in which
# 'url' was the only valid key for remote-execution:
- if 'url' in remote_config:
- if 'execution-service' not in remote_config:
- exec_config = Node.from_dict({'url': remote_config['url']})
+ if "url" in remote_config:
+ if "execution-service" not in remote_config:
+ exec_config = Node.from_dict({"url": remote_config["url"]})
else:
- provenance = remote_config.get_node('url').get_provenance()
- raise _yaml.LoadError("{}: 'url' and 'execution-service' keys were found in the remote "
- "execution configuration (remote-execution). "
- "You can only specify one of these."
- .format(str(provenance)), _yaml.LoadErrorReason.INVALID_DATA)
+ provenance = remote_config.get_node("url").get_provenance()
+ raise _yaml.LoadError(
+ "{}: 'url' and 'execution-service' keys were found in the remote "
+ "execution configuration (remote-execution). "
+ "You can only specify one of these.".format(str(provenance)),
+ _yaml.LoadErrorReason.INVALID_DATA,
+ )
service_configs = [exec_config, storage_config, action_config]
@@ -163,19 +176,23 @@ class SandboxRemote(Sandbox):
for config_key, config in zip(service_keys, service_configs):
# Either both or none of the TLS client key/cert pair must be specified:
- if ('client-key' in config) != ('client-cert' in config):
+ if ("client-key" in config) != ("client-cert" in config):
provenance = remote_config.get_node(config_key).get_provenance()
- raise _yaml.LoadError("{}: TLS client key/cert pair is incomplete. "
- "You must specify both 'client-key' and 'client-cert' "
- "for authenticated HTTPS connections."
- .format(str(provenance)), _yaml.LoadErrorReason.INVALID_DATA)
+ raise _yaml.LoadError(
+ "{}: TLS client key/cert pair is incomplete. "
+ "You must specify both 'client-key' and 'client-cert' "
+ "for authenticated HTTPS connections.".format(str(provenance)),
+ _yaml.LoadErrorReason.INVALID_DATA,
+ )
for tls_key in tls_keys:
if tls_key in config:
config[tls_key] = resolve_path(config.get_str(tls_key))
# TODO: we should probably not be stripping node info and rather load files the safe way
- return RemoteExecutionSpec(*[conf.strip_node_info() for conf in service_configs])
+ return RemoteExecutionSpec(
+ *[conf.strip_node_info() for conf in service_configs]
+ )
def run_remote_command(self, channel, action_digest):
# Sends an execution request to the remote execution server.
@@ -184,9 +201,11 @@ class SandboxRemote(Sandbox):
# Try to create a communication channel to the BuildGrid server.
stub = remote_execution_pb2_grpc.ExecutionStub(channel)
- request = remote_execution_pb2.ExecuteRequest(instance_name=self.exec_instance,
- action_digest=action_digest,
- skip_cache_lookup=False)
+ request = remote_execution_pb2.ExecuteRequest(
+ instance_name=self.exec_instance,
+ action_digest=action_digest,
+ skip_cache_lookup=False,
+ )
def __run_remote_command(stub, execute_request=None, running_operation=None):
try:
@@ -194,7 +213,9 @@ class SandboxRemote(Sandbox):
if execute_request is not None:
operation_iterator = stub.Execute(execute_request)
else:
- request = remote_execution_pb2.WaitExecutionRequest(name=running_operation.name)
+ request = remote_execution_pb2.WaitExecutionRequest(
+ name=running_operation.name
+ )
operation_iterator = stub.WaitExecution(request)
for operation in operation_iterator:
@@ -208,26 +229,34 @@ class SandboxRemote(Sandbox):
except grpc.RpcError as e:
status_code = e.code()
if status_code == grpc.StatusCode.UNAVAILABLE:
- raise SandboxError("Failed contacting remote execution server at {}."
- .format(self.exec_url))
-
- if status_code in (grpc.StatusCode.INVALID_ARGUMENT,
- grpc.StatusCode.FAILED_PRECONDITION,
- grpc.StatusCode.RESOURCE_EXHAUSTED,
- grpc.StatusCode.INTERNAL,
- grpc.StatusCode.DEADLINE_EXCEEDED):
+ raise SandboxError(
+ "Failed contacting remote execution server at {}.".format(
+ self.exec_url
+ )
+ )
+
+ if status_code in (
+ grpc.StatusCode.INVALID_ARGUMENT,
+ grpc.StatusCode.FAILED_PRECONDITION,
+ grpc.StatusCode.RESOURCE_EXHAUSTED,
+ grpc.StatusCode.INTERNAL,
+ grpc.StatusCode.DEADLINE_EXCEEDED,
+ ):
raise SandboxError("{} ({}).".format(e.details(), status_code.name))
if running_operation and status_code == grpc.StatusCode.UNIMPLEMENTED:
- raise SandboxError("Failed trying to recover from connection loss: "
- "server does not support operation status polling recovery.")
+ raise SandboxError(
+ "Failed trying to recover from connection loss: "
+ "server does not support operation status polling recovery."
+ )
return last_operation
# Set up signal handler to trigger cancel_operation on SIGTERM
operation = None
- with self._get_context().messenger.timed_activity("Waiting for the remote build to complete"), \
- _signals.terminator(partial(self.cancel_operation, channel)):
+ with self._get_context().messenger.timed_activity(
+ "Waiting for the remote build to complete"
+ ), _signals.terminator(partial(self.cancel_operation, channel)):
operation = __run_remote_command(stub, execute_request=request)
if operation is None:
return None
@@ -244,18 +273,21 @@ class SandboxRemote(Sandbox):
return
stub = operations_pb2_grpc.OperationsStub(channel)
- request = operations_pb2.CancelOperationRequest(
- name=str(self.operation_name))
+ request = operations_pb2.CancelOperationRequest(name=str(self.operation_name))
try:
stub.CancelOperation(request)
except grpc.RpcError as e:
- if (e.code() == grpc.StatusCode.UNIMPLEMENTED or
- e.code() == grpc.StatusCode.INVALID_ARGUMENT):
+ if (
+ e.code() == grpc.StatusCode.UNIMPLEMENTED
+ or e.code() == grpc.StatusCode.INVALID_ARGUMENT
+ ):
pass
else:
- raise SandboxError("Failed trying to send CancelOperation request: "
- "{} ({})".format(e.details(), e.code().name))
+ raise SandboxError(
+ "Failed trying to send CancelOperation request: "
+ "{} ({})".format(e.details(), e.code().name)
+ )
def process_job_output(self, output_directories, output_files, *, failure):
# Reads the remote execution server response to an execution request.
@@ -272,7 +304,9 @@ class SandboxRemote(Sandbox):
error_text = "No output directory was returned from the build server."
raise SandboxError(error_text)
if len(output_directories) > 1:
- error_text = "More than one output directory was returned from the build server: {}."
+ error_text = (
+ "More than one output directory was returned from the build server: {}."
+ )
raise SandboxError(error_text.format(output_directories))
tree_digest = output_directories[0].tree_digest
@@ -318,14 +352,21 @@ class SandboxRemote(Sandbox):
# however, artifact push remotes will need them.
# Only fetch blobs that are missing on one or multiple
# artifact servers.
- blobs_to_fetch = artifactcache.find_missing_blobs(project, local_missing_blobs)
+ blobs_to_fetch = artifactcache.find_missing_blobs(
+ project, local_missing_blobs
+ )
with CASRemote(self.storage_remote_spec, cascache) as casremote:
- remote_missing_blobs = cascache.fetch_blobs(casremote, blobs_to_fetch)
+ remote_missing_blobs = cascache.fetch_blobs(
+ casremote, blobs_to_fetch
+ )
if remote_missing_blobs:
- raise SandboxError("{} output files are missing on the CAS server"
- .format(len(remote_missing_blobs)))
+ raise SandboxError(
+ "{} output files are missing on the CAS server".format(
+ len(remote_missing_blobs)
+ )
+ )
def _run(self, command, flags, *, cwd, env):
stdout, stderr = self._get_output()
@@ -342,7 +383,7 @@ class SandboxRemote(Sandbox):
# some of the behaviour of other sandboxes, which create these
# to use as mount points.
for mark in self._get_marked_directories():
- directory = mark['directory']
+ directory = mark["directory"]
# Create each marked directory
upload_vdir.descend(*directory.split(os.path.sep), create=True)
@@ -350,8 +391,9 @@ class SandboxRemote(Sandbox):
input_root_digest = upload_vdir._get_digest()
command_proto = self._create_command(command, cwd, env)
command_digest = utils._message_digest(command_proto.SerializeToString())
- action = remote_execution_pb2.Action(command_digest=command_digest,
- input_root_digest=input_root_digest)
+ action = remote_execution_pb2.Action(
+ command_digest=command_digest, input_root_digest=input_root_digest
+ )
action_digest = utils._message_digest(action.SerializeToString())
# check action cache download and download if there
@@ -362,14 +404,21 @@ class SandboxRemote(Sandbox):
try:
casremote.init()
except grpc.RpcError as e:
- raise SandboxError("Failed to contact remote execution CAS endpoint at {}: {}"
- .format(self.storage_url, e)) from e
+ raise SandboxError(
+ "Failed to contact remote execution CAS endpoint at {}: {}".format(
+ self.storage_url, e
+ )
+ ) from e
# Determine blobs missing on remote
try:
- missing_blobs = cascache.remote_missing_blobs_for_directory(casremote, input_root_digest)
+ missing_blobs = cascache.remote_missing_blobs_for_directory(
+ casremote, input_root_digest
+ )
except grpc.RpcError as e:
- raise SandboxError("Failed to determine missing blobs: {}".format(e)) from e
+ raise SandboxError(
+ "Failed to determine missing blobs: {}".format(e)
+ ) from e
# Check if any blobs are also missing locally (partial artifact)
# and pull them from the artifact cache.
@@ -378,13 +427,17 @@ class SandboxRemote(Sandbox):
if local_missing_blobs:
artifactcache.fetch_missing_blobs(project, local_missing_blobs)
except (grpc.RpcError, BstError) as e:
- raise SandboxError("Failed to pull missing blobs from artifact cache: {}".format(e)) from e
+ raise SandboxError(
+ "Failed to pull missing blobs from artifact cache: {}".format(e)
+ ) from e
# Now, push the missing blobs to the remote.
try:
cascache.send_blobs(casremote, missing_blobs)
except grpc.RpcError as e:
- raise SandboxError("Failed to push source directory to remote: {}".format(e)) from e
+ raise SandboxError(
+ "Failed to push source directory to remote: {}".format(e)
+ ) from e
# Push command and action
try:
@@ -400,15 +453,21 @@ class SandboxRemote(Sandbox):
# Next, try to create a communication channel to the BuildGrid server.
url = urlparse(self.exec_url)
if not url.port:
- raise SandboxError("You must supply a protocol and port number in the execution-service url, "
- "for example: http://buildservice:50051.")
- if url.scheme == 'http':
- channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
- elif url.scheme == 'https':
- channel = grpc.secure_channel('{}:{}'.format(url.hostname, url.port), self.exec_credentials)
+ raise SandboxError(
+ "You must supply a protocol and port number in the execution-service url, "
+ "for example: http://buildservice:50051."
+ )
+ if url.scheme == "http":
+ channel = grpc.insecure_channel("{}:{}".format(url.hostname, url.port))
+ elif url.scheme == "https":
+ channel = grpc.secure_channel(
+ "{}:{}".format(url.hostname, url.port), self.exec_credentials
+ )
else:
- raise SandboxError("Remote execution currently only supports the 'http' protocol "
- "and '{}' was supplied.".format(url.scheme))
+ raise SandboxError(
+ "Remote execution currently only supports the 'http' protocol "
+ "and '{}' was supplied.".format(url.scheme)
+ )
# Now request to execute the action
with channel:
@@ -416,15 +475,18 @@ class SandboxRemote(Sandbox):
action_result = self._extract_action_result(operation)
# Get output of build
- self.process_job_output(action_result.output_directories, action_result.output_files,
- failure=action_result.exit_code != 0)
+ self.process_job_output(
+ action_result.output_directories,
+ action_result.output_files,
+ failure=action_result.exit_code != 0,
+ )
if stdout:
if action_result.stdout_raw:
- stdout.write(str(action_result.stdout_raw, 'utf-8', errors='ignore'))
+ stdout.write(str(action_result.stdout_raw, "utf-8", errors="ignore"))
if stderr:
if action_result.stderr_raw:
- stderr.write(str(action_result.stderr_raw, 'utf-8', errors='ignore'))
+ stderr.write(str(action_result.stderr_raw, "utf-8", errors="ignore"))
if action_result.exit_code != 0:
# A normal error during the build: the remote execution system
@@ -442,23 +504,31 @@ class SandboxRemote(Sandbox):
return None
url = urlparse(self.action_url)
if not url.port:
- raise SandboxError("You must supply a protocol and port number in the action-cache-service url, "
- "for example: http://buildservice:50051.")
- if url.scheme == 'http':
- channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
- elif url.scheme == 'https':
- channel = grpc.secure_channel('{}:{}'.format(url.hostname, url.port), self.action_credentials)
+ raise SandboxError(
+ "You must supply a protocol and port number in the action-cache-service url, "
+ "for example: http://buildservice:50051."
+ )
+ if url.scheme == "http":
+ channel = grpc.insecure_channel("{}:{}".format(url.hostname, url.port))
+ elif url.scheme == "https":
+ channel = grpc.secure_channel(
+ "{}:{}".format(url.hostname, url.port), self.action_credentials
+ )
with channel:
- request = remote_execution_pb2.GetActionResultRequest(instance_name=self.action_instance,
- action_digest=action_digest)
+ request = remote_execution_pb2.GetActionResultRequest(
+ instance_name=self.action_instance, action_digest=action_digest
+ )
stub = remote_execution_pb2_grpc.ActionCacheStub(channel)
try:
result = stub.GetActionResult(request)
except grpc.RpcError as e:
if e.code() != grpc.StatusCode.NOT_FOUND:
- raise SandboxError("Failed to query action cache: {} ({})"
- .format(e.code(), e.details()))
+ raise SandboxError(
+ "Failed to query action cache: {} ({})".format(
+ e.code(), e.details()
+ )
+ )
return None
else:
self.info("Action result found in action cache")
@@ -466,19 +536,22 @@ class SandboxRemote(Sandbox):
def _create_command(self, command, working_directory, environment):
# Creates a command proto
- environment_variables = [remote_execution_pb2.Command.
- EnvironmentVariable(name=k, value=v)
- for (k, v) in environment.items()]
+ environment_variables = [
+ remote_execution_pb2.Command.EnvironmentVariable(name=k, value=v)
+ for (k, v) in environment.items()
+ ]
# Request the whole directory tree as output
output_directory = os.path.relpath(os.path.sep, start=working_directory)
- return remote_execution_pb2.Command(arguments=command,
- working_directory=working_directory,
- environment_variables=environment_variables,
- output_files=[],
- output_directories=[output_directory],
- platform=None)
+ return remote_execution_pb2.Command(
+ arguments=command,
+ working_directory=working_directory,
+ environment_variables=environment_variables,
+ output_files=[],
+ output_directories=[output_directory],
+ platform=None,
+ )
@staticmethod
def _extract_action_result(operation):
@@ -486,7 +559,7 @@ class SandboxRemote(Sandbox):
# Failure of remote execution, usually due to an error in BuildStream
raise SandboxError("No response returned from server")
- assert not operation.HasField('error') and operation.HasField('response')
+ assert not operation.HasField("error") and operation.HasField("response")
execution_response = remote_execution_pb2.ExecuteResponse()
# The response is expected to be an ExecutionResponse message
@@ -517,7 +590,6 @@ class SandboxRemote(Sandbox):
# Command batching by shell script generation.
#
class _SandboxRemoteBatch(_SandboxBatch):
-
def __init__(self, sandbox, main_group, flags, *, collect=None):
super().__init__(sandbox, main_group, flags, collect=collect)
@@ -532,7 +604,16 @@ class _SandboxRemoteBatch(_SandboxBatch):
self.main_group.execute(self)
first = self.first_command
- if first and self.sandbox.run(['sh', '-c', '-e', self.script], self.flags, cwd=first.cwd, env=first.env) != 0:
+ if (
+ first
+ and self.sandbox.run(
+ ["sh", "-c", "-e", self.script],
+ self.flags,
+ cwd=first.cwd,
+ env=first.env,
+ )
+ != 0
+ ):
raise SandboxCommandError("Command execution failed", collect=self.collect)
def execute_group(self, group):
@@ -563,13 +644,17 @@ class _SandboxRemoteBatch(_SandboxBatch):
self.env = command.env
# Actual command execution
- cmdline = ' '.join(shlex.quote(cmd) for cmd in command.command)
+ cmdline = " ".join(shlex.quote(cmd) for cmd in command.command)
self.script += "(set -ex; {})".format(cmdline)
# Error handling
label = command.label or cmdline
quoted_label = shlex.quote("'{}'".format(label))
- self.script += " || (echo Command {} failed with exitcode $? >&2 ; exit 1)\n".format(quoted_label)
+ self.script += " || (echo Command {} failed with exitcode $? >&2 ; exit 1)\n".format(
+ quoted_label
+ )
def execute_call(self, call):
- raise SandboxError("SandboxRemote does not support callbacks in command batches")
+ raise SandboxError(
+ "SandboxRemote does not support callbacks in command batches"
+ )
diff --git a/src/buildstream/sandbox/sandbox.py b/src/buildstream/sandbox/sandbox.py
index b4691fe3f..2ce556ab8 100644
--- a/src/buildstream/sandbox/sandbox.py
+++ b/src/buildstream/sandbox/sandbox.py
@@ -47,10 +47,11 @@ if TYPE_CHECKING:
# pylint: disable=cyclic-import
from .._context import Context
from .._project import Project
+
# pylint: enable=cyclic-import
-class SandboxFlags():
+class SandboxFlags:
"""Flags indicating how the sandbox should be run.
"""
@@ -100,49 +101,47 @@ class SandboxCommandError(SandboxError):
collect (str): An optional directory containing partial install contents
reason (str): An optional reason string (defaults to 'command-failed')
"""
- def __init__(self, message, *, detail=None, collect=None, reason='command-failed'):
+
+ def __init__(self, message, *, detail=None, collect=None, reason="command-failed"):
super().__init__(message, detail=detail, reason=reason)
self.collect = collect
-class Sandbox():
+class Sandbox:
"""Sandbox()
Sandbox programming interface for :class:`.Element` plugins.
"""
# Minimal set of devices for the sandbox
- DEVICES = [
- '/dev/urandom',
- '/dev/random',
- '/dev/zero',
- '/dev/null'
- ]
- _dummy_reasons = [] # type: List[str]
-
- def __init__(self, context: 'Context', project: 'Project', directory: str, **kwargs):
+ DEVICES = ["/dev/urandom", "/dev/random", "/dev/zero", "/dev/null"]
+ _dummy_reasons = [] # type: List[str]
+
+ def __init__(
+ self, context: "Context", project: "Project", directory: str, **kwargs
+ ):
self.__context = context
self.__project = project
- self.__directories = [] # type: List[Dict[str, Union[int, str]]]
- self.__cwd = None # type: Optional[str]
- self.__env = None # type: Optional[Dict[str, str]]
- self.__mount_sources = {} # type: Dict[str, str]
- self.__allow_real_directory = kwargs['allow_real_directory']
+ self.__directories = [] # type: List[Dict[str, Union[int, str]]]
+ self.__cwd = None # type: Optional[str]
+ self.__env = None # type: Optional[Dict[str, str]]
+ self.__mount_sources = {} # type: Dict[str, str]
+ self.__allow_real_directory = kwargs["allow_real_directory"]
self.__allow_run = True
# Plugin element full name for logging
- plugin = kwargs.get('plugin', None)
+ plugin = kwargs.get("plugin", None)
if plugin:
self.__element_name = plugin._get_full_name()
else:
self.__element_name = None
# Configuration from kwargs common to all subclasses
- self.__config = kwargs['config']
- self.__stdout = kwargs['stdout']
- self.__stderr = kwargs['stderr']
- self.__bare_directory = kwargs['bare_directory']
+ self.__config = kwargs["config"]
+ self.__stdout = kwargs["stdout"]
+ self.__stderr = kwargs["stderr"]
+ self.__bare_directory = kwargs["bare_directory"]
# Setup the directories. Root and output_directory should be
# available to subclasses, hence being single-underscore. The
@@ -153,15 +152,15 @@ class Sandbox():
self.__scratch = None
os.makedirs(self._root, exist_ok=True)
else:
- self._root = os.path.join(directory, 'root')
- self.__scratch = os.path.join(directory, 'scratch')
+ self._root = os.path.join(directory, "root")
+ self.__scratch = os.path.join(directory, "scratch")
for directory_ in [self._root, self.__scratch]:
os.makedirs(directory_, exist_ok=True)
- self._output_directory = None # type: Optional[str]
+ self._output_directory = None # type: Optional[str]
self._build_directory = None
self._build_directory_always = None
- self._vdir = None # type: Optional[Directory]
+ self._vdir = None # type: Optional[Directory]
self._usebuildtree = False
# This is set if anyone requests access to the underlying
@@ -255,18 +254,17 @@ class Sandbox():
Any marked directories will be read-write in the sandboxed
environment, only the root directory is allowed to be readonly.
"""
- self.__directories.append({
- 'directory': directory,
- 'artifact': artifact
- })
-
- def run(self,
- command: List[str],
- flags: int,
- *,
- cwd: Optional[str] = None,
- env: Optional[Dict[str, str]] = None,
- label: str = None) -> Optional[int]:
+ self.__directories.append({"directory": directory, "artifact": artifact})
+
+ def run(
+ self,
+ command: List[str],
+ flags: int,
+ *,
+ cwd: Optional[str] = None,
+ env: Optional[Dict[str, str]] = None,
+ label: str = None
+ ) -> Optional[int]:
"""Run a command in the sandbox.
If this is called outside a batch context, the command is immediately
@@ -314,8 +312,9 @@ class Sandbox():
command = [command]
if self.__batch:
- assert flags == self.__batch.flags, \
- "Inconsistent sandbox flags in single command batch"
+ assert (
+ flags == self.__batch.flags
+ ), "Inconsistent sandbox flags in single command batch"
batch_command = _SandboxBatchCommand(command, cwd=cwd, env=env, label=label)
@@ -326,7 +325,9 @@ class Sandbox():
return self._run(command, flags, cwd=cwd, env=env)
@contextmanager
- def batch(self, flags: int, *, label: str = None, collect: str = None) -> Generator[None, None, None]:
+ def batch(
+ self, flags: int, *, label: str = None, collect: str = None
+ ) -> Generator[None, None, None]:
"""Context manager for command batching
This provides a batch context that defers execution of commands until
@@ -352,8 +353,9 @@ class Sandbox():
if self.__batch:
# Nested batch
- assert flags == self.__batch.flags, \
- "Inconsistent sandbox flags in single command batch"
+ assert (
+ flags == self.__batch.flags
+ ), "Inconsistent sandbox flags in single command batch"
parent_group = self.__batch.current_group
parent_group.append(group)
@@ -394,8 +396,9 @@ class Sandbox():
# (int): The program exit code.
#
def _run(self, command, flags, *, cwd, env):
- raise ImplError("Sandbox of type '{}' does not implement _run()"
- .format(type(self).__name__))
+ raise ImplError(
+ "Sandbox of type '{}' does not implement _run()".format(type(self).__name__)
+ )
# _create_batch()
#
@@ -425,7 +428,7 @@ class Sandbox():
if not self.__allow_real_directory and not self.__allow_run:
return True
- return 'BST_CAS_DIRECTORIES' in os.environ
+ return "BST_CAS_DIRECTORIES" in os.environ
# _fetch_missing_blobs()
#
@@ -513,7 +516,7 @@ class Sandbox():
# what directory it is in makes it unnecessary to call the faulty
# getcwd.
env = dict(env)
- env['PWD'] = cwd
+ env["PWD"] = cwd
return env
@@ -528,7 +531,7 @@ class Sandbox():
# Returns:
# (str): The sandbox work directory
def _get_work_directory(self, *, cwd=None):
- return cwd or self.__cwd or '/'
+ return cwd or self.__cwd or "/"
# _get_scratch_directory()
#
@@ -542,7 +545,9 @@ class Sandbox():
# Returns:
# (str): The sandbox scratch directory
def _get_scratch_directory(self):
- assert not self.__bare_directory, "Scratch is not going to work with bare directories"
+ assert (
+ not self.__bare_directory
+ ), "Scratch is not going to work with bare directories"
return self.__scratch
# _get_output()
@@ -584,7 +589,7 @@ class Sandbox():
if len(command_as_parts) > 1:
return False
- for path in env.get('PATH').split(':'):
+ for path in env.get("PATH").split(":"):
path_as_parts = path.lstrip(os.sep).split(os.sep)
if vroot._exists(*path_as_parts, command, follow_symlinks=True):
return True
@@ -650,10 +655,7 @@ class Sandbox():
# details (str): optional, more detatils
def _issue_warning(self, message, detail=None):
self.__context.messenger.message(
- Message(MessageType.WARN,
- message,
- detail=detail
- )
+ Message(MessageType.WARN, message, detail=detail)
)
@@ -661,8 +663,7 @@ class Sandbox():
#
# A batch of sandbox commands.
#
-class _SandboxBatch():
-
+class _SandboxBatch:
def __init__(self, sandbox, main_group, flags, *, collect=None):
self.sandbox = sandbox
self.main_group = main_group
@@ -676,7 +677,9 @@ class _SandboxBatch():
def execute_group(self, group):
if group.label:
context = self.sandbox._get_context()
- cm = context.messenger.timed_activity(group.label, element_name=self.sandbox._get_element_name())
+ cm = context.messenger.timed_activity(
+ group.label, element_name=self.sandbox._get_element_name()
+ )
else:
cm = contextlib.suppress()
@@ -686,16 +689,25 @@ class _SandboxBatch():
def execute_command(self, command):
if command.label:
context = self.sandbox._get_context()
- message = Message(MessageType.STATUS, 'Running command', detail=command.label,
- element_name=self.sandbox._get_element_name())
+ message = Message(
+ MessageType.STATUS,
+ "Running command",
+ detail=command.label,
+ element_name=self.sandbox._get_element_name(),
+ )
context.messenger.message(message)
- exitcode = self.sandbox._run(command.command, self.flags, cwd=command.cwd, env=command.env)
+ exitcode = self.sandbox._run(
+ command.command, self.flags, cwd=command.cwd, env=command.env
+ )
if exitcode != 0:
- cmdline = ' '.join(shlex.quote(cmd) for cmd in command.command)
+ cmdline = " ".join(shlex.quote(cmd) for cmd in command.command)
label = command.label or cmdline
- raise SandboxCommandError("Command failed with exitcode {}".format(exitcode),
- detail=label, collect=self.collect)
+ raise SandboxCommandError(
+ "Command failed with exitcode {}".format(exitcode),
+ detail=label,
+ collect=self.collect,
+ )
def execute_call(self, call):
call.callback()
@@ -705,8 +717,7 @@ class _SandboxBatch():
#
# An item in a command batch.
#
-class _SandboxBatchItem():
-
+class _SandboxBatchItem:
def __init__(self, *, label=None):
self.label = label
@@ -716,7 +727,6 @@ class _SandboxBatchItem():
# A command item in a command batch.
#
class _SandboxBatchCommand(_SandboxBatchItem):
-
def __init__(self, command, *, cwd, env, label=None):
super().__init__(label=label)
@@ -733,7 +743,6 @@ class _SandboxBatchCommand(_SandboxBatchItem):
# A group in a command batch.
#
class _SandboxBatchGroup(_SandboxBatchItem):
-
def __init__(self, *, label=None):
super().__init__(label=label)
@@ -755,7 +764,6 @@ class _SandboxBatchGroup(_SandboxBatchItem):
# A call item in a command batch.
#
class _SandboxBatchCall(_SandboxBatchItem):
-
def __init__(self, callback):
super().__init__()
diff --git a/src/buildstream/scriptelement.py b/src/buildstream/scriptelement.py
index e78049b4a..d90e8b6ba 100644
--- a/src/buildstream/scriptelement.py
+++ b/src/buildstream/scriptelement.py
@@ -48,8 +48,8 @@ class ScriptElement(Element):
__install_root = "/"
__cwd = "/"
__root_read_only = False
- __commands = None # type: OrderedDict[str, List[str]]
- __layout = [] # type: List[Dict[str, Optional[str]]]
+ __commands = None # type: OrderedDict[str, List[str]]
+ __layout = [] # type: List[Dict[str, Optional[str]]]
# The compose element's output is its dependencies, so
# we must rebuild if the dependencies change even when
@@ -149,8 +149,7 @@ class ScriptElement(Element):
#
if not self.__layout:
self.__layout = []
- self.__layout.append({"element": element,
- "destination": destination})
+ self.__layout.append({"element": element, "destination": destination})
def add_commands(self, group_name: str, command_list: List[str]) -> None:
"""Adds a list of commands under the group-name.
@@ -183,11 +182,11 @@ class ScriptElement(Element):
def get_unique_key(self):
return {
- 'commands': self.__commands,
- 'cwd': self.__cwd,
- 'install-root': self.__install_root,
- 'layout': self.__layout,
- 'root-read-only': self.__root_read_only
+ "commands": self.__commands,
+ "cwd": self.__cwd,
+ "install-root": self.__install_root,
+ "layout": self.__layout,
+ "root-read-only": self.__root_read_only,
}
def configure_sandbox(self, sandbox):
@@ -206,14 +205,14 @@ class ScriptElement(Element):
# Mark the artifact directories in the layout
for item in self.__layout:
- destination = item['destination']
+ destination = item["destination"]
was_artifact = directories.get(destination, False)
- directories[destination] = item['element'] or was_artifact
+ directories[destination] = item["element"] or was_artifact
for directory, artifact in directories.items():
# Root does not need to be marked as it is always mounted
# with artifact (unless explicitly marked non-artifact)
- if directory != '/':
+ if directory != "/":
sandbox.mark_directory(directory, artifact=artifact)
def stage(self, sandbox):
@@ -222,13 +221,16 @@ class ScriptElement(Element):
if not self.__layout:
# if no layout set, stage all dependencies into /
for build_dep in self.dependencies(Scope.BUILD, recurse=False):
- with self.timed_activity("Staging {} at /"
- .format(build_dep.name), silent_nested=True):
+ with self.timed_activity(
+ "Staging {} at /".format(build_dep.name), silent_nested=True
+ ):
build_dep.stage_dependency_artifacts(sandbox, Scope.RUN, path="/")
with sandbox.batch(SandboxFlags.NONE):
for build_dep in self.dependencies(Scope.BUILD, recurse=False):
- with self.timed_activity("Integrating {}".format(build_dep.name), silent_nested=True):
+ with self.timed_activity(
+ "Integrating {}".format(build_dep.name), silent_nested=True
+ ):
for dep in build_dep.dependencies(Scope.RUN):
dep.integrate(sandbox)
else:
@@ -236,40 +238,50 @@ class ScriptElement(Element):
for item in self.__layout:
# Skip layout members which dont stage an element
- if not item['element']:
+ if not item["element"]:
continue
- element = self.search(Scope.BUILD, item['element'])
- if item['destination'] == '/':
- with self.timed_activity("Staging {} at /".format(element.name),
- silent_nested=True):
+ element = self.search(Scope.BUILD, item["element"])
+ if item["destination"] == "/":
+ with self.timed_activity(
+ "Staging {} at /".format(element.name), silent_nested=True
+ ):
element.stage_dependency_artifacts(sandbox, Scope.RUN)
else:
- with self.timed_activity("Staging {} at {}"
- .format(element.name, item['destination']),
- silent_nested=True):
+ with self.timed_activity(
+ "Staging {} at {}".format(element.name, item["destination"]),
+ silent_nested=True,
+ ):
virtual_dstdir = sandbox.get_virtual_directory()
- virtual_dstdir.descend(*item['destination'].lstrip(os.sep).split(os.sep), create=True)
- element.stage_dependency_artifacts(sandbox, Scope.RUN, path=item['destination'])
+ virtual_dstdir.descend(
+ *item["destination"].lstrip(os.sep).split(os.sep),
+ create=True
+ )
+ element.stage_dependency_artifacts(
+ sandbox, Scope.RUN, path=item["destination"]
+ )
with sandbox.batch(SandboxFlags.NONE):
for item in self.__layout:
# Skip layout members which dont stage an element
- if not item['element']:
+ if not item["element"]:
continue
- element = self.search(Scope.BUILD, item['element'])
+ element = self.search(Scope.BUILD, item["element"])
# Integration commands can only be run for elements staged to /
- if item['destination'] == '/':
- with self.timed_activity("Integrating {}".format(element.name),
- silent_nested=True):
+ if item["destination"] == "/":
+ with self.timed_activity(
+ "Integrating {}".format(element.name), silent_nested=True
+ ):
for dep in element.dependencies(Scope.RUN):
dep.integrate(sandbox)
install_root_path_components = self.__install_root.lstrip(os.sep).split(os.sep)
- sandbox.get_virtual_directory().descend(*install_root_path_components, create=True)
+ sandbox.get_virtual_directory().descend(
+ *install_root_path_components, create=True
+ )
def assemble(self, sandbox):
@@ -283,9 +295,7 @@ class ScriptElement(Element):
for cmd in commands:
# Note the -e switch to 'sh' means to exit with an error
# if any untested command fails.
- sandbox.run(['sh', '-c', '-e', cmd + '\n'],
- flags,
- label=cmd)
+ sandbox.run(["sh", "-c", "-e", cmd + "\n"], flags, label=cmd)
# Return where the result can be collected from
return self.__install_root
@@ -297,18 +307,24 @@ class ScriptElement(Element):
def __validate_layout(self):
if self.__layout:
# Cannot proceeed if layout is used, but none are for "/"
- root_defined = any([(entry['destination'] == '/') for entry in self.__layout])
+ root_defined = any(
+ [(entry["destination"] == "/") for entry in self.__layout]
+ )
if not root_defined:
- raise ElementError("{}: Using layout, but none are staged as '/'"
- .format(self))
+ raise ElementError(
+ "{}: Using layout, but none are staged as '/'".format(self)
+ )
# Cannot proceed if layout specifies an element that isn't part
# of the dependencies.
for item in self.__layout:
- if item['element']:
- if not self.search(Scope.BUILD, item['element']):
- raise ElementError("{}: '{}' in layout not found in dependencies"
- .format(self, item['element']))
+ if item["element"]:
+ if not self.search(Scope.BUILD, item["element"]):
+ raise ElementError(
+ "{}: '{}' in layout not found in dependencies".format(
+ self, item["element"]
+ )
+ )
def setup():
diff --git a/src/buildstream/source.py b/src/buildstream/source.py
index 0312517c9..f8de12bc7 100644
--- a/src/buildstream/source.py
+++ b/src/buildstream/source.py
@@ -184,6 +184,7 @@ if TYPE_CHECKING:
# pylint: disable=cyclic-import
from ._context import Context
from ._project import Project
+
# pylint: enable=cyclic-import
@@ -197,16 +198,25 @@ class SourceError(BstError):
reason: An optional machine readable reason string, used for test cases
temporary: An indicator to whether the error may occur if the operation was run again. (*Since: 1.2*)
"""
- def __init__(self,
- message: str,
- *,
- detail: Optional[str] = None,
- reason: Optional[str] = None,
- temporary: bool = False):
- super().__init__(message, detail=detail, domain=ErrorDomain.SOURCE, reason=reason, temporary=temporary)
+
+ def __init__(
+ self,
+ message: str,
+ *,
+ detail: Optional[str] = None,
+ reason: Optional[str] = None,
+ temporary: bool = False
+ ):
+ super().__init__(
+ message,
+ detail=detail,
+ domain=ErrorDomain.SOURCE,
+ reason=reason,
+ temporary=temporary,
+ )
-class SourceFetcher():
+class SourceFetcher:
"""SourceFetcher()
This interface exists so that a source that downloads from multiple
@@ -222,6 +232,7 @@ class SourceFetcher():
for every URL found in the configuration data at
:func:`Plugin.configure() <buildstream.plugin.Plugin.configure>` time.
"""
+
def __init__(self):
self.__alias = None
@@ -243,7 +254,9 @@ class SourceFetcher():
Implementors should raise :class:`.SourceError` if the there is some
network error or if the source reference could not be matched.
"""
- raise ImplError("SourceFetcher '{}' does not implement fetch()".format(type(self)))
+ raise ImplError(
+ "SourceFetcher '{}' does not implement fetch()".format(type(self))
+ )
#############################################################
# Public Methods #
@@ -275,8 +288,9 @@ class Source(Plugin):
All Sources derive from this class, this interface defines how
the core will be interacting with Sources.
"""
+
# The defaults from the project
- __defaults = None # type: Optional[Dict[str, Any]]
+ __defaults = None # type: Optional[Dict[str, Any]]
BST_REQUIRES_PREVIOUS_SOURCES_TRACK = False
"""Whether access to previous sources is required during track
@@ -331,32 +345,48 @@ class Source(Plugin):
*Since: 1.91.2*
"""
- def __init__(self,
- context: 'Context',
- project: 'Project',
- meta: MetaSource,
- *,
- alias_override: Optional[Tuple[str, str]] = None,
- unique_id: Optional[int] = None):
+ def __init__(
+ self,
+ context: "Context",
+ project: "Project",
+ meta: MetaSource,
+ *,
+ alias_override: Optional[Tuple[str, str]] = None,
+ unique_id: Optional[int] = None
+ ):
provenance = meta.config.get_provenance()
# Set element_name member before parent init, as needed for debug messaging
- self.__element_name = meta.element_name # The name of the element owning this source
- super().__init__("{}-{}".format(meta.element_name, meta.element_index),
- context, project, provenance, "source", unique_id=unique_id)
+ self.__element_name = (
+ meta.element_name
+ ) # The name of the element owning this source
+ super().__init__(
+ "{}-{}".format(meta.element_name, meta.element_index),
+ context,
+ project,
+ provenance,
+ "source",
+ unique_id=unique_id,
+ )
- self.__element_index = meta.element_index # The index of the source in the owning element's source list
- self.__element_kind = meta.element_kind # The kind of the element owning this source
- self.__directory = meta.directory # Staging relative directory
- self.__consistency = Consistency.INCONSISTENT # Cached consistency state
- self.__meta_kind = meta.kind # The kind of this source, required for unpickling
+ self.__element_index = (
+ meta.element_index
+ ) # The index of the source in the owning element's source list
+ self.__element_kind = (
+ meta.element_kind
+ ) # The kind of the element owning this source
+ self.__directory = meta.directory # Staging relative directory
+ self.__consistency = Consistency.INCONSISTENT # Cached consistency state
+ self.__meta_kind = meta.kind # The kind of this source, required for unpickling
- self.__key = None # Cache key for source
+ self.__key = None # Cache key for source
# The alias_override is only set on a re-instantiated Source
- self.__alias_override = alias_override # Tuple of alias and its override to use instead
- self.__expected_alias = None # The primary alias
+ self.__alias_override = (
+ alias_override # Tuple of alias and its override to use instead
+ )
+ self.__expected_alias = None # The primary alias
# Set of marked download URLs
- self.__marked_urls = set() # type: Set[str]
+ self.__marked_urls = set() # type: Set[str]
# Collect the composited element configuration and
# ask the element to configure itself.
@@ -365,12 +395,12 @@ class Source(Plugin):
self.__first_pass = meta.first_pass
# cached values for commonly access values on the source
- self.__mirror_directory = None # type: Optional[str]
+ self.__mirror_directory = None # type: Optional[str]
self._configure(self.__config)
self.__digest = None
- COMMON_CONFIG_KEYS = ['kind', 'directory']
+ COMMON_CONFIG_KEYS = ["kind", "directory"]
"""Common source config keys
Source config keys that must not be accessed in configure(), and
@@ -386,7 +416,11 @@ class Source(Plugin):
Returns:
(:class:`.Consistency`): The source consistency
"""
- raise ImplError("Source plugin '{}' does not implement get_consistency()".format(self.get_kind()))
+ raise ImplError(
+ "Source plugin '{}' does not implement get_consistency()".format(
+ self.get_kind()
+ )
+ )
def load_ref(self, node: MappingNode) -> None:
"""Loads the *ref* for this Source from the specified *node*.
@@ -404,7 +438,9 @@ class Source(Plugin):
*Since: 1.2*
"""
- raise ImplError("Source plugin '{}' does not implement load_ref()".format(self.get_kind()))
+ raise ImplError(
+ "Source plugin '{}' does not implement load_ref()".format(self.get_kind())
+ )
def get_ref(self) -> SourceRef:
"""Fetch the internal ref, however it is represented
@@ -422,7 +458,9 @@ class Source(Plugin):
Implementations *must* return a ``None`` value in the case that
the ref was not loaded. E.g. a ``(None, None)`` tuple is not acceptable.
"""
- raise ImplError("Source plugin '{}' does not implement get_ref()".format(self.get_kind()))
+ raise ImplError(
+ "Source plugin '{}' does not implement get_ref()".format(self.get_kind())
+ )
def set_ref(self, ref: SourceRef, node: MappingNode) -> None:
"""Applies the internal ref, however it is represented
@@ -440,7 +478,9 @@ class Source(Plugin):
Implementors must support the special ``None`` value here to
allow clearing any existing ref.
"""
- raise ImplError("Source plugin '{}' does not implement set_ref()".format(self.get_kind()))
+ raise ImplError(
+ "Source plugin '{}' does not implement set_ref()".format(self.get_kind())
+ )
def track(self, **kwargs) -> SourceRef:
"""Resolve a new ref from the plugin's track option
@@ -486,7 +526,9 @@ class Source(Plugin):
Implementors should raise :class:`.SourceError` if the there is some
network error or if the source reference could not be matched.
"""
- raise ImplError("Source plugin '{}' does not implement fetch()".format(self.get_kind()))
+ raise ImplError(
+ "Source plugin '{}' does not implement fetch()".format(self.get_kind())
+ )
def stage(self, directory: Union[str, Directory]) -> None:
"""Stage the sources to a directory
@@ -503,7 +545,9 @@ class Source(Plugin):
Implementors should raise :class:`.SourceError` when encountering
some system error.
"""
- raise ImplError("Source plugin '{}' does not implement stage()".format(self.get_kind()))
+ raise ImplError(
+ "Source plugin '{}' does not implement stage()".format(self.get_kind())
+ )
def init_workspace(self, directory: str) -> None:
"""Initialises a new workspace
@@ -578,7 +622,9 @@ class Source(Plugin):
return self.__mirror_directory
- def translate_url(self, url: str, *, alias_override: Optional[str] = None, primary: bool = True) -> str:
+ def translate_url(
+ self, url: str, *, alias_override: Optional[str] = None, primary: bool = True
+ ) -> str:
"""Translates the given url which may be specified with an alias
into a fully qualified url.
@@ -611,8 +657,8 @@ class Source(Plugin):
# specific alias, so that sources that fetch from multiple
# URLs and use different aliases default to only overriding
# one alias, rather than getting confused.
- override_alias = self.__alias_override[0] # type: ignore
- override_url = self.__alias_override[1] # type: ignore
+ override_alias = self.__alias_override[0] # type: ignore
+ override_url = self.__alias_override[1] # type: ignore
if url_alias == override_alias:
url = override_url + url_body
return url
@@ -642,9 +688,10 @@ class Source(Plugin):
if primary:
expected_alias = _extract_alias(url)
- assert (self.__expected_alias is None or
- self.__expected_alias == expected_alias), \
- "Primary URL marked twice with different URLs"
+ assert (
+ self.__expected_alias is None
+ or self.__expected_alias == expected_alias
+ ), "Primary URL marked twice with different URLs"
self.__expected_alias = expected_alias
@@ -664,8 +711,9 @@ class Source(Plugin):
# the case for git submodules which might be automatically
# discovered.
#
- assert (url in self.__marked_urls or not _extract_alias(url)), \
- "URL was not seen at configure time: {}".format(url)
+ assert url in self.__marked_urls or not _extract_alias(
+ url
+ ), "URL was not seen at configure time: {}".format(url)
def get_project_directory(self) -> str:
"""Fetch the project base directory
@@ -753,7 +801,9 @@ class Source(Plugin):
# Source consistency interrogations are silent.
context = self._get_context()
with context.messenger.silence():
- self.__consistency = self.get_consistency() # pylint: disable=assignment-from-no-return
+ self.__consistency = (
+ self.get_consistency()
+ ) # pylint: disable=assignment-from-no-return
# Give the Source an opportunity to validate the cached
# sources as soon as the Source becomes Consistency.CACHED.
@@ -776,7 +826,9 @@ class Source(Plugin):
if self.BST_REQUIRES_PREVIOUS_SOURCES_FETCH:
self.__ensure_previous_sources(previous_sources)
with self.__stage_previous_sources(previous_sources) as staging_directory:
- self.__do_fetch(previous_sources_dir=self.__ensure_directory(staging_directory))
+ self.__do_fetch(
+ previous_sources_dir=self.__ensure_directory(staging_directory)
+ )
else:
self.__do_fetch()
@@ -790,8 +842,9 @@ class Source(Plugin):
if self.BST_KEY_REQUIRES_STAGE:
# _get_unique_key should be called before _stage
assert self.__digest is not None
- cas_dir = CasBasedDirectory(self._get_context().get_cascache(),
- digest=self.__digest)
+ cas_dir = CasBasedDirectory(
+ self._get_context().get_cascache(), digest=self.__digest
+ )
directory.import_files(cas_dir)
else:
self.stage(directory)
@@ -811,11 +864,13 @@ class Source(Plugin):
#
def _get_unique_key(self):
key = {}
- key['directory'] = self.__directory
+ key["directory"] = self.__directory
if self.BST_KEY_REQUIRES_STAGE:
- key['unique'] = self._stage_into_cas()
+ key["unique"] = self._stage_into_cas()
else:
- key['unique'] = self.get_unique_key() # pylint: disable=assignment-from-no-return
+ key[
+ "unique"
+ ] = self.get_unique_key() # pylint: disable=assignment-from-no-return
return key
# _project_refs():
@@ -828,7 +883,7 @@ class Source(Plugin):
#
def _project_refs(self, project):
element_kind = self.__element_kind
- if element_kind == 'junction':
+ if element_kind == "junction":
return project.junction_refs
return project.refs
@@ -863,12 +918,18 @@ class Source(Plugin):
try:
self.load_ref(ref_node)
except ImplError as e:
- raise SourceError("{}: Storing refs in project.refs is not supported by '{}' sources"
- .format(self, self.get_kind()),
- reason="unsupported-load-ref") from e
+ raise SourceError(
+ "{}: Storing refs in project.refs is not supported by '{}' sources".format(
+ self, self.get_kind()
+ ),
+ reason="unsupported-load-ref",
+ ) from e
# If the main project overrides the ref, use the override
- if project is not toplevel and toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS:
+ if (
+ project is not toplevel
+ and toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS
+ ):
refs = self._project_refs(toplevel)
ref_node = refs.lookup_ref(project.name, element_name, element_idx)
if ref_node is not None:
@@ -926,24 +987,37 @@ class Source(Plugin):
#
node = {}
if toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS:
- node = toplevel_refs.lookup_ref(project.name, element_name, element_idx, write=True)
+ node = toplevel_refs.lookup_ref(
+ project.name, element_name, element_idx, write=True
+ )
if project is toplevel and not node:
node = provenance._node
# Ensure the node is not from a junction
- if not toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS and provenance._project is not toplevel:
+ if (
+ not toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS
+ and provenance._project is not toplevel
+ ):
if provenance._project is project:
- self.warn("{}: Not persisting new reference in junctioned project".format(self))
+ self.warn(
+ "{}: Not persisting new reference in junctioned project".format(
+ self
+ )
+ )
elif provenance._project is None:
assert provenance._filename == ""
assert provenance._shortname == ""
- raise SourceError("{}: Error saving source reference to synthetic node."
- .format(self))
+ raise SourceError(
+ "{}: Error saving source reference to synthetic node.".format(self)
+ )
else:
- raise SourceError("{}: Cannot track source in a fragment from a junction"
- .format(provenance._shortname),
- reason="tracking-junction-fragment")
+ raise SourceError(
+ "{}: Cannot track source in a fragment from a junction".format(
+ provenance._shortname
+ ),
+ reason="tracking-junction-fragment",
+ )
#
# Step 2 - Set the ref in memory, and determine changed state
@@ -968,13 +1042,13 @@ class Source(Plugin):
actions = {}
for k, v in clean.items():
if k not in to_modify:
- actions[k] = 'del'
+ actions[k] = "del"
else:
if v != to_modify[k]:
- actions[k] = 'mod'
+ actions[k] = "mod"
for k in to_modify.keys():
if k not in clean:
- actions[k] = 'add'
+ actions[k] = "add"
def walk_container(container, path):
# For each step along path, synthesise if we need to.
@@ -987,7 +1061,9 @@ class Source(Plugin):
if type(step) is str: # pylint: disable=unidiomatic-typecheck
# handle dict container
if step not in container:
- if type(next_step) is str: # pylint: disable=unidiomatic-typecheck
+ if (
+ type(next_step) is str
+ ): # pylint: disable=unidiomatic-typecheck
container[step] = {}
else:
container[step] = []
@@ -1002,20 +1078,19 @@ class Source(Plugin):
def process_value(action, container, path, key, new_value):
container = walk_container(container, path)
- if action == 'del':
+ if action == "del":
del container[key]
- elif action == 'mod':
+ elif action == "mod":
container[key] = new_value
- elif action == 'add':
+ elif action == "add":
container[key] = new_value
else:
- assert False, \
- "BUG: Unknown action: {}".format(action)
+ assert False, "BUG: Unknown action: {}".format(action)
roundtrip_cache = {}
for key, action in actions.items():
# Obtain the top level node and its file
- if action == 'add':
+ if action == "add":
provenance = node.get_provenance()
else:
provenance = node.get_node(key).get_provenance()
@@ -1023,7 +1098,7 @@ class Source(Plugin):
toplevel_node = provenance._toplevel
# Get the path to whatever changed
- if action == 'add':
+ if action == "add":
path = toplevel_node._find(node)
else:
full_path = toplevel_node._find(node.get_node(key))
@@ -1032,10 +1107,9 @@ class Source(Plugin):
roundtrip_file = roundtrip_cache.get(provenance._filename)
if not roundtrip_file:
- roundtrip_file = roundtrip_cache[provenance._filename] = _yaml.roundtrip_load(
- provenance._filename,
- allow_missing=True
- )
+ roundtrip_file = roundtrip_cache[
+ provenance._filename
+ ] = _yaml.roundtrip_load(provenance._filename, allow_missing=True)
# Get the value of the round trip file that we need to change
process_value(action, roundtrip_file, path, key, to_modify.get(key))
@@ -1048,9 +1122,12 @@ class Source(Plugin):
try:
_yaml.roundtrip_dump(data, filename)
except OSError as e:
- raise SourceError("{}: Error saving source reference to '{}': {}"
- .format(self, filename, e),
- reason="save-ref-error") from e
+ raise SourceError(
+ "{}: Error saving source reference to '{}': {}".format(
+ self, filename, e
+ ),
+ reason="save-ref-error",
+ ) from e
return True
@@ -1059,7 +1136,7 @@ class Source(Plugin):
# Args:
# previous_sources (list): List of Sources listed prior to this source
#
- def _track(self, previous_sources: List['Source']) -> SourceRef:
+ def _track(self, previous_sources: List["Source"]) -> SourceRef:
if self.BST_KEY_REQUIRES_STAGE:
# ensure that these sources have a key after tracking
self._get_unique_key()
@@ -1067,9 +1144,10 @@ class Source(Plugin):
if self.BST_REQUIRES_PREVIOUS_SOURCES_TRACK:
self.__ensure_previous_sources(previous_sources)
- with self.__stage_previous_sources(previous_sources) \
- as staging_directory:
- new_ref = self.__do_track(previous_sources_dir=self.__ensure_directory(staging_directory))
+ with self.__stage_previous_sources(previous_sources) as staging_directory:
+ new_ref = self.__do_track(
+ previous_sources_dir=self.__ensure_directory(staging_directory)
+ )
else:
new_ref = self.__do_track()
@@ -1096,7 +1174,10 @@ class Source(Plugin):
# (bool): Whether this source requires access to previous sources
#
def _requires_previous_sources(self):
- return self.BST_REQUIRES_PREVIOUS_SOURCES_TRACK or self.BST_REQUIRES_PREVIOUS_SOURCES_FETCH
+ return (
+ self.BST_REQUIRES_PREVIOUS_SOURCES_TRACK
+ or self.BST_REQUIRES_PREVIOUS_SOURCES_FETCH
+ )
# Returns the alias if it's defined in the project
def _get_alias(self):
@@ -1129,9 +1210,7 @@ class Source(Plugin):
# Gives a ref path that points to where sources are kept in the CAS
def _get_source_name(self):
# @ is used to prevent conflicts with project names
- return "{}/{}".format(
- self.get_kind(),
- self._key)
+ return "{}/{}".format(self.get_kind(), self._key)
def _get_brief_display_key(self):
context = self._get_context()
@@ -1204,9 +1283,13 @@ class Source(Plugin):
meta.first_pass = self.__first_pass
- clone = source_kind(context, project, meta,
- alias_override=(alias, uri),
- unique_id=self._unique_id)
+ clone = source_kind(
+ context,
+ project,
+ meta,
+ alias_override=(alias, uri),
+ unique_id=self._unique_id,
+ )
# Do the necessary post instantiation routines here
#
@@ -1324,7 +1407,9 @@ class Source(Plugin):
# NOTE: We are assuming here that tracking only requires substituting the
# first alias used
- for uri in reversed(project.get_alias_uris(alias, first_pass=self.__first_pass)):
+ for uri in reversed(
+ project.get_alias_uris(alias, first_pass=self.__first_pass)
+ ):
new_source = self.__clone_for_uri(uri)
try:
ref = new_source.track(**kwargs) # pylint: disable=assignment-from-none
@@ -1346,20 +1431,22 @@ class Source(Plugin):
try:
os.makedirs(directory, exist_ok=True)
except OSError as e:
- raise SourceError("Failed to create staging directory: {}"
- .format(e),
- reason="ensure-stage-dir-fail") from e
+ raise SourceError(
+ "Failed to create staging directory: {}".format(e),
+ reason="ensure-stage-dir-fail",
+ ) from e
else:
if self.__directory is not None:
try:
directory = directory.descend(
- *self.__directory.lstrip(os.sep).split(os.sep),
- create=True)
+ *self.__directory.lstrip(os.sep).split(os.sep), create=True
+ )
except VirtualDirectoryError as e:
- raise SourceError("Failed to descend into staging directory: {}"
- .format(e),
- reason="ensure-stage-dir-fail") from e
+ raise SourceError(
+ "Failed to descend into staging directory: {}".format(e),
+ reason="ensure-stage-dir-fail",
+ ) from e
return directory
@@ -1377,7 +1464,7 @@ class Source(Plugin):
#
@classmethod
def __extract_config(cls, meta):
- config = cls.__defaults.get_mapping('config', default={})
+ config = cls.__defaults.get_mapping("config", default={})
config = config.clone()
meta.config._composite(config)
diff --git a/src/buildstream/storage/_casbaseddirectory.py b/src/buildstream/storage/_casbaseddirectory.py
index 3786f25b6..3b248f3ae 100644
--- a/src/buildstream/storage/_casbaseddirectory.py
+++ b/src/buildstream/storage/_casbaseddirectory.py
@@ -38,10 +38,20 @@ from ._filebaseddirectory import FileBasedDirectory
from ..utils import FileListResult, BST_ARBITRARY_TIMESTAMP
-class IndexEntry():
+class IndexEntry:
""" Directory entry used in CasBasedDirectory.index """
- def __init__(self, name, entrytype, *, digest=None, target=None, is_executable=False,
- buildstream_object=None, modified=False):
+
+ def __init__(
+ self,
+ name,
+ entrytype,
+ *,
+ digest=None,
+ target=None,
+ is_executable=False,
+ buildstream_object=None,
+ modified=False
+ ):
self.name = name
self.type = entrytype
self.digest = digest
@@ -52,8 +62,9 @@ class IndexEntry():
def get_directory(self, parent):
if not self.buildstream_object:
- self.buildstream_object = CasBasedDirectory(parent.cas_cache, digest=self.digest,
- parent=parent, filename=self.name)
+ self.buildstream_object = CasBasedDirectory(
+ parent.cas_cache, digest=self.digest, parent=parent, filename=self.name
+ )
self.digest = None
return self.buildstream_object
@@ -69,6 +80,7 @@ class IndexEntry():
# which is meant to be unimplemented.
# pylint: disable=super-init-not-called
+
class CasBasedDirectory(Directory):
"""
CAS-based directories can have two names; one is a 'common name' which has no effect
@@ -87,7 +99,15 @@ class CasBasedDirectory(Directory):
_pb2_path_sep = "/"
_pb2_absolute_path_prefix = "/"
- def __init__(self, cas_cache, *, digest=None, parent=None, common_name="untitled", filename=None):
+ def __init__(
+ self,
+ cas_cache,
+ *,
+ digest=None,
+ parent=None,
+ common_name="untitled",
+ filename=None
+ ):
self.filename = filename
self.common_name = common_name
self.cas_cache = cas_cache
@@ -100,21 +120,28 @@ class CasBasedDirectory(Directory):
def _populate_index(self, digest):
try:
pb2_directory = remote_execution_pb2.Directory()
- with open(self.cas_cache.objpath(digest), 'rb') as f:
+ with open(self.cas_cache.objpath(digest), "rb") as f:
pb2_directory.ParseFromString(f.read())
except FileNotFoundError as e:
- raise VirtualDirectoryError("Directory not found in local cache: {}".format(e)) from e
+ raise VirtualDirectoryError(
+ "Directory not found in local cache: {}".format(e)
+ ) from e
for entry in pb2_directory.directories:
- self.index[entry.name] = IndexEntry(entry.name, _FileType.DIRECTORY,
- digest=entry.digest)
+ self.index[entry.name] = IndexEntry(
+ entry.name, _FileType.DIRECTORY, digest=entry.digest
+ )
for entry in pb2_directory.files:
- self.index[entry.name] = IndexEntry(entry.name, _FileType.REGULAR_FILE,
- digest=entry.digest,
- is_executable=entry.is_executable)
+ self.index[entry.name] = IndexEntry(
+ entry.name,
+ _FileType.REGULAR_FILE,
+ digest=entry.digest,
+ is_executable=entry.is_executable,
+ )
for entry in pb2_directory.symlinks:
- self.index[entry.name] = IndexEntry(entry.name, _FileType.SYMLINK,
- target=entry.target)
+ self.index[entry.name] = IndexEntry(
+ entry.name, _FileType.SYMLINK, target=entry.target
+ )
def _find_self_in_parent(self):
assert self.parent is not None
@@ -129,15 +156,20 @@ class CasBasedDirectory(Directory):
newdir = CasBasedDirectory(self.cas_cache, parent=self, filename=name)
- self.index[name] = IndexEntry(name, _FileType.DIRECTORY, buildstream_object=newdir)
+ self.index[name] = IndexEntry(
+ name, _FileType.DIRECTORY, buildstream_object=newdir
+ )
self.__invalidate_digest()
return newdir
def _add_file(self, basename, filename, modified=False, can_link=False):
- entry = IndexEntry(filename, _FileType.REGULAR_FILE,
- modified=modified or filename in self.index)
+ entry = IndexEntry(
+ filename,
+ _FileType.REGULAR_FILE,
+ modified=modified or filename in self.index,
+ )
path = os.path.join(basename, filename)
entry.digest = self.cas_cache.add_object(path=path, link_directly=can_link)
entry.is_executable = os.access(path, os.X_OK)
@@ -146,10 +178,14 @@ class CasBasedDirectory(Directory):
self.__invalidate_digest()
def _copy_link_from_filesystem(self, basename, filename):
- self._add_new_link_direct(filename, os.readlink(os.path.join(basename, filename)))
+ self._add_new_link_direct(
+ filename, os.readlink(os.path.join(basename, filename))
+ )
def _add_new_link_direct(self, name, target):
- self.index[name] = IndexEntry(name, _FileType.SYMLINK, target=target, modified=name in self.index)
+ self.index[name] = IndexEntry(
+ name, _FileType.SYMLINK, target=target, modified=name in self.index
+ )
self.__invalidate_digest()
@@ -201,19 +237,25 @@ class CasBasedDirectory(Directory):
linklocation = entry.target
newpaths = linklocation.split(os.path.sep)
if os.path.isabs(linklocation):
- current_dir = current_dir.find_root().descend(*newpaths, follow_symlinks=True)
+ current_dir = current_dir.find_root().descend(
+ *newpaths, follow_symlinks=True
+ )
else:
- current_dir = current_dir.descend(*newpaths, follow_symlinks=True)
+ current_dir = current_dir.descend(
+ *newpaths, follow_symlinks=True
+ )
else:
- error = "Cannot descend into {}, which is a '{}' in the directory {}"
- raise VirtualDirectoryError(error.format(path,
- current_dir.index[path].type,
- current_dir),
- reason="not-a-directory")
+ error = (
+ "Cannot descend into {}, which is a '{}' in the directory {}"
+ )
+ raise VirtualDirectoryError(
+ error.format(path, current_dir.index[path].type, current_dir),
+ reason="not-a-directory",
+ )
else:
- if path == '.':
+ if path == ".":
continue
- elif path == '..':
+ elif path == "..":
if current_dir.parent is not None:
current_dir = current_dir.parent
# In POSIX /.. == / so just stay at the root dir
@@ -222,8 +264,10 @@ class CasBasedDirectory(Directory):
current_dir = current_dir._add_directory(path)
else:
error = "'{}' not found in {}"
- raise VirtualDirectoryError(error.format(path, str(current_dir)),
- reason="directory-not-found")
+ raise VirtualDirectoryError(
+ error.format(path, str(current_dir)),
+ reason="directory-not-found",
+ )
return current_dir
@@ -253,7 +297,9 @@ class CasBasedDirectory(Directory):
fileListResult.overwritten.append(relative_pathname)
return True
- def _partial_import_cas_into_cas(self, source_directory, filter_callback, *, path_prefix="", origin=None, result):
+ def _partial_import_cas_into_cas(
+ self, source_directory, filter_callback, *, path_prefix="", origin=None, result
+ ):
""" Import files from a CAS-based directory. """
if origin is None:
origin = self
@@ -272,7 +318,9 @@ class CasBasedDirectory(Directory):
# we can import the whole source directory by digest instead
# of importing each directory entry individually.
subdir_digest = entry.get_digest()
- dest_entry = IndexEntry(name, _FileType.DIRECTORY, digest=subdir_digest)
+ dest_entry = IndexEntry(
+ name, _FileType.DIRECTORY, digest=subdir_digest
+ )
self.index[name] = dest_entry
self.__invalidate_digest()
@@ -289,7 +337,9 @@ class CasBasedDirectory(Directory):
else:
subdir = dest_entry.get_directory(self)
- subdir.__add_files_to_result(path_prefix=relative_pathname, result=result)
+ subdir.__add_files_to_result(
+ path_prefix=relative_pathname, result=result
+ )
else:
src_subdir = source_directory.descend(name)
if src_subdir == origin:
@@ -299,12 +349,19 @@ class CasBasedDirectory(Directory):
dest_subdir = self.descend(name, create=create_subdir)
except VirtualDirectoryError:
filetype = self.index[name].type
- raise VirtualDirectoryError('Destination is a {}, not a directory: /{}'
- .format(filetype, relative_pathname))
-
- dest_subdir._partial_import_cas_into_cas(src_subdir, filter_callback,
- path_prefix=relative_pathname,
- origin=origin, result=result)
+ raise VirtualDirectoryError(
+ "Destination is a {}, not a directory: /{}".format(
+ filetype, relative_pathname
+ )
+ )
+
+ dest_subdir._partial_import_cas_into_cas(
+ src_subdir,
+ filter_callback,
+ path_prefix=relative_pathname,
+ origin=origin,
+ result=result,
+ )
if filter_callback and not filter_callback(relative_pathname):
if is_dir and create_subdir and dest_subdir.is_empty():
@@ -317,20 +374,28 @@ class CasBasedDirectory(Directory):
if not is_dir:
if self._check_replacement(name, relative_pathname, result):
if entry.type == _FileType.REGULAR_FILE:
- self.index[name] = IndexEntry(name, _FileType.REGULAR_FILE,
- digest=entry.digest,
- is_executable=entry.is_executable,
- modified=True)
+ self.index[name] = IndexEntry(
+ name,
+ _FileType.REGULAR_FILE,
+ digest=entry.digest,
+ is_executable=entry.is_executable,
+ modified=True,
+ )
self.__invalidate_digest()
else:
assert entry.type == _FileType.SYMLINK
self._add_new_link_direct(name=name, target=entry.target)
result.files_written.append(relative_pathname)
- def import_files(self, external_pathspec, *,
- filter_callback=None,
- report_written=True, update_mtime=False,
- can_link=False):
+ def import_files(
+ self,
+ external_pathspec,
+ *,
+ filter_callback=None,
+ report_written=True,
+ update_mtime=False,
+ can_link=False
+ ):
""" See superclass Directory for arguments """
result = FileListResult()
@@ -348,7 +413,9 @@ class CasBasedDirectory(Directory):
external_pathspec = CasBasedDirectory(self.cas_cache, digest=digest)
assert isinstance(external_pathspec, CasBasedDirectory)
- self._partial_import_cas_into_cas(external_pathspec, filter_callback, result=result)
+ self._partial_import_cas_into_cas(
+ external_pathspec, filter_callback, result=result
+ )
# TODO: No notice is taken of report_written or update_mtime.
# Current behaviour is to fully populate the report, which is inefficient,
@@ -358,13 +425,16 @@ class CasBasedDirectory(Directory):
def import_single_file(self, external_pathspec):
result = FileListResult()
- if self._check_replacement(os.path.basename(external_pathspec),
- os.path.dirname(external_pathspec),
- result):
- self._add_file(os.path.dirname(external_pathspec),
- os.path.basename(external_pathspec),
- modified=os.path.basename(external_pathspec)
- in result.overwritten)
+ if self._check_replacement(
+ os.path.basename(external_pathspec),
+ os.path.dirname(external_pathspec),
+ result,
+ ):
+ self._add_file(
+ os.path.dirname(external_pathspec),
+ os.path.basename(external_pathspec),
+ modified=os.path.basename(external_pathspec) in result.overwritten,
+ )
result.files_written.append(external_pathspec)
return result
@@ -425,7 +495,9 @@ class CasBasedDirectory(Directory):
f = StringIO(entry.target)
tarfile.addfile(tarinfo, f)
else:
- raise VirtualDirectoryError("can not export file type {} to tar".format(entry.type))
+ raise VirtualDirectoryError(
+ "can not export file type {} to tar".format(entry.type)
+ )
def _mark_changed(self):
""" It should not be possible to externally modify a CAS-based
@@ -516,10 +588,12 @@ class CasBasedDirectory(Directory):
"""
- file_list = list(filter(lambda i: i[1].type != _FileType.DIRECTORY,
- self.index.items()))
- directory_list = filter(lambda i: i[1].type == _FileType.DIRECTORY,
- self.index.items())
+ file_list = list(
+ filter(lambda i: i[1].type != _FileType.DIRECTORY, self.index.items())
+ )
+ directory_list = filter(
+ lambda i: i[1].type == _FileType.DIRECTORY, self.index.items()
+ )
if prefix != "":
yield prefix
@@ -529,7 +603,9 @@ class CasBasedDirectory(Directory):
for (k, v) in sorted(directory_list):
subdir = v.get_directory(self)
- yield from subdir._list_prefixed_relative_paths(prefix=os.path.join(prefix, k))
+ yield from subdir._list_prefixed_relative_paths(
+ prefix=os.path.join(prefix, k)
+ )
def walk(self):
"""Provide a list of dictionaries containing information about the files.
@@ -553,10 +629,7 @@ class CasBasedDirectory(Directory):
"""
for leaf in sorted(self.index.keys()):
entry = self.index[leaf]
- info = {
- "name": os.path.join(prefix, leaf),
- "type": entry.type
- }
+ info = {"name": os.path.join(prefix, leaf), "type": entry.type}
if entry.type == _FileType.REGULAR_FILE:
info["executable"] = entry.is_executable
info["size"] = self.get_size()
@@ -599,8 +672,10 @@ class CasBasedDirectory(Directory):
def _get_underlying_directory(self):
""" There is no underlying directory for a CAS-backed directory, so
throw an exception. """
- raise VirtualDirectoryError("_get_underlying_directory was called on a CAS-backed directory," +
- " which has no underlying directory.")
+ raise VirtualDirectoryError(
+ "_get_underlying_directory was called on a CAS-backed directory,"
+ + " which has no underlying directory."
+ )
# _get_digest():
#
@@ -637,7 +712,9 @@ class CasBasedDirectory(Directory):
symlinknode.name = name
symlinknode.target = entry.target
- self.__digest = self.cas_cache.add_object(buffer=pb2_directory.SerializeToString())
+ self.__digest = self.cas_cache.add_object(
+ buffer=pb2_directory.SerializeToString()
+ )
return self.__digest
@@ -652,7 +729,9 @@ class CasBasedDirectory(Directory):
linklocation = target.target
newpath = linklocation.split(os.path.sep)
if os.path.isabs(linklocation):
- return subdir.find_root()._exists(*newpath, follow_symlinks=True)
+ return subdir.find_root()._exists(
+ *newpath, follow_symlinks=True
+ )
return subdir._exists(*newpath, follow_symlinks=True)
return False
except VirtualDirectoryError:
@@ -671,6 +750,8 @@ class CasBasedDirectory(Directory):
if entry.type == _FileType.DIRECTORY:
subdir = self.descend(name)
- subdir.__add_files_to_result(path_prefix=relative_pathname, result=result)
+ subdir.__add_files_to_result(
+ path_prefix=relative_pathname, result=result
+ )
else:
result.files_written.append(relative_pathname)
diff --git a/src/buildstream/storage/_filebaseddirectory.py b/src/buildstream/storage/_filebaseddirectory.py
index 07c23c192..21515649d 100644
--- a/src/buildstream/storage/_filebaseddirectory.py
+++ b/src/buildstream/storage/_filebaseddirectory.py
@@ -34,7 +34,13 @@ import time
from .directory import Directory, VirtualDirectoryError, _FileType
from .. import utils
-from ..utils import link_files, copy_files, list_relative_paths, _get_link_mtime, BST_ARBITRARY_TIMESTAMP
+from ..utils import (
+ link_files,
+ copy_files,
+ list_relative_paths,
+ _get_link_mtime,
+ BST_ARBITRARY_TIMESTAMP,
+)
from ..utils import _set_deterministic_user, _set_deterministic_mtime
from ..utils import FileListResult
from .._exceptions import ImplError
@@ -52,7 +58,9 @@ class FileBasedDirectory(Directory):
""" See superclass Directory for arguments """
if follow_symlinks:
- ImplError("FileBasedDirectory.Decend dose not implement follow_symlinks=True")
+ ImplError(
+ "FileBasedDirectory.Decend dose not implement follow_symlinks=True"
+ )
current_dir = self
@@ -65,26 +73,39 @@ class FileBasedDirectory(Directory):
try:
st = os.lstat(new_path)
if not stat.S_ISDIR(st.st_mode):
- raise VirtualDirectoryError("Cannot descend into '{}': '{}' is not a directory"
- .format(path, new_path))
+ raise VirtualDirectoryError(
+ "Cannot descend into '{}': '{}' is not a directory".format(
+ path, new_path
+ )
+ )
except FileNotFoundError:
if create:
os.mkdir(new_path)
else:
- raise VirtualDirectoryError("Cannot descend into '{}': '{}' does not exist"
- .format(path, new_path))
+ raise VirtualDirectoryError(
+ "Cannot descend into '{}': '{}' does not exist".format(
+ path, new_path
+ )
+ )
current_dir = FileBasedDirectory(new_path)
return current_dir
- def import_files(self, external_pathspec, *,
- filter_callback=None,
- report_written=True, update_mtime=False,
- can_link=False):
+ def import_files(
+ self,
+ external_pathspec,
+ *,
+ filter_callback=None,
+ report_written=True,
+ update_mtime=False,
+ can_link=False
+ ):
""" See superclass Directory for arguments """
- from ._casbaseddirectory import CasBasedDirectory # pylint: disable=cyclic-import
+ from ._casbaseddirectory import (
+ CasBasedDirectory,
+ ) # pylint: disable=cyclic-import
if isinstance(external_pathspec, CasBasedDirectory):
if can_link and not update_mtime:
@@ -93,7 +114,9 @@ class FileBasedDirectory(Directory):
actionfunc = utils.safe_copy
import_result = FileListResult()
- self._import_files_from_cas(external_pathspec, actionfunc, filter_callback, result=import_result)
+ self._import_files_from_cas(
+ external_pathspec, actionfunc, filter_callback, result=import_result
+ )
else:
if isinstance(external_pathspec, Directory):
source_directory = external_pathspec.external_directory
@@ -101,23 +124,35 @@ class FileBasedDirectory(Directory):
source_directory = external_pathspec
if can_link and not update_mtime:
- import_result = link_files(source_directory, self.external_directory,
- filter_callback=filter_callback,
- ignore_missing=False, report_written=report_written)
+ import_result = link_files(
+ source_directory,
+ self.external_directory,
+ filter_callback=filter_callback,
+ ignore_missing=False,
+ report_written=report_written,
+ )
else:
- import_result = copy_files(source_directory, self.external_directory,
- filter_callback=filter_callback,
- ignore_missing=False, report_written=report_written)
+ import_result = copy_files(
+ source_directory,
+ self.external_directory,
+ filter_callback=filter_callback,
+ ignore_missing=False,
+ report_written=report_written,
+ )
if update_mtime:
cur_time = time.time()
for f in import_result.files_written:
- os.utime(os.path.join(self.external_directory, f), times=(cur_time, cur_time))
+ os.utime(
+ os.path.join(self.external_directory, f), times=(cur_time, cur_time)
+ )
return import_result
def import_single_file(self, external_pathspec):
- dstpath = os.path.join(self.external_directory, os.path.basename(external_pathspec))
+ dstpath = os.path.join(
+ self.external_directory, os.path.basename(external_pathspec)
+ )
result = FileListResult()
if os.path.exists(dstpath):
result.ignored.append(dstpath)
@@ -171,7 +206,9 @@ class FileBasedDirectory(Directory):
tarfile.addfile(tarinfo, f)
elif tarinfo.isdir():
tarfile.addfile(tarinfo)
- self.descend(*filename.split(os.path.sep)).export_to_tar(tarfile, arcname, mtime)
+ self.descend(*filename.split(os.path.sep)).export_to_tar(
+ tarfile, arcname, mtime
+ )
else:
tarfile.addfile(tarinfo)
@@ -190,8 +227,12 @@ class FileBasedDirectory(Directory):
Return value: List(str) - list of modified paths
"""
- return [f for f in list_relative_paths(self.external_directory)
- if _get_link_mtime(os.path.join(self.external_directory, f)) != BST_ARBITRARY_TIMESTAMP]
+ return [
+ f
+ for f in list_relative_paths(self.external_directory)
+ if _get_link_mtime(os.path.join(self.external_directory, f))
+ != BST_ARBITRARY_TIMESTAMP
+ ]
def list_relative_paths(self):
"""Provide a list of all relative paths.
@@ -231,7 +272,9 @@ class FileBasedDirectory(Directory):
else:
return _FileType.SPECIAL_FILE
- def _import_files_from_cas(self, source_directory, actionfunc, filter_callback, *, path_prefix="", result):
+ def _import_files_from_cas(
+ self, source_directory, actionfunc, filter_callback, *, path_prefix="", result
+ ):
""" Import files from a CAS-based directory. """
for name, entry in source_directory.index.items():
@@ -251,11 +294,19 @@ class FileBasedDirectory(Directory):
dest_subdir = self.descend(name, create=create_subdir)
except VirtualDirectoryError:
filetype = self._get_filetype(name)
- raise VirtualDirectoryError('Destination is a {}, not a directory: /{}'
- .format(filetype, relative_pathname))
-
- dest_subdir._import_files_from_cas(src_subdir, actionfunc, filter_callback,
- path_prefix=relative_pathname, result=result)
+ raise VirtualDirectoryError(
+ "Destination is a {}, not a directory: /{}".format(
+ filetype, relative_pathname
+ )
+ )
+
+ dest_subdir._import_files_from_cas(
+ src_subdir,
+ actionfunc,
+ filter_callback,
+ path_prefix=relative_pathname,
+ result=result,
+ )
if filter_callback and not filter_callback(relative_pathname):
if is_dir and create_subdir and dest_subdir.is_empty():
@@ -279,8 +330,16 @@ class FileBasedDirectory(Directory):
src_path = source_directory.cas_cache.objpath(entry.digest)
actionfunc(src_path, dest_path, result=result)
if entry.is_executable:
- os.chmod(dest_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR |
- stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
+ os.chmod(
+ dest_path,
+ stat.S_IRUSR
+ | stat.S_IWUSR
+ | stat.S_IXUSR
+ | stat.S_IRGRP
+ | stat.S_IXGRP
+ | stat.S_IROTH
+ | stat.S_IXOTH,
+ )
else:
assert entry.type == _FileType.SYMLINK
os.symlink(entry.target, dest_path)
diff --git a/src/buildstream/storage/directory.py b/src/buildstream/storage/directory.py
index 29cbb53f2..89d20c433 100644
--- a/src/buildstream/storage/directory.py
+++ b/src/buildstream/storage/directory.py
@@ -46,11 +46,12 @@ class VirtualDirectoryError(BstError):
or either of the :class:`.ElementError` or :class:`.SourceError`
exceptions should be raised from this error.
"""
+
def __init__(self, message, reason=None):
super().__init__(message, domain=ErrorDomain.VIRTUAL_FS, reason=reason)
-class Directory():
+class Directory:
def __init__(self, external_directory=None):
raise NotImplementedError()
@@ -74,10 +75,15 @@ class Directory():
raise NotImplementedError()
# Import and export of files and links
- def import_files(self, external_pathspec: Union['Directory', str], *,
- filter_callback: Optional[Callable[[str], bool]] = None,
- report_written: bool = True, update_mtime: bool = False,
- can_link: bool = False) -> FileListResult:
+ def import_files(
+ self,
+ external_pathspec: Union["Directory", str],
+ *,
+ filter_callback: Optional[Callable[[str], bool]] = None,
+ report_written: bool = True,
+ update_mtime: bool = False,
+ can_link: bool = False
+ ) -> FileListResult:
"""Imports some or all files from external_path into this directory.
Args:
@@ -214,4 +220,4 @@ class _FileType(FastEnum):
def __str__(self):
# https://github.com/PyCQA/pylint/issues/2062
- return self.name.lower().replace('_', ' ') # pylint: disable=no-member
+ return self.name.lower().replace("_", " ") # pylint: disable=no-member
diff --git a/src/buildstream/testing/__init__.py b/src/buildstream/testing/__init__.py
index 3926b4eab..2fd882e18 100644
--- a/src/buildstream/testing/__init__.py
+++ b/src/buildstream/testing/__init__.py
@@ -31,9 +31,11 @@ from .integration import integration_cache
try:
import pytest
except ImportError:
- module_name = globals()['__name__']
- msg = "Could not import pytest:\n" \
- "To use the {} module, you must have pytest installed.".format(module_name)
+ module_name = globals()["__name__"]
+ msg = (
+ "Could not import pytest:\n"
+ "To use the {} module, you must have pytest installed.".format(module_name)
+ )
raise ImportError(msg)
@@ -41,7 +43,7 @@ except ImportError:
ALL_REPO_KINDS = OrderedDict() # type: OrderedDict[Repo, str]
-def create_repo(kind, directory, subdir='repo'):
+def create_repo(kind, directory, subdir="repo"):
"""Convenience method for creating a Repo
Args:
@@ -92,6 +94,7 @@ def sourcetests_collection_hook(session):
Args:
session (pytest.Session): The current pytest session
"""
+
def should_collect_tests(config):
args = config.args
rootdir = config.rootdir
@@ -112,11 +115,14 @@ def sourcetests_collection_hook(session):
return True
from . import _sourcetests
+
source_test_path = os.path.dirname(_sourcetests.__file__)
# Add the location of the source tests to the session's
# python_files config. Without this, pytest may filter out these
# tests during collection.
- session.config.addinivalue_line("python_files", os.path.join(source_test_path, "*.py"))
+ session.config.addinivalue_line(
+ "python_files", os.path.join(source_test_path, "*.py")
+ )
# If test invocation has specified specic tests, don't
# automatically collect templated tests.
if should_collect_tests(session.config):
diff --git a/src/buildstream/testing/_fixtures.py b/src/buildstream/testing/_fixtures.py
index 2684782a1..5da51bb45 100644
--- a/src/buildstream/testing/_fixtures.py
+++ b/src/buildstream/testing/_fixtures.py
@@ -30,6 +30,7 @@ def thread_check():
yield
assert utils._is_single_threaded()
+
# Reset global state in node.pyx to improve test isolation
@pytest.fixture(autouse=True)
def reset_global_node_state():
diff --git a/src/buildstream/testing/_sourcetests/build_checkout.py b/src/buildstream/testing/_sourcetests/build_checkout.py
index 4d4bcf0e2..e673702e2 100644
--- a/src/buildstream/testing/_sourcetests/build_checkout.py
+++ b/src/buildstream/testing/_sourcetests/build_checkout.py
@@ -29,23 +29,23 @@ from .utils import kind # pylint: disable=unused-import
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
-DATA_DIR = os.path.join(TOP_DIR, 'project')
+DATA_DIR = os.path.join(TOP_DIR, "project")
def strict_args(args, strict):
if strict != "strict":
- return ['--no-strict', *args]
+ return ["--no-strict", *args]
return args
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("strict", ["strict", "non-strict"])
def test_fetch_build_checkout(cli, tmpdir, datafiles, strict, kind):
- checkout = os.path.join(cli.directory, 'checkout')
+ checkout = os.path.join(cli.directory, "checkout")
project = str(datafiles)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
- element_name = 'build-test-{}.bst'.format(kind)
+ dev_files_path = os.path.join(project, "files", "dev-files")
+ element_path = os.path.join(project, "elements")
+ element_name = "build-test-{}.bst".format(kind)
# Create our repo object of the given source type with
# the dev files, and then collect the initial ref.
@@ -54,26 +54,23 @@ def test_fetch_build_checkout(cli, tmpdir, datafiles, strict, kind):
ref = repo.create(dev_files_path)
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
- _yaml.roundtrip_dump(element,
- os.path.join(element_path, element_name))
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
- assert cli.get_element_state(project, element_name) == 'fetch needed'
- result = cli.run(project=project, args=strict_args(['build', element_name], strict))
+ assert cli.get_element_state(project, element_name) == "fetch needed"
+ result = cli.run(project=project, args=strict_args(["build", element_name], strict))
result.assert_success()
- assert cli.get_element_state(project, element_name) == 'cached'
+ assert cli.get_element_state(project, element_name) == "cached"
# Now check it out
- result = cli.run(project=project, args=strict_args([
- 'artifact', 'checkout', element_name, '--directory', checkout
- ], strict))
+ result = cli.run(
+ project=project,
+ args=strict_args(
+ ["artifact", "checkout", element_name, "--directory", checkout], strict
+ ),
+ )
result.assert_success()
# Check that the pony.h include from files/dev-files exists
- filename = os.path.join(checkout, 'usr', 'include', 'pony.h')
+ filename = os.path.join(checkout, "usr", "include", "pony.h")
assert os.path.exists(filename)
diff --git a/src/buildstream/testing/_sourcetests/conftest.py b/src/buildstream/testing/_sourcetests/conftest.py
index 64dd404ef..47564abf9 100644
--- a/src/buildstream/testing/_sourcetests/conftest.py
+++ b/src/buildstream/testing/_sourcetests/conftest.py
@@ -14,4 +14,7 @@
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
-from .._fixtures import reset_global_node_state, thread_check # pylint: disable=unused-import
+from .._fixtures import (
+ reset_global_node_state,
+ thread_check,
+) # pylint: disable=unused-import
diff --git a/src/buildstream/testing/_sourcetests/fetch.py b/src/buildstream/testing/_sourcetests/fetch.py
index 897752297..fc95c6e5b 100644
--- a/src/buildstream/testing/_sourcetests/fetch.py
+++ b/src/buildstream/testing/_sourcetests/fetch.py
@@ -32,15 +32,15 @@ from .utils import kind # pylint: disable=unused-import
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
-DATA_DIR = os.path.join(TOP_DIR, 'project')
+DATA_DIR = os.path.join(TOP_DIR, "project")
@pytest.mark.datafiles(DATA_DIR)
def test_fetch(cli, tmpdir, datafiles, kind):
project = str(datafiles)
- bin_files_path = os.path.join(project, 'files', 'bin-files')
- element_path = os.path.join(project, 'elements')
- element_name = 'fetch-test-{}.bst'.format(kind)
+ bin_files_path = os.path.join(project, "files", "bin-files")
+ element_path = os.path.join(project, "elements")
+ element_name = "fetch-test-{}.bst".format(kind)
# Create our repo object of the given source type with
# the bin files, and then collect the initial ref.
@@ -49,59 +49,55 @@ def test_fetch(cli, tmpdir, datafiles, kind):
ref = repo.create(bin_files_path)
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
- _yaml.roundtrip_dump(element,
- os.path.join(element_path, element_name))
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# Assert that a fetch is needed
- assert cli.get_element_state(project, element_name) == 'fetch needed'
+ assert cli.get_element_state(project, element_name) == "fetch needed"
# Now try to fetch it
- result = cli.run(project=project, args=['source', 'fetch', element_name])
+ result = cli.run(project=project, args=["source", "fetch", element_name])
result.assert_success()
# Assert that we are now buildable because the source is
# now cached.
- assert cli.get_element_state(project, element_name) == 'buildable'
+ assert cli.get_element_state(project, element_name) == "buildable"
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", ['inline', 'project.refs'])
+@pytest.mark.parametrize("ref_storage", ["inline", "project.refs"])
def test_fetch_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
- import_etc_path = os.path.join(subproject_path, 'elements', 'import-etc-repo.bst')
- etc_files_path = os.path.join(subproject_path, 'files', 'etc-files')
+ import_etc_path = os.path.join(subproject_path, "elements", "import-etc-repo.bst")
+ etc_files_path = os.path.join(subproject_path, "files", "etc-files")
- repo = create_repo(kind, str(tmpdir.join('import-etc')))
+ repo = create_repo(kind, str(tmpdir.join("import-etc")))
ref = repo.create(etc_files_path)
element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=(ref if ref_storage == 'inline' else None))
- ]
+ "kind": "import",
+ "sources": [repo.source_config(ref=(ref if ref_storage == "inline" else None))],
}
_yaml.roundtrip_dump(element, import_etc_path)
- update_project_configuration(project, {
- 'ref-storage': ref_storage
- })
+ update_project_configuration(project, {"ref-storage": ref_storage})
- generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == 'inline'))
+ generate_junction(
+ tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline")
+ )
- if ref_storage == 'project.refs':
- result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
+ if ref_storage == "project.refs":
+ result = cli.run(project=project, args=["source", "track", "junction.bst"])
result.assert_success()
- result = cli.run(project=project, args=['source', 'track', 'junction.bst:import-etc.bst'])
+ result = cli.run(
+ project=project, args=["source", "track", "junction.bst:import-etc.bst"]
+ )
result.assert_success()
- result = cli.run(project=project, args=['source', 'fetch', 'junction.bst:import-etc.bst'])
+ result = cli.run(
+ project=project, args=["source", "fetch", "junction.bst:import-etc.bst"]
+ )
result.assert_success()
diff --git a/src/buildstream/testing/_sourcetests/mirror.py b/src/buildstream/testing/_sourcetests/mirror.py
index b6316045d..a28bf3c00 100644
--- a/src/buildstream/testing/_sourcetests/mirror.py
+++ b/src/buildstream/testing/_sourcetests/mirror.py
@@ -31,25 +31,25 @@ from .utils import kind # pylint: disable=unused-import
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
-DATA_DIR = os.path.join(TOP_DIR, 'project')
+DATA_DIR = os.path.join(TOP_DIR, "project")
def _set_project_mirrors_and_aliases(project_path, mirrors, aliases):
- project_conf_path = os.path.join(project_path, 'project.conf')
+ project_conf_path = os.path.join(project_path, "project.conf")
project_conf = _yaml.roundtrip_load(project_conf_path)
- project_conf['mirrors'] = mirrors
- project_conf['aliases'].update(aliases)
+ project_conf["mirrors"] = mirrors
+ project_conf["aliases"].update(aliases)
_yaml.roundtrip_dump(project_conf, project_conf_path)
def _set_project_includes_and_aliases(project_path, includes, aliases):
- project_conf_path = os.path.join(project_path, 'project.conf')
+ project_conf_path = os.path.join(project_path, "project.conf")
project_conf = _yaml.roundtrip_load(project_conf_path)
- project_conf['aliases'].update(aliases)
- project_conf['(@)'] = includes
+ project_conf["aliases"].update(aliases)
+ project_conf["(@)"] = includes
_yaml.roundtrip_dump(project_conf, project_conf_path)
@@ -57,11 +57,11 @@ def _set_project_includes_and_aliases(project_path, includes, aliases):
@pytest.mark.datafiles(DATA_DIR)
def test_mirror_fetch(cli, tmpdir, datafiles, kind):
project_dir = str(datafiles)
- bin_files_path = os.path.join(project_dir, 'files', 'bin-files', 'usr')
- dev_files_path = os.path.join(project_dir, 'files', 'dev-files', 'usr')
- upstream_repodir = os.path.join(str(tmpdir), 'upstream')
- mirror_repodir = os.path.join(str(tmpdir), 'mirror')
- element_dir = os.path.join(project_dir, 'elements')
+ bin_files_path = os.path.join(project_dir, "files", "bin-files", "usr")
+ dev_files_path = os.path.join(project_dir, "files", "dev-files", "usr")
+ upstream_repodir = os.path.join(str(tmpdir), "upstream")
+ mirror_repodir = os.path.join(str(tmpdir), "mirror")
+ element_dir = os.path.join(project_dir, "elements")
# Create repo objects of the upstream and mirror
upstream_repo = create_repo(kind, upstream_repodir)
@@ -70,96 +70,75 @@ def test_mirror_fetch(cli, tmpdir, datafiles, kind):
upstream_ref = upstream_repo.create(dev_files_path)
element = {
- 'kind': 'import',
- 'sources': [
- upstream_repo.source_config(ref=upstream_ref)
- ]
+ "kind": "import",
+ "sources": [upstream_repo.source_config(ref=upstream_ref)],
}
- element_name = 'test.bst'
+ element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
- full_repo = element['sources'][0]['url']
+ full_repo = element["sources"][0]["url"]
upstream_map, repo_name = os.path.split(full_repo)
- alias = 'foo-' + kind
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
- full_mirror = mirror_repo.source_config()['url']
+ alias = "foo-" + kind
+ aliased_repo = alias + ":" + repo_name
+ element["sources"][0]["url"] = aliased_repo
+ full_mirror = mirror_repo.source_config()["url"]
mirror_map, _ = os.path.split(full_mirror)
_yaml.roundtrip_dump(element, element_path)
_set_project_mirrors_and_aliases(
project_dir,
- [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + '/'],
- },
- },
- ],
- {alias: upstream_map + '/'},
+ [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],},},],
+ {alias: upstream_map + "/"},
)
# No obvious ways of checking that the mirror has been fetched
# But at least we can be sure it succeeds
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ result = cli.run(project=project_dir, args=["source", "fetch", element_name])
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
def test_mirror_fetch_upstream_absent(cli, tmpdir, datafiles, kind):
project_dir = str(datafiles)
- dev_files_path = os.path.join(project_dir, 'files', 'dev-files', 'usr')
- upstream_repodir = os.path.join(project_dir, 'upstream')
- mirror_repodir = os.path.join(str(tmpdir), 'mirror')
- element_dir = os.path.join(project_dir, 'elements')
+ dev_files_path = os.path.join(project_dir, "files", "dev-files", "usr")
+ upstream_repodir = os.path.join(project_dir, "upstream")
+ mirror_repodir = os.path.join(str(tmpdir), "mirror")
+ element_dir = os.path.join(project_dir, "elements")
# Create repo objects of the upstream and mirror
upstream_repo = create_repo(kind, upstream_repodir)
ref = upstream_repo.create(dev_files_path)
mirror_repo = upstream_repo.copy(mirror_repodir)
- element = {
- 'kind': 'import',
- 'sources': [
- upstream_repo.source_config(ref=ref)
- ]
- }
+ element = {"kind": "import", "sources": [upstream_repo.source_config(ref=ref)]}
- element_name = 'test.bst'
+ element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
- full_repo = element['sources'][0]['url']
+ full_repo = element["sources"][0]["url"]
_, repo_name = os.path.split(full_repo)
- alias = 'foo-' + kind
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
- full_mirror = mirror_repo.source_config()['url']
+ alias = "foo-" + kind
+ aliased_repo = alias + ":" + repo_name
+ element["sources"][0]["url"] = aliased_repo
+ full_mirror = mirror_repo.source_config()["url"]
mirror_map, _ = os.path.split(full_mirror)
_yaml.roundtrip_dump(element, element_path)
_set_project_mirrors_and_aliases(
project_dir,
- [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + "/"]
- },
- },
- ],
- {alias: 'http://www.example.com'},
+ [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"]},},],
+ {alias: "http://www.example.com"},
)
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ result = cli.run(project=project_dir, args=["source", "fetch", element_name])
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
def test_mirror_from_includes(cli, tmpdir, datafiles, kind):
project_dir = str(datafiles)
- bin_files_path = os.path.join(project_dir, 'files', 'bin-files', 'usr')
- upstream_repodir = os.path.join(str(tmpdir), 'upstream')
- mirror_repodir = os.path.join(str(tmpdir), 'mirror')
- element_dir = os.path.join(project_dir, 'elements')
+ bin_files_path = os.path.join(project_dir, "files", "bin-files", "usr")
+ upstream_repodir = os.path.join(str(tmpdir), "upstream")
+ mirror_repodir = os.path.join(str(tmpdir), "mirror")
+ element_dir = os.path.join(project_dir, "elements")
# Create repo objects of the upstream and mirror
upstream_repo = create_repo(kind, upstream_repodir)
@@ -167,62 +146,53 @@ def test_mirror_from_includes(cli, tmpdir, datafiles, kind):
mirror_repo = upstream_repo.copy(mirror_repodir)
element = {
- 'kind': 'import',
- 'sources': [
- upstream_repo.source_config(ref=upstream_ref)
- ]
+ "kind": "import",
+ "sources": [upstream_repo.source_config(ref=upstream_ref)],
}
- element_name = 'test.bst'
+ element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
- full_repo = element['sources'][0]['url']
+ full_repo = element["sources"][0]["url"]
upstream_map, repo_name = os.path.split(full_repo)
- alias = 'foo-' + kind
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
- full_mirror = mirror_repo.source_config()['url']
+ alias = "foo-" + kind
+ aliased_repo = alias + ":" + repo_name
+ element["sources"][0]["url"] = aliased_repo
+ full_mirror = mirror_repo.source_config()["url"]
mirror_map, _ = os.path.split(full_mirror)
_yaml.roundtrip_dump(element, element_path)
- config_project_dir = str(tmpdir.join('config'))
+ config_project_dir = str(tmpdir.join("config"))
os.makedirs(config_project_dir, exist_ok=True)
- config_project = {
- 'name': 'config'
- }
- _yaml.roundtrip_dump(config_project, os.path.join(config_project_dir, 'project.conf'))
+ config_project = {"name": "config"}
+ _yaml.roundtrip_dump(
+ config_project, os.path.join(config_project_dir, "project.conf")
+ )
extra_mirrors = {
- 'mirrors': [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + "/"],
- }
- }
- ]
+ "mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],}}]
}
- _yaml.roundtrip_dump(extra_mirrors, os.path.join(config_project_dir, 'mirrors.yml'))
- generate_junction(str(tmpdir.join('config_repo')),
- config_project_dir,
- os.path.join(element_dir, 'config.bst'))
+ _yaml.roundtrip_dump(extra_mirrors, os.path.join(config_project_dir, "mirrors.yml"))
+ generate_junction(
+ str(tmpdir.join("config_repo")),
+ config_project_dir,
+ os.path.join(element_dir, "config.bst"),
+ )
_set_project_includes_and_aliases(
- project_dir,
- ['config.bst:mirrors.yml'],
- {alias: upstream_map + '/'},
+ project_dir, ["config.bst:mirrors.yml"], {alias: upstream_map + "/"},
)
# Now make the upstream unavailable.
- os.rename(upstream_repo.repo, '{}.bak'.format(upstream_repo.repo))
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ os.rename(upstream_repo.repo, "{}.bak".format(upstream_repo.repo))
+ result = cli.run(project=project_dir, args=["source", "fetch", element_name])
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
def test_mirror_junction_from_includes(cli, tmpdir, datafiles, kind):
project_dir = str(datafiles)
- bin_files_path = os.path.join(project_dir, 'files', 'bin-files', 'usr')
- upstream_repodir = os.path.join(str(tmpdir), 'upstream')
- mirror_repodir = os.path.join(str(tmpdir), 'mirror')
- element_dir = os.path.join(project_dir, 'elements')
+ bin_files_path = os.path.join(project_dir, "files", "bin-files", "usr")
+ upstream_repodir = os.path.join(str(tmpdir), "upstream")
+ mirror_repodir = os.path.join(str(tmpdir), "mirror")
+ element_dir = os.path.join(project_dir, "elements")
# Create repo objects of the upstream and mirror
upstream_repo = create_repo(kind, upstream_repodir)
@@ -230,67 +200,58 @@ def test_mirror_junction_from_includes(cli, tmpdir, datafiles, kind):
mirror_repo = upstream_repo.copy(mirror_repodir)
element = {
- 'kind': 'junction',
- 'sources': [
- upstream_repo.source_config(ref=upstream_ref)
- ]
+ "kind": "junction",
+ "sources": [upstream_repo.source_config(ref=upstream_ref)],
}
- element_name = 'test.bst'
+ element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
- full_repo = element['sources'][0]['url']
+ full_repo = element["sources"][0]["url"]
upstream_map, repo_name = os.path.split(full_repo)
- alias = 'foo-' + kind
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
- full_mirror = mirror_repo.source_config()['url']
+ alias = "foo-" + kind
+ aliased_repo = alias + ":" + repo_name
+ element["sources"][0]["url"] = aliased_repo
+ full_mirror = mirror_repo.source_config()["url"]
mirror_map, _ = os.path.split(full_mirror)
_yaml.roundtrip_dump(element, element_path)
- config_project_dir = str(tmpdir.join('config'))
+ config_project_dir = str(tmpdir.join("config"))
os.makedirs(config_project_dir, exist_ok=True)
- config_project = {
- 'name': 'config'
- }
- _yaml.roundtrip_dump(config_project, os.path.join(config_project_dir, 'project.conf'))
+ config_project = {"name": "config"}
+ _yaml.roundtrip_dump(
+ config_project, os.path.join(config_project_dir, "project.conf")
+ )
extra_mirrors = {
- 'mirrors': [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + "/"],
- }
- }
- ]
+ "mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],}}]
}
- _yaml.roundtrip_dump(extra_mirrors, os.path.join(config_project_dir, 'mirrors.yml'))
- generate_junction(str(tmpdir.join('config_repo')),
- config_project_dir,
- os.path.join(element_dir, 'config.bst'))
+ _yaml.roundtrip_dump(extra_mirrors, os.path.join(config_project_dir, "mirrors.yml"))
+ generate_junction(
+ str(tmpdir.join("config_repo")),
+ config_project_dir,
+ os.path.join(element_dir, "config.bst"),
+ )
_set_project_includes_and_aliases(
- project_dir,
- ['config.bst:mirrors.yml'],
- {alias: upstream_map + '/'}
+ project_dir, ["config.bst:mirrors.yml"], {alias: upstream_map + "/"}
)
# Now make the upstream unavailable.
- os.rename(upstream_repo.repo, '{}.bak'.format(upstream_repo.repo))
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ os.rename(upstream_repo.repo, "{}.bak".format(upstream_repo.repo))
+ result = cli.run(project=project_dir, args=["source", "fetch", element_name])
result.assert_main_error(ErrorDomain.STREAM, None)
# Now make the upstream available again.
- os.rename('{}.bak'.format(upstream_repo.repo), upstream_repo.repo)
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ os.rename("{}.bak".format(upstream_repo.repo), upstream_repo.repo)
+ result = cli.run(project=project_dir, args=["source", "fetch", element_name])
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
def test_mirror_track_upstream_present(cli, tmpdir, datafiles, kind):
project_dir = str(datafiles)
- bin_files_path = os.path.join(project_dir, 'files', 'bin-files', 'usr')
- dev_files_path = os.path.join(project_dir, 'files', 'dev-files', 'usr')
- upstream_repodir = os.path.join(str(tmpdir), 'upstream')
- mirror_repodir = os.path.join(str(tmpdir), 'mirror')
- element_dir = os.path.join(project_dir, 'elements')
+ bin_files_path = os.path.join(project_dir, "files", "bin-files", "usr")
+ dev_files_path = os.path.join(project_dir, "files", "dev-files", "usr")
+ upstream_repodir = os.path.join(str(tmpdir), "upstream")
+ mirror_repodir = os.path.join(str(tmpdir), "mirror")
+ element_dir = os.path.join(project_dir, "elements")
# Create repo objects of the upstream and mirror
upstream_repo = create_repo(kind, upstream_repodir)
@@ -299,54 +260,45 @@ def test_mirror_track_upstream_present(cli, tmpdir, datafiles, kind):
upstream_ref = upstream_repo.create(dev_files_path)
element = {
- 'kind': 'import',
- 'sources': [
- upstream_repo.source_config(ref=upstream_ref)
- ]
+ "kind": "import",
+ "sources": [upstream_repo.source_config(ref=upstream_ref)],
}
- element_name = 'test.bst'
+ element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
- full_repo = element['sources'][0]['url']
+ full_repo = element["sources"][0]["url"]
upstream_map, repo_name = os.path.split(full_repo)
- alias = 'foo-' + kind
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
- full_mirror = mirror_repo.source_config()['url']
+ alias = "foo-" + kind
+ aliased_repo = alias + ":" + repo_name
+ element["sources"][0]["url"] = aliased_repo
+ full_mirror = mirror_repo.source_config()["url"]
mirror_map, _ = os.path.split(full_mirror)
_yaml.roundtrip_dump(element, element_path)
_set_project_mirrors_and_aliases(
project_dir,
- [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + '/'],
- },
- },
- ],
- {alias: upstream_map + '/'},
+ [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],},},],
+ {alias: upstream_map + "/"},
)
- result = cli.run(project=project_dir, args=['source', 'track', element_name])
+ result = cli.run(project=project_dir, args=["source", "track", element_name])
result.assert_success()
# Tracking tries upstream first. Check the ref is from upstream.
new_element = _yaml.load(element_path)
- source = new_element.get_sequence('sources').mapping_at(0)
- if 'ref' in source:
- assert source.get_str('ref') == upstream_ref
+ source = new_element.get_sequence("sources").mapping_at(0)
+ if "ref" in source:
+ assert source.get_str("ref") == upstream_ref
@pytest.mark.datafiles(DATA_DIR)
def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind):
project_dir = str(datafiles)
- bin_files_path = os.path.join(project_dir, 'files', 'bin-files', 'usr')
- dev_files_path = os.path.join(project_dir, 'files', 'dev-files', 'usr')
- upstream_repodir = os.path.join(str(tmpdir), 'upstream')
- mirror_repodir = os.path.join(str(tmpdir), 'mirror')
- element_dir = os.path.join(project_dir, 'elements')
+ bin_files_path = os.path.join(project_dir, "files", "bin-files", "usr")
+ dev_files_path = os.path.join(project_dir, "files", "dev-files", "usr")
+ upstream_repodir = os.path.join(str(tmpdir), "upstream")
+ mirror_repodir = os.path.join(str(tmpdir), "mirror")
+ element_dir = os.path.join(project_dir, "elements")
# Create repo objects of the upstream and mirror
upstream_repo = create_repo(kind, upstream_repodir)
@@ -356,41 +308,32 @@ def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind):
upstream_ref = upstream_repo.create(dev_files_path)
element = {
- 'kind': 'import',
- 'sources': [
- upstream_repo.source_config(ref=upstream_ref)
- ]
+ "kind": "import",
+ "sources": [upstream_repo.source_config(ref=upstream_ref)],
}
- element_name = 'test.bst'
+ element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
- full_repo = element['sources'][0]['url']
+ full_repo = element["sources"][0]["url"]
_, repo_name = os.path.split(full_repo)
- alias = 'foo-' + kind
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
- full_mirror = mirror_repo.source_config()['url']
+ alias = "foo-" + kind
+ aliased_repo = alias + ":" + repo_name
+ element["sources"][0]["url"] = aliased_repo
+ full_mirror = mirror_repo.source_config()["url"]
mirror_map, _ = os.path.split(full_mirror)
_yaml.roundtrip_dump(element, element_path)
_set_project_mirrors_and_aliases(
project_dir,
- [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + '/'],
- },
- },
- ],
- {alias: 'http://www.example.com'},
+ [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],},},],
+ {alias: "http://www.example.com"},
)
- result = cli.run(project=project_dir, args=['source', 'track', element_name])
+ result = cli.run(project=project_dir, args=["source", "track", element_name])
result.assert_success()
# Check that tracking fell back to the mirror
new_element = _yaml.load(element_path)
- source = new_element.get_sequence('sources').mapping_at(0)
- if 'ref' in source:
- assert source.get_str('ref') == mirror_ref
+ source = new_element.get_sequence("sources").mapping_at(0)
+ if "ref" in source:
+ assert source.get_str("ref") == mirror_ref
diff --git a/src/buildstream/testing/_sourcetests/source_determinism.py b/src/buildstream/testing/_sourcetests/source_determinism.py
index fc0e4618c..465afa23b 100644
--- a/src/buildstream/testing/_sourcetests/source_determinism.py
+++ b/src/buildstream/testing/_sourcetests/source_determinism.py
@@ -30,83 +30,79 @@ from .utils import kind # pylint: disable=unused-import
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
-DATA_DIR = os.path.join(TOP_DIR, 'project')
+DATA_DIR = os.path.join(TOP_DIR, "project")
-def create_test_file(*path, mode=0o644, content='content\n'):
+def create_test_file(*path, mode=0o644, content="content\n"):
path = os.path.join(*path)
os.makedirs(os.path.dirname(path), exist_ok=True)
- with open(path, 'w') as f:
+ with open(path, "w") as f:
f.write(content)
os.fchmod(f.fileno(), mode)
def create_test_directory(*path, mode=0o644):
- create_test_file(*path, '.keep', content='')
+ create_test_file(*path, ".keep", content="")
path = os.path.join(*path)
os.chmod(path, mode)
@pytest.mark.integration
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
-@pytest.mark.skipif(HAVE_SANDBOX == 'buildbox', reason='Not working with BuildBox, Must Fix')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
+@pytest.mark.skipif(
+ HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox, Must Fix"
+)
def test_deterministic_source_umask(cli, tmpdir, datafiles, kind):
project = str(datafiles)
- element_name = 'list.bst'
- element_path = os.path.join(project, 'elements', element_name)
- repodir = os.path.join(str(tmpdir), 'repo')
- sourcedir = os.path.join(project, 'source')
-
- create_test_file(sourcedir, 'a.txt', mode=0o700)
- create_test_file(sourcedir, 'b.txt', mode=0o755)
- create_test_file(sourcedir, 'c.txt', mode=0o600)
- create_test_file(sourcedir, 'd.txt', mode=0o400)
- create_test_file(sourcedir, 'e.txt', mode=0o644)
- create_test_file(sourcedir, 'f.txt', mode=0o4755)
- create_test_file(sourcedir, 'g.txt', mode=0o2755)
- create_test_file(sourcedir, 'h.txt', mode=0o1755)
- create_test_directory(sourcedir, 'dir-a', mode=0o0700)
- create_test_directory(sourcedir, 'dir-c', mode=0o0755)
- create_test_directory(sourcedir, 'dir-d', mode=0o4755)
- create_test_directory(sourcedir, 'dir-e', mode=0o2755)
- create_test_directory(sourcedir, 'dir-f', mode=0o1755)
+ element_name = "list.bst"
+ element_path = os.path.join(project, "elements", element_name)
+ repodir = os.path.join(str(tmpdir), "repo")
+ sourcedir = os.path.join(project, "source")
+
+ create_test_file(sourcedir, "a.txt", mode=0o700)
+ create_test_file(sourcedir, "b.txt", mode=0o755)
+ create_test_file(sourcedir, "c.txt", mode=0o600)
+ create_test_file(sourcedir, "d.txt", mode=0o400)
+ create_test_file(sourcedir, "e.txt", mode=0o644)
+ create_test_file(sourcedir, "f.txt", mode=0o4755)
+ create_test_file(sourcedir, "g.txt", mode=0o2755)
+ create_test_file(sourcedir, "h.txt", mode=0o1755)
+ create_test_directory(sourcedir, "dir-a", mode=0o0700)
+ create_test_directory(sourcedir, "dir-c", mode=0o0755)
+ create_test_directory(sourcedir, "dir-d", mode=0o4755)
+ create_test_directory(sourcedir, "dir-e", mode=0o2755)
+ create_test_directory(sourcedir, "dir-f", mode=0o1755)
repo = create_repo(kind, repodir)
ref = repo.create(sourcedir)
source = repo.source_config(ref=ref)
element = {
- 'kind': 'manual',
- 'depends': [
- {
- 'filename': 'base.bst',
- 'type': 'build'
- }
- ],
- 'sources': [
- source
- ],
- 'config': {
- 'install-commands': [
- 'ls -l >"%{install-root}/ls-l"'
- ]
- }
+ "kind": "manual",
+ "depends": [{"filename": "base.bst", "type": "build"}],
+ "sources": [source],
+ "config": {"install-commands": ['ls -l >"%{install-root}/ls-l"']},
}
_yaml.roundtrip_dump(element, element_path)
def get_value_for_umask(umask):
- checkoutdir = os.path.join(str(tmpdir), 'checkout-{}'.format(umask))
+ checkoutdir = os.path.join(str(tmpdir), "checkout-{}".format(umask))
old_umask = os.umask(umask)
try:
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkoutdir],
+ )
result.assert_success()
- with open(os.path.join(checkoutdir, 'ls-l'), 'r') as f:
+ with open(os.path.join(checkoutdir, "ls-l"), "r") as f:
return f.read()
finally:
os.umask(old_umask)
diff --git a/src/buildstream/testing/_sourcetests/track.py b/src/buildstream/testing/_sourcetests/track.py
index 48856b351..c857e246d 100644
--- a/src/buildstream/testing/_sourcetests/track.py
+++ b/src/buildstream/testing/_sourcetests/track.py
@@ -33,33 +33,26 @@ from .utils import kind # pylint: disable=unused-import
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
-DATA_DIR = os.path.join(TOP_DIR, 'project')
+DATA_DIR = os.path.join(TOP_DIR, "project")
def generate_element(repo, element_path, dep_name=None):
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config()
- ]
- }
+ element = {"kind": "import", "sources": [repo.source_config()]}
if dep_name:
- element['depends'] = [dep_name]
+ element["depends"] = [dep_name]
_yaml.roundtrip_dump(element, element_path)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", ['inline', 'project.refs'])
+@pytest.mark.parametrize("ref_storage", ["inline", "project.refs"])
def test_track(cli, tmpdir, datafiles, ref_storage, kind):
project = str(datafiles)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
- element_name = 'track-test-{}.bst'.format(kind)
+ dev_files_path = os.path.join(project, "files", "dev-files")
+ element_path = os.path.join(project, "elements")
+ element_name = "track-test-{}.bst".format(kind)
- update_project_configuration(project, {
- 'ref-storage': ref_storage
- })
+ update_project_configuration(project, {"ref-storage": ref_storage})
# Create our repo object of the given source type with
# the dev files, and then collect the initial ref.
@@ -71,28 +64,28 @@ def test_track(cli, tmpdir, datafiles, ref_storage, kind):
generate_element(repo, os.path.join(element_path, element_name))
# Assert that a fetch is needed
- assert cli.get_element_state(project, element_name) == 'no reference'
+ assert cli.get_element_state(project, element_name) == "no reference"
# Now first try to track it
- result = cli.run(project=project, args=['source', 'track', element_name])
+ result = cli.run(project=project, args=["source", "track", element_name])
result.assert_success()
# And now fetch it: The Source has probably already cached the
# latest ref locally, but it is not required to have cached
# the associated content of the latest ref at track time, that
# is the job of fetch.
- result = cli.run(project=project, args=['source', 'fetch', element_name])
+ result = cli.run(project=project, args=["source", "fetch", element_name])
result.assert_success()
# Assert that we are now buildable because the source is
# now cached.
- assert cli.get_element_state(project, element_name) == 'buildable'
+ assert cli.get_element_state(project, element_name) == "buildable"
# Assert there was a project.refs created, depending on the configuration
- if ref_storage == 'project.refs':
- assert os.path.exists(os.path.join(project, 'project.refs'))
+ if ref_storage == "project.refs":
+ assert os.path.exists(os.path.join(project, "project.refs"))
else:
- assert not os.path.exists(os.path.join(project, 'project.refs'))
+ assert not os.path.exists(os.path.join(project, "project.refs"))
# NOTE:
@@ -112,18 +105,14 @@ def test_track(cli, tmpdir, datafiles, ref_storage, kind):
@pytest.mark.parametrize("amount", [1, 10])
def test_track_recurse(cli, tmpdir, datafiles, kind, amount):
project = str(datafiles)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
+ dev_files_path = os.path.join(project, "files", "dev-files")
+ element_path = os.path.join(project, "elements")
# Try to actually launch as many fetch jobs as possible at the same time
#
# This stresses the Source plugins and helps to ensure that
# they handle concurrent access to the store correctly.
- cli.configure({
- 'scheduler': {
- 'fetchers': amount,
- }
- })
+ cli.configure({"scheduler": {"fetchers": amount,}})
# Create our repo object of the given source type with
# the dev files, and then collect the initial ref.
@@ -135,7 +124,7 @@ def test_track_recurse(cli, tmpdir, datafiles, kind, amount):
element_names = []
last_element_name = None
for i in range(amount + 1):
- element_name = 'track-test-{}-{}.bst'.format(kind, i + 1)
+ element_name = "track-test-{}-{}.bst".format(kind, i + 1)
filename = os.path.join(element_path, element_name)
element_names.append(element_name)
@@ -146,39 +135,39 @@ def test_track_recurse(cli, tmpdir, datafiles, kind, amount):
# Assert that a fetch is needed
states = cli.get_element_states(project, [last_element_name])
for element_name in element_names:
- assert states[element_name] == 'no reference'
+ assert states[element_name] == "no reference"
# Now first try to track it
- result = cli.run(project=project, args=[
- 'source', 'track', '--deps', 'all',
- last_element_name])
+ result = cli.run(
+ project=project, args=["source", "track", "--deps", "all", last_element_name]
+ )
result.assert_success()
# And now fetch it: The Source has probably already cached the
# latest ref locally, but it is not required to have cached
# the associated content of the latest ref at track time, that
# is the job of fetch.
- result = cli.run(project=project, args=[
- 'source', 'fetch', '--deps', 'all',
- last_element_name])
+ result = cli.run(
+ project=project, args=["source", "fetch", "--deps", "all", last_element_name]
+ )
result.assert_success()
# Assert that the base is buildable and the rest are waiting
states = cli.get_element_states(project, [last_element_name])
for element_name in element_names:
if element_name == element_names[0]:
- assert states[element_name] == 'buildable'
+ assert states[element_name] == "buildable"
else:
- assert states[element_name] == 'waiting'
+ assert states[element_name] == "waiting"
@pytest.mark.datafiles(DATA_DIR)
def test_track_recurse_except(cli, tmpdir, datafiles, kind):
project = str(datafiles)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
- element_dep_name = 'track-test-dep-{}.bst'.format(kind)
- element_target_name = 'track-test-target-{}.bst'.format(kind)
+ dev_files_path = os.path.join(project, "files", "dev-files")
+ element_path = os.path.join(project, "elements")
+ element_dep_name = "track-test-dep-{}.bst".format(kind)
+ element_target_name = "track-test-target-{}.bst".format(kind)
# Create our repo object of the given source type with
# the dev files, and then collect the initial ref.
@@ -188,88 +177,105 @@ def test_track_recurse_except(cli, tmpdir, datafiles, kind):
# Write out our test targets
generate_element(repo, os.path.join(element_path, element_dep_name))
- generate_element(repo, os.path.join(element_path, element_target_name),
- dep_name=element_dep_name)
+ generate_element(
+ repo, os.path.join(element_path, element_target_name), dep_name=element_dep_name
+ )
# Assert that a fetch is needed
states = cli.get_element_states(project, [element_target_name])
- assert states[element_dep_name] == 'no reference'
- assert states[element_target_name] == 'no reference'
+ assert states[element_dep_name] == "no reference"
+ assert states[element_target_name] == "no reference"
# Now first try to track it
- result = cli.run(project=project, args=[
- 'source', 'track', '--deps', 'all', '--except', element_dep_name,
- element_target_name])
+ result = cli.run(
+ project=project,
+ args=[
+ "source",
+ "track",
+ "--deps",
+ "all",
+ "--except",
+ element_dep_name,
+ element_target_name,
+ ],
+ )
result.assert_success()
# And now fetch it: The Source has probably already cached the
# latest ref locally, but it is not required to have cached
# the associated content of the latest ref at track time, that
# is the job of fetch.
- result = cli.run(project=project, args=[
- 'source', 'fetch', '--deps', 'none',
- element_target_name])
+ result = cli.run(
+ project=project, args=["source", "fetch", "--deps", "none", element_target_name]
+ )
result.assert_success()
# Assert that the dependency is buildable and the target is waiting
states = cli.get_element_states(project, [element_target_name])
- assert states[element_dep_name] == 'no reference'
- assert states[element_target_name] == 'waiting'
+ assert states[element_dep_name] == "no reference"
+ assert states[element_target_name] == "waiting"
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", ['inline', 'project.refs'])
+@pytest.mark.parametrize("ref_storage", ["inline", "project.refs"])
def test_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- etc_files = os.path.join(subproject_path, 'files', 'etc-files')
- repo_element_path = os.path.join(subproject_path, 'elements',
- 'import-etc-repo.bst')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ etc_files = os.path.join(subproject_path, "files", "etc-files")
+ repo_element_path = os.path.join(subproject_path, "elements", "import-etc-repo.bst")
- update_project_configuration(project, {
- 'ref-storage': ref_storage
- })
+ update_project_configuration(project, {"ref-storage": ref_storage})
- repo = create_repo(kind, str(tmpdir.join('element_repo')))
+ repo = create_repo(kind, str(tmpdir.join("element_repo")))
repo.create(etc_files)
generate_element(repo, repo_element_path)
- generate_junction(str(tmpdir.join('junction_repo')),
- subproject_path, junction_path, store_ref=False)
+ generate_junction(
+ str(tmpdir.join("junction_repo")),
+ subproject_path,
+ junction_path,
+ store_ref=False,
+ )
# Track the junction itself first.
- result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
+ result = cli.run(project=project, args=["source", "track", "junction.bst"])
result.assert_success()
- assert cli.get_element_state(project, 'junction.bst:import-etc-repo.bst') == 'no reference'
+ assert (
+ cli.get_element_state(project, "junction.bst:import-etc-repo.bst")
+ == "no reference"
+ )
# Track the cross junction element. -J is not given, it is implied.
- result = cli.run(project=project, args=['source', 'track', 'junction.bst:import-etc-repo.bst'])
+ result = cli.run(
+ project=project, args=["source", "track", "junction.bst:import-etc-repo.bst"]
+ )
- if ref_storage == 'inline':
+ if ref_storage == "inline":
# This is not allowed to track cross junction without project.refs.
- result.assert_main_error(ErrorDomain.PIPELINE, 'untrackable-sources')
+ result.assert_main_error(ErrorDomain.PIPELINE, "untrackable-sources")
else:
result.assert_success()
- assert cli.get_element_state(project, 'junction.bst:import-etc-repo.bst') == 'buildable'
+ assert (
+ cli.get_element_state(project, "junction.bst:import-etc-repo.bst")
+ == "buildable"
+ )
- assert os.path.exists(os.path.join(project, 'project.refs'))
+ assert os.path.exists(os.path.join(project, "project.refs"))
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", ['inline', 'project.refs'])
+@pytest.mark.parametrize("ref_storage", ["inline", "project.refs"])
def test_track_include(cli, tmpdir, datafiles, ref_storage, kind):
project = str(datafiles)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
- element_name = 'track-test-{}.bst'.format(kind)
+ dev_files_path = os.path.join(project, "files", "dev-files")
+ element_path = os.path.join(project, "elements")
+ element_name = "track-test-{}.bst".format(kind)
- update_project_configuration(project, {
- 'ref-storage': ref_storage
- })
+ update_project_configuration(project, {"ref-storage": ref_storage})
# Create our repo object of the given source type with
# the dev files, and then collect the initial ref.
@@ -278,139 +284,133 @@ def test_track_include(cli, tmpdir, datafiles, ref_storage, kind):
ref = repo.create(dev_files_path)
# Generate the element
- element = {
- 'kind': 'import',
- '(@)': ['elements/sources.yml']
- }
- sources = {
- 'sources': [
- repo.source_config()
- ]
- }
+ element = {"kind": "import", "(@)": ["elements/sources.yml"]}
+ sources = {"sources": [repo.source_config()]}
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
- _yaml.roundtrip_dump(sources, os.path.join(element_path, 'sources.yml'))
+ _yaml.roundtrip_dump(sources, os.path.join(element_path, "sources.yml"))
# Assert that a fetch is needed
- assert cli.get_element_state(project, element_name) == 'no reference'
+ assert cli.get_element_state(project, element_name) == "no reference"
# Now first try to track it
- result = cli.run(project=project, args=['source', 'track', element_name])
+ result = cli.run(project=project, args=["source", "track", element_name])
result.assert_success()
# And now fetch it: The Source has probably already cached the
# latest ref locally, but it is not required to have cached
# the associated content of the latest ref at track time, that
# is the job of fetch.
- result = cli.run(project=project, args=['source', 'fetch', element_name])
+ result = cli.run(project=project, args=["source", "fetch", element_name])
result.assert_success()
# Assert that we are now buildable because the source is
# now cached.
- assert cli.get_element_state(project, element_name) == 'buildable'
+ assert cli.get_element_state(project, element_name) == "buildable"
# Assert there was a project.refs created, depending on the configuration
- if ref_storage == 'project.refs':
- assert os.path.exists(os.path.join(project, 'project.refs'))
+ if ref_storage == "project.refs":
+ assert os.path.exists(os.path.join(project, "project.refs"))
else:
- assert not os.path.exists(os.path.join(project, 'project.refs'))
+ assert not os.path.exists(os.path.join(project, "project.refs"))
- new_sources = _yaml.load(os.path.join(element_path, 'sources.yml'))
+ new_sources = _yaml.load(os.path.join(element_path, "sources.yml"))
# Get all of the sources
- assert 'sources' in new_sources
- sources_list = new_sources.get_sequence('sources')
+ assert "sources" in new_sources
+ sources_list = new_sources.get_sequence("sources")
assert len(sources_list) == 1
# Get the first source from the sources list
new_source = sources_list.mapping_at(0)
- assert 'ref' in new_source
- assert ref == new_source.get_str('ref')
+ assert "ref" in new_source
+ assert ref == new_source.get_str("ref")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", ['inline', 'project.refs'])
+@pytest.mark.parametrize("ref_storage", ["inline", "project.refs"])
def test_track_include_junction(cli, tmpdir, datafiles, ref_storage, kind):
project = str(datafiles)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
- element_name = 'track-test-{}.bst'.format(kind)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- sub_element_path = os.path.join(subproject_path, 'elements')
- junction_path = os.path.join(element_path, 'junction.bst')
+ dev_files_path = os.path.join(project, "files", "dev-files")
+ element_path = os.path.join(project, "elements")
+ element_name = "track-test-{}.bst".format(kind)
+ subproject_path = os.path.join(project, "files", "sub-project")
+ sub_element_path = os.path.join(subproject_path, "elements")
+ junction_path = os.path.join(element_path, "junction.bst")
- update_project_configuration(project, {
- 'ref-storage': ref_storage
- })
+ update_project_configuration(project, {"ref-storage": ref_storage})
# Create our repo object of the given source type with
# the dev files, and then collect the initial ref.
#
- repo = create_repo(kind, str(tmpdir.join('element_repo')))
+ repo = create_repo(kind, str(tmpdir.join("element_repo")))
repo.create(dev_files_path)
# Generate the element
- element = {
- 'kind': 'import',
- '(@)': ['junction.bst:elements/sources.yml']
- }
- sources = {
- 'sources': [
- repo.source_config()
- ]
- }
+ element = {"kind": "import", "(@)": ["junction.bst:elements/sources.yml"]}
+ sources = {"sources": [repo.source_config()]}
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
- _yaml.roundtrip_dump(sources, os.path.join(sub_element_path, 'sources.yml'))
+ _yaml.roundtrip_dump(sources, os.path.join(sub_element_path, "sources.yml"))
- generate_junction(str(tmpdir.join('junction_repo')),
- subproject_path, junction_path, store_ref=True)
+ generate_junction(
+ str(tmpdir.join("junction_repo")),
+ subproject_path,
+ junction_path,
+ store_ref=True,
+ )
- result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
+ result = cli.run(project=project, args=["source", "track", "junction.bst"])
result.assert_success()
# Assert that a fetch is needed
- assert cli.get_element_state(project, element_name) == 'no reference'
+ assert cli.get_element_state(project, element_name) == "no reference"
# Now first try to track it
- result = cli.run(project=project, args=['source', 'track', element_name])
+ result = cli.run(project=project, args=["source", "track", element_name])
# Assert there was a project.refs created, depending on the configuration
- if ref_storage == 'inline':
+ if ref_storage == "inline":
# FIXME: We should expect an error. But only a warning is emitted
# result.assert_main_error(ErrorDomain.SOURCE, 'tracking-junction-fragment')
- assert 'junction.bst:elements/sources.yml: Cannot track source in a fragment from a junction' in result.stderr
+ assert (
+ "junction.bst:elements/sources.yml: Cannot track source in a fragment from a junction"
+ in result.stderr
+ )
else:
- assert os.path.exists(os.path.join(project, 'project.refs'))
+ assert os.path.exists(os.path.join(project, "project.refs"))
# And now fetch it: The Source has probably already cached the
# latest ref locally, but it is not required to have cached
# the associated content of the latest ref at track time, that
# is the job of fetch.
- result = cli.run(project=project, args=['source', 'fetch', element_name])
+ result = cli.run(project=project, args=["source", "fetch", element_name])
result.assert_success()
# Assert that we are now buildable because the source is
# now cached.
- assert cli.get_element_state(project, element_name) == 'buildable'
+ assert cli.get_element_state(project, element_name) == "buildable"
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", ['inline', 'project.refs'])
+@pytest.mark.parametrize("ref_storage", ["inline", "project.refs"])
def test_track_junction_included(cli, tmpdir, datafiles, ref_storage, kind):
project = str(datafiles)
- element_path = os.path.join(project, 'elements')
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(element_path, 'junction.bst')
-
- update_project_configuration(project, {
- 'ref-storage': ref_storage,
- '(@)': ['junction.bst:test.yml']
- })
-
- generate_junction(str(tmpdir.join('junction_repo')),
- subproject_path, junction_path, store_ref=False)
-
- result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
+ element_path = os.path.join(project, "elements")
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(element_path, "junction.bst")
+
+ update_project_configuration(
+ project, {"ref-storage": ref_storage, "(@)": ["junction.bst:test.yml"]}
+ )
+
+ generate_junction(
+ str(tmpdir.join("junction_repo")),
+ subproject_path,
+ junction_path,
+ store_ref=False,
+ )
+
+ result = cli.run(project=project, args=["source", "track", "junction.bst"])
result.assert_success()
diff --git a/src/buildstream/testing/_sourcetests/track_cross_junction.py b/src/buildstream/testing/_sourcetests/track_cross_junction.py
index 550f57faf..470b67e8d 100644
--- a/src/buildstream/testing/_sourcetests/track_cross_junction.py
+++ b/src/buildstream/testing/_sourcetests/track_cross_junction.py
@@ -32,32 +32,27 @@ from .utils import add_plugins_conf
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
-DATA_DIR = os.path.join(TOP_DIR, 'project')
+DATA_DIR = os.path.join(TOP_DIR, "project")
def generate_element(repo, element_path, dep_name=None):
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config()
- ]
- }
+ element = {"kind": "import", "sources": [repo.source_config()]}
if dep_name:
- element['depends'] = [dep_name]
+ element["depends"] = [dep_name]
_yaml.roundtrip_dump(element, element_path)
def generate_import_element(tmpdir, kind, project, name):
- element_name = 'import-{}.bst'.format(name)
- repo_element_path = os.path.join(project, 'elements', element_name)
+ element_name = "import-{}.bst".format(name)
+ repo_element_path = os.path.join(project, "elements", element_name)
files = str(tmpdir.join("imported_files_{}".format(name)))
os.makedirs(files)
- with open(os.path.join(files, '{}.txt'.format(name)), 'w') as f:
+ with open(os.path.join(files, "{}.txt".format(name)), "w") as f:
f.write(name)
- repo = create_repo(kind, str(tmpdir.join('element_{}_repo'.format(name))))
+ repo = create_repo(kind, str(tmpdir.join("element_{}_repo".format(name))))
repo.create(files)
generate_element(repo, repo_element_path)
@@ -69,28 +64,22 @@ def generate_project(tmpdir, name, kind, config=None):
if config is None:
config = {}
- project_name = 'project-{}'.format(name)
+ project_name = "project-{}".format(name)
subproject_path = os.path.join(str(tmpdir.join(project_name)))
- os.makedirs(os.path.join(subproject_path, 'elements'))
+ os.makedirs(os.path.join(subproject_path, "elements"))
- project_conf = {
- 'name': name,
- 'element-path': 'elements'
- }
+ project_conf = {"name": name, "element-path": "elements"}
project_conf.update(config)
- _yaml.roundtrip_dump(project_conf, os.path.join(subproject_path, 'project.conf'))
+ _yaml.roundtrip_dump(project_conf, os.path.join(subproject_path, "project.conf"))
add_plugins_conf(subproject_path, kind)
return project_name, subproject_path
def generate_simple_stack(project, name, dependencies):
- element_name = '{}.bst'.format(name)
- element_path = os.path.join(project, 'elements', element_name)
- element = {
- 'kind': 'stack',
- 'depends': dependencies
- }
+ element_name = "{}.bst".format(name)
+ element_path = os.path.join(project, "elements", element_name)
+ element = {"kind": "stack", "depends": dependencies}
_yaml.roundtrip_dump(element, element_path)
return element_name
@@ -98,11 +87,11 @@ def generate_simple_stack(project, name, dependencies):
def generate_cross_element(project, subproject_name, import_name):
basename, _ = os.path.splitext(import_name)
- return generate_simple_stack(project, 'import-{}-{}'.format(subproject_name, basename),
- [{
- 'junction': '{}.bst'.format(subproject_name),
- 'filename': import_name
- }])
+ return generate_simple_stack(
+ project,
+ "import-{}-{}".format(subproject_name, basename),
+ [{"junction": "{}.bst".format(subproject_name), "filename": import_name}],
+ )
@pytest.mark.parametrize("kind", ALL_REPO_KINDS.keys())
@@ -110,30 +99,34 @@ def test_cross_junction_multiple_projects(cli, tmpdir, kind):
tmpdir = tmpdir.join(kind)
# Generate 3 projects: main, a, b
- _, project = generate_project(tmpdir, 'main', kind, {'ref-storage': 'project.refs'})
- project_a, project_a_path = generate_project(tmpdir, 'a', kind)
- project_b, project_b_path = generate_project(tmpdir, 'b', kind)
+ _, project = generate_project(tmpdir, "main", kind, {"ref-storage": "project.refs"})
+ project_a, project_a_path = generate_project(tmpdir, "a", kind)
+ project_b, project_b_path = generate_project(tmpdir, "b", kind)
# Generate an element with a trackable source for each project
- element_a = generate_import_element(tmpdir, kind, project_a_path, 'a')
- element_b = generate_import_element(tmpdir, kind, project_b_path, 'b')
- element_c = generate_import_element(tmpdir, kind, project, 'c')
+ element_a = generate_import_element(tmpdir, kind, project_a_path, "a")
+ element_b = generate_import_element(tmpdir, kind, project_b_path, "b")
+ element_c = generate_import_element(tmpdir, kind, project, "c")
# Create some indirections to the elements with dependencies to test --deps
- stack_a = generate_simple_stack(project_a_path, 'stack-a', [element_a])
- stack_b = generate_simple_stack(project_b_path, 'stack-b', [element_b])
+ stack_a = generate_simple_stack(project_a_path, "stack-a", [element_a])
+ stack_b = generate_simple_stack(project_b_path, "stack-b", [element_b])
# Create junctions for projects a and b in main.
- junction_a = '{}.bst'.format(project_a)
- junction_a_path = os.path.join(project, 'elements', junction_a)
- generate_junction(tmpdir.join('repo_a'), project_a_path, junction_a_path, store_ref=False)
-
- junction_b = '{}.bst'.format(project_b)
- junction_b_path = os.path.join(project, 'elements', junction_b)
- generate_junction(tmpdir.join('repo_b'), project_b_path, junction_b_path, store_ref=False)
+ junction_a = "{}.bst".format(project_a)
+ junction_a_path = os.path.join(project, "elements", junction_a)
+ generate_junction(
+ tmpdir.join("repo_a"), project_a_path, junction_a_path, store_ref=False
+ )
+
+ junction_b = "{}.bst".format(project_b)
+ junction_b_path = os.path.join(project, "elements", junction_b)
+ generate_junction(
+ tmpdir.join("repo_b"), project_b_path, junction_b_path, store_ref=False
+ )
# Track the junctions.
- result = cli.run(project=project, args=['source', 'track', junction_a, junction_b])
+ result = cli.run(project=project, args=["source", "track", junction_a, junction_b])
result.assert_success()
# Import elements from a and b in to main.
@@ -141,18 +134,24 @@ def test_cross_junction_multiple_projects(cli, tmpdir, kind):
imported_b = generate_cross_element(project, project_b, stack_b)
# Generate a top level stack depending on everything
- all_bst = generate_simple_stack(project, 'all', [imported_a, imported_b, element_c])
+ all_bst = generate_simple_stack(project, "all", [imported_a, imported_b, element_c])
# Track without following junctions. But explicitly also track the elements in project a.
- result = cli.run(project=project, args=['source', 'track',
- '--deps', 'all',
- all_bst,
- '{}:{}'.format(junction_a, stack_a)])
+ result = cli.run(
+ project=project,
+ args=[
+ "source",
+ "track",
+ "--deps",
+ "all",
+ all_bst,
+ "{}:{}".format(junction_a, stack_a),
+ ],
+ )
result.assert_success()
# Elements in project b should not be tracked. But elements in project a and main should.
- expected = [element_c,
- '{}:{}'.format(junction_a, element_a)]
+ expected = [element_c, "{}:{}".format(junction_a, element_a)]
assert set(result.get_tracked_elements()) == set(expected)
@@ -160,31 +159,43 @@ def test_cross_junction_multiple_projects(cli, tmpdir, kind):
def test_track_exceptions(cli, tmpdir, kind):
tmpdir = tmpdir.join(kind)
- _, project = generate_project(tmpdir, 'main', kind, {'ref-storage': 'project.refs'})
- project_a, project_a_path = generate_project(tmpdir, 'a', kind)
+ _, project = generate_project(tmpdir, "main", kind, {"ref-storage": "project.refs"})
+ project_a, project_a_path = generate_project(tmpdir, "a", kind)
- element_a = generate_import_element(tmpdir, kind, project_a_path, 'a')
- element_b = generate_import_element(tmpdir, kind, project_a_path, 'b')
+ element_a = generate_import_element(tmpdir, kind, project_a_path, "a")
+ element_b = generate_import_element(tmpdir, kind, project_a_path, "b")
- all_bst = generate_simple_stack(project_a_path, 'all', [element_a,
- element_b])
+ all_bst = generate_simple_stack(project_a_path, "all", [element_a, element_b])
- junction_a = '{}.bst'.format(project_a)
- junction_a_path = os.path.join(project, 'elements', junction_a)
- generate_junction(tmpdir.join('repo_a'), project_a_path, junction_a_path, store_ref=False)
+ junction_a = "{}.bst".format(project_a)
+ junction_a_path = os.path.join(project, "elements", junction_a)
+ generate_junction(
+ tmpdir.join("repo_a"), project_a_path, junction_a_path, store_ref=False
+ )
- result = cli.run(project=project, args=['source', 'track', junction_a])
+ result = cli.run(project=project, args=["source", "track", junction_a])
result.assert_success()
imported_b = generate_cross_element(project, project_a, element_b)
- indirection = generate_simple_stack(project, 'indirection', [imported_b])
-
- result = cli.run(project=project,
- args=['source', 'track', '--deps', 'all',
- '--except', indirection,
- '{}:{}'.format(junction_a, all_bst), imported_b])
+ indirection = generate_simple_stack(project, "indirection", [imported_b])
+
+ result = cli.run(
+ project=project,
+ args=[
+ "source",
+ "track",
+ "--deps",
+ "all",
+ "--except",
+ indirection,
+ "{}:{}".format(junction_a, all_bst),
+ imported_b,
+ ],
+ )
result.assert_success()
- expected = ['{}:{}'.format(junction_a, element_a),
- '{}:{}'.format(junction_a, element_b)]
+ expected = [
+ "{}:{}".format(junction_a, element_a),
+ "{}:{}".format(junction_a, element_b),
+ ]
assert set(result.get_tracked_elements()) == set(expected)
diff --git a/src/buildstream/testing/_sourcetests/utils.py b/src/buildstream/testing/_sourcetests/utils.py
index a0e65b4f4..ca245a57d 100644
--- a/src/buildstream/testing/_sourcetests/utils.py
+++ b/src/buildstream/testing/_sourcetests/utils.py
@@ -27,9 +27,11 @@ import os
try:
import pytest
except ImportError:
- module_name = globals()['__name__']
- msg = "Could not import pytest:\n" \
- "To use the {} module, you must have pytest installed.".format(module_name)
+ module_name = globals()["__name__"]
+ msg = (
+ "Could not import pytest:\n"
+ "To use the {} module, you must have pytest installed.".format(module_name)
+ )
raise ImportError(msg)
from buildstream import _yaml
@@ -50,7 +52,10 @@ from .. import ALL_REPO_KINDS
@pytest.fixture(params=ALL_REPO_KINDS.keys())
def kind(request, datafiles):
# Register plugins both on the toplevel project and on its junctions
- for project_dir in [str(datafiles), os.path.join(str(datafiles), "files", "sub-project")]:
+ for project_dir in [
+ str(datafiles),
+ os.path.join(str(datafiles), "files", "sub-project"),
+ ]:
add_plugins_conf(project_dir, request.param)
yield request.param
@@ -75,9 +80,7 @@ def add_plugins_conf(project, plugin_kind):
{
"origin": "pip",
"package-name": plugin_package,
- "sources": {
- plugin_kind: 0,
- },
+ "sources": {plugin_kind: 0,},
},
]
@@ -96,7 +99,7 @@ def add_plugins_conf(project, plugin_kind):
# updated_configuration (dict): configuration to merge into the existing one
#
def update_project_configuration(project_path, updated_configuration):
- project_conf_path = os.path.join(project_path, 'project.conf')
+ project_conf_path = os.path.join(project_path, "project.conf")
project_conf = _yaml.roundtrip_load(project_conf_path)
project_conf.update(updated_configuration)
diff --git a/src/buildstream/testing/_sourcetests/workspace.py b/src/buildstream/testing/_sourcetests/workspace.py
index dd7977e76..7cc308006 100644
--- a/src/buildstream/testing/_sourcetests/workspace.py
+++ b/src/buildstream/testing/_sourcetests/workspace.py
@@ -30,10 +30,10 @@ from .utils import kind # pylint: disable=unused-import
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
-DATA_DIR = os.path.join(TOP_DIR, 'project')
+DATA_DIR = os.path.join(TOP_DIR, "project")
-class WorkspaceCreator():
+class WorkspaceCreator:
def __init__(self, cli, tmpdir, datafiles, project_path=None):
self.cli = cli
self.tmpdir = tmpdir
@@ -45,17 +45,18 @@ class WorkspaceCreator():
shutil.copytree(str(datafiles), project_path)
self.project_path = project_path
- self.bin_files_path = os.path.join(project_path, 'files', 'bin-files')
+ self.bin_files_path = os.path.join(project_path, "files", "bin-files")
- self.workspace_cmd = os.path.join(self.project_path, 'workspace_cmd')
+ self.workspace_cmd = os.path.join(self.project_path, "workspace_cmd")
- def create_workspace_element(self, kind, track, suffix='', workspace_dir=None,
- element_attrs=None):
- element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
- element_path = os.path.join(self.project_path, 'elements')
+ def create_workspace_element(
+ self, kind, track, suffix="", workspace_dir=None, element_attrs=None
+ ):
+ element_name = "workspace-test-{}{}.bst".format(kind, suffix)
+ element_path = os.path.join(self.project_path, "elements")
if not workspace_dir:
workspace_dir = os.path.join(self.workspace_cmd, element_name)
- if workspace_dir[-4:] == '.bst':
+ if workspace_dir[-4:] == ".bst":
workspace_dir = workspace_dir[:-4]
# Create our repo object of the given source type with
@@ -66,90 +67,108 @@ class WorkspaceCreator():
ref = None
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
if element_attrs:
element = {**element, **element_attrs}
- _yaml.roundtrip_dump(element,
- os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
return element_name, element_path, workspace_dir
- def create_workspace_elements(self, kinds, track, suffixs=None, workspace_dir_usr=None,
- element_attrs=None):
+ def create_workspace_elements(
+ self, kinds, track, suffixs=None, workspace_dir_usr=None, element_attrs=None
+ ):
element_tuples = []
if suffixs is None:
- suffixs = ['', ] * len(kinds)
+ suffixs = ["",] * len(kinds)
else:
if len(suffixs) != len(kinds):
raise "terable error"
for suffix, kind in zip(suffixs, kinds):
- element_name, _, workspace_dir = \
- self.create_workspace_element(kind, track, suffix, workspace_dir_usr,
- element_attrs)
+ element_name, _, workspace_dir = self.create_workspace_element(
+ kind, track, suffix, workspace_dir_usr, element_attrs
+ )
element_tuples.append((element_name, workspace_dir))
# Assert that there is no reference, a track & fetch is needed
- states = self.cli.get_element_states(self.project_path, [
- e for e, _ in element_tuples
- ])
+ states = self.cli.get_element_states(
+ self.project_path, [e for e, _ in element_tuples]
+ )
if track:
- assert not any(states[e] != 'no reference' for e, _ in element_tuples)
+ assert not any(states[e] != "no reference" for e, _ in element_tuples)
else:
- assert not any(states[e] != 'fetch needed' for e, _ in element_tuples)
+ assert not any(states[e] != "fetch needed" for e, _ in element_tuples)
return element_tuples
- def open_workspaces(self, kinds, track, suffixs=None, workspace_dir=None,
- element_attrs=None, no_checkout=False):
-
- element_tuples = self.create_workspace_elements(kinds, track, suffixs, workspace_dir,
- element_attrs)
+ def open_workspaces(
+ self,
+ kinds,
+ track,
+ suffixs=None,
+ workspace_dir=None,
+ element_attrs=None,
+ no_checkout=False,
+ ):
+
+ element_tuples = self.create_workspace_elements(
+ kinds, track, suffixs, workspace_dir, element_attrs
+ )
os.makedirs(self.workspace_cmd, exist_ok=True)
# Now open the workspace, this should have the effect of automatically
# tracking & fetching the source from the repo.
- args = ['workspace', 'open']
+ args = ["workspace", "open"]
if track:
- args.append('--track')
+ args.append("--track")
if no_checkout:
- args.append('--no-checkout')
+ args.append("--no-checkout")
if workspace_dir is not None:
assert len(element_tuples) == 1, "test logic error"
_, workspace_dir = element_tuples[0]
- args.extend(['--directory', workspace_dir])
+ args.extend(["--directory", workspace_dir])
- args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
- result = self.cli.run(cwd=self.workspace_cmd, project=self.project_path, args=args)
+ args.extend(
+ [element_name for element_name, workspace_dir_suffix in element_tuples]
+ )
+ result = self.cli.run(
+ cwd=self.workspace_cmd, project=self.project_path, args=args
+ )
result.assert_success()
if not no_checkout:
# Assert that we are now buildable because the source is now cached.
- states = self.cli.get_element_states(self.project_path, [
- e for e, _ in element_tuples
- ])
- assert not any(states[e] != 'buildable' for e, _ in element_tuples)
+ states = self.cli.get_element_states(
+ self.project_path, [e for e, _ in element_tuples]
+ )
+ assert not any(states[e] != "buildable" for e, _ in element_tuples)
# Check that the executable hello file is found in each workspace
for _, workspace in element_tuples:
- filename = os.path.join(workspace, 'usr', 'bin', 'hello')
+ filename = os.path.join(workspace, "usr", "bin", "hello")
assert os.path.exists(filename)
return element_tuples
-def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
- project_path=None, element_attrs=None, no_checkout=False):
+def open_workspace(
+ cli,
+ tmpdir,
+ datafiles,
+ kind,
+ track,
+ suffix="",
+ workspace_dir=None,
+ project_path=None,
+ element_attrs=None,
+ no_checkout=False,
+):
workspace_object = WorkspaceCreator(cli, tmpdir, datafiles, project_path)
- workspaces = workspace_object.open_workspaces((kind, ), track, (suffix, ), workspace_dir,
- element_attrs, no_checkout)
+ workspaces = workspace_object.open_workspaces(
+ (kind,), track, (suffix,), workspace_dir, element_attrs, no_checkout
+ )
assert len(workspaces) == 1
element_name, workspace = workspaces[0]
return element_name, workspace_object.project_path, workspace
diff --git a/src/buildstream/testing/_utils/junction.py b/src/buildstream/testing/_utils/junction.py
index 98d23b0a2..ddfbead55 100644
--- a/src/buildstream/testing/_utils/junction.py
+++ b/src/buildstream/testing/_utils/junction.py
@@ -28,12 +28,7 @@ def generate_junction(tmpdir, subproject_path, junction_path, *, store_ref=True)
if not store_ref:
source_ref = None
- element = {
- 'kind': 'junction',
- 'sources': [
- repo.source_config(ref=source_ref)
- ]
- }
+ element = {"kind": "junction", "sources": [repo.source_config(ref=source_ref)]}
_yaml.roundtrip_dump(element, junction_path)
return ref
@@ -41,46 +36,40 @@ def generate_junction(tmpdir, subproject_path, junction_path, *, store_ref=True)
# A barebones Git Repo class to use for generating junctions
class _SimpleGit(Repo):
- def __init__(self, directory, subdir='repo'):
+ def __init__(self, directory, subdir="repo"):
if not HAVE_GIT:
- pytest.skip('git is not available')
+ pytest.skip("git is not available")
super().__init__(directory, subdir)
def create(self, directory):
self.copy_directory(directory, self.repo)
- self._run_git('init', '.')
- self._run_git('add', '.')
- self._run_git('commit', '-m', 'Initial commit')
+ self._run_git("init", ".")
+ self._run_git("add", ".")
+ self._run_git("commit", "-m", "Initial commit")
return self.latest_commit()
def latest_commit(self):
return self._run_git(
- 'rev-parse', 'HEAD',
- stdout=subprocess.PIPE,
- universal_newlines=True,
+ "rev-parse", "HEAD", stdout=subprocess.PIPE, universal_newlines=True,
).stdout.strip()
def source_config(self, ref=None):
return self.source_config_extra(ref)
def source_config_extra(self, ref=None, checkout_submodules=None):
- config = {
- 'kind': 'git',
- 'url': 'file://' + self.repo,
- 'track': 'master'
- }
+ config = {"kind": "git", "url": "file://" + self.repo, "track": "master"}
if ref is not None:
- config['ref'] = ref
+ config["ref"] = ref
if checkout_submodules is not None:
- config['checkout-submodules'] = checkout_submodules
+ config["checkout-submodules"] = checkout_submodules
return config
def _run_git(self, *args, **kwargs):
argv = [GIT]
argv.extend(args)
- if 'env' not in kwargs:
- kwargs['env'] = dict(GIT_ENV, PWD=self.repo)
- kwargs.setdefault('cwd', self.repo)
- kwargs.setdefault('check', True)
+ if "env" not in kwargs:
+ kwargs["env"] = dict(GIT_ENV, PWD=self.repo)
+ kwargs.setdefault("cwd", self.repo)
+ kwargs.setdefault("check", True)
return subprocess.run(argv, **kwargs)
diff --git a/src/buildstream/testing/_utils/site.py b/src/buildstream/testing/_utils/site.py
index ca74d9505..953d21607 100644
--- a/src/buildstream/testing/_utils/site.py
+++ b/src/buildstream/testing/_utils/site.py
@@ -5,29 +5,29 @@ import os
import subprocess
import sys
import platform
-from typing import Optional # pylint: disable=unused-import
+from typing import Optional # pylint: disable=unused-import
from buildstream import _site, utils, ProgramNotFoundError
from buildstream._platform import Platform
try:
- GIT = utils.get_host_tool('git') # type: Optional[str]
+ GIT = utils.get_host_tool("git") # type: Optional[str]
HAVE_GIT = True
- out = str(subprocess.check_output(['git', '--version']), "utf-8")
+ out = str(subprocess.check_output(["git", "--version"]), "utf-8")
# e.g. on Git for Windows we get "git version 2.21.0.windows.1".
# e.g. on Mac via Homebrew we get "git version 2.19.0".
- version = tuple(int(x) for x in out.split(' ')[2].split('.')[:3])
+ version = tuple(int(x) for x in out.split(" ")[2].split(".")[:3])
HAVE_OLD_GIT = version < (1, 8, 5)
GIT_ENV = {
- 'GIT_AUTHOR_DATE': '1320966000 +0200',
- 'GIT_AUTHOR_NAME': 'tomjon',
- 'GIT_AUTHOR_EMAIL': 'tom@jon.com',
- 'GIT_COMMITTER_DATE': '1320966000 +0200',
- 'GIT_COMMITTER_NAME': 'tomjon',
- 'GIT_COMMITTER_EMAIL': 'tom@jon.com'
+ "GIT_AUTHOR_DATE": "1320966000 +0200",
+ "GIT_AUTHOR_NAME": "tomjon",
+ "GIT_AUTHOR_EMAIL": "tom@jon.com",
+ "GIT_COMMITTER_DATE": "1320966000 +0200",
+ "GIT_COMMITTER_NAME": "tomjon",
+ "GIT_COMMITTER_EMAIL": "tom@jon.com",
}
except ProgramNotFoundError:
GIT = None
@@ -36,18 +36,16 @@ except ProgramNotFoundError:
GIT_ENV = dict()
try:
- BZR = utils.get_host_tool('bzr') # type: Optional[str]
+ BZR = utils.get_host_tool("bzr") # type: Optional[str]
HAVE_BZR = True
- BZR_ENV = {
- "BZR_EMAIL": "Testy McTesterson <testy.mctesterson@example.com>"
- }
+ BZR_ENV = {"BZR_EMAIL": "Testy McTesterson <testy.mctesterson@example.com>"}
except ProgramNotFoundError:
BZR = None
HAVE_BZR = False
BZR_ENV = {}
try:
- utils.get_host_tool('bwrap')
+ utils.get_host_tool("bwrap")
HAVE_BWRAP = True
HAVE_BWRAP_JSON_STATUS = _site.get_bwrap_version() >= (0, 3, 2)
except ProgramNotFoundError:
@@ -55,32 +53,33 @@ except ProgramNotFoundError:
HAVE_BWRAP_JSON_STATUS = False
try:
- utils.get_host_tool('lzip')
+ utils.get_host_tool("lzip")
HAVE_LZIP = True
except ProgramNotFoundError:
HAVE_LZIP = False
try:
import arpy # pylint: disable=unused-import
+
HAVE_ARPY = True
except ImportError:
HAVE_ARPY = False
try:
- utils.get_host_tool('buildbox')
+ utils.get_host_tool("buildbox")
HAVE_BUILDBOX = True
except ProgramNotFoundError:
HAVE_BUILDBOX = False
-IS_LINUX = os.getenv('BST_FORCE_BACKEND', sys.platform).startswith('linux')
-IS_WSL = (IS_LINUX and 'Microsoft' in platform.uname().release)
-IS_WINDOWS = (os.name == 'nt')
+IS_LINUX = os.getenv("BST_FORCE_BACKEND", sys.platform).startswith("linux")
+IS_WSL = IS_LINUX and "Microsoft" in platform.uname().release
+IS_WINDOWS = os.name == "nt"
MACHINE_ARCH = Platform.get_host_arch()
-HAVE_SANDBOX = os.getenv('BST_FORCE_SANDBOX')
+HAVE_SANDBOX = os.getenv("BST_FORCE_SANDBOX")
if HAVE_SANDBOX is not None:
pass
elif IS_LINUX and HAVE_BWRAP and (not IS_WSL):
- HAVE_SANDBOX = 'bwrap'
+ HAVE_SANDBOX = "bwrap"
diff --git a/src/buildstream/testing/integration.py b/src/buildstream/testing/integration.py
index 01635de74..5734c6c82 100644
--- a/src/buildstream/testing/integration.py
+++ b/src/buildstream/testing/integration.py
@@ -39,11 +39,11 @@ def walk_dir(root):
# print path to all subdirectories first.
for subdirname in dirnames:
- yield os.path.join(dirname, subdirname)[len(root):]
+ yield os.path.join(dirname, subdirname)[len(root) :]
# print path to all filenames.
for filename in filenames:
- yield os.path.join(dirname, filename)[len(root):]
+ yield os.path.join(dirname, filename)[len(root) :]
# Ensure that a directory contains the given filenames.
@@ -51,35 +51,35 @@ def assert_contains(directory, expected):
missing = set(expected)
missing.difference_update(walk_dir(directory))
if missing:
- raise AssertionError("Missing {} expected elements from list: {}"
- .format(len(missing), missing))
+ raise AssertionError(
+ "Missing {} expected elements from list: {}".format(len(missing), missing)
+ )
class IntegrationCache:
-
def __init__(self, cache):
self.root = os.path.abspath(cache)
os.makedirs(cache, exist_ok=True)
# Use the same sources every time
- self.sources = os.path.join(self.root, 'sources')
+ self.sources = os.path.join(self.root, "sources")
# Create a temp directory for the duration of the test for
# the artifacts directory
try:
- self.cachedir = tempfile.mkdtemp(dir=self.root, prefix='cache-')
+ self.cachedir = tempfile.mkdtemp(dir=self.root, prefix="cache-")
except OSError as e:
raise AssertionError("Unable to create test directory !") from e
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def integration_cache(request):
# Set the cache dir to the INTEGRATION_CACHE variable, or the
# default if that is not set.
- if 'INTEGRATION_CACHE' in os.environ:
- cache_dir = os.environ['INTEGRATION_CACHE']
+ if "INTEGRATION_CACHE" in os.environ:
+ cache_dir = os.environ["INTEGRATION_CACHE"]
else:
- cache_dir = os.path.abspath('./integration-cache')
+ cache_dir = os.path.abspath("./integration-cache")
cache = IntegrationCache(cache_dir)
@@ -92,6 +92,6 @@ def integration_cache(request):
except FileNotFoundError:
pass
try:
- shutil.rmtree(os.path.join(cache.root, 'cas'))
+ shutil.rmtree(os.path.join(cache.root, "cas"))
except FileNotFoundError:
pass
diff --git a/src/buildstream/testing/repo.py b/src/buildstream/testing/repo.py
index c1538685d..1b46ec806 100644
--- a/src/buildstream/testing/repo.py
+++ b/src/buildstream/testing/repo.py
@@ -25,7 +25,7 @@ import os
import shutil
-class Repo():
+class Repo:
"""Repo()
Abstract class providing scaffolding for generating data to be
@@ -38,7 +38,8 @@ class Repo():
subdir (str): The subdir for the repo, in case there is more than one
"""
- def __init__(self, directory, subdir='repo'):
+
+ def __init__(self, directory, subdir="repo"):
# The working directory for the repo object
#
@@ -100,7 +101,7 @@ class Repo():
Returns:
(Repo): A Repo object for the new repository.
"""
- subdir = self.repo[len(self.directory):].lstrip(os.sep)
+ subdir = self.repo[len(self.directory) :].lstrip(os.sep)
new_dir = os.path.join(dest, subdir)
os.makedirs(new_dir, exist_ok=True)
self.copy_directory(self.repo, new_dir)
diff --git a/src/buildstream/testing/runcli.py b/src/buildstream/testing/runcli.py
index 36426c8af..9cded9f9e 100644
--- a/src/buildstream/testing/runcli.py
+++ b/src/buildstream/testing/runcli.py
@@ -61,14 +61,10 @@ from buildstream._protos.buildstream.v2 import artifact_pb2
# Wrapper for the click.testing result
-class Result():
-
- def __init__(self,
- exit_code=None,
- exception=None,
- exc_info=None,
- output=None,
- stderr=None):
+class Result:
+ def __init__(
+ self, exit_code=None, exception=None, exc_info=None, output=None, stderr=None
+ ):
self.exit_code = exit_code
self.exc = exception
self.exc_info = exc_info
@@ -94,8 +90,7 @@ class Result():
self.unhandled_exception = True
self.exception = get_last_exception()
- self.task_error_domain, \
- self.task_error_reason = get_last_task_error()
+ self.task_error_domain, self.task_error_reason = get_last_task_error()
else:
self.exception = None
self.task_error_domain = None
@@ -111,7 +106,7 @@ class Result():
# Raises:
# (AssertionError): If the session did not complete successfully
#
- def assert_success(self, fail_message=''):
+ def assert_success(self, fail_message=""):
assert self.exit_code == 0, fail_message
assert self.exc is None, fail_message
assert self.exception is None, fail_message
@@ -131,11 +126,9 @@ class Result():
# Raises:
# (AssertionError): If any of the assertions fail
#
- def assert_main_error(self,
- error_domain,
- error_reason,
- fail_message='',
- *, debug=False):
+ def assert_main_error(
+ self, error_domain, error_reason, fail_message="", *, debug=False
+ ):
if debug:
print(
"""
@@ -147,8 +140,9 @@ class Result():
self.exit_code,
self.exception,
self.exception.domain,
- self.exception.reason
- ))
+ self.exception.reason,
+ )
+ )
assert self.exit_code == -1, fail_message
assert self.exc is not None, fail_message
assert self.exception is not None, fail_message
@@ -172,10 +166,7 @@ class Result():
# Raises:
# (AssertionError): If any of the assertions fail
#
- def assert_task_error(self,
- error_domain,
- error_reason,
- fail_message=''):
+ def assert_task_error(self, error_domain, error_reason, fail_message=""):
assert self.exit_code == -1, fail_message
assert self.exc is not None, fail_message
@@ -197,7 +188,7 @@ class Result():
# Raises:
# (AssertionError): If any of the assertions fail
#
- def assert_shell_error(self, fail_message=''):
+ def assert_shell_error(self, fail_message=""):
assert self.exit_code == 1, fail_message
# get_start_order()
@@ -212,7 +203,9 @@ class Result():
# (list): A list of element names in the order which they first appeared in the result
#
def get_start_order(self, activity):
- results = re.findall(r'\[\s*{}:(\S+)\s*\]\s*START\s*.*\.log'.format(activity), self.stderr)
+ results = re.findall(
+ r"\[\s*{}:(\S+)\s*\]\s*START\s*.*\.log".format(activity), self.stderr
+ )
if results is None:
return []
return list(results)
@@ -228,29 +221,32 @@ class Result():
# (list): A list of element names
#
def get_tracked_elements(self):
- tracked = re.findall(r'\[\s*track:(\S+)\s*]', self.stderr)
+ tracked = re.findall(r"\[\s*track:(\S+)\s*]", self.stderr)
if tracked is None:
return []
return list(tracked)
def get_pushed_elements(self):
- pushed = re.findall(r'\[\s*push:(\S+)\s*\]\s*INFO\s*Pushed artifact', self.stderr)
+ pushed = re.findall(
+ r"\[\s*push:(\S+)\s*\]\s*INFO\s*Pushed artifact", self.stderr
+ )
if pushed is None:
return []
return list(pushed)
def get_pulled_elements(self):
- pulled = re.findall(r'\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled artifact', self.stderr)
+ pulled = re.findall(
+ r"\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled artifact", self.stderr
+ )
if pulled is None:
return []
return list(pulled)
-class Cli():
-
+class Cli:
def __init__(self, directory, verbose=True, default_options=None):
self.directory = directory
self.config = None
@@ -286,14 +282,13 @@ class Cli():
# element_name (str): The name of the element artifact
# cache_dir (str): Specific cache dir to remove artifact from
#
- def remove_artifact_from_cache(self, project, element_name,
- *, cache_dir=None):
+ def remove_artifact_from_cache(self, project, element_name, *, cache_dir=None):
# Read configuration to figure out where artifacts are stored
if not cache_dir:
- default = os.path.join(project, 'cache')
+ default = os.path.join(project, "cache")
if self.config is not None:
- cache_dir = self.config.get('cachedir', default)
+ cache_dir = self.config.get("cachedir", default)
else:
cache_dir = default
@@ -313,8 +308,17 @@ class Cli():
# args (list): A list of arguments to pass buildstream
# binary_capture (bool): Whether to capture the stdout/stderr as binary
#
- def run(self, configure=True, project=None, silent=False, env=None,
- cwd=None, options=None, args=None, binary_capture=False):
+ def run(
+ self,
+ configure=True,
+ project=None,
+ silent=False,
+ env=None,
+ cwd=None,
+ options=None,
+ args=None,
+ binary_capture=False,
+ ):
# We don't want to carry the state of one bst invocation into another
# bst invocation. Since node _FileInfo objects hold onto BuildStream
@@ -335,22 +339,22 @@ class Cli():
options = self.default_options + options
with ExitStack() as stack:
- bst_args = ['--no-colors']
+ bst_args = ["--no-colors"]
if silent:
- bst_args += ['--no-verbose']
+ bst_args += ["--no-verbose"]
if configure:
config_file = stack.enter_context(
configured(self.directory, self.config)
)
- bst_args += ['--config', config_file]
+ bst_args += ["--config", config_file]
if project:
- bst_args += ['--directory', str(project)]
+ bst_args += ["--directory", str(project)]
for option, value in options:
- bst_args += ['--option', option, value]
+ bst_args += ["--option", option, value]
bst_args += args
@@ -366,15 +370,18 @@ class Cli():
try:
sys.__stdout__.fileno()
except ValueError:
- sys.__stdout__ = open('/dev/stdout', 'w')
+ sys.__stdout__ = open("/dev/stdout", "w")
result = self._invoke(bst_cli, bst_args, binary_capture=binary_capture)
# Some informative stdout we can observe when anything fails
if self.verbose:
command = "bst " + " ".join(bst_args)
- print("BuildStream exited with code {} for invocation:\n\t{}"
- .format(result.exit_code, command))
+ print(
+ "BuildStream exited with code {} for invocation:\n\t{}".format(
+ result.exit_code, command
+ )
+ )
if result.output:
print("Program output was:\n{}".format(result.output))
if result.stderr:
@@ -409,9 +416,9 @@ class Cli():
exit_code = e.code
if not isinstance(exit_code, int):
- sys.stdout.write('Program exit code was not an integer: ')
+ sys.stdout.write("Program exit code was not an integer: ")
sys.stdout.write(str(exit_code))
- sys.stdout.write('\n')
+ sys.stdout.write("\n")
exit_code = 1
except Exception as e: # pylint: disable=broad-except
exception = e
@@ -424,11 +431,13 @@ class Cli():
out, err = capture.readouterr()
capture.stop_capturing()
- return Result(exit_code=exit_code,
- exception=exception,
- exc_info=exc_info,
- output=out,
- stderr=err)
+ return Result(
+ exit_code=exit_code,
+ exception=exception,
+ exc_info=exc_info,
+ output=out,
+ stderr=err,
+ )
# Fetch an element state by name by
# invoking bst show on the project with the CLI
@@ -437,12 +446,11 @@ class Cli():
# then use get_element_states(s) instead.
#
def get_element_state(self, project, element_name):
- result = self.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{state}',
- element_name
- ])
+ result = self.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{state}", element_name],
+ )
result.assert_success()
return result.output.strip()
@@ -450,18 +458,17 @@ class Cli():
#
# Returns a dictionary with the element names as keys
#
- def get_element_states(self, project, targets, deps='all'):
- result = self.run(project=project, silent=True, args=[
- 'show',
- '--deps', deps,
- '--format', '%{name}||%{state}',
- *targets
- ])
+ def get_element_states(self, project, targets, deps="all"):
+ result = self.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", deps, "--format", "%{name}||%{state}", *targets],
+ )
result.assert_success()
lines = result.output.splitlines()
states = {}
for line in lines:
- split = line.split(sep='||')
+ split = line.split(sep="||")
states[split[0]] = split[1]
return states
@@ -469,24 +476,22 @@ class Cli():
# on the project with the CLI
#
def get_element_key(self, project, element_name):
- result = self.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{full-key}',
- element_name
- ])
+ result = self.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{full-key}", element_name],
+ )
result.assert_success()
return result.output.strip()
# Get the decoded config of an element.
#
def get_element_config(self, project, element_name):
- result = self.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{config}',
- element_name
- ])
+ result = self.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{config}", element_name],
+ )
result.assert_success()
return yaml.safe_load(result.output)
@@ -494,12 +499,14 @@ class Cli():
# Fetch the elements that would be in the pipeline with the given
# arguments.
#
- def get_pipeline(self, project, elements, except_=None, scope='plan'):
+ def get_pipeline(self, project, elements, except_=None, scope="plan"):
if except_ is None:
except_ = []
- args = ['show', '--deps', scope, '--format', '%{name}']
- args += list(itertools.chain.from_iterable(zip(itertools.repeat('--except'), except_)))
+ args = ["show", "--deps", scope, "--format", "%{name}"]
+ args += list(
+ itertools.chain.from_iterable(zip(itertools.repeat("--except"), except_))
+ )
result = self.run(project=project, silent=True, args=args + elements)
result.assert_success()
@@ -523,11 +530,27 @@ class CliIntegration(Cli):
#
# This supports the same arguments as Cli.run(), see run_project_config().
#
- def run(self, configure=True, project=None, silent=False, env=None,
- cwd=None, options=None, args=None, binary_capture=False):
+ def run(
+ self,
+ configure=True,
+ project=None,
+ silent=False,
+ env=None,
+ cwd=None,
+ options=None,
+ args=None,
+ binary_capture=False,
+ ):
return self.run_project_config(
- configure=configure, project=project, silent=silent, env=env,
- cwd=cwd, options=options, args=args, binary_capture=binary_capture)
+ configure=configure,
+ project=project,
+ silent=silent,
+ env=env,
+ cwd=cwd,
+ options=options,
+ args=args,
+ binary_capture=binary_capture,
+ )
# run_project_config()
#
@@ -549,9 +572,9 @@ class CliIntegration(Cli):
# Save the original project.conf, because we will run more than
# once in the same temp directory
#
- project_directory = kwargs['project']
- project_filename = os.path.join(project_directory, 'project.conf')
- project_backup = os.path.join(project_directory, 'project.conf.backup')
+ project_directory = kwargs["project"]
+ project_filename = os.path.join(project_directory, "project.conf")
+ project_backup = os.path.join(project_directory, "project.conf.backup")
project_load_filename = project_filename
if not os.path.exists(project_backup):
@@ -576,8 +599,8 @@ class CliIntegration(Cli):
#
with tempfile.TemporaryDirectory(dir=project_directory) as scratchdir:
- temp_project = os.path.join(scratchdir, 'project.conf')
- with open(temp_project, 'w') as f:
+ temp_project = os.path.join(scratchdir, "project.conf")
+ with open(temp_project, "w") as f:
yaml.safe_dump(project_config, f)
project_config = _yaml.load(temp_project)
@@ -589,7 +612,7 @@ class CliIntegration(Cli):
else:
# Otherwise, just dump it as is
- with open(project_filename, 'w') as f:
+ with open(project_filename, "w") as f:
f.write(config)
return super().run(**kwargs)
@@ -611,50 +634,51 @@ class CliRemote(CliIntegration):
#
# Returns a list of configured services (by names).
#
- def ensure_services(self, actions=True, execution=True, storage=True,
- artifacts=False, sources=False):
+ def ensure_services(
+ self, actions=True, execution=True, storage=True, artifacts=False, sources=False
+ ):
# Build a list of configured services by name:
configured_services = []
if not self.config:
return configured_services
- if 'remote-execution' in self.config:
- rexec_config = self.config['remote-execution']
+ if "remote-execution" in self.config:
+ rexec_config = self.config["remote-execution"]
- if 'action-cache-service' in rexec_config:
+ if "action-cache-service" in rexec_config:
if actions:
- configured_services.append('action-cache')
+ configured_services.append("action-cache")
else:
- rexec_config.pop('action-cache-service')
+ rexec_config.pop("action-cache-service")
- if 'execution-service' in rexec_config:
+ if "execution-service" in rexec_config:
if execution:
- configured_services.append('execution')
+ configured_services.append("execution")
else:
- rexec_config.pop('execution-service')
+ rexec_config.pop("execution-service")
- if 'storage-service' in rexec_config:
+ if "storage-service" in rexec_config:
if storage:
- configured_services.append('storage')
+ configured_services.append("storage")
else:
- rexec_config.pop('storage-service')
+ rexec_config.pop("storage-service")
- if 'artifacts' in self.config:
+ if "artifacts" in self.config:
if artifacts:
- configured_services.append('artifact-cache')
+ configured_services.append("artifact-cache")
else:
- self.config.pop('artifacts')
+ self.config.pop("artifacts")
- if 'source-caches' in self.config:
+ if "source-caches" in self.config:
if sources:
- configured_services.append('source-cache')
+ configured_services.append("source-cache")
else:
- self.config.pop('source-caches')
+ self.config.pop("source-caches")
return configured_services
-class TestArtifact():
+class TestArtifact:
# remove_artifact_from_cache():
#
@@ -666,10 +690,10 @@ class TestArtifact():
#
def remove_artifact_from_cache(self, cache_dir, element_name):
- cache_dir = os.path.join(cache_dir, 'artifacts', 'refs')
+ cache_dir = os.path.join(cache_dir, "artifacts", "refs")
- normal_name = element_name.replace(os.sep, '-')
- cache_dir = os.path.splitext(os.path.join(cache_dir, 'test', normal_name))[0]
+ normal_name = element_name.replace(os.sep, "-")
+ cache_dir = os.path.splitext(os.path.join(cache_dir, "test", normal_name))[0]
shutil.rmtree(cache_dir)
# is_cached():
@@ -688,7 +712,9 @@ class TestArtifact():
# cas = CASCache(str(cache_dir))
artifact_ref = element.get_artifact_name(element_key)
- return os.path.exists(os.path.join(cache_dir, 'artifacts', 'refs', artifact_ref))
+ return os.path.exists(
+ os.path.join(cache_dir, "artifacts", "refs", artifact_ref)
+ )
# get_digest():
#
@@ -705,9 +731,9 @@ class TestArtifact():
def get_digest(self, cache_dir, element, element_key):
artifact_ref = element.get_artifact_name(element_key)
- artifact_dir = os.path.join(cache_dir, 'artifacts', 'refs')
+ artifact_dir = os.path.join(cache_dir, "artifacts", "refs")
artifact_proto = artifact_pb2.Artifact()
- with open(os.path.join(artifact_dir, artifact_ref), 'rb') as f:
+ with open(os.path.join(artifact_dir, artifact_ref), "rb") as f:
artifact_proto.ParseFromString(f.read())
return artifact_proto.files
@@ -727,7 +753,7 @@ class TestArtifact():
def extract_buildtree(self, cache_dir, tmpdir, ref):
artifact = artifact_pb2.Artifact()
try:
- with open(os.path.join(cache_dir, 'artifacts', 'refs', ref), 'rb') as f:
+ with open(os.path.join(cache_dir, "artifacts", "refs", ref), "rb") as f:
artifact.ParseFromString(f.read())
except FileNotFoundError:
yield None
@@ -768,7 +794,7 @@ class TestArtifact():
#
@pytest.fixture()
def cli(tmpdir):
- directory = os.path.join(str(tmpdir), 'cache')
+ directory = os.path.join(str(tmpdir), "cache")
os.makedirs(directory)
return Cli(directory)
@@ -781,27 +807,29 @@ def cli(tmpdir):
# stacktraces.
@pytest.fixture()
def cli_integration(tmpdir, integration_cache):
- directory = os.path.join(str(tmpdir), 'cache')
+ directory = os.path.join(str(tmpdir), "cache")
os.makedirs(directory)
fixture = CliIntegration(directory)
# We want to cache sources for integration tests more permanently,
# to avoid downloading the huge base-sdk repeatedly
- fixture.configure({
- 'cachedir': integration_cache.cachedir,
- 'sourcedir': integration_cache.sources,
- })
+ fixture.configure(
+ {
+ "cachedir": integration_cache.cachedir,
+ "sourcedir": integration_cache.sources,
+ }
+ )
yield fixture
# remove following folders if necessary
try:
- shutil.rmtree(os.path.join(integration_cache.cachedir, 'build'))
+ shutil.rmtree(os.path.join(integration_cache.cachedir, "build"))
except FileNotFoundError:
pass
try:
- shutil.rmtree(os.path.join(integration_cache.cachedir, 'tmp'))
+ shutil.rmtree(os.path.join(integration_cache.cachedir, "tmp"))
except FileNotFoundError:
pass
@@ -813,36 +841,32 @@ def cli_integration(tmpdir, integration_cache):
# stacktraces.
@pytest.fixture()
def cli_remote_execution(tmpdir, remote_services):
- directory = os.path.join(str(tmpdir), 'cache')
+ directory = os.path.join(str(tmpdir), "cache")
os.makedirs(directory)
fixture = CliRemote(directory)
if remote_services.artifact_service:
- fixture.configure({'artifacts': [{
- 'url': remote_services.artifact_service,
- }]})
+ fixture.configure({"artifacts": [{"url": remote_services.artifact_service,}]})
remote_execution = {}
if remote_services.action_service:
- remote_execution['action-cache-service'] = {
- 'url': remote_services.action_service,
+ remote_execution["action-cache-service"] = {
+ "url": remote_services.action_service,
}
if remote_services.exec_service:
- remote_execution['execution-service'] = {
- 'url': remote_services.exec_service,
+ remote_execution["execution-service"] = {
+ "url": remote_services.exec_service,
}
if remote_services.storage_service:
- remote_execution['storage-service'] = {
- 'url': remote_services.storage_service,
+ remote_execution["storage-service"] = {
+ "url": remote_services.storage_service,
}
if remote_execution:
- fixture.configure({'remote-execution': remote_execution})
+ fixture.configure({"remote-execution": remote_execution})
if remote_services.source_service:
- fixture.configure({'source-caches': [{
- 'url': remote_services.source_service,
- }]})
+ fixture.configure({"source-caches": [{"url": remote_services.source_service,}]})
return fixture
@@ -882,12 +906,12 @@ def configured(directory, config=None):
if not config:
config = {}
- if not config.get('sourcedir', False):
- config['sourcedir'] = os.path.join(directory, 'sources')
- if not config.get('cachedir', False):
- config['cachedir'] = directory
- if not config.get('logdir', False):
- config['logdir'] = os.path.join(directory, 'logs')
+ if not config.get("sourcedir", False):
+ config["sourcedir"] = os.path.join(directory, "sources")
+ if not config.get("cachedir", False):
+ config["cachedir"] = directory
+ if not config.get("logdir", False):
+ config["logdir"] = os.path.join(directory, "logs")
# Dump it and yield the filename for test scripts to feed it
# to buildstream as an artument
diff --git a/src/buildstream/types.py b/src/buildstream/types.py
index 5688bf393..2a27891ba 100644
--- a/src/buildstream/types.py
+++ b/src/buildstream/types.py
@@ -68,14 +68,18 @@ class FastEnum(metaclass=MetaFastEnum):
def __eq__(self, other):
if self.__class__ is not other.__class__:
- raise ValueError("Unexpected comparison between {} and {}".format(self, repr(other)))
+ raise ValueError(
+ "Unexpected comparison between {} and {}".format(self, repr(other))
+ )
# Enums instances are unique, so creating an instance with the same value as another will just
# send back the other one, hence we can use an identity comparison, which is much faster than '=='
return self is other
def __ne__(self, other):
if self.__class__ is not other.__class__:
- raise ValueError("Unexpected comparison between {} and {}".format(self, repr(other)))
+ raise ValueError(
+ "Unexpected comparison between {} and {}".format(self, repr(other))
+ )
return self is not other
def __hash__(self):
@@ -142,16 +146,20 @@ class Consistency(FastEnum):
def __ge__(self, other):
if self.__class__ is not other.__class__:
- raise ValueError("Unexpected comparison between {} and {}".format(self, repr(other)))
+ raise ValueError(
+ "Unexpected comparison between {} and {}".format(self, repr(other))
+ )
return self.value >= other.value
def __lt__(self, other):
if self.__class__ is not other.__class__:
- raise ValueError("Unexpected comparison between {} and {}".format(self, repr(other)))
+ raise ValueError(
+ "Unexpected comparison between {} and {}".format(self, repr(other))
+ )
return self.value < other.value
-class CoreWarnings():
+class CoreWarnings:
"""CoreWarnings()
Some common warnings which are raised by core functionalities within BuildStream are found in this class.
diff --git a/src/buildstream/utils.py b/src/buildstream/utils.py
index de7c14b70..ec57b7f7f 100644
--- a/src/buildstream/utils.py
+++ b/src/buildstream/utils.py
@@ -52,7 +52,7 @@ from ._utils import url_directory_name # pylint: disable=unused-import
BST_ARBITRARY_TIMESTAMP = calendar.timegm((2011, 11, 11, 11, 11, 11))
# The separator we use for user specified aliases
-_ALIAS_SEPARATOR = ':'
+_ALIAS_SEPARATOR = ":"
_URI_SCHEMES = ["http", "https", "ftp", "file", "git", "sftp", "ssh"]
# Main process pid
@@ -74,6 +74,7 @@ class UtilError(BstError):
or either of the :class:`.ElementError` or :class:`.SourceError`
exceptions should be raised from this error.
"""
+
def __init__(self, message, reason=None):
super().__init__(message, domain=ErrorDomain.UTIL, reason=reason)
@@ -83,6 +84,7 @@ class ProgramNotFoundError(BstError):
It is normally unneeded to handle this exception from plugin code.
"""
+
def __init__(self, message, reason=None):
super().__init__(message, domain=ErrorDomain.PROG_NOT_FOUND, reason=reason)
@@ -92,7 +94,7 @@ class DirectoryExistsError(OSError):
"""
-class FileListResult():
+class FileListResult:
"""An object which stores the result of one of the operations
which run on a list of files.
"""
@@ -112,7 +114,7 @@ class FileListResult():
self.files_written = []
"""List of files that were written."""
- def combine(self, other: 'FileListResult') -> 'FileListResult':
+ def combine(self, other: "FileListResult") -> "FileListResult":
"""Create a new FileListResult that contains the results of both.
"""
ret = FileListResult()
@@ -165,10 +167,10 @@ def list_relative_paths(directory: str) -> Iterator[str]:
# We don't want "./" pre-pended to all the entries in the root of
# `directory`, prefer to have no prefix in that case.
- basepath = relpath if relpath != '.' and dirpath != directory else ''
+ basepath = relpath if relpath != "." and dirpath != directory else ""
# First yield the walked directory itself, except for the root
- if basepath != '':
+ if basepath != "":
yield basepath
# List the filenames in the walked directory
@@ -248,8 +250,9 @@ def sha256sum(filename: str) -> str:
h.update(chunk)
except OSError as e:
- raise UtilError("Failed to get a checksum of file '{}': {}"
- .format(filename, e)) from e
+ raise UtilError(
+ "Failed to get a checksum of file '{}': {}".format(filename, e)
+ ) from e
return h.hexdigest()
@@ -274,8 +277,9 @@ def safe_copy(src: str, dest: str, *, result: Optional[FileListResult] = None) -
os.unlink(dest)
except OSError as e:
if e.errno != errno.ENOENT:
- raise UtilError("Failed to remove destination file '{}': {}"
- .format(dest, e)) from e
+ raise UtilError(
+ "Failed to remove destination file '{}': {}".format(dest, e)
+ ) from e
shutil.copyfile(src, dest)
try:
@@ -291,11 +295,12 @@ def safe_copy(src: str, dest: str, *, result: Optional[FileListResult] = None) -
result.failed_attributes.append(dest)
except shutil.Error as e:
- raise UtilError("Failed to copy '{} -> {}': {}"
- .format(src, dest, e)) from e
+ raise UtilError("Failed to copy '{} -> {}': {}".format(src, dest, e)) from e
-def safe_link(src: str, dest: str, *, result: Optional[FileListResult] = None, _unlink=False) -> None:
+def safe_link(
+ src: str, dest: str, *, result: Optional[FileListResult] = None, _unlink=False
+) -> None:
"""Try to create a hardlink, but resort to copying in the case of cross device links.
Args:
@@ -313,8 +318,9 @@ def safe_link(src: str, dest: str, *, result: Optional[FileListResult] = None, _
os.unlink(dest)
except OSError as e:
if e.errno != errno.ENOENT:
- raise UtilError("Failed to remove destination file '{}': {}"
- .format(dest, e)) from e
+ raise UtilError(
+ "Failed to remove destination file '{}': {}".format(dest, e)
+ ) from e
# If we can't link it due to cross-device hardlink, copy
try:
@@ -326,8 +332,7 @@ def safe_link(src: str, dest: str, *, result: Optional[FileListResult] = None, _
elif e.errno == errno.EXDEV:
safe_copy(src, dest)
else:
- raise UtilError("Failed to link '{} -> {}': {}"
- .format(src, dest, e)) from e
+ raise UtilError("Failed to link '{} -> {}': {}".format(src, dest, e)) from e
def safe_remove(path: str) -> bool:
@@ -363,16 +368,17 @@ def safe_remove(path: str) -> bool:
# Path does not exist
return True
- raise UtilError("Failed to remove '{}': {}"
- .format(path, e))
+ raise UtilError("Failed to remove '{}': {}".format(path, e))
-def copy_files(src: str,
- dest: str,
- *,
- filter_callback: Optional[Callable[[str], bool]] = None,
- ignore_missing: bool = False,
- report_written: bool = False) -> FileListResult:
+def copy_files(
+ src: str,
+ dest: str,
+ *,
+ filter_callback: Optional[Callable[[str], bool]] = None,
+ ignore_missing: bool = False,
+ report_written: bool = False
+) -> FileListResult:
"""Copy files from source to destination.
Args:
@@ -401,22 +407,28 @@ def copy_files(src: str,
"""
result = FileListResult()
try:
- _process_list(src, dest, safe_copy, result,
- filter_callback=filter_callback,
- ignore_missing=ignore_missing,
- report_written=report_written)
+ _process_list(
+ src,
+ dest,
+ safe_copy,
+ result,
+ filter_callback=filter_callback,
+ ignore_missing=ignore_missing,
+ report_written=report_written,
+ )
except OSError as e:
- raise UtilError("Failed to copy '{} -> {}': {}"
- .format(src, dest, e))
+ raise UtilError("Failed to copy '{} -> {}': {}".format(src, dest, e))
return result
-def link_files(src: str,
- dest: str,
- *,
- filter_callback: Optional[Callable[[str], bool]] = None,
- ignore_missing: bool = False,
- report_written: bool = False) -> FileListResult:
+def link_files(
+ src: str,
+ dest: str,
+ *,
+ filter_callback: Optional[Callable[[str], bool]] = None,
+ ignore_missing: bool = False,
+ report_written: bool = False
+) -> FileListResult:
"""Hardlink files from source to destination.
Args:
@@ -450,13 +462,17 @@ def link_files(src: str,
"""
result = FileListResult()
try:
- _process_list(src, dest, safe_link, result,
- filter_callback=filter_callback,
- ignore_missing=ignore_missing,
- report_written=report_written)
+ _process_list(
+ src,
+ dest,
+ safe_link,
+ result,
+ filter_callback=filter_callback,
+ ignore_missing=ignore_missing,
+ report_written=report_written,
+ )
except OSError as e:
- raise UtilError("Failed to link '{} -> {}': {}"
- .format(src, dest, e))
+ raise UtilError("Failed to link '{} -> {}': {}".format(src, dest, e))
return result
@@ -473,11 +489,13 @@ def get_host_tool(name: str) -> str:
Raises:
:class:`.ProgramNotFoundError`
"""
- search_path = os.environ.get('PATH')
+ search_path = os.environ.get("PATH")
program_path = shutil.which(name, path=search_path)
if not program_path:
- raise ProgramNotFoundError("Did not find '{}' in PATH: {}".format(name, search_path))
+ raise ProgramNotFoundError(
+ "Did not find '{}' in PATH: {}".format(name, search_path)
+ )
return program_path
@@ -491,25 +509,35 @@ def get_bst_version() -> Tuple[int, int]:
"""
# Import this only conditionally, it's not resolved at bash complete time
from . import __version__ # pylint: disable=cyclic-import
- versions = __version__.split('.')[:2]
- if versions[0] == '0+untagged':
- raise UtilError("Your git repository has no tags - BuildStream can't "
- "determine its version. Please run `git fetch --tags`.")
+ versions = __version__.split(".")[:2]
+
+ if versions[0] == "0+untagged":
+ raise UtilError(
+ "Your git repository has no tags - BuildStream can't "
+ "determine its version. Please run `git fetch --tags`."
+ )
try:
return (int(versions[0]), int(versions[1]))
except IndexError:
- raise UtilError("Cannot detect Major and Minor parts of the version\n"
- "Version: {} not in XX.YY.whatever format"
- .format(__version__))
+ raise UtilError(
+ "Cannot detect Major and Minor parts of the version\n"
+ "Version: {} not in XX.YY.whatever format".format(__version__)
+ )
except ValueError:
- raise UtilError("Cannot convert version to integer numbers\n"
- "Version: {} not in Integer.Integer.whatever format"
- .format(__version__))
-
-
-def move_atomic(source: Union[Path, str], destination: Union[Path, str], *, ensure_parents: bool = True) -> None:
+ raise UtilError(
+ "Cannot convert version to integer numbers\n"
+ "Version: {} not in Integer.Integer.whatever format".format(__version__)
+ )
+
+
+def move_atomic(
+ source: Union[Path, str],
+ destination: Union[Path, str],
+ *,
+ ensure_parents: bool = True
+) -> None:
"""Move the source to the destination using atomic primitives.
This uses `os.rename` to move a file or directory to a new destination.
@@ -548,16 +576,18 @@ def move_atomic(source: Union[Path, str], destination: Union[Path, str], *, ensu
@contextmanager
-def save_file_atomic(filename: str,
- mode: str = 'w',
- *,
- buffering: int = -1,
- encoding: Optional[str] = None,
- errors: Optional[str] = None,
- newline: Optional[str] = None,
- closefd: bool = True,
- opener: Optional[Callable[[str, int], int]] = None,
- tempdir: Optional[str] = None) -> Iterator[IO]:
+def save_file_atomic(
+ filename: str,
+ mode: str = "w",
+ *,
+ buffering: int = -1,
+ encoding: Optional[str] = None,
+ errors: Optional[str] = None,
+ newline: Optional[str] = None,
+ closefd: bool = True,
+ opener: Optional[Callable[[str, int], int]] = None,
+ tempdir: Optional[str] = None
+) -> Iterator[IO]:
"""Save a file with a temporary name and rename it into place when ready.
This is a context manager which is meant for saving data to files.
@@ -583,14 +613,24 @@ def save_file_atomic(filename: str,
# This feature has been proposed for upstream Python in the past, e.g.:
# https://bugs.python.org/issue8604
- assert os.path.isabs(filename), "The utils.save_file_atomic() parameter ``filename`` must be an absolute path"
+ assert os.path.isabs(
+ filename
+ ), "The utils.save_file_atomic() parameter ``filename`` must be an absolute path"
if tempdir is None:
tempdir = os.path.dirname(filename)
fd, tempname = tempfile.mkstemp(dir=tempdir)
os.close(fd)
- f = open(tempname, mode=mode, buffering=buffering, encoding=encoding,
- errors=errors, newline=newline, closefd=closefd, opener=opener)
+ f = open(
+ tempname,
+ mode=mode,
+ buffering=buffering,
+ encoding=encoding,
+ errors=errors,
+ newline=newline,
+ closefd=closefd,
+ opener=opener,
+ )
def cleanup_tempfile():
f.close()
@@ -599,12 +639,14 @@ def save_file_atomic(filename: str,
except FileNotFoundError:
pass
except OSError as e:
- raise UtilError("Failed to cleanup temporary file {}: {}".format(tempname, e)) from e
+ raise UtilError(
+ "Failed to cleanup temporary file {}: {}".format(tempname, e)
+ ) from e
try:
with _signals.terminator(cleanup_tempfile):
# Disable type-checking since "IO[Any]" has no attribute "real_filename"
- f.real_filename = filename # type: ignore
+ f.real_filename = filename # type: ignore
yield f
f.close()
# This operation is atomic, at least on platforms we care about:
@@ -660,8 +702,9 @@ def _get_volume_size(path):
try:
usage = shutil.disk_usage(path)
except OSError as e:
- raise UtilError("Failed to retrieve stats on volume for path '{}': {}"
- .format(path, e)) from e
+ raise UtilError(
+ "Failed to retrieve stats on volume for path '{}': {}".format(path, e)
+ ) from e
return usage.total, usage.free
@@ -685,16 +728,16 @@ def _get_volume_size(path):
# UtilError if the string is not a valid data size.
#
def _parse_size(size, volume):
- if size == 'infinity':
+ if size == "infinity":
return None
- matches = re.fullmatch(r'([0-9]+\.?[0-9]*)([KMGT%]?)', size)
+ matches = re.fullmatch(r"([0-9]+\.?[0-9]*)([KMGT%]?)", size)
if matches is None:
raise UtilError("{} is not a valid data size.".format(size))
num, unit = matches.groups()
- if unit == '%':
+ if unit == "%":
num = float(num)
if num > 100:
raise UtilError("{}% is not a valid percentage value.".format(num))
@@ -703,8 +746,8 @@ def _parse_size(size, volume):
return disk_size * (num / 100)
- units = ('', 'K', 'M', 'G', 'T')
- return int(num) * 1024**units.index(unit)
+ units = ("", "K", "M", "G", "T")
+ return int(num) * 1024 ** units.index(unit)
# _pretty_size()
@@ -720,8 +763,8 @@ def _parse_size(size, volume):
# (str): The string representation of the number of bytes in the largest
def _pretty_size(size, dec_places=0):
psize = size
- unit = 'B'
- units = ('B', 'K', 'M', 'G', 'T')
+ unit = "B"
+ units = ("B", "K", "M", "G", "T")
for unit in units:
if psize < 1024:
break
@@ -746,19 +789,21 @@ def _force_rmtree(rootpath, **kwargs):
os.chmod(rootpath, 0o755)
for root, dirs, _ in os.walk(rootpath):
for d in dirs:
- path = os.path.join(root, d.lstrip('/'))
+ path = os.path.join(root, d.lstrip("/"))
if os.path.exists(path) and not os.path.islink(path):
try:
os.chmod(path, 0o755)
except OSError as e:
- raise UtilError("Failed to ensure write permission on file '{}': {}"
- .format(path, e))
+ raise UtilError(
+ "Failed to ensure write permission on file '{}': {}".format(
+ path, e
+ )
+ )
try:
shutil.rmtree(rootpath, **kwargs)
except OSError as e:
- raise UtilError("Failed to remove cache directory '{}': {}"
- .format(rootpath, e))
+ raise UtilError("Failed to remove cache directory '{}': {}".format(rootpath, e))
# Recursively make directories in target area
@@ -779,8 +824,10 @@ def _copy_directories(srcdir, destdir, target):
os.makedirs(new_dir)
yield (new_dir, mode)
else:
- raise UtilError('Source directory tree has file where '
- 'directory expected: {}'.format(old_dir))
+ raise UtilError(
+ "Source directory tree has file where "
+ "directory expected: {}".format(old_dir)
+ )
else:
if not os.access(new_dir, os.W_OK):
# If the destination directory is not writable, change permissions to make it
@@ -806,16 +853,18 @@ def _ensure_real_directory(root, path):
try:
deststat = os.lstat(destpath)
if not stat.S_ISDIR(deststat.st_mode):
- relpath = destpath[len(root):]
+ relpath = destpath[len(root) :]
if stat.S_ISLNK(deststat.st_mode):
- filetype = 'symlink'
+ filetype = "symlink"
elif stat.S_ISREG(deststat.st_mode):
- filetype = 'regular file'
+ filetype = "regular file"
else:
- filetype = 'special file'
+ filetype = "special file"
- raise UtilError('Destination is a {}, not a directory: {}'.format(filetype, relpath))
+ raise UtilError(
+ "Destination is a {}, not a directory: {}".format(filetype, relpath)
+ )
except FileNotFoundError:
os.makedirs(destpath)
@@ -836,9 +885,15 @@ def _ensure_real_directory(root, path):
# ignore_missing: Dont raise any error if a source file is missing
#
#
-def _process_list(srcdir, destdir, actionfunc, result,
- filter_callback=None,
- ignore_missing=False, report_written=False):
+def _process_list(
+ srcdir,
+ destdir,
+ actionfunc,
+ result,
+ filter_callback=None,
+ ignore_missing=False,
+ report_written=False,
+):
# Keep track of directory permissions, since these need to be set
# *after* files have been written.
@@ -921,7 +976,9 @@ def _process_list(srcdir, destdir, actionfunc, result,
else:
# Unsupported type.
- raise UtilError('Cannot extract {} into staging-area. Unsupported type.'.format(srcpath))
+ raise UtilError(
+ "Cannot extract {} into staging-area. Unsupported type.".format(srcpath)
+ )
# Write directory permissions now that all files have been written
for d, perms in permissions:
@@ -1028,15 +1085,18 @@ def _tempdir(suffix="", prefix="tmp", dir=None): # pylint: disable=redefined-bu
# on SIGTERM.
#
@contextmanager
-def _tempnamedfile(suffix="", prefix="tmp", dir=None): # pylint: disable=redefined-builtin
+def _tempnamedfile(
+ suffix="", prefix="tmp", dir=None
+): # pylint: disable=redefined-builtin
temp = None
def close_tempfile():
if temp is not None:
temp.close()
- with _signals.terminator(close_tempfile), \
- tempfile.NamedTemporaryFile(suffix=suffix, prefix=prefix, dir=dir) as temp:
+ with _signals.terminator(close_tempfile), tempfile.NamedTemporaryFile(
+ suffix=suffix, prefix=prefix, dir=dir
+ ) as temp:
yield temp
@@ -1145,13 +1205,13 @@ def _kill_process_tree(pid):
#
def _call(*popenargs, terminate=False, **kwargs):
- kwargs['start_new_session'] = True
+ kwargs["start_new_session"] = True
process = None
- old_preexec_fn = kwargs.get('preexec_fn')
- if 'preexec_fn' in kwargs:
- del kwargs['preexec_fn']
+ old_preexec_fn = kwargs.get("preexec_fn")
+ if "preexec_fn" in kwargs:
+ del kwargs["preexec_fn"]
def preexec_fn():
os.umask(stat.S_IWGRP | stat.S_IWOTH)
@@ -1201,9 +1261,12 @@ def _call(*popenargs, terminate=False, **kwargs):
group_id = os.getpgid(process.pid)
os.killpg(group_id, signal.SIGCONT)
- with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(kill_proc):
+ with _signals.suspendable(suspend_proc, resume_proc), _signals.terminator(
+ kill_proc
+ ):
process = subprocess.Popen( # pylint: disable=subprocess-popen-preexec-fn
- *popenargs, preexec_fn=preexec_fn, universal_newlines=True, **kwargs)
+ *popenargs, preexec_fn=preexec_fn, universal_newlines=True, **kwargs
+ )
output, _ = process.communicate()
exit_code = process.poll()
@@ -1237,44 +1300,44 @@ def _call(*popenargs, terminate=False, **kwargs):
#
def _glob2re(pat):
i, n = 0, len(pat)
- res = '(?ms)'
+ res = "(?ms)"
while i < n:
c = pat[i]
i = i + 1
- if c == '*':
+ if c == "*":
# fnmatch.translate() simply uses the '.*' separator here,
# we only want that for double asterisk (bash 'globstar' behavior)
#
- if i < n and pat[i] == '*':
- res = res + '.*'
+ if i < n and pat[i] == "*":
+ res = res + ".*"
i = i + 1
else:
- res = res + '[^/]*'
- elif c == '?':
+ res = res + "[^/]*"
+ elif c == "?":
# fnmatch.translate() simply uses the '.' wildcard here, but
# we dont want to match path separators here
- res = res + '[^/]'
- elif c == '[':
+ res = res + "[^/]"
+ elif c == "[":
j = i
- if j < n and pat[j] == '!':
+ if j < n and pat[j] == "!":
j = j + 1
- if j < n and pat[j] == ']':
+ if j < n and pat[j] == "]":
j = j + 1
- while j < n and pat[j] != ']':
+ while j < n and pat[j] != "]":
j = j + 1
if j >= n:
- res = res + '\\['
+ res = res + "\\["
else:
- stuff = pat[i:j].replace('\\', '\\\\')
+ stuff = pat[i:j].replace("\\", "\\\\")
i = j + 1
- if stuff[0] == '!':
- stuff = '^' + stuff[1:]
- elif stuff[0] == '^':
- stuff = '\\' + stuff
- res = '{}[{}]'.format(res, stuff)
+ if stuff[0] == "!":
+ stuff = "^" + stuff[1:]
+ elif stuff[0] == "^":
+ stuff = "\\" + stuff
+ res = "{}[{}]".format(res, stuff)
else:
res = res + re.escape(c)
- return res + r'\Z'
+ return res + r"\Z"
# _deduplicate()
@@ -1392,7 +1455,7 @@ def _deterministic_umask():
#
#
def _get_compression(tar):
- mapped_extensions = {'.tar': '', '.gz': 'gz', '.xz': 'xz', '.bz2': 'bz2'}
+ mapped_extensions = {".tar": "", ".gz": "gz", ".xz": "xz", ".bz2": "bz2"}
name, ext = os.path.splitext(tar)
@@ -1403,12 +1466,16 @@ def _get_compression(tar):
# If so, we assume we have been given an unsupported extension,
# which expects compression. Raise an error
_, suffix = os.path.splitext(name)
- if suffix == '.tar':
- raise UtilError("Expected compression with unknown file extension ('{}'), "
- "supported extensions are ('.tar'), ('.gz'), ('.xz'), ('.bz2')".format(ext))
+ if suffix == ".tar":
+ raise UtilError(
+ "Expected compression with unknown file extension ('{}'), "
+ "supported extensions are ('.tar'), ('.gz'), ('.xz'), ('.bz2')".format(
+ ext
+ )
+ )
# Assume just an unconventional name was provided, default to uncompressed
- return ''
+ return ""
# _is_single_threaded()
diff --git a/tests/artifactcache/artifactservice.py b/tests/artifactcache/artifactservice.py
index 51f128c32..dafbc8fc9 100644
--- a/tests/artifactcache/artifactservice.py
+++ b/tests/artifactcache/artifactservice.py
@@ -22,11 +22,15 @@ from urllib.parse import urlparse
import grpc
import pytest
-from buildstream._protos.buildstream.v2.artifact_pb2 \
- import Artifact, GetArtifactRequest, UpdateArtifactRequest
+from buildstream._protos.buildstream.v2.artifact_pb2 import (
+ Artifact,
+ GetArtifactRequest,
+ UpdateArtifactRequest,
+)
from buildstream._protos.buildstream.v2.artifact_pb2_grpc import ArtifactServiceStub
-from buildstream._protos.build.bazel.remote.execution.v2 \
- import remote_execution_pb2 as re_pb2
+from buildstream._protos.build.bazel.remote.execution.v2 import (
+ remote_execution_pb2 as re_pb2,
+)
from buildstream import utils
from tests.testutils.artifactshare import create_artifact_share
@@ -96,9 +100,14 @@ def test_update_artifact(tmpdir, files):
except grpc.RpcError as e:
assert e.code() == grpc.StatusCode.FAILED_PRECONDITION
if files == "absent":
- assert e.details() == "Artifact files specified but no files found"
+ assert (
+ e.details() == "Artifact files specified but no files found"
+ )
elif files == "invalid":
- assert e.details() == "Artifact files specified but directory not found"
+ assert (
+ e.details()
+ == "Artifact files specified but directory not found"
+ )
return
# If we uploaded the artifact check GetArtifact
diff --git a/tests/artifactcache/config.py b/tests/artifactcache/config.py
index 8b01a9ebe..d2df0fd79 100644
--- a/tests/artifactcache/config.py
+++ b/tests/artifactcache/config.py
@@ -19,31 +19,27 @@ from tests.testutils import dummy_context
DATA_DIR = os.path.dirname(os.path.realpath(__file__))
-cache1 = RemoteSpec(url='https://example.com/cache1', push=True)
-cache2 = RemoteSpec(url='https://example.com/cache2', push=False)
-cache3 = RemoteSpec(url='https://example.com/cache3', push=False)
-cache4 = RemoteSpec(url='https://example.com/cache4', push=False)
-cache5 = RemoteSpec(url='https://example.com/cache5', push=False)
-cache6 = RemoteSpec(url='https://example.com/cache6',
- push=True,
- type=RemoteType.ALL)
-cache7 = RemoteSpec(url='https://index.example.com/cache1',
- push=True,
- type=RemoteType.INDEX)
-cache8 = RemoteSpec(url='https://storage.example.com/cache1',
- push=True,
- type=RemoteType.STORAGE)
+cache1 = RemoteSpec(url="https://example.com/cache1", push=True)
+cache2 = RemoteSpec(url="https://example.com/cache2", push=False)
+cache3 = RemoteSpec(url="https://example.com/cache3", push=False)
+cache4 = RemoteSpec(url="https://example.com/cache4", push=False)
+cache5 = RemoteSpec(url="https://example.com/cache5", push=False)
+cache6 = RemoteSpec(url="https://example.com/cache6", push=True, type=RemoteType.ALL)
+cache7 = RemoteSpec(
+ url="https://index.example.com/cache1", push=True, type=RemoteType.INDEX
+)
+cache8 = RemoteSpec(
+ url="https://storage.example.com/cache1", push=True, type=RemoteType.STORAGE
+)
# Generate cache configuration fragments for the user config and project config files.
#
-def configure_remote_caches(override_caches,
- project_caches=None,
- user_caches=None):
+def configure_remote_caches(override_caches, project_caches=None, user_caches=None):
type_strings = {
- RemoteType.INDEX: 'index',
- RemoteType.STORAGE: 'storage',
- RemoteType.ALL: 'all'
+ RemoteType.INDEX: "index",
+ RemoteType.STORAGE: "storage",
+ RemoteType.ALL: "all",
}
if project_caches is None:
@@ -54,39 +50,37 @@ def configure_remote_caches(override_caches,
user_config = {}
if len(user_caches) == 1:
- user_config['artifacts'] = {
- 'url': user_caches[0].url,
- 'push': user_caches[0].push,
- 'type': type_strings[user_caches[0].type]
+ user_config["artifacts"] = {
+ "url": user_caches[0].url,
+ "push": user_caches[0].push,
+ "type": type_strings[user_caches[0].type],
}
elif len(user_caches) > 1:
- user_config['artifacts'] = [
- {
- 'url': cache.url,
- 'push': cache.push,
- 'type': type_strings[cache.type]
- } for cache in user_caches
+ user_config["artifacts"] = [
+ {"url": cache.url, "push": cache.push, "type": type_strings[cache.type]}
+ for cache in user_caches
]
if len(override_caches) == 1:
- user_config['projects'] = {
- 'test': {
- 'artifacts': {
- 'url': override_caches[0].url,
- 'push': override_caches[0].push,
- 'type': type_strings[override_caches[0].type]
+ user_config["projects"] = {
+ "test": {
+ "artifacts": {
+ "url": override_caches[0].url,
+ "push": override_caches[0].push,
+ "type": type_strings[override_caches[0].type],
}
}
}
elif len(override_caches) > 1:
- user_config['projects'] = {
- 'test': {
- 'artifacts': [
+ user_config["projects"] = {
+ "test": {
+ "artifacts": [
{
- 'url': cache.url,
- 'push': cache.push,
- 'type': type_strings[cache.type]
- } for cache in override_caches
+ "url": cache.url,
+ "push": cache.push,
+ "type": type_strings[cache.type],
+ }
+ for cache in override_caches
]
}
}
@@ -94,23 +88,28 @@ def configure_remote_caches(override_caches,
project_config = {}
if project_caches:
if len(project_caches) == 1:
- project_config.update({
- 'artifacts': {
- 'url': project_caches[0].url,
- 'push': project_caches[0].push,
- 'type': type_strings[project_caches[0].type],
+ project_config.update(
+ {
+ "artifacts": {
+ "url": project_caches[0].url,
+ "push": project_caches[0].push,
+ "type": type_strings[project_caches[0].type],
+ }
}
- })
+ )
elif len(project_caches) > 1:
- project_config.update({
- 'artifacts': [
- {
- 'url': cache.url,
- 'push': cache.push,
- 'type': type_strings[cache.type]
- } for cache in project_caches
- ]
- })
+ project_config.update(
+ {
+ "artifacts": [
+ {
+ "url": cache.url,
+ "push": cache.push,
+ "type": type_strings[cache.type],
+ }
+ for cache in project_caches
+ ]
+ }
+ )
return user_config, project_config
@@ -118,27 +117,38 @@ def configure_remote_caches(override_caches,
# Test that parsing the remote artifact cache locations produces the
# expected results.
@pytest.mark.parametrize(
- 'override_caches, project_caches, user_caches',
+ "override_caches, project_caches, user_caches",
[
# The leftmost cache is the highest priority one in all cases here.
- pytest.param([], [], [], id='empty-config'),
- pytest.param([], [], [cache1, cache2], id='user-config'),
- pytest.param([], [cache1, cache2], [cache3], id='project-config'),
- pytest.param([cache1], [cache2], [cache3], id='project-override-in-user-config'),
- pytest.param([cache1, cache2], [cache3, cache4], [cache5, cache6], id='list-order'),
- pytest.param([cache1, cache2, cache1], [cache2], [cache2, cache1], id='duplicates'),
- pytest.param([cache7, cache8], [], [cache1], id='split-caches')
- ])
-def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user_caches):
+ pytest.param([], [], [], id="empty-config"),
+ pytest.param([], [], [cache1, cache2], id="user-config"),
+ pytest.param([], [cache1, cache2], [cache3], id="project-config"),
+ pytest.param(
+ [cache1], [cache2], [cache3], id="project-override-in-user-config"
+ ),
+ pytest.param(
+ [cache1, cache2], [cache3, cache4], [cache5, cache6], id="list-order"
+ ),
+ pytest.param(
+ [cache1, cache2, cache1], [cache2], [cache2, cache1], id="duplicates"
+ ),
+ pytest.param([cache7, cache8], [], [cache1], id="split-caches"),
+ ],
+)
+def test_artifact_cache_precedence(
+ tmpdir, override_caches, project_caches, user_caches
+):
# Produce a fake user and project config with the cache configuration.
- user_config, project_config = configure_remote_caches(override_caches, project_caches, user_caches)
- project_config['name'] = 'test'
+ user_config, project_config = configure_remote_caches(
+ override_caches, project_caches, user_caches
+ )
+ project_config["name"] = "test"
- user_config_file = str(tmpdir.join('buildstream.conf'))
+ user_config_file = str(tmpdir.join("buildstream.conf"))
_yaml.roundtrip_dump(user_config, file=user_config_file)
- project_dir = tmpdir.mkdir('project')
- project_config_file = str(project_dir.join('project.conf'))
+ project_dir = tmpdir.mkdir("project")
+ project_config_file = str(project_dir.join("project.conf"))
_yaml.roundtrip_dump(project_config, file=project_config_file)
with dummy_context(config=user_config_file) as context:
@@ -146,10 +156,14 @@ def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user
project.ensure_fully_loaded()
# Use the helper from the artifactcache module to parse our configuration.
- parsed_cache_specs = ArtifactCache._configured_remote_cache_specs(context, project)
+ parsed_cache_specs = ArtifactCache._configured_remote_cache_specs(
+ context, project
+ )
# Verify that it was correctly read.
- expected_cache_specs = list(_deduplicate(itertools.chain(override_caches, project_caches, user_caches)))
+ expected_cache_specs = list(
+ _deduplicate(itertools.chain(override_caches, project_caches, user_caches))
+ )
assert parsed_cache_specs == expected_cache_specs
@@ -157,29 +171,28 @@ def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user
# without specifying its counterpart, we get a comprehensive LoadError
# instead of an unhandled exception.
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize('config_key, config_value', [
- ('client-cert', 'client.crt'),
- ('client-key', 'client.key')
-])
+@pytest.mark.parametrize(
+ "config_key, config_value",
+ [("client-cert", "client.crt"), ("client-key", "client.key")],
+)
def test_missing_certs(cli, datafiles, config_key, config_value):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'missing-certs')
+ project = os.path.join(datafiles.dirname, datafiles.basename, "missing-certs")
project_conf = {
- 'name': 'test',
-
- 'artifacts': {
- 'url': 'https://cache.example.com:12345',
- 'push': 'true',
- config_key: config_value
- }
+ "name": "test",
+ "artifacts": {
+ "url": "https://cache.example.com:12345",
+ "push": "true",
+ config_key: config_value,
+ },
}
- project_conf_file = os.path.join(project, 'project.conf')
+ project_conf_file = os.path.join(project, "project.conf")
_yaml.roundtrip_dump(project_conf, project_conf_file)
# Use `pull` here to ensure we try to initialize the remotes, triggering the error
#
# This does not happen for a simple `bst show`.
- result = cli.run(project=project, args=['artifact', 'pull', 'element.bst'])
+ result = cli.run(project=project, args=["artifact", "pull", "element.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@@ -187,30 +200,33 @@ def test_missing_certs(cli, datafiles, config_key, config_value):
# only one type of storage.
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize(
- 'override_caches, project_caches, user_caches',
+ "override_caches, project_caches, user_caches",
[
# The leftmost cache is the highest priority one in all cases here.
- pytest.param([], [], [cache7], id='index-user'),
- pytest.param([], [], [cache8], id='storage-user'),
- pytest.param([], [cache7], [], id='index-project'),
- pytest.param([], [cache8], [], id='storage-project'),
- pytest.param([cache7], [], [], id='index-override'),
- pytest.param([cache8], [], [], id='storage-override'),
- ])
+ pytest.param([], [], [cache7], id="index-user"),
+ pytest.param([], [], [cache8], id="storage-user"),
+ pytest.param([], [cache7], [], id="index-project"),
+ pytest.param([], [cache8], [], id="storage-project"),
+ pytest.param([cache7], [], [], id="index-override"),
+ pytest.param([cache8], [], [], id="storage-override"),
+ ],
+)
def test_only_one(cli, datafiles, override_caches, project_caches, user_caches):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'only-one')
+ project = os.path.join(datafiles.dirname, datafiles.basename, "only-one")
# Produce a fake user and project config with the cache configuration.
- user_config, project_config = configure_remote_caches(override_caches, project_caches, user_caches)
- project_config['name'] = 'test'
+ user_config, project_config = configure_remote_caches(
+ override_caches, project_caches, user_caches
+ )
+ project_config["name"] = "test"
cli.configure(user_config)
- project_config_file = os.path.join(project, 'project.conf')
+ project_config_file = os.path.join(project, "project.conf")
_yaml.roundtrip_dump(project_config, file=project_config_file)
# Use `pull` here to ensure we try to initialize the remotes, triggering the error
#
# This does not happen for a simple `bst show`.
- result = cli.run(project=project, args=['artifact', 'pull', 'element.bst'])
+ result = cli.run(project=project, args=["artifact", "pull", "element.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
diff --git a/tests/artifactcache/expiry.py b/tests/artifactcache/expiry.py
index 9ede1a8d3..1bc7b9781 100644
--- a/tests/artifactcache/expiry.py
+++ b/tests/artifactcache/expiry.py
@@ -32,10 +32,7 @@ from buildstream.testing import cli # pylint: disable=unused-import
from tests.testutils import create_element_size, wait_for_cache_granularity
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "expiry"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "expiry")
def get_cache_usage(directory):
@@ -59,103 +56,97 @@ def get_cache_usage(directory):
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_expires(cli, datafiles):
project = str(datafiles)
- element_path = 'elements'
+ element_path = "elements"
- cli.configure({
- 'cache': {
- 'quota': 10000000,
- }
- })
+ cli.configure({"cache": {"quota": 10000000,}})
# Create an element that uses almost the entire cache (an empty
# ostree cache starts at about ~10KiB, so we need a bit of a
# buffer)
- create_element_size('target.bst', project, element_path, [], 6000000)
- res = cli.run(project=project, args=['build', 'target.bst'])
+ create_element_size("target.bst", project, element_path, [], 6000000)
+ res = cli.run(project=project, args=["build", "target.bst"])
res.assert_success()
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ assert cli.get_element_state(project, "target.bst") == "cached"
# Our cache should now be almost full. Let's create another
# artifact and see if we can cause buildstream to delete the old
# one.
- create_element_size('target2.bst', project, element_path, [], 6000000)
- res = cli.run(project=project, args=['build', 'target2.bst'])
+ create_element_size("target2.bst", project, element_path, [], 6000000)
+ res = cli.run(project=project, args=["build", "target2.bst"])
res.assert_success()
# Check that the correct element remains in the cache
- states = cli.get_element_states(project, ['target.bst', 'target2.bst'])
- assert states['target.bst'] != 'cached'
- assert states['target2.bst'] == 'cached'
+ states = cli.get_element_states(project, ["target.bst", "target2.bst"])
+ assert states["target.bst"] != "cached"
+ assert states["target2.bst"] == "cached"
# Ensure that we don't end up deleting the whole cache (or worse) if
# we try to store an artifact that is too large to fit in the quota.
-@pytest.mark.parametrize('size', [
- # Test an artifact that is obviously too large
- (500000),
- # Test an artifact that might be too large due to slight overhead
- # of storing stuff in ostree
- (399999)
-])
+@pytest.mark.parametrize(
+ "size",
+ [
+ # Test an artifact that is obviously too large
+ (500000),
+ # Test an artifact that might be too large due to slight overhead
+ # of storing stuff in ostree
+ (399999),
+ ],
+)
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_too_large(cli, datafiles, size):
project = str(datafiles)
- element_path = 'elements'
+ element_path = "elements"
- cli.configure({
- 'cache': {
- 'quota': 400000
- }
- })
+ cli.configure({"cache": {"quota": 400000}})
# Create an element whose artifact is too large
- create_element_size('target.bst', project, element_path, [], size)
- res = cli.run(project=project, args=['build', 'target.bst'])
+ create_element_size("target.bst", project, element_path, [], size)
+ res = cli.run(project=project, args=["build", "target.bst"])
res.assert_main_error(ErrorDomain.STREAM, None)
- res.assert_task_error(ErrorDomain.CAS, 'cache-too-full')
+ res.assert_task_error(ErrorDomain.CAS, "cache-too-full")
@pytest.mark.datafiles(DATA_DIR)
def test_expiry_order(cli, datafiles):
project = str(datafiles)
- element_path = 'elements'
- checkout = os.path.join(project, 'workspace')
+ element_path = "elements"
+ checkout = os.path.join(project, "workspace")
- cli.configure({
- 'cache': {
- 'quota': 9000000
- }
- })
+ cli.configure({"cache": {"quota": 9000000}})
# Create an artifact
- create_element_size('dep.bst', project, element_path, [], 2000000)
- res = cli.run(project=project, args=['build', 'dep.bst'])
+ create_element_size("dep.bst", project, element_path, [], 2000000)
+ res = cli.run(project=project, args=["build", "dep.bst"])
res.assert_success()
# Create another artifact
- create_element_size('unrelated.bst', project, element_path, [], 2000000)
- res = cli.run(project=project, args=['build', 'unrelated.bst'])
+ create_element_size("unrelated.bst", project, element_path, [], 2000000)
+ res = cli.run(project=project, args=["build", "unrelated.bst"])
res.assert_success()
# And build something else
- create_element_size('target.bst', project, element_path, [], 2000000)
- res = cli.run(project=project, args=['build', 'target.bst'])
+ create_element_size("target.bst", project, element_path, [], 2000000)
+ res = cli.run(project=project, args=["build", "target.bst"])
res.assert_success()
- create_element_size('target2.bst', project, element_path, [], 2000000)
- res = cli.run(project=project, args=['build', 'target2.bst'])
+ create_element_size("target2.bst", project, element_path, [], 2000000)
+ res = cli.run(project=project, args=["build", "target2.bst"])
res.assert_success()
wait_for_cache_granularity()
# Now extract dep.bst
- res = cli.run(project=project, args=['artifact', 'checkout', 'dep.bst', '--directory', checkout])
+ res = cli.run(
+ project=project,
+ args=["artifact", "checkout", "dep.bst", "--directory", checkout],
+ )
res.assert_success()
# Finally, build something that will cause the cache to overflow
- create_element_size('expire.bst', project, element_path, [], 2000000)
- res = cli.run(project=project, args=['build', 'expire.bst'])
+ create_element_size("expire.bst", project, element_path, [], 2000000)
+ res = cli.run(project=project, args=["build", "expire.bst"])
res.assert_success()
# While dep.bst was the first element to be created, it should not
@@ -163,11 +154,20 @@ def test_expiry_order(cli, datafiles):
# Note that buildstream will reduce the cache to 50% of the
# original size - we therefore remove multiple elements.
check_elements = [
- 'unrelated.bst', 'target.bst', 'target2.bst', 'dep.bst', 'expire.bst'
+ "unrelated.bst",
+ "target.bst",
+ "target2.bst",
+ "dep.bst",
+ "expire.bst",
]
states = cli.get_element_states(project, check_elements)
- assert (tuple(states[element] for element in check_elements) ==
- ('buildable', 'buildable', 'buildable', 'cached', 'cached', ))
+ assert tuple(states[element] for element in check_elements) == (
+ "buildable",
+ "buildable",
+ "buildable",
+ "cached",
+ "cached",
+ )
# Ensure that we don't accidentally remove an artifact from something
@@ -176,28 +176,24 @@ def test_expiry_order(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_keep_dependencies(cli, datafiles):
project = str(datafiles)
- element_path = 'elements'
+ element_path = "elements"
- cli.configure({
- 'cache': {
- 'quota': 10000000
- }
- })
+ cli.configure({"cache": {"quota": 10000000}})
# Create a pretty big dependency
- create_element_size('dependency.bst', project, element_path, [], 5000000)
- res = cli.run(project=project, args=['build', 'dependency.bst'])
+ create_element_size("dependency.bst", project, element_path, [], 5000000)
+ res = cli.run(project=project, args=["build", "dependency.bst"])
res.assert_success()
# Now create some other unrelated artifact
- create_element_size('unrelated.bst', project, element_path, [], 4000000)
- res = cli.run(project=project, args=['build', 'unrelated.bst'])
+ create_element_size("unrelated.bst", project, element_path, [], 4000000)
+ res = cli.run(project=project, args=["build", "unrelated.bst"])
res.assert_success()
# Check that the correct element remains in the cache
- states = cli.get_element_states(project, ['dependency.bst', 'unrelated.bst'])
- assert states['dependency.bst'] == 'cached'
- assert states['unrelated.bst'] == 'cached'
+ states = cli.get_element_states(project, ["dependency.bst", "unrelated.bst"])
+ assert states["dependency.bst"] == "cached"
+ assert states["unrelated.bst"] == "cached"
# We try to build an element which depends on the LRU artifact,
# and could therefore fail if we didn't make sure dependencies
@@ -207,54 +203,49 @@ def test_keep_dependencies(cli, datafiles):
# duplicating artifacts (bad!) we need to make this equal in size
# or smaller than half the size of its dependencies.
#
- create_element_size('target.bst', project,
- element_path, ['dependency.bst'], 2000000)
- res = cli.run(project=project, args=['build', 'target.bst'])
+ create_element_size(
+ "target.bst", project, element_path, ["dependency.bst"], 2000000
+ )
+ res = cli.run(project=project, args=["build", "target.bst"])
res.assert_success()
- states = cli.get_element_states(project, ['target.bst', 'unrelated.bst'])
- assert states['target.bst'] == 'cached'
- assert states['dependency.bst'] == 'cached'
- assert states['unrelated.bst'] != 'cached'
+ states = cli.get_element_states(project, ["target.bst", "unrelated.bst"])
+ assert states["target.bst"] == "cached"
+ assert states["dependency.bst"] == "cached"
+ assert states["unrelated.bst"] != "cached"
# Assert that we never delete a dependency required for a build tree
@pytest.mark.datafiles(DATA_DIR)
def test_never_delete_required(cli, datafiles):
project = str(datafiles)
- element_path = 'elements'
-
- cli.configure({
- 'cache': {
- 'quota': 10000000
- },
- 'scheduler': {
- 'fetchers': 1,
- 'builders': 1
- }
- })
+ element_path = "elements"
+
+ cli.configure(
+ {"cache": {"quota": 10000000}, "scheduler": {"fetchers": 1, "builders": 1}}
+ )
# Create a linear build tree
- create_element_size('dep1.bst', project, element_path, [], 8000000)
- create_element_size('dep2.bst', project, element_path, ['dep1.bst'], 8000000)
- create_element_size('dep3.bst', project, element_path, ['dep2.bst'], 8000000)
- create_element_size('target.bst', project, element_path, ['dep3.bst'], 8000000)
+ create_element_size("dep1.bst", project, element_path, [], 8000000)
+ create_element_size("dep2.bst", project, element_path, ["dep1.bst"], 8000000)
+ create_element_size("dep3.bst", project, element_path, ["dep2.bst"], 8000000)
+ create_element_size("target.bst", project, element_path, ["dep3.bst"], 8000000)
# Build dep1.bst, which should fit into the cache.
- res = cli.run(project=project, args=['build', 'dep1.bst'])
+ res = cli.run(project=project, args=["build", "dep1.bst"])
res.assert_success()
# We try to build this pipeline, but it's too big for the
# cache. Since all elements are required, the build should fail.
- res = cli.run(project=project, args=['build', 'target.bst'])
+ res = cli.run(project=project, args=["build", "target.bst"])
res.assert_main_error(ErrorDomain.STREAM, None)
- res.assert_task_error(ErrorDomain.CAS, 'cache-too-full')
+ res.assert_task_error(ErrorDomain.CAS, "cache-too-full")
- states = cli.get_element_states(project, ['target.bst'])
- assert states['dep1.bst'] == 'cached'
- assert states['dep2.bst'] != 'cached'
- assert states['dep3.bst'] != 'cached'
- assert states['target.bst'] != 'cached'
+ states = cli.get_element_states(project, ["target.bst"])
+ assert states["dep1.bst"] == "cached"
+ assert states["dep2.bst"] != "cached"
+ assert states["dep3.bst"] != "cached"
+ assert states["target.bst"] != "cached"
# Ensure that only valid cache quotas make it through the loading
@@ -267,32 +258,33 @@ def test_never_delete_required(cli, datafiles):
#
# If err_domain is 'success', then err_reason is unused.
#
-@pytest.mark.parametrize("quota,err_domain,err_reason", [
- # Valid configurations
- ("1", 'success', None),
- ("1K", 'success', None),
- ("50%", 'success', None),
- ("infinity", 'success', None),
- ("0", 'success', None),
- # Invalid configurations
- ("-1", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
- ("pony", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
- ("200%", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
-])
+@pytest.mark.parametrize(
+ "quota,err_domain,err_reason",
+ [
+ # Valid configurations
+ ("1", "success", None),
+ ("1K", "success", None),
+ ("50%", "success", None),
+ ("infinity", "success", None),
+ ("0", "success", None),
+ # Invalid configurations
+ ("-1", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
+ ("pony", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
+ ("200%", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
+ ],
+)
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_cache_quota(cli, datafiles, quota, err_domain, err_reason):
project = str(datafiles)
- os.makedirs(os.path.join(project, 'elements'))
+ os.makedirs(os.path.join(project, "elements"))
- cli.configure({
- 'cache': {
- 'quota': quota,
- },
- })
+ cli.configure(
+ {"cache": {"quota": quota,},}
+ )
- res = cli.run(project=project, args=['workspace', 'list'])
+ res = cli.run(project=project, args=["workspace", "list"])
- if err_domain == 'success':
+ if err_domain == "success":
res.assert_success()
else:
res.assert_main_error(err_domain, err_reason)
@@ -304,59 +296,49 @@ def test_invalid_cache_quota(cli, datafiles, quota, err_domain, err_reason):
@pytest.mark.datafiles(DATA_DIR)
def test_cleanup_first(cli, datafiles):
project = str(datafiles)
- element_path = 'elements'
+ element_path = "elements"
- cli.configure({
- 'cache': {
- 'quota': 10000000,
- }
- })
+ cli.configure({"cache": {"quota": 10000000,}})
# Create an element that uses almost the entire cache (an empty
# ostree cache starts at about ~10KiB, so we need a bit of a
# buffer)
- create_element_size('target.bst', project, element_path, [], 8000000)
- res = cli.run(project=project, args=['build', 'target.bst'])
+ create_element_size("target.bst", project, element_path, [], 8000000)
+ res = cli.run(project=project, args=["build", "target.bst"])
res.assert_success()
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ assert cli.get_element_state(project, "target.bst") == "cached"
# Now configure with a smaller quota, create a situation
# where the cache must be cleaned up before building anything else.
#
# Fix the fetchers and builders just to ensure a predictable
# sequence of events (although it does not effect this test)
- cli.configure({
- 'cache': {
- 'quota': 5000000,
- },
- 'scheduler': {
- 'fetchers': 1,
- 'builders': 1
- }
- })
+ cli.configure(
+ {"cache": {"quota": 5000000,}, "scheduler": {"fetchers": 1, "builders": 1}}
+ )
# Our cache is now more than full, BuildStream
- create_element_size('target2.bst', project, element_path, [], 4000000)
- res = cli.run(project=project, args=['build', 'target2.bst'])
+ create_element_size("target2.bst", project, element_path, [], 4000000)
+ res = cli.run(project=project, args=["build", "target2.bst"])
res.assert_success()
# Check that the correct element remains in the cache
- states = cli.get_element_states(project, ['target.bst', 'target2.bst'])
- assert states['target.bst'] != 'cached'
- assert states['target2.bst'] == 'cached'
+ states = cli.get_element_states(project, ["target.bst", "target2.bst"])
+ assert states["target.bst"] != "cached"
+ assert states["target2.bst"] == "cached"
@pytest.mark.datafiles(DATA_DIR)
def test_cache_usage_monitor(cli, tmpdir, datafiles):
project = str(datafiles)
- element_path = 'elements'
+ element_path = "elements"
assert get_cache_usage(cli.directory) == 0
ELEMENT_SIZE = 1000000
- create_element_size('target.bst', project, element_path, [], ELEMENT_SIZE)
- res = cli.run(project=project, args=['build', 'target.bst'])
+ create_element_size("target.bst", project, element_path, [], ELEMENT_SIZE)
+ res = cli.run(project=project, args=["build", "target.bst"])
res.assert_success()
assert get_cache_usage(cli.directory) >= ELEMENT_SIZE
diff --git a/tests/artifactcache/junctions.py b/tests/artifactcache/junctions.py
index dab69ea8d..91cc01fff 100644
--- a/tests/artifactcache/junctions.py
+++ b/tests/artifactcache/junctions.py
@@ -11,187 +11,208 @@ from buildstream.testing import cli # pylint: disable=unused-import
from tests.testutils import create_artifact_share, assert_shared, assert_not_shared
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "junctions",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "junctions",)
def project_set_artifacts(project, url):
- project_conf_file = os.path.join(project, 'project.conf')
+ project_conf_file = os.path.join(project, "project.conf")
project_config = _yaml.load(project_conf_file)
- project_config['artifacts'] = {
- 'url': url,
- 'push': True
- }
+ project_config["artifacts"] = {"url": url, "push": True}
_yaml.roundtrip_dump(project_config.strip_node_info(), file=project_conf_file)
@pytest.mark.datafiles(DATA_DIR)
def test_push_pull(cli, tmpdir, datafiles):
- project = os.path.join(str(datafiles), 'parent')
- base_project = os.path.join(str(project), 'base')
+ project = os.path.join(str(datafiles), "parent")
+ base_project = os.path.join(str(project), "base")
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare-parent')) as share,\
- create_artifact_share(os.path.join(str(tmpdir), 'artifactshare-base')) as base_share:
+ with create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare-parent")
+ ) as share, create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare-base")
+ ) as base_share:
# First build it without the artifact cache configured
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
assert result.exit_code == 0
# Assert that we are now cached locally
- state = cli.get_element_state(project, 'target.bst')
- assert state == 'cached'
- state = cli.get_element_state(base_project, 'base-element.bst')
- assert state == 'cached'
+ state = cli.get_element_state(project, "target.bst")
+ assert state == "cached"
+ state = cli.get_element_state(base_project, "base-element.bst")
+ assert state == "cached"
project_set_artifacts(project, share.repo)
project_set_artifacts(base_project, base_share.repo)
# Now try bst artifact push
- result = cli.run(project=project, args=['artifact', 'push', '--deps', 'all', 'target.bst'])
+ result = cli.run(
+ project=project, args=["artifact", "push", "--deps", "all", "target.bst"]
+ )
assert result.exit_code == 0
# And finally assert that the artifacts are in the right shares
#
# In the parent project's cache
- assert_shared(cli, share, project, 'target.bst', project_name='parent')
- assert_shared(cli, share, project, 'app.bst', project_name='parent')
- assert_not_shared(cli, share, base_project, 'base-element.bst', project_name='base')
+ assert_shared(cli, share, project, "target.bst", project_name="parent")
+ assert_shared(cli, share, project, "app.bst", project_name="parent")
+ assert_not_shared(
+ cli, share, base_project, "base-element.bst", project_name="base"
+ )
# In the junction project's cache
- assert_not_shared(cli, base_share, project, 'target.bst', project_name='parent')
- assert_not_shared(cli, base_share, project, 'app.bst', project_name='parent')
- assert_shared(cli, base_share, base_project, 'base-element.bst', project_name='base')
+ assert_not_shared(cli, base_share, project, "target.bst", project_name="parent")
+ assert_not_shared(cli, base_share, project, "app.bst", project_name="parent")
+ assert_shared(
+ cli, base_share, base_project, "base-element.bst", project_name="base"
+ )
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
- cas = os.path.join(cli.directory, 'cas')
+ cas = os.path.join(cli.directory, "cas")
shutil.rmtree(cas)
- artifact_dir = os.path.join(cli.directory, 'artifacts')
+ artifact_dir = os.path.join(cli.directory, "artifacts")
shutil.rmtree(artifact_dir)
# Assert that nothing is cached locally anymore
- state = cli.get_element_state(project, 'target.bst')
- assert state != 'cached'
- state = cli.get_element_state(base_project, 'base-element.bst')
- assert state != 'cached'
+ state = cli.get_element_state(project, "target.bst")
+ assert state != "cached"
+ state = cli.get_element_state(base_project, "base-element.bst")
+ assert state != "cached"
# Now try bst artifact pull
- result = cli.run(project=project, args=['artifact', 'pull', '--deps', 'all', 'target.bst'])
+ result = cli.run(
+ project=project, args=["artifact", "pull", "--deps", "all", "target.bst"]
+ )
assert result.exit_code == 0
# And assert that they are again in the local cache, without having built
- state = cli.get_element_state(project, 'target.bst')
- assert state == 'cached'
- state = cli.get_element_state(base_project, 'base-element.bst')
- assert state == 'cached'
+ state = cli.get_element_state(project, "target.bst")
+ assert state == "cached"
+ state = cli.get_element_state(base_project, "base-element.bst")
+ assert state == "cached"
@pytest.mark.datafiles(DATA_DIR)
def test_caching_junction_elements(cli, tmpdir, datafiles):
- project = os.path.join(str(datafiles), 'parent')
- base_project = os.path.join(str(project), 'base')
+ project = os.path.join(str(datafiles), "parent")
+ base_project = os.path.join(str(project), "base")
# Load the junction element
- junction_element = os.path.join(project, 'base.bst')
+ junction_element = os.path.join(project, "base.bst")
junction_data = _yaml.roundtrip_load(junction_element)
# Add the "cache-junction-elements" boolean to the junction Element
- junction_data['config'] = {"cache-junction-elements": True}
+ junction_data["config"] = {"cache-junction-elements": True}
_yaml.roundtrip_dump(junction_data, junction_element)
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare-parent')) as share,\
- create_artifact_share(os.path.join(str(tmpdir), 'artifactshare-base')) as base_share:
+ with create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare-parent")
+ ) as share, create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare-base")
+ ) as base_share:
# First build it without the artifact cache configured
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
assert result.exit_code == 0
# Assert that we are now cached locally
- state = cli.get_element_state(project, 'target.bst')
- assert state == 'cached'
- state = cli.get_element_state(base_project, 'base-element.bst')
- assert state == 'cached'
+ state = cli.get_element_state(project, "target.bst")
+ assert state == "cached"
+ state = cli.get_element_state(base_project, "base-element.bst")
+ assert state == "cached"
project_set_artifacts(project, share.repo)
project_set_artifacts(base_project, base_share.repo)
# Now try bst artifact push
- result = cli.run(project=project, args=['artifact', 'push', '--deps', 'all', 'target.bst'])
+ result = cli.run(
+ project=project, args=["artifact", "push", "--deps", "all", "target.bst"]
+ )
assert result.exit_code == 0
# And finally assert that the artifacts are in the right shares
#
# The parent project's cache should *also* contain elements from the junction
- assert_shared(cli, share, project, 'target.bst', project_name='parent')
- assert_shared(cli, share, project, 'app.bst', project_name='parent')
- assert_shared(cli, share, base_project, 'base-element.bst', project_name='base')
+ assert_shared(cli, share, project, "target.bst", project_name="parent")
+ assert_shared(cli, share, project, "app.bst", project_name="parent")
+ assert_shared(cli, share, base_project, "base-element.bst", project_name="base")
# The junction project's cache should only contain elements in the junction project
- assert_not_shared(cli, base_share, project, 'target.bst', project_name='parent')
- assert_not_shared(cli, base_share, project, 'app.bst', project_name='parent')
- assert_shared(cli, base_share, base_project, 'base-element.bst', project_name='base')
+ assert_not_shared(cli, base_share, project, "target.bst", project_name="parent")
+ assert_not_shared(cli, base_share, project, "app.bst", project_name="parent")
+ assert_shared(
+ cli, base_share, base_project, "base-element.bst", project_name="base"
+ )
@pytest.mark.datafiles(DATA_DIR)
def test_ignore_junction_remotes(cli, tmpdir, datafiles):
- project = os.path.join(str(datafiles), 'parent')
- base_project = os.path.join(str(project), 'base')
+ project = os.path.join(str(datafiles), "parent")
+ base_project = os.path.join(str(project), "base")
# Load the junction element
- junction_element = os.path.join(project, 'base.bst')
+ junction_element = os.path.join(project, "base.bst")
junction_data = _yaml.roundtrip_load(junction_element)
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare-parent')) as share,\
- create_artifact_share(os.path.join(str(tmpdir), 'artifactshare-base')) as base_share:
+ with create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare-parent")
+ ) as share, create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare-base")
+ ) as base_share:
# Immediately declare the artifact caches in the appropriate project configs
project_set_artifacts(project, share.repo)
project_set_artifacts(base_project, base_share.repo)
# Build and populate the project remotes with their respective elements
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
assert result.exit_code == 0
# And finally assert that the artifacts are in the right shares
#
# The parent project's cache should only contain project elements
- assert_shared(cli, share, project, 'target.bst', project_name='parent')
- assert_shared(cli, share, project, 'app.bst', project_name='parent')
- assert_not_shared(cli, share, base_project, 'base-element.bst', project_name='base')
+ assert_shared(cli, share, project, "target.bst", project_name="parent")
+ assert_shared(cli, share, project, "app.bst", project_name="parent")
+ assert_not_shared(
+ cli, share, base_project, "base-element.bst", project_name="base"
+ )
# The junction project's cache should only contain elements in the junction project
- assert_not_shared(cli, base_share, project, 'target.bst', project_name='parent')
- assert_not_shared(cli, base_share, project, 'app.bst', project_name='parent')
- assert_shared(cli, base_share, base_project, 'base-element.bst', project_name='base')
+ assert_not_shared(cli, base_share, project, "target.bst", project_name="parent")
+ assert_not_shared(cli, base_share, project, "app.bst", project_name="parent")
+ assert_shared(
+ cli, base_share, base_project, "base-element.bst", project_name="base"
+ )
# Ensure that, from now on, we ignore junction element remotes
- junction_data['config'] = {"ignore-junction-remotes": True}
+ junction_data["config"] = {"ignore-junction-remotes": True}
_yaml.roundtrip_dump(junction_data, junction_element)
# Now delete everything from the local cache and try to
# redownload from the shares.
#
- cas = os.path.join(cli.directory, 'cas')
+ cas = os.path.join(cli.directory, "cas")
shutil.rmtree(cas)
- artifact_dir = os.path.join(cli.directory, 'artifacts')
+ artifact_dir = os.path.join(cli.directory, "artifacts")
shutil.rmtree(artifact_dir)
# Assert that nothing is cached locally anymore
- state = cli.get_element_state(project, 'target.bst')
- assert state != 'cached'
- state = cli.get_element_state(base_project, 'base-element.bst')
- assert state != 'cached'
+ state = cli.get_element_state(project, "target.bst")
+ assert state != "cached"
+ state = cli.get_element_state(base_project, "base-element.bst")
+ assert state != "cached"
# Now try bst artifact pull
- result = cli.run(project=project, args=['artifact', 'pull', '--deps', 'all', 'target.bst'])
+ result = cli.run(
+ project=project, args=["artifact", "pull", "--deps", "all", "target.bst"]
+ )
assert result.exit_code == 0
# And assert that they are again in the local cache, without having built
- state = cli.get_element_state(project, 'target.bst')
- assert state == 'cached'
+ state = cli.get_element_state(project, "target.bst")
+ assert state == "cached"
# We shouldn't be able to download base-element!
- state = cli.get_element_state(base_project, 'base-element.bst')
- assert state != 'cached'
+ state = cli.get_element_state(base_project, "base-element.bst")
+ assert state != "cached"
diff --git a/tests/artifactcache/pull.py b/tests/artifactcache/pull.py
index a42a01bf7..6c4134b0b 100644
--- a/tests/artifactcache/pull.py
+++ b/tests/artifactcache/pull.py
@@ -14,10 +14,7 @@ from tests.testutils import create_artifact_share, dummy_context
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
def tree_maker(cas, tree, directory):
@@ -27,7 +24,7 @@ def tree_maker(cas, tree, directory):
for directory_node in directory.directories:
child_directory = tree.children.add()
- with open(cas.objpath(directory_node.digest), 'rb') as f:
+ with open(cas.objpath(directory_node.digest), "rb") as f:
child_directory.ParseFromString(f.read())
tree_maker(cas, tree, child_directory)
@@ -38,19 +35,14 @@ def test_pull(cli, tmpdir, datafiles):
project_dir = str(datafiles)
# Set up an artifact cache.
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# Configure artifact share
- cache_dir = os.path.join(str(tmpdir), 'cache')
- user_config_file = str(tmpdir.join('buildstream.conf'))
+ cache_dir = os.path.join(str(tmpdir), "cache")
+ user_config_file = str(tmpdir.join("buildstream.conf"))
user_config = {
- 'scheduler': {
- 'pushers': 1
- },
- 'artifacts': {
- 'url': share.repo,
- 'push': True,
- },
- 'cachedir': cache_dir
+ "scheduler": {"pushers": 1},
+ "artifacts": {"url": share.repo, "push": True,},
+ "cachedir": cache_dir,
}
# Write down the user configuration file
@@ -59,19 +51,21 @@ def test_pull(cli, tmpdir, datafiles):
cli.configure(user_config)
# First build the project with the artifact cache configured
- result = cli.run(project=project_dir, args=['build', 'target.bst'])
+ result = cli.run(project=project_dir, args=["build", "target.bst"])
result.assert_success()
# Assert that we are now cached locally
- assert cli.get_element_state(project_dir, 'target.bst') == 'cached'
+ assert cli.get_element_state(project_dir, "target.bst") == "cached"
# Assert that we shared/pushed the cached artifact
- assert share.get_artifact(cli.get_artifact_name(project_dir, 'test', 'target.bst'))
+ assert share.get_artifact(
+ cli.get_artifact_name(project_dir, "test", "target.bst")
+ )
# Delete the artifact locally
- cli.remove_artifact_from_cache(project_dir, 'target.bst')
+ cli.remove_artifact_from_cache(project_dir, "target.bst")
# Assert that we are not cached locally anymore
- assert cli.get_element_state(project_dir, 'target.bst') != 'cached'
+ assert cli.get_element_state(project_dir, "target.bst") != "cached"
with dummy_context(config=user_config_file) as context:
# Load the project
@@ -79,13 +73,13 @@ def test_pull(cli, tmpdir, datafiles):
project.ensure_fully_loaded()
# Assert that the element's artifact is **not** cached
- element = project.load_elements(['target.bst'])[0]
- element_key = cli.get_element_key(project_dir, 'target.bst')
+ element = project.load_elements(["target.bst"])[0]
+ element_key = cli.get_element_key(project_dir, "target.bst")
assert not cli.artifact.is_cached(cache_dir, element, element_key)
context.cachedir = cache_dir
- context.casdir = os.path.join(cache_dir, 'cas')
- context.tmpdir = os.path.join(cache_dir, 'tmp')
+ context.casdir = os.path.join(cache_dir, "cas")
+ context.tmpdir = os.path.join(cache_dir, "tmp")
# Load the project manually
project = Project(project_dir, context)
@@ -97,8 +91,9 @@ def test_pull(cli, tmpdir, datafiles):
# Manually setup the CAS remote
artifactcache.setup_remotes(use_config=True)
- assert artifactcache.has_push_remotes(plugin=element), \
- "No remote configured for element target.bst"
+ assert artifactcache.has_push_remotes(
+ plugin=element
+ ), "No remote configured for element target.bst"
assert artifactcache.pull(element, element_key), "Pull operation failed"
assert cli.artifact.is_cached(cache_dir, element, element_key)
@@ -109,19 +104,14 @@ def test_pull_tree(cli, tmpdir, datafiles):
project_dir = str(datafiles)
# Set up an artifact cache.
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# Configure artifact share
- rootcache_dir = os.path.join(str(tmpdir), 'cache')
- user_config_file = str(tmpdir.join('buildstream.conf'))
+ rootcache_dir = os.path.join(str(tmpdir), "cache")
+ user_config_file = str(tmpdir.join("buildstream.conf"))
user_config = {
- 'scheduler': {
- 'pushers': 1
- },
- 'artifacts': {
- 'url': share.repo,
- 'push': True,
- },
- 'cachedir': rootcache_dir
+ "scheduler": {"pushers": 1},
+ "artifacts": {"url": share.repo, "push": True,},
+ "cachedir": rootcache_dir,
}
# Write down the user configuration file
@@ -130,13 +120,15 @@ def test_pull_tree(cli, tmpdir, datafiles):
cli.configure(user_config)
# First build the project with the artifact cache configured
- result = cli.run(project=project_dir, args=['build', 'target.bst'])
+ result = cli.run(project=project_dir, args=["build", "target.bst"])
result.assert_success()
# Assert that we are now cached locally
- assert cli.get_element_state(project_dir, 'target.bst') == 'cached'
+ assert cli.get_element_state(project_dir, "target.bst") == "cached"
# Assert that we shared/pushed the cached artifact
- assert share.get_artifact(cli.get_artifact_name(project_dir, 'test', 'target.bst'))
+ assert share.get_artifact(
+ cli.get_artifact_name(project_dir, "test", "target.bst")
+ )
with dummy_context(config=user_config_file) as context:
# Load the project and CAS cache
@@ -145,12 +137,14 @@ def test_pull_tree(cli, tmpdir, datafiles):
cas = context.get_cascache()
# Assert that the element's artifact is cached
- element = project.load_elements(['target.bst'])[0]
- element_key = cli.get_element_key(project_dir, 'target.bst')
+ element = project.load_elements(["target.bst"])[0]
+ element_key = cli.get_element_key(project_dir, "target.bst")
assert cli.artifact.is_cached(rootcache_dir, element, element_key)
# Retrieve the Directory object from the cached artifact
- artifact_digest = cli.artifact.get_digest(rootcache_dir, element, element_key)
+ artifact_digest = cli.artifact.get_digest(
+ rootcache_dir, element, element_key
+ )
artifactcache = context.artifactcache
# Manually setup the CAS remote
@@ -159,7 +153,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
directory = remote_execution_pb2.Directory()
- with open(cas.objpath(artifact_digest), 'rb') as f:
+ with open(cas.objpath(artifact_digest), "rb") as f:
directory.ParseFromString(f.read())
# Build the Tree object while we are still cached
@@ -172,25 +166,30 @@ def test_pull_tree(cli, tmpdir, datafiles):
assert tree_hash and tree_size
# Now delete the artifact locally
- cli.remove_artifact_from_cache(project_dir, 'target.bst')
+ cli.remove_artifact_from_cache(project_dir, "target.bst")
# Assert that we are not cached locally anymore
artifactcache.close_grpc_channels()
cas.close_grpc_channels()
- assert cli.get_element_state(project_dir, 'target.bst') != 'cached'
+ assert cli.get_element_state(project_dir, "target.bst") != "cached"
- tree_digest = remote_execution_pb2.Digest(hash=tree_hash,
- size_bytes=tree_size)
+ tree_digest = remote_execution_pb2.Digest(
+ hash=tree_hash, size_bytes=tree_size
+ )
# Pull the artifact using the Tree object
directory_digest = artifactcache.pull_tree(project, artifact_digest)
- directory_hash, directory_size = directory_digest.hash, directory_digest.size_bytes
+ directory_hash, directory_size = (
+ directory_digest.hash,
+ directory_digest.size_bytes,
+ )
# Directory size now zero with AaaP and stack element commit #1cbc5e63dc
assert directory_hash and not directory_size
- directory_digest = remote_execution_pb2.Digest(hash=directory_hash,
- size_bytes=directory_size)
+ directory_digest = remote_execution_pb2.Digest(
+ hash=directory_hash, size_bytes=directory_size
+ )
# Ensure the entire Tree stucture has been pulled
assert os.path.exists(cas.objpath(directory_digest))
diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py
index 62c443d61..dded57563 100644
--- a/tests/artifactcache/push.py
+++ b/tests/artifactcache/push.py
@@ -14,10 +14,7 @@ from tests.testutils import create_artifact_share, create_split_share, dummy_con
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
# Push the given element and return its artifact key for assertions.
@@ -28,8 +25,8 @@ def _push(cli, cache_dir, project_dir, config_file, target):
project.ensure_fully_loaded()
# Assert that the element's artifact is cached
- element = project.load_elements(['target.bst'])[0]
- element_key = cli.get_element_key(project_dir, 'target.bst')
+ element = project.load_elements(["target.bst"])[0]
+ element_key = cli.get_element_key(project_dir, "target.bst")
assert cli.artifact.is_cached(cache_dir, element, element_key)
# Create a local artifact cache handle
@@ -46,8 +43,9 @@ def _push(cli, cache_dir, project_dir, config_file, target):
artifactcache.setup_remotes(use_config=True)
artifactcache.initialize_remotes()
- assert artifactcache.has_push_remotes(plugin=element), \
- "No remote configured for element target.bst"
+ assert artifactcache.has_push_remotes(
+ plugin=element
+ ), "No remote configured for element target.bst"
assert element._push(), "Push operation failed"
return element_key
@@ -58,32 +56,33 @@ def test_push(cli, tmpdir, datafiles):
project_dir = str(datafiles)
# First build the project without the artifact cache configured
- result = cli.run(project=project_dir, args=['build', 'target.bst'])
+ result = cli.run(project=project_dir, args=["build", "target.bst"])
result.assert_success()
# Assert that we are now cached locally
- assert cli.get_element_state(project_dir, 'target.bst') == 'cached'
+ assert cli.get_element_state(project_dir, "target.bst") == "cached"
# Set up an artifact cache.
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# Configure artifact share
- rootcache_dir = os.path.join(str(tmpdir), 'cache')
- user_config_file = str(tmpdir.join('buildstream.conf'))
+ rootcache_dir = os.path.join(str(tmpdir), "cache")
+ user_config_file = str(tmpdir.join("buildstream.conf"))
user_config = {
- 'scheduler': {
- 'pushers': 1
- },
- 'artifacts': {
- 'url': share.repo,
- 'push': True,
- },
- 'cachedir': rootcache_dir
+ "scheduler": {"pushers": 1},
+ "artifacts": {"url": share.repo, "push": True,},
+ "cachedir": rootcache_dir,
}
# Write down the user configuration file
_yaml.roundtrip_dump(user_config, file=user_config_file)
- element_key = _push(cli, rootcache_dir, project_dir, user_config_file, 'target.bst')
- assert share.get_artifact(cli.get_artifact_name(project_dir, 'test', 'target.bst', cache_key=element_key))
+ element_key = _push(
+ cli, rootcache_dir, project_dir, user_config_file, "target.bst"
+ )
+ assert share.get_artifact(
+ cli.get_artifact_name(
+ project_dir, "test", "target.bst", cache_key=element_key
+ )
+ )
@pytest.mark.datafiles(DATA_DIR)
@@ -91,41 +90,35 @@ def test_push_split(cli, tmpdir, datafiles):
project_dir = str(datafiles)
# First build the project without the artifact cache configured
- result = cli.run(project=project_dir, args=['build', 'target.bst'])
+ result = cli.run(project=project_dir, args=["build", "target.bst"])
result.assert_success()
# Assert that we are now cached locally
- assert cli.get_element_state(project_dir, 'target.bst') == 'cached'
+ assert cli.get_element_state(project_dir, "target.bst") == "cached"
- indexshare = os.path.join(str(tmpdir), 'indexshare')
- storageshare = os.path.join(str(tmpdir), 'storageshare')
+ indexshare = os.path.join(str(tmpdir), "indexshare")
+ storageshare = os.path.join(str(tmpdir), "storageshare")
# Set up an artifact cache.
with create_split_share(indexshare, storageshare) as (index, storage):
- rootcache_dir = os.path.join(str(tmpdir), 'cache')
+ rootcache_dir = os.path.join(str(tmpdir), "cache")
user_config = {
- 'scheduler': {
- 'pushers': 1
- },
- 'artifacts': [{
- 'url': index.repo,
- 'push': True,
- 'type': 'index'
- }, {
- 'url': storage.repo,
- 'push': True,
- 'type': 'storage'
- }],
- 'cachedir': rootcache_dir
+ "scheduler": {"pushers": 1},
+ "artifacts": [
+ {"url": index.repo, "push": True, "type": "index"},
+ {"url": storage.repo, "push": True, "type": "storage"},
+ ],
+ "cachedir": rootcache_dir,
}
- config_path = str(tmpdir.join('buildstream.conf'))
+ config_path = str(tmpdir.join("buildstream.conf"))
_yaml.roundtrip_dump(user_config, file=config_path)
- element_key = _push(cli, rootcache_dir, project_dir, config_path, 'target.bst')
- proto = index.get_artifact_proto(cli.get_artifact_name(project_dir,
- 'test',
- 'target.bst',
- cache_key=element_key))
+ element_key = _push(cli, rootcache_dir, project_dir, config_path, "target.bst")
+ proto = index.get_artifact_proto(
+ cli.get_artifact_name(
+ project_dir, "test", "target.bst", cache_key=element_key
+ )
+ )
assert storage.get_cas_files(proto) is not None
@@ -134,20 +127,15 @@ def test_push_message(tmpdir, datafiles):
project_dir = str(datafiles)
# Set up an artifact cache.
- artifactshare = os.path.join(str(tmpdir), 'artifactshare')
+ artifactshare = os.path.join(str(tmpdir), "artifactshare")
with create_artifact_share(artifactshare) as share:
# Configure artifact share
- rootcache_dir = os.path.join(str(tmpdir), 'cache')
- user_config_file = str(tmpdir.join('buildstream.conf'))
+ rootcache_dir = os.path.join(str(tmpdir), "cache")
+ user_config_file = str(tmpdir.join("buildstream.conf"))
user_config = {
- 'scheduler': {
- 'pushers': 1
- },
- 'artifacts': {
- 'url': share.repo,
- 'push': True,
- },
- 'cachedir': rootcache_dir
+ "scheduler": {"pushers": 1},
+ "artifacts": {"url": share.repo, "push": True,},
+ "cachedir": rootcache_dir,
}
# Write down the user configuration file
@@ -166,15 +154,18 @@ def test_push_message(tmpdir, datafiles):
artifactcache.initialize_remotes()
assert artifactcache.has_push_remotes()
- command = remote_execution_pb2.Command(arguments=['/usr/bin/gcc', '--help'],
- working_directory='/buildstream-build',
- output_directories=['/buildstream-install'])
+ command = remote_execution_pb2.Command(
+ arguments=["/usr/bin/gcc", "--help"],
+ working_directory="/buildstream-build",
+ output_directories=["/buildstream-install"],
+ )
# Push the message object
command_digest = artifactcache.push_message(project, command)
message_hash, message_size = command_digest.hash, command_digest.size_bytes
assert message_hash and message_size
- message_digest = remote_execution_pb2.Digest(hash=message_hash,
- size_bytes=message_size)
+ message_digest = remote_execution_pb2.Digest(
+ hash=message_hash, size_bytes=message_size
+ )
assert share.has_object(message_digest)
diff --git a/tests/cachekey/cachekey.py b/tests/cachekey/cachekey.py
index fa93f5746..882d07240 100644
--- a/tests/cachekey/cachekey.py
+++ b/tests/cachekey/cachekey.py
@@ -62,7 +62,7 @@ def element_filename(project_dir, element_name, alt_suffix=None):
if alt_suffix:
# Just in case...
- assert element_name.endswith('.bst')
+ assert element_name.endswith(".bst")
# Chop off the 'bst' in '.bst' and add the new suffix
element_name = element_name[:-3]
@@ -93,18 +93,20 @@ def load_expected_keys(project_dir, actual_keys, raise_error=True):
expected_keys = OrderedDict()
for element_name in actual_keys:
- expected = element_filename(project_dir, element_name, 'expected')
+ expected = element_filename(project_dir, element_name, "expected")
try:
- with open(expected, 'r') as f:
+ with open(expected, "r") as f:
expected_key = f.read()
expected_key = expected_key.strip()
except FileNotFoundError:
expected_key = None
if raise_error:
- raise Exception("Cache key test needs update, " +
- "expected file {} not found.\n\n".format(expected) +
- "Use tests/cachekey/update.py to automatically " +
- "update this test case")
+ raise Exception(
+ "Cache key test needs update, "
+ + "expected file {} not found.\n\n".format(expected)
+ + "Use tests/cachekey/update.py to automatically "
+ + "update this test case"
+ )
expected_keys[element_name] = expected_key
@@ -127,13 +129,17 @@ def assert_cache_keys(project_dir, output):
if mismatches:
info = ""
for element_name in mismatches:
- info += " Element: {}\n".format(element_name) + \
- " Expected: {}\n".format(expected_keys[element_name]) + \
- " Actual: {}\n".format(actual_keys[element_name])
+ info += (
+ " Element: {}\n".format(element_name)
+ + " Expected: {}\n".format(expected_keys[element_name])
+ + " Actual: {}\n".format(actual_keys[element_name])
+ )
- raise AssertionError("Cache key mismatches occurred:\n{}\n".format(info) +
- "Use tests/cachekey/update.py to automatically " +
- "update this test case")
+ raise AssertionError(
+ "Cache key mismatches occurred:\n{}\n".format(info)
+ + "Use tests/cachekey/update.py to automatically "
+ + "update this test case"
+ )
##############################################
@@ -141,18 +147,16 @@ def assert_cache_keys(project_dir, output):
##############################################
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
# The cache key test uses a project which exercises all plugins,
# so we cant run it at all if we dont have them installed.
#
-@pytest.mark.skipif(MACHINE_ARCH != 'x86-64',
- reason='Cache keys depend on architecture')
-@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
+@pytest.mark.skipif(
+ MACHINE_ARCH != "x86-64", reason="Cache keys depend on architecture"
+)
+@pytest.mark.skipif(not IS_LINUX, reason="Only available on linux")
@pytest.mark.skipif(HAVE_BZR is False, reason="bzr is not available")
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
@pytest.mark.datafiles(DATA_DIR)
@@ -163,55 +167,61 @@ def test_cache_key(datafiles, cli):
# versions of setuptools fail to preserve symbolic links
# when creating a source distribution, causing this test
# to fail from a dist tarball.
- goodbye_link = os.path.join(project, 'files', 'local',
- 'usr', 'bin', 'goodbye')
+ goodbye_link = os.path.join(project, "files", "local", "usr", "bin", "goodbye")
os.unlink(goodbye_link)
- os.symlink('hello', goodbye_link)
+ os.symlink("hello", goodbye_link)
# pytest-datafiles does not copy mode bits
# https://github.com/omarkohl/pytest-datafiles/issues/11
os.chmod(goodbye_link, 0o755)
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--format', '%{name}::%{full-key}',
- 'target.bst'
- ])
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--format", "%{name}::%{full-key}", "target.bst"],
+ )
result.assert_success()
assert_cache_keys(project, result.output)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("first_warnings, second_warnings, identical_keys", [
- [[], [], True],
- [[], [CoreWarnings.REF_NOT_IN_TRACK], False],
- [[CoreWarnings.REF_NOT_IN_TRACK], [], False],
- [[CoreWarnings.REF_NOT_IN_TRACK], [CoreWarnings.REF_NOT_IN_TRACK], True],
- [[CoreWarnings.REF_NOT_IN_TRACK, CoreWarnings.OVERLAPS],
- [CoreWarnings.OVERLAPS, CoreWarnings.REF_NOT_IN_TRACK], True],
-])
-def test_cache_key_fatal_warnings(cli, tmpdir, first_warnings, second_warnings, identical_keys):
+@pytest.mark.parametrize(
+ "first_warnings, second_warnings, identical_keys",
+ [
+ [[], [], True],
+ [[], [CoreWarnings.REF_NOT_IN_TRACK], False],
+ [[CoreWarnings.REF_NOT_IN_TRACK], [], False],
+ [[CoreWarnings.REF_NOT_IN_TRACK], [CoreWarnings.REF_NOT_IN_TRACK], True],
+ [
+ [CoreWarnings.REF_NOT_IN_TRACK, CoreWarnings.OVERLAPS],
+ [CoreWarnings.OVERLAPS, CoreWarnings.REF_NOT_IN_TRACK],
+ True,
+ ],
+ ],
+)
+def test_cache_key_fatal_warnings(
+ cli, tmpdir, first_warnings, second_warnings, identical_keys
+):
# Builds project, Runs bst show, gathers cache keys
def run_get_cache_key(project_name, warnings):
config = {
- 'name': project_name,
- 'element-path': 'elements',
- 'fatal-warnings': warnings
+ "name": project_name,
+ "element-path": "elements",
+ "fatal-warnings": warnings,
}
project_dir = tmpdir.mkdir(project_name)
- project_config_file = str(project_dir.join('project.conf'))
+ project_config_file = str(project_dir.join("project.conf"))
_yaml.roundtrip_dump(config, file=project_config_file)
- elem_dir = project_dir.mkdir('elements')
- element_file = str(elem_dir.join('stack.bst'))
- _yaml.roundtrip_dump({'kind': 'stack'}, file=element_file)
+ elem_dir = project_dir.mkdir("elements")
+ element_file = str(elem_dir.join("stack.bst"))
+ _yaml.roundtrip_dump({"kind": "stack"}, file=element_file)
- result = cli.run(project=str(project_dir), args=[
- 'show',
- '--format', '%{name}::%{full-key}',
- 'stack.bst'
- ])
+ result = cli.run(
+ project=str(project_dir),
+ args=["show", "--format", "%{name}::%{full-key}", "stack.bst"],
+ )
return result.output
# Returns true if all keys are identical
@@ -226,34 +236,28 @@ def test_cache_key_fatal_warnings(cli, tmpdir, first_warnings, second_warnings,
@pytest.mark.datafiles(DATA_DIR)
def test_keys_stable_over_targets(cli, datafiles):
- root_element = 'elements/key-stability/top-level.bst'
- target1 = 'elements/key-stability/t1.bst'
- target2 = 'elements/key-stability/t2.bst'
+ root_element = "elements/key-stability/top-level.bst"
+ target1 = "elements/key-stability/t1.bst"
+ target2 = "elements/key-stability/t2.bst"
project = str(datafiles)
- full_graph_result = cli.run(project=project, args=[
- 'show',
- '--format', '%{name}::%{full-key}',
- root_element
- ])
+ full_graph_result = cli.run(
+ project=project, args=["show", "--format", "%{name}::%{full-key}", root_element]
+ )
full_graph_result.assert_success()
all_cache_keys = parse_output_keys(full_graph_result.output)
- ordering1_result = cli.run(project=project, args=[
- 'show',
- '--format', '%{name}::%{full-key}',
- target1,
- target2
- ])
+ ordering1_result = cli.run(
+ project=project,
+ args=["show", "--format", "%{name}::%{full-key}", target1, target2],
+ )
ordering1_result.assert_success()
ordering1_cache_keys = parse_output_keys(ordering1_result.output)
- ordering2_result = cli.run(project=project, args=[
- 'show',
- '--format', '%{name}::%{full-key}',
- target2,
- target1
- ])
+ ordering2_result = cli.run(
+ project=project,
+ args=["show", "--format", "%{name}::%{full-key}", target2, target1],
+ )
ordering2_result.assert_success()
ordering2_cache_keys = parse_output_keys(ordering2_result.output)
diff --git a/tests/cachekey/update.py b/tests/cachekey/update.py
index feda5dbde..ae8b368c5 100755
--- a/tests/cachekey/update.py
+++ b/tests/cachekey/update.py
@@ -25,31 +25,34 @@ except ImportError:
from .cachekey import element_filename, parse_output_keys, load_expected_keys
# Project directory
-PROJECT_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+PROJECT_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
def write_expected_key(element_name, actual_key):
- expected_file = element_filename(PROJECT_DIR, element_name, 'expected')
- with open(expected_file, 'w') as f:
+ expected_file = element_filename(PROJECT_DIR, element_name, "expected")
+ with open(expected_file, "w") as f:
f.write(actual_key)
def update_keys():
with tempfile.TemporaryDirectory(dir=PROJECT_DIR) as tmpdir:
- directory = os.path.join(tmpdir, 'cache')
+ directory = os.path.join(tmpdir, "cache")
os.makedirs(directory)
cli = Cli(directory, verbose=True)
# Run bst show
- result = cli.run(project=PROJECT_DIR, silent=True, args=[
- '--no-colors',
- 'show', '--format', '%{name}::%{full-key}',
- 'target.bst'
- ])
+ result = cli.run(
+ project=PROJECT_DIR,
+ silent=True,
+ args=[
+ "--no-colors",
+ "show",
+ "--format",
+ "%{name}::%{full-key}",
+ "target.bst",
+ ],
+ )
# Load the actual keys, and the expected ones if they exist
if not result.output:
@@ -59,7 +62,7 @@ def update_keys():
expected_keys = load_expected_keys(PROJECT_DIR, actual_keys, raise_error=False)
for element_name in actual_keys:
- expected = element_filename(PROJECT_DIR, element_name, 'expected')
+ expected = element_filename(PROJECT_DIR, element_name, "expected")
if actual_keys[element_name] != expected_keys[element_name]:
if not expected_keys[element_name]:
@@ -70,10 +73,10 @@ def update_keys():
write_expected_key(element_name, actual_keys[element_name])
-if __name__ == '__main__':
+if __name__ == "__main__":
# patch the environment BST_TEST_SUITE value to something if it's not
# present. This avoids an exception thrown at the cli level
- bst = 'BST_TEST_SUITE'
- mock_bst = os.environ.get(bst, 'True')
+ bst = "BST_TEST_SUITE"
+ mock_bst = os.environ.get(bst, "True")
with mock.patch.dict(os.environ, {**os.environ, bst: mock_bst}):
update_keys()
diff --git a/tests/conftest.py b/tests/conftest.py
index 216c83893..05a4853f6 100755
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -24,8 +24,13 @@ import multiprocessing
import pytest
from buildstream.testing import register_repo_kind, sourcetests_collection_hook
-from buildstream.testing._fixtures import reset_global_node_state, thread_check # pylint: disable=unused-import
-from buildstream.testing.integration import integration_cache # pylint: disable=unused-import
+from buildstream.testing._fixtures import (
+ reset_global_node_state,
+ thread_check,
+) # pylint: disable=unused-import
+from buildstream.testing.integration import (
+ integration_cache,
+) # pylint: disable=unused-import
from tests.testutils.repo.git import Git
@@ -45,28 +50,36 @@ from tests.testutils.repo.zip import Zip
# Implement pytest option #
#################################################
def pytest_addoption(parser):
- parser.addoption('--integration', action='store_true', default=False,
- help='Run integration tests')
+ parser.addoption(
+ "--integration",
+ action="store_true",
+ default=False,
+ help="Run integration tests",
+ )
- parser.addoption('--remote-execution', action='store_true', default=False,
- help='Run remote-execution tests only')
+ parser.addoption(
+ "--remote-execution",
+ action="store_true",
+ default=False,
+ help="Run remote-execution tests only",
+ )
def pytest_runtest_setup(item):
# Without --integration: skip tests not marked with 'integration'
- if not item.config.getvalue('integration'):
- if item.get_closest_marker('integration'):
- pytest.skip('skipping integration test')
+ if not item.config.getvalue("integration"):
+ if item.get_closest_marker("integration"):
+ pytest.skip("skipping integration test")
# With --remote-execution: only run tests marked with 'remoteexecution'
- if item.config.getvalue('remote_execution'):
- if not item.get_closest_marker('remoteexecution'):
- pytest.skip('skipping non remote-execution test')
+ if item.config.getvalue("remote_execution"):
+ if not item.get_closest_marker("remoteexecution"):
+ pytest.skip("skipping non remote-execution test")
# Without --remote-execution: skip tests marked with 'remoteexecution'
else:
- if item.get_closest_marker('remoteexecution'):
- pytest.skip('skipping remote-execution test')
+ if item.get_closest_marker("remoteexecution"):
+ pytest.skip("skipping remote-execution test")
#################################################
@@ -75,30 +88,29 @@ def pytest_runtest_setup(item):
#
# This is returned by the `remote_services` fixture
#
-class RemoteServices():
-
+class RemoteServices:
def __init__(self, **kwargs):
- self.action_service = kwargs.get('action_service')
- self.artifact_service = kwargs.get('artifact_service')
- self.exec_service = kwargs.get('exec_service')
- self.source_service = kwargs.get('source_service')
- self.storage_service = kwargs.get('storage_service')
+ self.action_service = kwargs.get("action_service")
+ self.artifact_service = kwargs.get("artifact_service")
+ self.exec_service = kwargs.get("exec_service")
+ self.source_service = kwargs.get("source_service")
+ self.storage_service = kwargs.get("storage_service")
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def remote_services(request):
kwargs = {}
# Look for remote services configuration in environment.
- if 'ARTIFACT_CACHE_SERVICE' in os.environ:
- kwargs['artifact_service'] = os.environ.get('ARTIFACT_CACHE_SERVICE')
+ if "ARTIFACT_CACHE_SERVICE" in os.environ:
+ kwargs["artifact_service"] = os.environ.get("ARTIFACT_CACHE_SERVICE")
- if 'REMOTE_EXECUTION_SERVICE' in os.environ:
- kwargs['action_service'] = os.environ.get('REMOTE_EXECUTION_SERVICE')
- kwargs['exec_service'] = os.environ.get('REMOTE_EXECUTION_SERVICE')
- kwargs['storage_service'] = os.environ.get('REMOTE_EXECUTION_SERVICE')
+ if "REMOTE_EXECUTION_SERVICE" in os.environ:
+ kwargs["action_service"] = os.environ.get("REMOTE_EXECUTION_SERVICE")
+ kwargs["exec_service"] = os.environ.get("REMOTE_EXECUTION_SERVICE")
+ kwargs["storage_service"] = os.environ.get("REMOTE_EXECUTION_SERVICE")
- if 'SOURCE_CACHE_SERVICE' in os.environ:
- kwargs['source_service'] = os.environ.get('SOURCE_CACHE_SERVICE')
+ if "SOURCE_CACHE_SERVICE" in os.environ:
+ kwargs["source_service"] = os.environ.get("SOURCE_CACHE_SERVICE")
return RemoteServices(**kwargs)
@@ -106,10 +118,10 @@ def remote_services(request):
#################################################
# Setup for templated source tests #
#################################################
-register_repo_kind('git', Git, None)
-register_repo_kind('bzr', Bzr, None)
-register_repo_kind('tar', Tar, None)
-register_repo_kind('zip', Zip, None)
+register_repo_kind("git", Git, None)
+register_repo_kind("bzr", Bzr, None)
+register_repo_kind("tar", Tar, None)
+register_repo_kind("zip", Zip, None)
# This hook enables pytest to collect the templated source tests from
@@ -124,10 +136,10 @@ def pytest_sessionstart(session):
@pytest.fixture(scope="session", autouse=True)
def set_xdg_paths(pytestconfig):
for env_var, default in [
- ("HOME", "tmp"),
- ("XDG_CACHE_HOME", "tmp/cache"),
- ("XDG_CONFIG_HOME", "tmp/config"),
- ("XDG_DATA_HOME", "tmp/share"),
+ ("HOME", "tmp"),
+ ("XDG_CACHE_HOME", "tmp/cache"),
+ ("XDG_CONFIG_HOME", "tmp/config"),
+ ("XDG_DATA_HOME", "tmp/share"),
]:
value = os.environ.get("BST_TEST_{}".format(env_var))
if value is None:
@@ -141,10 +153,9 @@ def pytest_configure(config):
# possible. Note that some tests implicitly set the start method by using
# multiprocessing. If we wait for bst to do it, it will already be too
# late.
- if 'BST_FORCE_START_METHOD' in os.environ:
- start_method = os.environ['BST_FORCE_START_METHOD']
+ if "BST_FORCE_START_METHOD" in os.environ:
+ start_method = os.environ["BST_FORCE_START_METHOD"]
multiprocessing.set_start_method(start_method)
print(
- "Multiprocessing method set to:",
- start_method,
+ "Multiprocessing method set to:", start_method,
)
diff --git a/tests/elements/filter.py b/tests/elements/filter.py
index 99370052e..54ddf216a 100644
--- a/tests/elements/filter.py
+++ b/tests/elements/filter.py
@@ -12,188 +12,246 @@ from buildstream.testing._utils.site import HAVE_SANDBOX
from buildstream._exceptions import ErrorDomain
from buildstream import _yaml
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'filter',
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "filter",)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_include(datafiles, cli, tmpdir):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'output-include.bst'])
+ result = cli.run(project=project, args=["build", "output-include.bst"])
result.assert_success()
- checkout = os.path.join(tmpdir.dirname, tmpdir.basename, 'checkout')
- result = cli.run(project=project, args=['artifact', 'checkout', 'output-include.bst', '--directory', checkout])
+ checkout = os.path.join(tmpdir.dirname, tmpdir.basename, "checkout")
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "output-include.bst", "--directory", checkout],
+ )
result.assert_success()
assert os.path.exists(os.path.join(checkout, "foo"))
assert not os.path.exists(os.path.join(checkout, "bar"))
-@pytest.mark.xfail(HAVE_SANDBOX == 'buildbox', reason='Not working with BuildBox')
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_include_dynamic(datafiles, cli, tmpdir):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'output-dynamic-include.bst'])
+ result = cli.run(project=project, args=["build", "output-dynamic-include.bst"])
result.assert_success()
- checkout = os.path.join(tmpdir.dirname, tmpdir.basename, 'checkout')
- result = cli.run(project=project, args=['artifact', 'checkout', 'output-dynamic-include.bst',
- '--directory', checkout])
+ checkout = os.path.join(tmpdir.dirname, tmpdir.basename, "checkout")
+ result = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ "output-dynamic-include.bst",
+ "--directory",
+ checkout,
+ ],
+ )
result.assert_success()
assert os.path.exists(os.path.join(checkout, "foo"))
assert not os.path.exists(os.path.join(checkout, "bar"))
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_exclude(datafiles, cli, tmpdir):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'output-exclude.bst'])
+ result = cli.run(project=project, args=["build", "output-exclude.bst"])
result.assert_success()
- checkout = os.path.join(tmpdir.dirname, tmpdir.basename, 'checkout')
- result = cli.run(project=project, args=['artifact', 'checkout', 'output-exclude.bst', '--directory', checkout])
+ checkout = os.path.join(tmpdir.dirname, tmpdir.basename, "checkout")
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "output-exclude.bst", "--directory", checkout],
+ )
result.assert_success()
assert not os.path.exists(os.path.join(checkout, "foo"))
assert os.path.exists(os.path.join(checkout, "bar"))
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_orphans(datafiles, cli, tmpdir):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'output-orphans.bst'])
+ result = cli.run(project=project, args=["build", "output-orphans.bst"])
result.assert_success()
- checkout = os.path.join(tmpdir.dirname, tmpdir.basename, 'checkout')
- result = cli.run(project=project, args=['artifact', 'checkout', 'output-orphans.bst', '--directory', checkout])
+ checkout = os.path.join(tmpdir.dirname, tmpdir.basename, "checkout")
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "output-orphans.bst", "--directory", checkout],
+ )
result.assert_success()
assert os.path.exists(os.path.join(checkout, "baz"))
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_deps_ok(datafiles, cli):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'deps-permitted.bst'])
+ result = cli.run(project=project, args=["build", "deps-permitted.bst"])
result.assert_success()
- result = cli.run(project=project,
- args=['show', '--deps=run', "--format='%{name}'", 'deps-permitted.bst'])
+ result = cli.run(
+ project=project,
+ args=["show", "--deps=run", "--format='%{name}'", "deps-permitted.bst"],
+ )
result.assert_success()
- assert 'output-exclude.bst' in result.output
- assert 'output-orphans.bst' in result.output
+ assert "output-exclude.bst" in result.output
+ assert "output-orphans.bst" in result.output
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_forbid_sources(datafiles, cli):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'forbidden-source.bst'])
- result.assert_main_error(ErrorDomain.ELEMENT, 'element-forbidden-sources')
+ result = cli.run(project=project, args=["build", "forbidden-source.bst"])
+ result.assert_main_error(ErrorDomain.ELEMENT, "element-forbidden-sources")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_forbid_multi_bdep(datafiles, cli):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'forbidden-multi-bdep.bst'])
- result.assert_main_error(ErrorDomain.ELEMENT, 'filter-bdepend-wrong-count')
+ result = cli.run(project=project, args=["build", "forbidden-multi-bdep.bst"])
+ result.assert_main_error(ErrorDomain.ELEMENT, "filter-bdepend-wrong-count")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_forbid_no_bdep(datafiles, cli):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'forbidden-no-bdep.bst'])
- result.assert_main_error(ErrorDomain.ELEMENT, 'filter-bdepend-wrong-count')
+ result = cli.run(project=project, args=["build", "forbidden-no-bdep.bst"])
+ result.assert_main_error(ErrorDomain.ELEMENT, "filter-bdepend-wrong-count")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_forbid_also_rdep(datafiles, cli):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'forbidden-also-rdep.bst'])
- result.assert_main_error(ErrorDomain.ELEMENT, 'filter-bdepend-also-rdepend')
+ result = cli.run(project=project, args=["build", "forbidden-also-rdep.bst"])
+ result.assert_main_error(ErrorDomain.ELEMENT, "filter-bdepend-also-rdepend")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_workspace_open(datafiles, cli, tmpdir):
project = str(datafiles)
workspace_dir = os.path.join(tmpdir.dirname, tmpdir.basename, "workspace")
- result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'deps-permitted.bst'])
+ result = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace_dir, "deps-permitted.bst"],
+ )
result.assert_success()
assert os.path.exists(os.path.join(workspace_dir, "foo"))
assert os.path.exists(os.path.join(workspace_dir, "bar"))
assert os.path.exists(os.path.join(workspace_dir, "baz"))
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_workspace_open_multi(datafiles, cli):
project = str(datafiles)
- result = cli.run(cwd=project, project=project, args=['workspace', 'open', 'deps-permitted.bst',
- 'output-orphans.bst'])
+ result = cli.run(
+ cwd=project,
+ project=project,
+ args=["workspace", "open", "deps-permitted.bst", "output-orphans.bst"],
+ )
result.assert_success()
assert os.path.exists(os.path.join(project, "input"))
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_workspace_build(datafiles, cli, tmpdir):
project = str(datafiles)
tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
workspace_dir = os.path.join(tempdir, "workspace")
- result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'output-orphans.bst'])
+ result = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace_dir, "output-orphans.bst"],
+ )
result.assert_success()
src = os.path.join(workspace_dir, "foo")
dst = os.path.join(workspace_dir, "quux")
shutil.copyfile(src, dst)
- result = cli.run(project=project, args=['build', 'output-orphans.bst'])
+ result = cli.run(project=project, args=["build", "output-orphans.bst"])
result.assert_success()
checkout_dir = os.path.join(tempdir, "checkout")
- result = cli.run(project=project, args=['artifact', 'checkout', 'output-orphans.bst', '--directory', checkout_dir])
+ result = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ "output-orphans.bst",
+ "--directory",
+ checkout_dir,
+ ],
+ )
result.assert_success()
assert os.path.exists(os.path.join(checkout_dir, "quux"))
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_workspace_close(datafiles, cli, tmpdir):
project = str(datafiles)
tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
workspace_dir = os.path.join(tempdir, "workspace")
- result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'output-orphans.bst'])
+ result = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace_dir, "output-orphans.bst"],
+ )
result.assert_success()
src = os.path.join(workspace_dir, "foo")
dst = os.path.join(workspace_dir, "quux")
shutil.copyfile(src, dst)
- result = cli.run(project=project, args=['workspace', 'close', 'deps-permitted.bst'])
+ result = cli.run(project=project, args=["workspace", "close", "deps-permitted.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'output-orphans.bst'])
+ result = cli.run(project=project, args=["build", "output-orphans.bst"])
result.assert_success()
checkout_dir = os.path.join(tempdir, "checkout")
- result = cli.run(project=project, args=['artifact', 'checkout', 'output-orphans.bst', '--directory', checkout_dir])
+ result = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ "output-orphans.bst",
+ "--directory",
+ checkout_dir,
+ ],
+ )
result.assert_success()
assert not os.path.exists(os.path.join(checkout_dir, "quux"))
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_workspace_reset(datafiles, cli, tmpdir):
project = str(datafiles)
tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
workspace_dir = os.path.join(tempdir, "workspace")
- result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'output-orphans.bst'])
+ result = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace_dir, "output-orphans.bst"],
+ )
result.assert_success()
src = os.path.join(workspace_dir, "foo")
dst = os.path.join(workspace_dir, "quux")
shutil.copyfile(src, dst)
- result = cli.run(project=project, args=['workspace', 'reset', 'deps-permitted.bst'])
+ result = cli.run(project=project, args=["workspace", "reset", "deps-permitted.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'output-orphans.bst'])
+ result = cli.run(project=project, args=["build", "output-orphans.bst"])
result.assert_success()
checkout_dir = os.path.join(tempdir, "checkout")
- result = cli.run(project=project, args=['artifact', 'checkout', 'output-orphans.bst', '--directory', checkout_dir])
+ result = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ "output-orphans.bst",
+ "--directory",
+ checkout_dir,
+ ],
+ )
result.assert_success()
assert not os.path.exists(os.path.join(checkout_dir, "quux"))
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_track(datafiles, cli, tmpdir):
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
ref = repo.create(os.path.join(str(datafiles), "files"))
elements_dir = os.path.join(str(tmpdir), "elements")
project = str(tmpdir)
@@ -216,24 +274,20 @@ def test_filter_track(datafiles, cli, tmpdir):
filter1_config = {
"kind": "filter",
- "depends": [
- {"filename": input_name, "type": "build"}
- ]
+ "depends": [{"filename": input_name, "type": "build"}],
}
filter1_file = os.path.join(elements_dir, "filter1.bst")
_yaml.roundtrip_dump(filter1_config, filter1_file)
filter2_config = {
"kind": "filter",
- "depends": [
- {"filename": "filter1.bst", "type": "build"}
- ]
+ "depends": [{"filename": "filter1.bst", "type": "build"}],
}
filter2_file = os.path.join(elements_dir, "filter2.bst")
_yaml.roundtrip_dump(filter2_config, filter2_file)
# Assert that a fetch is needed
- assert cli.get_element_state(project, input_name) == 'no reference'
+ assert cli.get_element_state(project, input_name) == "no reference"
# Now try to track it
result = cli.run(project=project, args=["source", "track", "filter2.bst"])
@@ -241,14 +295,14 @@ def test_filter_track(datafiles, cli, tmpdir):
# Now check that a ref field exists
new_input = _yaml.load(input_file)
- source_node = new_input.get_sequence('sources').mapping_at(0)
- new_input_ref = source_node.get_str('ref')
+ source_node = new_input.get_sequence("sources").mapping_at(0)
+ new_input_ref = source_node.get_str("ref")
assert new_input_ref == ref
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_track_excepted(datafiles, cli, tmpdir):
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
repo.create(os.path.join(str(datafiles), "files"))
elements_dir = os.path.join(str(tmpdir), "elements")
project = str(tmpdir)
@@ -271,38 +325,37 @@ def test_filter_track_excepted(datafiles, cli, tmpdir):
filter1_config = {
"kind": "filter",
- "depends": [
- {"filename": input_name, "type": "build"}
- ]
+ "depends": [{"filename": input_name, "type": "build"}],
}
filter1_file = os.path.join(elements_dir, "filter1.bst")
_yaml.roundtrip_dump(filter1_config, filter1_file)
filter2_config = {
"kind": "filter",
- "depends": [
- {"filename": "filter1.bst", "type": "build"}
- ]
+ "depends": [{"filename": "filter1.bst", "type": "build"}],
}
filter2_file = os.path.join(elements_dir, "filter2.bst")
_yaml.roundtrip_dump(filter2_config, filter2_file)
# Assert that a fetch is needed
- assert cli.get_element_state(project, input_name) == 'no reference'
+ assert cli.get_element_state(project, input_name) == "no reference"
# Now try to track it
- result = cli.run(project=project, args=["source", "track", "filter2.bst", "--except", "input.bst"])
+ result = cli.run(
+ project=project,
+ args=["source", "track", "filter2.bst", "--except", "input.bst"],
+ )
result.assert_success()
# Now check that a ref field exists
new_input = _yaml.load(input_file)
- source_node = new_input.get_sequence('sources').mapping_at(0)
- assert 'ref' not in source_node
+ source_node = new_input.get_sequence("sources").mapping_at(0)
+ assert "ref" not in source_node
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_track_multi_to_one(datafiles, cli, tmpdir):
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
ref = repo.create(os.path.join(str(datafiles), "files"))
elements_dir = os.path.join(str(tmpdir), "elements")
project = str(tmpdir)
@@ -325,39 +378,37 @@ def test_filter_track_multi_to_one(datafiles, cli, tmpdir):
filter1_config = {
"kind": "filter",
- "depends": [
- {"filename": input_name, "type": "build"}
- ]
+ "depends": [{"filename": input_name, "type": "build"}],
}
filter1_file = os.path.join(elements_dir, "filter1.bst")
_yaml.roundtrip_dump(filter1_config, filter1_file)
filter2_config = {
"kind": "filter",
- "depends": [
- {"filename": input_name, "type": "build"}
- ]
+ "depends": [{"filename": input_name, "type": "build"}],
}
filter2_file = os.path.join(elements_dir, "filter2.bst")
_yaml.roundtrip_dump(filter2_config, filter2_file)
# Assert that a fetch is needed
- assert cli.get_element_state(project, input_name) == 'no reference'
+ assert cli.get_element_state(project, input_name) == "no reference"
# Now try to track it
- result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst"])
+ result = cli.run(
+ project=project, args=["source", "track", "filter1.bst", "filter2.bst"]
+ )
result.assert_success()
# Now check that a ref field exists
new_input = _yaml.load(input_file)
- source_node = new_input.get_sequence('sources').mapping_at(0)
- new_ref = source_node.get_str('ref')
+ source_node = new_input.get_sequence("sources").mapping_at(0)
+ new_ref = source_node.get_str("ref")
assert new_ref == ref
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_track_multi(datafiles, cli, tmpdir):
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
ref = repo.create(os.path.join(str(datafiles), "files"))
elements_dir = os.path.join(str(tmpdir), "elements")
project = str(tmpdir)
@@ -385,18 +436,14 @@ def test_filter_track_multi(datafiles, cli, tmpdir):
filter1_config = {
"kind": "filter",
- "depends": [
- {"filename": input_name, "type": "build"}
- ]
+ "depends": [{"filename": input_name, "type": "build"}],
}
filter1_file = os.path.join(elements_dir, "filter1.bst")
_yaml.roundtrip_dump(filter1_config, filter1_file)
filter2_config = {
"kind": "filter",
- "depends": [
- {"filename": input2_name, "type": "build"}
- ]
+ "depends": [{"filename": input2_name, "type": "build"}],
}
filter2_file = os.path.join(elements_dir, "filter2.bst")
_yaml.roundtrip_dump(filter2_config, filter2_file)
@@ -410,24 +457,26 @@ def test_filter_track_multi(datafiles, cli, tmpdir):
}
# Now try to track it
- result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst"])
+ result = cli.run(
+ project=project, args=["source", "track", "filter1.bst", "filter2.bst"]
+ )
result.assert_success()
# Now check that a ref field exists
new_input = _yaml.load(input_file)
- source_node = new_input.get_sequence('sources').mapping_at(0)
- new_ref = source_node.get_str('ref')
+ source_node = new_input.get_sequence("sources").mapping_at(0)
+ new_ref = source_node.get_str("ref")
assert new_ref == ref
new_input2 = _yaml.load(input2_file)
- source_node2 = new_input2.get_sequence('sources').mapping_at(0)
- new_ref2 = source_node2.get_str('ref')
+ source_node2 = new_input2.get_sequence("sources").mapping_at(0)
+ new_ref2 = source_node2.get_str("ref")
assert new_ref2 == ref
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_track_multi_exclude(datafiles, cli, tmpdir):
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
ref = repo.create(os.path.join(str(datafiles), "files"))
elements_dir = os.path.join(str(tmpdir), "elements")
project = str(tmpdir)
@@ -455,18 +504,14 @@ def test_filter_track_multi_exclude(datafiles, cli, tmpdir):
filter1_config = {
"kind": "filter",
- "depends": [
- {"filename": input_name, "type": "build"}
- ]
+ "depends": [{"filename": input_name, "type": "build"}],
}
filter1_file = os.path.join(elements_dir, "filter1.bst")
_yaml.roundtrip_dump(filter1_config, filter1_file)
filter2_config = {
"kind": "filter",
- "depends": [
- {"filename": input2_name, "type": "build"}
- ]
+ "depends": [{"filename": input2_name, "type": "build"}],
}
filter2_file = os.path.join(elements_dir, "filter2.bst")
_yaml.roundtrip_dump(filter2_config, filter2_file)
@@ -479,30 +524,42 @@ def test_filter_track_multi_exclude(datafiles, cli, tmpdir):
}
# Now try to track it
- result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst", "--except", input_name])
+ result = cli.run(
+ project=project,
+ args=["source", "track", "filter1.bst", "filter2.bst", "--except", input_name],
+ )
result.assert_success()
# Now check that a ref field exists
new_input = _yaml.load(input_file)
- source_node = new_input.get_sequence('sources').mapping_at(0)
- assert 'ref' not in source_node
+ source_node = new_input.get_sequence("sources").mapping_at(0)
+ assert "ref" not in source_node
new_input2 = _yaml.load(input2_file)
- source_node2 = new_input2.get_sequence('sources').mapping_at(0)
- new_ref2 = source_node2.get_str('ref')
+ source_node2 = new_input2.get_sequence("sources").mapping_at(0)
+ new_ref2 = source_node2.get_str("ref")
assert new_ref2 == ref
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_include_with_indirect_deps(datafiles, cli, tmpdir):
project = str(datafiles)
- result = cli.run(project=project, args=[
- 'build', 'output-include-with-indirect-deps.bst'])
+ result = cli.run(
+ project=project, args=["build", "output-include-with-indirect-deps.bst"]
+ )
result.assert_success()
- checkout = os.path.join(tmpdir.dirname, tmpdir.basename, 'checkout')
- result = cli.run(project=project, args=[
- 'artifact', 'checkout', 'output-include-with-indirect-deps.bst', '--directory', checkout])
+ checkout = os.path.join(tmpdir.dirname, tmpdir.basename, "checkout")
+ result = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ "output-include-with-indirect-deps.bst",
+ "--directory",
+ checkout,
+ ],
+ )
result.assert_success()
# direct dependencies should be staged and filtered
@@ -513,46 +570,66 @@ def test_filter_include_with_indirect_deps(datafiles, cli, tmpdir):
assert not os.path.exists(os.path.join(checkout, "bar"))
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_fails_for_nonexisting_domain(datafiles, cli):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'output-include-nonexistent-domain.bst'])
+ result = cli.run(
+ project=project, args=["build", "output-include-nonexistent-domain.bst"]
+ )
result.assert_main_error(ErrorDomain.STREAM, None)
error = "Unknown domains were used in output-include-nonexistent-domain.bst [line 7 column 2]"
assert error in result.stderr
- assert '- unknown_file' in result.stderr
+ assert "- unknown_file" in result.stderr
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_pass_integration(datafiles, cli):
project = str(datafiles)
# Explicitly not passing integration commands should be fine
- result = cli.run(project=project, args=['build', 'no-pass-integration.bst'])
+ result = cli.run(project=project, args=["build", "no-pass-integration.bst"])
result.assert_success()
# Passing integration commands should build nicely
- result = cli.run(project=project, args=['build', 'pass-integration.bst'])
+ result = cli.run(project=project, args=["build", "pass-integration.bst"])
result.assert_success()
# Checking out elements which don't pass integration commands should still work
- checkout_dir = os.path.join(project, 'no-pass')
- result = cli.run(project=project, args=['artifact', 'checkout', '--integrate',
- '--directory', checkout_dir, 'no-pass-integration.bst'])
+ checkout_dir = os.path.join(project, "no-pass")
+ result = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ "--integrate",
+ "--directory",
+ checkout_dir,
+ "no-pass-integration.bst",
+ ],
+ )
result.assert_success()
# Checking out the artifact should fail if we run integration commands, as
# the staged artifacts don't have a shell
- checkout_dir = os.path.join(project, 'pass')
- result = cli.run(project=project, args=['artifact', 'checkout', '--integrate',
- '--directory', checkout_dir, 'pass-integration.bst'])
+ checkout_dir = os.path.join(project, "pass")
+ result = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ "--integrate",
+ "--directory",
+ checkout_dir,
+ "pass-integration.bst",
+ ],
+ )
result.assert_main_error(ErrorDomain.STREAM, "missing-command")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_filter_stack_depend_failure(datafiles, cli):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'forbidden-stack-dep.bst'])
+ result = cli.run(project=project, args=["build", "forbidden-stack-dep.bst"])
result.assert_main_error(ErrorDomain.ELEMENT, "filter-bdepend-no-artifact")
diff --git a/tests/elements/filter/basic/element_plugins/dynamic.py b/tests/elements/filter/basic/element_plugins/dynamic.py
index fe83d7295..6cd6b1093 100644
--- a/tests/elements/filter/basic/element_plugins/dynamic.py
+++ b/tests/elements/filter/basic/element_plugins/dynamic.py
@@ -4,14 +4,17 @@ from buildstream import Element, Scope
# Copies files from the dependent element but inserts split-rules using dynamic data
class DynamicElement(Element):
def configure(self, node):
- node.validate_keys(['split-rules'])
- self.split_rules = {key: value.as_str_list() for key, value in node.get_mapping('split-rules').items()}
+ node.validate_keys(["split-rules"])
+ self.split_rules = {
+ key: value.as_str_list()
+ for key, value in node.get_mapping("split-rules").items()
+ }
def preflight(self):
pass
def get_unique_key(self):
- return {'split-rules': self.split_rules}
+ return {"split-rules": self.split_rules}
def configure_sandbox(self, sandbox):
pass
diff --git a/tests/examples/autotools.py b/tests/examples/autotools.py
index 45783b30c..e684fd43c 100644
--- a/tests/examples/autotools.py
+++ b/tests/examples/autotools.py
@@ -11,47 +11,71 @@ from buildstream.testing._utils.site import IS_LINUX, MACHINE_ARCH, HAVE_SANDBOX
pytestmark = pytest.mark.integration
DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)), '..', '..', 'doc', 'examples', 'autotools'
+ os.path.dirname(os.path.realpath(__file__)),
+ "..",
+ "..",
+ "doc",
+ "examples",
+ "autotools",
)
# Tests a build of the autotools amhello project on a alpine-linux base runtime
-@pytest.mark.skipif(MACHINE_ARCH != 'x86-64',
- reason='Examples are written for x86-64')
-@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason='Only available on linux with sandbox')
-@pytest.mark.skipif(HAVE_SANDBOX == 'chroot', reason='This test is not meant to work with chroot sandbox')
+@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
+@pytest.mark.skipif(
+ not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with sandbox"
+)
+@pytest.mark.skipif(
+ HAVE_SANDBOX == "chroot",
+ reason="This test is not meant to work with chroot sandbox",
+)
@pytest.mark.datafiles(DATA_DIR)
def test_autotools_build(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
+ checkout = os.path.join(cli.directory, "checkout")
# Check that the project can be built correctly.
- result = cli.run(project=project, args=['build', 'hello.bst'])
+ result = cli.run(project=project, args=["build", "hello.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'hello.bst', '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "hello.bst", "--directory", checkout],
+ )
result.assert_success()
- assert_contains(checkout, ['/usr', '/usr/lib', '/usr/bin',
- '/usr/share',
- '/usr/bin/hello',
- '/usr/share/doc', '/usr/share/doc/amhello',
- '/usr/share/doc/amhello/README'])
+ assert_contains(
+ checkout,
+ [
+ "/usr",
+ "/usr/lib",
+ "/usr/bin",
+ "/usr/share",
+ "/usr/bin/hello",
+ "/usr/share/doc",
+ "/usr/share/doc/amhello",
+ "/usr/share/doc/amhello/README",
+ ],
+ )
# Test running an executable built with autotools.
-@pytest.mark.skipif(MACHINE_ARCH != 'x86-64',
- reason='Examples are written for x86-64')
-@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason='Only available on linux with sandbox')
-@pytest.mark.xfail(HAVE_SANDBOX == 'buildbox', reason='Not working with BuildBox')
-@pytest.mark.skipif(HAVE_SANDBOX == 'chroot', reason='This test is not meant to work with chroot sandbox')
+@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
+@pytest.mark.skipif(
+ not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with sandbox"
+)
+@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
+@pytest.mark.skipif(
+ HAVE_SANDBOX == "chroot",
+ reason="This test is not meant to work with chroot sandbox",
+)
@pytest.mark.datafiles(DATA_DIR)
def test_autotools_run(cli, datafiles):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'hello.bst'])
+ result = cli.run(project=project, args=["build", "hello.bst"])
result.assert_success()
- result = cli.run(project=project, args=['shell', 'hello.bst', 'hello'])
+ result = cli.run(project=project, args=["shell", "hello.bst", "hello"])
result.assert_success()
- assert result.output == 'Hello World!\nThis is amhello 1.0.\n'
+ assert result.output == "Hello World!\nThis is amhello 1.0.\n"
diff --git a/tests/examples/developing.py b/tests/examples/developing.py
index 53a554b86..df6e82623 100644
--- a/tests/examples/developing.py
+++ b/tests/examples/developing.py
@@ -12,92 +12,119 @@ import tests.testutils.patch as patch
pytestmark = pytest.mark.integration
DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)), '..', '..', 'doc', 'examples', 'developing'
+ os.path.dirname(os.path.realpath(__file__)),
+ "..",
+ "..",
+ "doc",
+ "examples",
+ "developing",
)
# Test that the project builds successfully
-@pytest.mark.skipif(MACHINE_ARCH != 'x86-64',
- reason='Examples are written for x86-64')
-@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason='Only available on linux with SANDBOX')
-@pytest.mark.skipif(HAVE_SANDBOX == 'chroot', reason='This is not meant to work with chroot')
+@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
+@pytest.mark.skipif(
+ not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with SANDBOX"
+)
+@pytest.mark.skipif(
+ HAVE_SANDBOX == "chroot", reason="This is not meant to work with chroot"
+)
@pytest.mark.datafiles(DATA_DIR)
def test_autotools_build(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
+ checkout = os.path.join(cli.directory, "checkout")
# Check that the project can be built correctly.
- result = cli.run(project=project, args=['build', 'hello.bst'])
+ result = cli.run(project=project, args=["build", "hello.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'hello.bst', '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "hello.bst", "--directory", checkout],
+ )
result.assert_success()
- assert_contains(checkout, ['/usr', '/usr/lib', '/usr/bin',
- '/usr/share',
- '/usr/bin/hello'])
+ assert_contains(
+ checkout, ["/usr", "/usr/lib", "/usr/bin", "/usr/share", "/usr/bin/hello"]
+ )
# Test the unmodified hello command works as expected.
-@pytest.mark.skipif(MACHINE_ARCH != 'x86-64',
- reason='Examples are written for x86-64')
-@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason='Only available on linux with SANDBOX')
-@pytest.mark.skipif(HAVE_SANDBOX == 'chroot', reason='This is not meant to work with chroot')
+@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
+@pytest.mark.skipif(
+ not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with SANDBOX"
+)
+@pytest.mark.skipif(
+ HAVE_SANDBOX == "chroot", reason="This is not meant to work with chroot"
+)
@pytest.mark.datafiles(DATA_DIR)
def test_run_unmodified_hello(cli, datafiles):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'hello.bst'])
+ result = cli.run(project=project, args=["build", "hello.bst"])
result.assert_success()
- result = cli.run(project=project, args=['shell', 'hello.bst', 'hello'])
+ result = cli.run(project=project, args=["shell", "hello.bst", "hello"])
result.assert_success()
- assert result.output == 'Hello World\n'
+ assert result.output == "Hello World\n"
# Test opening a workspace
-@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
+@pytest.mark.skipif(not IS_LINUX, reason="Only available on linux")
@pytest.mark.datafiles(DATA_DIR)
def test_open_workspace(cli, tmpdir, datafiles):
project = str(datafiles)
workspace_dir = os.path.join(str(tmpdir), "workspace_hello")
- result = cli.run(project=project, args=['workspace', 'open', '-f', '--directory', workspace_dir, 'hello.bst', ])
+ result = cli.run(
+ project=project,
+ args=["workspace", "open", "-f", "--directory", workspace_dir, "hello.bst",],
+ )
result.assert_success()
- result = cli.run(project=project, args=['workspace', 'list'])
+ result = cli.run(project=project, args=["workspace", "list"])
result.assert_success()
- result = cli.run(project=project, args=['workspace', 'close', '--remove-dir', 'hello.bst'])
+ result = cli.run(
+ project=project, args=["workspace", "close", "--remove-dir", "hello.bst"]
+ )
result.assert_success()
# Test making a change using the workspace
-@pytest.mark.skipif(MACHINE_ARCH != 'x86-64',
- reason='Examples are written for x86-64')
-@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason='Only available on linux with SANDBOX')
-@pytest.mark.skipif(HAVE_SANDBOX == 'chroot', reason='This is not meant to work with chroot')
+@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
+@pytest.mark.skipif(
+ not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with SANDBOX"
+)
+@pytest.mark.skipif(
+ HAVE_SANDBOX == "chroot", reason="This is not meant to work with chroot"
+)
@pytest.mark.datafiles(DATA_DIR)
def test_make_change_in_workspace(cli, tmpdir, datafiles):
project = str(datafiles)
workspace_dir = os.path.join(str(tmpdir), "workspace_hello")
- result = cli.run(project=project, args=['workspace', 'open', '-f', '--directory', workspace_dir, 'hello.bst'])
+ result = cli.run(
+ project=project,
+ args=["workspace", "open", "-f", "--directory", workspace_dir, "hello.bst"],
+ )
result.assert_success()
- result = cli.run(project=project, args=['workspace', 'list'])
+ result = cli.run(project=project, args=["workspace", "list"])
result.assert_success()
patch_target = os.path.join(workspace_dir, "hello.c")
patch_source = os.path.join(project, "update.patch")
patch.apply(patch_target, patch_source)
- result = cli.run(project=project, args=['build', 'hello.bst'])
+ result = cli.run(project=project, args=["build", "hello.bst"])
result.assert_success()
- result = cli.run(project=project, args=['shell', 'hello.bst', '--', 'hello'])
+ result = cli.run(project=project, args=["shell", "hello.bst", "--", "hello"])
result.assert_success()
- assert result.output == 'Hello World\nWe can use workspaces!\n'
+ assert result.output == "Hello World\nWe can use workspaces!\n"
- result = cli.run(project=project, args=['workspace', 'close', '--remove-dir', 'hello.bst'])
+ result = cli.run(
+ project=project, args=["workspace", "close", "--remove-dir", "hello.bst"]
+ )
result.assert_success()
diff --git a/tests/examples/first-project.py b/tests/examples/first-project.py
index 84ab7aa61..4a378df62 100644
--- a/tests/examples/first-project.py
+++ b/tests/examples/first-project.py
@@ -13,20 +13,28 @@ pytestmark = pytest.mark.integration
DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)), '..', '..', 'doc', 'examples', 'first-project'
+ os.path.dirname(os.path.realpath(__file__)),
+ "..",
+ "..",
+ "doc",
+ "examples",
+ "first-project",
)
-@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
+@pytest.mark.skipif(not IS_LINUX, reason="Only available on linux")
@pytest.mark.datafiles(DATA_DIR)
def test_first_project_build_checkout(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
+ checkout = os.path.join(cli.directory, "checkout")
- result = cli.run(project=project, args=['build', 'hello.bst'])
+ result = cli.run(project=project, args=["build", "hello.bst"])
assert result.exit_code == 0
- result = cli.run(project=project, args=['artifact', 'checkout', 'hello.bst', '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "hello.bst", "--directory", checkout],
+ )
assert result.exit_code == 0
- assert_contains(checkout, ['/hello.world'])
+ assert_contains(checkout, ["/hello.world"])
diff --git a/tests/examples/flatpak-autotools.py b/tests/examples/flatpak-autotools.py
index 2418807c0..4e7a9e36f 100644
--- a/tests/examples/flatpak-autotools.py
+++ b/tests/examples/flatpak-autotools.py
@@ -13,12 +13,20 @@ pytestmark = pytest.mark.integration
DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)), '..', '..', 'doc', 'examples', 'flatpak-autotools'
+ os.path.dirname(os.path.realpath(__file__)),
+ "..",
+ "..",
+ "doc",
+ "examples",
+ "flatpak-autotools",
)
try:
- from bst_plugins_experimental.sources import _ostree # pylint: disable=unused-import
+ from bst_plugins_experimental.sources import (
+ _ostree,
+ ) # pylint: disable=unused-import
+
# Even when we have the plugin, it might be missing dependencies. This requires
# bst_plugins_experimantal to be fully installed, with host ostree dependencies
HAVE_OSTREE_PLUGIN = True
@@ -33,9 +41,15 @@ except (ImportError, ValueError):
def workaround_setuptools_bug(project):
os.makedirs(os.path.join(project, "files", "links"), exist_ok=True)
try:
- os.symlink(os.path.join("usr", "lib"), os.path.join(project, "files", "links", "lib"))
- os.symlink(os.path.join("usr", "bin"), os.path.join(project, "files", "links", "bin"))
- os.symlink(os.path.join("usr", "etc"), os.path.join(project, "files", "links", "etc"))
+ os.symlink(
+ os.path.join("usr", "lib"), os.path.join(project, "files", "links", "lib")
+ )
+ os.symlink(
+ os.path.join("usr", "bin"), os.path.join(project, "files", "links", "bin")
+ )
+ os.symlink(
+ os.path.join("usr", "etc"), os.path.join(project, "files", "links", "etc")
+ )
except FileExistsError:
# If the files exist, we're running from a git checkout and
# not a source distribution, no need to complain
@@ -44,40 +58,53 @@ def workaround_setuptools_bug(project):
# Test that a build upon flatpak runtime 'works' - we use the autotools sample
# amhello project for this.
-@pytest.mark.skipif(MACHINE_ARCH != 'x86-64',
- reason='Examples are written for x86-64')
-@pytest.mark.skipif(not IS_LINUX or not HAVE_OSTREE_PLUGIN, reason='Only available on linux with ostree')
+@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
+@pytest.mark.skipif(
+ not IS_LINUX or not HAVE_OSTREE_PLUGIN, reason="Only available on linux with ostree"
+)
@pytest.mark.datafiles(DATA_DIR)
def test_autotools_build(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
+ checkout = os.path.join(cli.directory, "checkout")
workaround_setuptools_bug(project)
- result = cli.run(project=project, args=['build', 'hello.bst'])
+ result = cli.run(project=project, args=["build", "hello.bst"])
assert result.exit_code == 0
- result = cli.run(project=project, args=['artifact', 'checkout', 'hello.bst', '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "hello.bst", "--directory", checkout],
+ )
assert result.exit_code == 0
- assert_contains(checkout, ['/usr', '/usr/lib', '/usr/bin',
- '/usr/share',
- '/usr/bin/hello', '/usr/share/doc',
- '/usr/share/doc/amhello',
- '/usr/share/doc/amhello/README'])
+ assert_contains(
+ checkout,
+ [
+ "/usr",
+ "/usr/lib",
+ "/usr/bin",
+ "/usr/share",
+ "/usr/bin/hello",
+ "/usr/share/doc",
+ "/usr/share/doc/amhello",
+ "/usr/share/doc/amhello/README",
+ ],
+ )
# Test running an executable built with autotools
-@pytest.mark.skipif(MACHINE_ARCH != 'x86-64',
- reason='Examples are written for x86-64')
-@pytest.mark.skipif(not IS_LINUX or not HAVE_OSTREE_PLUGIN, reason='Only available on linux with ostree')
+@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
+@pytest.mark.skipif(
+ not IS_LINUX or not HAVE_OSTREE_PLUGIN, reason="Only available on linux with ostree"
+)
@pytest.mark.datafiles(DATA_DIR)
def test_autotools_run(cli, datafiles):
project = str(datafiles)
workaround_setuptools_bug(project)
- result = cli.run(project=project, args=['build', 'hello.bst'])
+ result = cli.run(project=project, args=["build", "hello.bst"])
assert result.exit_code == 0
- result = cli.run(project=project, args=['shell', 'hello.bst', '/usr/bin/hello'])
+ result = cli.run(project=project, args=["shell", "hello.bst", "/usr/bin/hello"])
assert result.exit_code == 0
- assert result.output == 'Hello World!\nThis is amhello 1.0.\n'
+ assert result.output == "Hello World!\nThis is amhello 1.0.\n"
diff --git a/tests/examples/integration-commands.py b/tests/examples/integration-commands.py
index 1ed888b5d..fac45fd22 100644
--- a/tests/examples/integration-commands.py
+++ b/tests/examples/integration-commands.py
@@ -10,34 +10,49 @@ from buildstream.testing._utils.site import IS_LINUX, MACHINE_ARCH, HAVE_SANDBOX
pytestmark = pytest.mark.integration
DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)), '..', '..', 'doc', 'examples', 'integration-commands'
+ os.path.dirname(os.path.realpath(__file__)),
+ "..",
+ "..",
+ "doc",
+ "examples",
+ "integration-commands",
)
-@pytest.mark.skipif(MACHINE_ARCH != 'x86-64',
- reason='Examples are written for x86-64')
-@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason='Only available on linux with sandbox')
-@pytest.mark.skipif(HAVE_SANDBOX == 'chroot', reason='This test is not meant to work with chroot sandbox')
+@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
+@pytest.mark.skipif(
+ not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with sandbox"
+)
+@pytest.mark.skipif(
+ HAVE_SANDBOX == "chroot",
+ reason="This test is not meant to work with chroot sandbox",
+)
@pytest.mark.datafiles(DATA_DIR)
def test_integration_commands_build(cli, datafiles):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'hello.bst'])
+ result = cli.run(project=project, args=["build", "hello.bst"])
assert result.exit_code == 0
# Test running the executable
-@pytest.mark.skipif(MACHINE_ARCH != 'x86-64',
- reason='Examples are written for x86-64')
-@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason='Only available on linux with sandbox')
-@pytest.mark.skipif(HAVE_SANDBOX == 'chroot', reason='This test is not meant to work with chroot sandbox')
+@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
+@pytest.mark.skipif(
+ not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with sandbox"
+)
+@pytest.mark.skipif(
+ HAVE_SANDBOX == "chroot",
+ reason="This test is not meant to work with chroot sandbox",
+)
@pytest.mark.datafiles(DATA_DIR)
def test_integration_commands_run(cli, datafiles):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'hello.bst'])
+ result = cli.run(project=project, args=["build", "hello.bst"])
assert result.exit_code == 0
- result = cli.run(project=project, args=['shell', 'hello.bst', '--', 'hello', 'pony'])
+ result = cli.run(
+ project=project, args=["shell", "hello.bst", "--", "hello", "pony"]
+ )
assert result.exit_code == 0
- assert result.output == 'Hello pony\n'
+ assert result.output == "Hello pony\n"
diff --git a/tests/examples/junctions.py b/tests/examples/junctions.py
index 18bf4da4f..e93db8a68 100644
--- a/tests/examples/junctions.py
+++ b/tests/examples/junctions.py
@@ -10,51 +10,77 @@ from buildstream.testing._utils.site import IS_LINUX, MACHINE_ARCH, HAVE_SANDBOX
pytestmark = pytest.mark.integration
DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)), '..', '..', 'doc', 'examples', 'junctions'
+ os.path.dirname(os.path.realpath(__file__)),
+ "..",
+ "..",
+ "doc",
+ "examples",
+ "junctions",
)
# Test that the project builds successfully
-@pytest.mark.skipif(MACHINE_ARCH != 'x86-64',
- reason='Examples are written for x86-64')
-@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason='Only available on linux with bubblewrap')
-@pytest.mark.skipif(HAVE_SANDBOX == 'chroot', reason='This test is not meant to work with chroot sandbox')
+@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
+@pytest.mark.skipif(
+ not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with bubblewrap"
+)
+@pytest.mark.skipif(
+ HAVE_SANDBOX == "chroot",
+ reason="This test is not meant to work with chroot sandbox",
+)
@pytest.mark.datafiles(DATA_DIR)
def test_build(cli, datafiles):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'callHello.bst'])
+ result = cli.run(project=project, args=["build", "callHello.bst"])
result.assert_success()
# Test the callHello script works as expected.
-@pytest.mark.skipif(MACHINE_ARCH != 'x86-64',
- reason='Examples are written for x86-64')
-@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason='Only available on linux with bubblewrap')
-@pytest.mark.skipif(HAVE_SANDBOX == 'chroot', reason='This test is not meant to work with chroot sandbox')
+@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
+@pytest.mark.skipif(
+ not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with bubblewrap"
+)
+@pytest.mark.skipif(
+ HAVE_SANDBOX == "chroot",
+ reason="This test is not meant to work with chroot sandbox",
+)
@pytest.mark.datafiles(DATA_DIR)
def test_shell_call_hello(cli, datafiles):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'callHello.bst'])
+ result = cli.run(project=project, args=["build", "callHello.bst"])
result.assert_success()
- result = cli.run(project=project, args=['shell', 'callHello.bst', '--', '/bin/sh', 'callHello.sh'])
+ result = cli.run(
+ project=project,
+ args=["shell", "callHello.bst", "--", "/bin/sh", "callHello.sh"],
+ )
result.assert_success()
- assert result.output == 'Calling hello:\nHello World!\nThis is amhello 1.0.\n'
+ assert result.output == "Calling hello:\nHello World!\nThis is amhello 1.0.\n"
# Test opening a cross-junction workspace
-@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
+@pytest.mark.skipif(not IS_LINUX, reason="Only available on linux")
@pytest.mark.datafiles(DATA_DIR)
def test_open_cross_junction_workspace(cli, tmpdir, datafiles):
project = str(datafiles)
workspace_dir = os.path.join(str(tmpdir), "workspace_hello_junction")
- result = cli.run(project=project,
- args=['workspace', 'open', '--directory', workspace_dir, 'hello-junction.bst:hello.bst'])
+ result = cli.run(
+ project=project,
+ args=[
+ "workspace",
+ "open",
+ "--directory",
+ workspace_dir,
+ "hello-junction.bst:hello.bst",
+ ],
+ )
result.assert_success()
- result = cli.run(project=project,
- args=['workspace', 'close', '--remove-dir', 'hello-junction.bst:hello.bst'])
+ result = cli.run(
+ project=project,
+ args=["workspace", "close", "--remove-dir", "hello-junction.bst:hello.bst"],
+ )
result.assert_success()
diff --git a/tests/examples/running-commands.py b/tests/examples/running-commands.py
index 1c419d524..177f4e3cc 100644
--- a/tests/examples/running-commands.py
+++ b/tests/examples/running-commands.py
@@ -10,34 +10,47 @@ from buildstream.testing._utils.site import IS_LINUX, MACHINE_ARCH, HAVE_SANDBOX
pytestmark = pytest.mark.integration
DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)), '..', '..', 'doc', 'examples', 'running-commands'
+ os.path.dirname(os.path.realpath(__file__)),
+ "..",
+ "..",
+ "doc",
+ "examples",
+ "running-commands",
)
-@pytest.mark.skipif(MACHINE_ARCH != 'x86-64',
- reason='Examples are written for x86-64')
+@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason='Only available on linux with sandbox')
-@pytest.mark.skipif(HAVE_SANDBOX == 'chroot', reason='This test is not meant to work with chroot sandbox')
+@pytest.mark.skipif(
+ not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with sandbox"
+)
+@pytest.mark.skipif(
+ HAVE_SANDBOX == "chroot",
+ reason="This test is not meant to work with chroot sandbox",
+)
def test_running_commands_build(cli, datafiles):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'hello.bst'])
+ result = cli.run(project=project, args=["build", "hello.bst"])
assert result.exit_code == 0
# Test running the executable
-@pytest.mark.skipif(MACHINE_ARCH != 'x86-64',
- reason='Examples are written for x86-64')
-@pytest.mark.skipif(not IS_LINUX or not HAVE_SANDBOX, reason='Only available on linux with sandbox')
-@pytest.mark.skipif(HAVE_SANDBOX == 'chroot', reason='This test is not meant to work with chroot sandbox')
+@pytest.mark.skipif(MACHINE_ARCH != "x86-64", reason="Examples are written for x86-64")
+@pytest.mark.skipif(
+ not IS_LINUX or not HAVE_SANDBOX, reason="Only available on linux with sandbox"
+)
+@pytest.mark.skipif(
+ HAVE_SANDBOX == "chroot",
+ reason="This test is not meant to work with chroot sandbox",
+)
@pytest.mark.datafiles(DATA_DIR)
def test_running_commands_run(cli, datafiles):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'hello.bst'])
+ result = cli.run(project=project, args=["build", "hello.bst"])
assert result.exit_code == 0
- result = cli.run(project=project, args=['shell', 'hello.bst', '--', 'hello'])
+ result = cli.run(project=project, args=["shell", "hello.bst", "--", "hello"])
assert result.exit_code == 0
- assert result.output == 'Hello World\n'
+ assert result.output == "Hello World\n"
diff --git a/tests/external_plugins.py b/tests/external_plugins.py
index f6f67707b..3e5684ea5 100644
--- a/tests/external_plugins.py
+++ b/tests/external_plugins.py
@@ -16,7 +16,7 @@ import pytest
# test_match_patterns (list[str]): A list of shell style globs which may be
# used to specify a subset of test files from the repository to run.
# These must be specified relative to the root of the repository.
-class ExternalPluginRepo():
+class ExternalPluginRepo:
def __init__(self, name, url, ref, test_match_patterns=None):
self.name = name
self.url = url
@@ -30,17 +30,23 @@ class ExternalPluginRepo():
def clone(self, location):
self._clone_location = os.path.join(location, self.name)
- subprocess.run(['git', 'clone',
- '--single-branch',
- '--branch', self.ref,
- '--depth', '1',
- self.url,
- self._clone_location,
- ])
+ subprocess.run(
+ [
+ "git",
+ "clone",
+ "--single-branch",
+ "--branch",
+ self.ref,
+ "--depth",
+ "1",
+ self.url,
+ self._clone_location,
+ ]
+ )
return self._clone_location
def install(self):
- subprocess.run(['pip3', 'install', self._clone_location])
+ subprocess.run(["pip3", "install", self._clone_location])
def test(self, args):
test_files = self._match_test_patterns()
@@ -55,7 +61,9 @@ class ExternalPluginRepo():
match_list.extend(matches)
if not match_list:
- raise ValueError("No matches found for patterns {}".format(self._test_match_patterns))
+ raise ValueError(
+ "No matches found for patterns {}".format(self._test_match_patterns)
+ )
return match_list
diff --git a/tests/format/assertion.py b/tests/format/assertion.py
index 7e87977cb..67436250a 100644
--- a/tests/format/assertion.py
+++ b/tests/format/assertion.py
@@ -7,30 +7,41 @@ from buildstream._exceptions import ErrorDomain, LoadErrorReason
from buildstream.testing.runcli import cli # pylint: disable=unused-import
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'assertion'
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "assertion")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("target,opt_pony,opt_horsy,assertion", [
- # Test an unconditional (!) directly in the element
- ('raw-assertion.bst', 'False', 'False', 'Raw assertion boogey'),
- # Test an assertion in a conditional
- ('conditional-assertion.bst', 'True', 'False', "It's not pony time yet"),
- # Test that we get the first composited assertion
- ('ordered-assertion.bst', 'True', 'True', "It's not horsy time yet"),
-])
+@pytest.mark.parametrize(
+ "target,opt_pony,opt_horsy,assertion",
+ [
+ # Test an unconditional (!) directly in the element
+ ("raw-assertion.bst", "False", "False", "Raw assertion boogey"),
+ # Test an assertion in a conditional
+ ("conditional-assertion.bst", "True", "False", "It's not pony time yet"),
+ # Test that we get the first composited assertion
+ ("ordered-assertion.bst", "True", "True", "It's not horsy time yet"),
+ ],
+)
def test_assertion_cli(cli, datafiles, target, opt_pony, opt_horsy, assertion):
project = str(datafiles)
- result = cli.run(project=project, silent=True, args=[
- '--option', 'pony', opt_pony,
- '--option', 'horsy', opt_horsy,
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- target])
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ "pony",
+ opt_pony,
+ "--option",
+ "horsy",
+ opt_horsy,
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{vars}",
+ target,
+ ],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.USER_ASSERTION)
# Assert that the assertion text provided by the user
diff --git a/tests/format/dependencies.py b/tests/format/dependencies.py
index f92b89afa..e54b9b2d5 100644
--- a/tests/format/dependencies.py
+++ b/tests/format/dependencies.py
@@ -15,125 +15,125 @@ DATA_DIR = os.path.dirname(os.path.realpath(__file__))
#
@pytest.mark.datafiles(DATA_DIR)
def test_two_files(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies1')
+ project = os.path.join(str(datafiles), "dependencies1")
- elements = cli.get_pipeline(project, ['target.bst'])
- assert elements == ['firstdep.bst', 'target.bst']
+ elements = cli.get_pipeline(project, ["target.bst"])
+ assert elements == ["firstdep.bst", "target.bst"]
@pytest.mark.datafiles(DATA_DIR)
def test_shared_dependency(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies1')
+ project = os.path.join(str(datafiles), "dependencies1")
- elements = cli.get_pipeline(project, ['shareddeptarget.bst'])
- assert elements == ['firstdep.bst', 'shareddep.bst', 'shareddeptarget.bst']
+ elements = cli.get_pipeline(project, ["shareddeptarget.bst"])
+ assert elements == ["firstdep.bst", "shareddep.bst", "shareddeptarget.bst"]
@pytest.mark.datafiles(DATA_DIR)
def test_dependency_dict(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies1')
- elements = cli.get_pipeline(project, ['target-depdict.bst'])
- assert elements == ['firstdep.bst', 'target-depdict.bst']
+ project = os.path.join(str(datafiles), "dependencies1")
+ elements = cli.get_pipeline(project, ["target-depdict.bst"])
+ assert elements == ["firstdep.bst", "target-depdict.bst"]
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_dependency_declaration(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies1')
- result = cli.run(project=project, args=['show', 'invaliddep.bst'])
+ project = os.path.join(str(datafiles), "dependencies1")
+ result = cli.run(project=project, args=["show", "invaliddep.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_dependency_type(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies1')
- result = cli.run(project=project, args=['show', 'invaliddeptype.bst'])
+ project = os.path.join(str(datafiles), "dependencies1")
+ result = cli.run(project=project, args=["show", "invaliddeptype.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_strict_dependency(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies1')
- result = cli.run(project=project, args=['show', 'invalidstrict.bst'])
+ project = os.path.join(str(datafiles), "dependencies1")
+ result = cli.run(project=project, args=["show", "invalidstrict.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_non_strict_dependency(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies1')
- result = cli.run(project=project, args=['show', 'invalidnonstrict.bst'])
+ project = os.path.join(str(datafiles), "dependencies1")
+ result = cli.run(project=project, args=["show", "invalidnonstrict.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
def test_circular_dependency(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies1')
- result = cli.run(project=project, args=['show', 'circulartarget.bst'])
+ project = os.path.join(str(datafiles), "dependencies1")
+ result = cli.run(project=project, args=["show", "circulartarget.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.CIRCULAR_DEPENDENCY)
@pytest.mark.datafiles(DATA_DIR)
def test_build_dependency(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies1')
+ project = os.path.join(str(datafiles), "dependencies1")
- elements = cli.get_pipeline(project, ['builddep.bst'], scope='run')
- assert elements == ['builddep.bst']
+ elements = cli.get_pipeline(project, ["builddep.bst"], scope="run")
+ assert elements == ["builddep.bst"]
- elements = cli.get_pipeline(project, ['builddep.bst'], scope='build')
- assert elements == ['firstdep.bst']
+ elements = cli.get_pipeline(project, ["builddep.bst"], scope="build")
+ assert elements == ["firstdep.bst"]
@pytest.mark.datafiles(DATA_DIR)
def test_runtime_dependency(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies1')
- elements = cli.get_pipeline(project, ['runtimedep.bst'], scope='build')
+ project = os.path.join(str(datafiles), "dependencies1")
+ elements = cli.get_pipeline(project, ["runtimedep.bst"], scope="build")
# FIXME: The empty line should probably never happen here when there are no results.
- assert elements == ['']
- elements = cli.get_pipeline(project, ['runtimedep.bst'], scope='run')
- assert elements == ['firstdep.bst', 'runtimedep.bst']
+ assert elements == [""]
+ elements = cli.get_pipeline(project, ["runtimedep.bst"], scope="run")
+ assert elements == ["firstdep.bst", "runtimedep.bst"]
@pytest.mark.datafiles(DATA_DIR)
def test_all_dependency(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies1')
+ project = os.path.join(str(datafiles), "dependencies1")
- elements = cli.get_pipeline(project, ['alldep.bst'], scope='build')
- assert elements == ['firstdep.bst']
+ elements = cli.get_pipeline(project, ["alldep.bst"], scope="build")
+ assert elements == ["firstdep.bst"]
- elements = cli.get_pipeline(project, ['alldep.bst'], scope='run')
- assert elements == ['firstdep.bst', 'alldep.bst']
+ elements = cli.get_pipeline(project, ["alldep.bst"], scope="run")
+ assert elements == ["firstdep.bst", "alldep.bst"]
@pytest.mark.datafiles(DATA_DIR)
def test_list_build_dependency(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies1')
+ project = os.path.join(str(datafiles), "dependencies1")
# Check that the pipeline includes the build dependency
- deps = cli.get_pipeline(project, ['builddep-list.bst'], scope="build")
+ deps = cli.get_pipeline(project, ["builddep-list.bst"], scope="build")
assert "firstdep.bst" in deps
@pytest.mark.datafiles(DATA_DIR)
def test_list_runtime_dependency(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies1')
+ project = os.path.join(str(datafiles), "dependencies1")
# Check that the pipeline includes the runtime dependency
- deps = cli.get_pipeline(project, ['runtimedep-list.bst'], scope="run")
+ deps = cli.get_pipeline(project, ["runtimedep-list.bst"], scope="run")
assert "firstdep.bst" in deps
@pytest.mark.datafiles(DATA_DIR)
def test_list_dependencies_combined(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies1')
+ project = os.path.join(str(datafiles), "dependencies1")
# Check that runtime deps get combined
- rundeps = cli.get_pipeline(project, ['list-combine.bst'], scope="run")
+ rundeps = cli.get_pipeline(project, ["list-combine.bst"], scope="run")
assert "firstdep.bst" not in rundeps
assert "seconddep.bst" in rundeps
assert "thirddep.bst" in rundeps
# Check that build deps get combined
- builddeps = cli.get_pipeline(project, ['list-combine.bst'], scope="build")
+ builddeps = cli.get_pipeline(project, ["list-combine.bst"], scope="build")
assert "firstdep.bst" in builddeps
assert "seconddep.bst" not in builddeps
assert "thirddep.bst" in builddeps
@@ -141,12 +141,12 @@ def test_list_dependencies_combined(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_list_overlap(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies1')
+ project = os.path.join(str(datafiles), "dependencies1")
# Check that dependencies get merged
- rundeps = cli.get_pipeline(project, ['list-overlap.bst'], scope="run")
+ rundeps = cli.get_pipeline(project, ["list-overlap.bst"], scope="run")
assert "firstdep.bst" in rundeps
- builddeps = cli.get_pipeline(project, ['list-overlap.bst'], scope="build")
+ builddeps = cli.get_pipeline(project, ["list-overlap.bst"], scope="build")
assert "firstdep.bst" in builddeps
@@ -156,10 +156,10 @@ def test_list_overlap(cli, datafiles):
#
@pytest.mark.datafiles(DATA_DIR)
def test_scope_all(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies2')
- elements = ['target.bst']
+ project = os.path.join(str(datafiles), "dependencies2")
+ elements = ["target.bst"]
- element_list = cli.get_pipeline(project, elements, scope='all')
+ element_list = cli.get_pipeline(project, elements, scope="all")
assert element_list == [
"build-build.bst",
@@ -174,10 +174,10 @@ def test_scope_all(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_scope_run(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies2')
- elements = ['target.bst']
+ project = os.path.join(str(datafiles), "dependencies2")
+ elements = ["target.bst"]
- element_list = cli.get_pipeline(project, elements, scope='run')
+ element_list = cli.get_pipeline(project, elements, scope="run")
assert element_list == [
"dep-one.bst",
@@ -189,69 +189,71 @@ def test_scope_run(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_scope_build(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies2')
- elements = ['target.bst']
+ project = os.path.join(str(datafiles), "dependencies2")
+ elements = ["target.bst"]
- element_list = cli.get_pipeline(project, elements, scope='build')
+ element_list = cli.get_pipeline(project, elements, scope="build")
assert element_list == ["dep-one.bst", "run.bst", "dep-two.bst"]
@pytest.mark.datafiles(DATA_DIR)
def test_scope_build_of_child(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies2')
- elements = ['target.bst']
+ project = os.path.join(str(datafiles), "dependencies2")
+ elements = ["target.bst"]
- element_list = cli.get_pipeline(project, elements, scope='build')
+ element_list = cli.get_pipeline(project, elements, scope="build")
# First pass, lets check dep-two
element = element_list[2]
# Pass two, let's look at these
- element_list = cli.get_pipeline(project, [element], scope='build')
+ element_list = cli.get_pipeline(project, [element], scope="build")
assert element_list == ["run-build.bst", "build.bst"]
@pytest.mark.datafiles(DATA_DIR)
def test_no_recurse(cli, datafiles):
- project = os.path.join(str(datafiles), 'dependencies2')
- elements = ['target.bst']
+ project = os.path.join(str(datafiles), "dependencies2")
+ elements = ["target.bst"]
# We abuse the 'plan' scope here to ensure that we call
# element.dependencies() with recurse=False - currently, no `bst
# show` option does this directly.
- element_list = cli.get_pipeline(project, elements, scope='plan')
+ element_list = cli.get_pipeline(project, elements, scope="plan")
assert element_list == [
- 'build-build.bst',
- 'run-build.bst',
- 'build.bst',
- 'dep-one.bst',
- 'run.bst',
- 'dep-two.bst',
- 'target.bst',
+ "build-build.bst",
+ "run-build.bst",
+ "build.bst",
+ "dep-one.bst",
+ "run.bst",
+ "dep-two.bst",
+ "target.bst",
]
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize(("element", "asserts"), [
- ('build-runtime', False),
- ('build-build', True),
- ('build-all', True),
- ('runtime-runtime', True),
- ('runtime-all', True),
- ('all-all', True),
-])
+@pytest.mark.parametrize(
+ ("element", "asserts"),
+ [
+ ("build-runtime", False),
+ ("build-build", True),
+ ("build-all", True),
+ ("runtime-runtime", True),
+ ("runtime-all", True),
+ ("all-all", True),
+ ],
+)
def test_duplicate_deps(cli, datafiles, element, asserts):
- project = os.path.join(str(datafiles), 'dependencies3')
+ project = os.path.join(str(datafiles), "dependencies3")
- result = cli.run(project=project, args=['show', '{}.bst'.format(element)])
+ result = cli.run(project=project, args=["show", "{}.bst".format(element)])
if asserts:
- result.assert_main_error(ErrorDomain.LOAD,
- LoadErrorReason.DUPLICATE_DEPENDENCY)
- assert '[line 10 column 2]' in result.stderr
- assert '[line 8 column 2]' in result.stderr
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.DUPLICATE_DEPENDENCY)
+ assert "[line 10 column 2]" in result.stderr
+ assert "[line 8 column 2]" in result.stderr
else:
result.assert_success()
diff --git a/tests/format/include.py b/tests/format/include.py
index 434a94d1f..9aec83ff5 100644
--- a/tests/format/include.py
+++ b/tests/format/include.py
@@ -12,97 +12,109 @@ from tests.testutils import generate_junction
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'include'
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "include")
@pytest.mark.datafiles(DATA_DIR)
def test_include_project_file(cli, datafiles):
- project = os.path.join(str(datafiles), 'file')
- result = cli.run(project=project, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(str(datafiles), "file")
+ result = cli.run(
+ project=project,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_bool('included')
+ assert loaded.get_bool("included")
def test_include_missing_file(cli, tmpdir):
- tmpdir.join('project.conf').write('{"name": "test"}')
- element = tmpdir.join('include_missing_file.bst')
+ tmpdir.join("project.conf").write('{"name": "test"}')
+ element = tmpdir.join("include_missing_file.bst")
# Normally we would use dicts and _yaml.roundtrip_dump to write such things, but here
# we want to be sure of a stable line and column number.
- element.write(textwrap.dedent("""
+ element.write(
+ textwrap.dedent(
+ """
kind: manual
"(@)":
- nosuch.yaml
- """).strip())
+ """
+ ).strip()
+ )
- result = cli.run(project=str(tmpdir), args=['show', str(element.basename)])
+ result = cli.run(project=str(tmpdir), args=["show", str(element.basename)])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
# Make sure the root cause provenance is in the output.
- assert 'line 4 column 2' in result.stderr
+ assert "line 4 column 2" in result.stderr
def test_include_dir(cli, tmpdir):
- tmpdir.join('project.conf').write('{"name": "test"}')
- tmpdir.mkdir('subdir')
- element = tmpdir.join('include_dir.bst')
+ tmpdir.join("project.conf").write('{"name": "test"}')
+ tmpdir.mkdir("subdir")
+ element = tmpdir.join("include_dir.bst")
# Normally we would use dicts and _yaml.roundtrip_dump to write such things, but here
# we want to be sure of a stable line and column number.
- element.write(textwrap.dedent("""
+ element.write(
+ textwrap.dedent(
+ """
kind: manual
"(@)":
- subdir/
- """).strip())
+ """
+ ).strip()
+ )
- result = cli.run(project=str(tmpdir), args=['show', str(element.basename)])
- result.assert_main_error(
- ErrorDomain.LOAD, LoadErrorReason.LOADING_DIRECTORY)
+ result = cli.run(project=str(tmpdir), args=["show", str(element.basename)])
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.LOADING_DIRECTORY)
# Make sure the root cause provenance is in the output.
- assert 'line 4 column 2' in result.stderr
+ assert "line 4 column 2" in result.stderr
@pytest.mark.datafiles(DATA_DIR)
def test_include_junction_file(cli, tmpdir, datafiles):
- project = os.path.join(str(datafiles), 'junction')
-
- generate_junction(tmpdir,
- os.path.join(project, 'subproject'),
- os.path.join(project, 'junction.bst'),
- store_ref=True)
-
- result = cli.run(project=project, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(str(datafiles), "junction")
+
+ generate_junction(
+ tmpdir,
+ os.path.join(project, "subproject"),
+ os.path.join(project, "junction.bst"),
+ store_ref=True,
+ )
+
+ result = cli.run(
+ project=project,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_bool('included')
+ assert loaded.get_bool("included")
@pytest.mark.datafiles(DATA_DIR)
def test_include_junction_options(cli, datafiles):
- project = os.path.join(str(datafiles), 'options')
-
- result = cli.run(project=project, args=[
- '-o', 'build_arch', 'x86_64',
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(str(datafiles), "options")
+
+ result = cli.run(
+ project=project,
+ args=[
+ "-o",
+ "build_arch",
+ "x86_64",
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{vars}",
+ "element.bst",
+ ],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('build_arch') == 'x86_64'
+ assert loaded.get_str("build_arch") == "x86_64"
@pytest.mark.datafiles(DATA_DIR)
@@ -111,31 +123,25 @@ def test_junction_element_partial_project_project(cli, tmpdir, datafiles):
Junction elements never depend on fully include processed project.
"""
- project = os.path.join(str(datafiles), 'junction')
+ project = os.path.join(str(datafiles), "junction")
- subproject_path = os.path.join(project, 'subproject')
- junction_path = os.path.join(project, 'junction.bst')
+ subproject_path = os.path.join(project, "subproject")
+ junction_path = os.path.join(project, "junction.bst")
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
ref = repo.create(subproject_path)
- element = {
- 'kind': 'junction',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
+ element = {"kind": "junction", "sources": [repo.source_config(ref=ref)]}
_yaml.roundtrip_dump(element, junction_path)
- result = cli.run(project=project, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'junction.bst'])
+ result = cli.run(
+ project=project,
+ args=["show", "--deps", "none", "--format", "%{vars}", "junction.bst"],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('included', default=None) is None
+ assert loaded.get_str("included", default=None) is None
@pytest.mark.datafiles(DATA_DIR)
@@ -144,170 +150,177 @@ def test_junction_element_not_partial_project_file(cli, tmpdir, datafiles):
Junction elements never depend on fully include processed project.
"""
- project = os.path.join(str(datafiles), 'file_with_subproject')
+ project = os.path.join(str(datafiles), "file_with_subproject")
- subproject_path = os.path.join(project, 'subproject')
- junction_path = os.path.join(project, 'junction.bst')
+ subproject_path = os.path.join(project, "subproject")
+ junction_path = os.path.join(project, "junction.bst")
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
ref = repo.create(subproject_path)
- element = {
- 'kind': 'junction',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
+ element = {"kind": "junction", "sources": [repo.source_config(ref=ref)]}
_yaml.roundtrip_dump(element, junction_path)
- result = cli.run(project=project, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'junction.bst'])
+ result = cli.run(
+ project=project,
+ args=["show", "--deps", "none", "--format", "%{vars}", "junction.bst"],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('included', default=None) is not None
+ assert loaded.get_str("included", default=None) is not None
@pytest.mark.datafiles(DATA_DIR)
def test_include_element_overrides(cli, datafiles):
- project = os.path.join(str(datafiles), 'overrides')
+ project = os.path.join(str(datafiles), "overrides")
- result = cli.run(project=project, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ result = cli.run(
+ project=project,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('manual_main_override', default=None) is not None
- assert loaded.get_str('manual_included_override', default=None) is not None
+ assert loaded.get_str("manual_main_override", default=None) is not None
+ assert loaded.get_str("manual_included_override", default=None) is not None
@pytest.mark.datafiles(DATA_DIR)
def test_include_element_overrides_composition(cli, datafiles):
- project = os.path.join(str(datafiles), 'overrides')
+ project = os.path.join(str(datafiles), "overrides")
- result = cli.run(project=project, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{config}',
- 'element.bst'])
+ result = cli.run(
+ project=project,
+ args=["show", "--deps", "none", "--format", "%{config}", "element.bst"],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str_list('build-commands') == ['first', 'second']
+ assert loaded.get_str_list("build-commands") == ["first", "second"]
@pytest.mark.datafiles(DATA_DIR)
def test_list_overide_does_not_fail_upon_first_composition(cli, datafiles):
- project = os.path.join(str(datafiles), 'eventual_overrides')
+ project = os.path.join(str(datafiles), "eventual_overrides")
- result = cli.run(project=project, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{public}',
- 'element.bst'])
+ result = cli.run(
+ project=project,
+ args=["show", "--deps", "none", "--format", "%{public}", "element.bst"],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
# Assert that the explicitly overwritten public data is present
- bst = loaded.get_mapping('bst')
- assert 'foo-commands' in bst
- assert bst.get_str_list('foo-commands') == ['need', 'this']
+ bst = loaded.get_mapping("bst")
+ assert "foo-commands" in bst
+ assert bst.get_str_list("foo-commands") == ["need", "this"]
@pytest.mark.datafiles(DATA_DIR)
def test_include_element_overrides_sub_include(cli, datafiles):
- project = os.path.join(str(datafiles), 'sub-include')
+ project = os.path.join(str(datafiles), "sub-include")
- result = cli.run(project=project, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ result = cli.run(
+ project=project,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('included', default=None) is not None
+ assert loaded.get_str("included", default=None) is not None
@pytest.mark.datafiles(DATA_DIR)
def test_junction_do_not_use_included_overrides(cli, tmpdir, datafiles):
- project = os.path.join(str(datafiles), 'overrides-junction')
-
- generate_junction(tmpdir,
- os.path.join(project, 'subproject'),
- os.path.join(project, 'junction.bst'),
- store_ref=True)
-
- result = cli.run(project=project, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'junction.bst'])
+ project = os.path.join(str(datafiles), "overrides-junction")
+
+ generate_junction(
+ tmpdir,
+ os.path.join(project, "subproject"),
+ os.path.join(project, "junction.bst"),
+ store_ref=True,
+ )
+
+ result = cli.run(
+ project=project,
+ args=["show", "--deps", "none", "--format", "%{vars}", "junction.bst"],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('main_override', default=None) is not None
- assert loaded.get_str('included_override', default=None) is None
+ assert loaded.get_str("main_override", default=None) is not None
+ assert loaded.get_str("included_override", default=None) is None
@pytest.mark.datafiles(DATA_DIR)
def test_conditional_in_fragment(cli, datafiles):
- project = os.path.join(str(datafiles), 'conditional')
-
- result = cli.run(project=project, args=[
- '-o', 'build_arch', 'x86_64',
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(str(datafiles), "conditional")
+
+ result = cli.run(
+ project=project,
+ args=[
+ "-o",
+ "build_arch",
+ "x86_64",
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{vars}",
+ "element.bst",
+ ],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('size') == '8'
+ assert loaded.get_str("size") == "8"
@pytest.mark.datafiles(DATA_DIR)
def test_inner(cli, datafiles):
- project = os.path.join(str(datafiles), 'inner')
- result = cli.run(project=project, args=[
- '-o', 'build_arch', 'x86_64',
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(str(datafiles), "inner")
+ result = cli.run(
+ project=project,
+ args=[
+ "-o",
+ "build_arch",
+ "x86_64",
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{vars}",
+ "element.bst",
+ ],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('build_arch') == 'x86_64'
+ assert loaded.get_str("build_arch") == "x86_64"
@pytest.mark.datafiles(DATA_DIR)
def test_recursive_include(cli, datafiles):
- project = os.path.join(str(datafiles), 'recursive')
+ project = os.path.join(str(datafiles), "recursive")
- result = cli.run(project=project, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ result = cli.run(
+ project=project,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.RECURSIVE_INCLUDE)
- assert 'line 2 column 2' in result.stderr
+ assert "line 2 column 2" in result.stderr
@pytest.mark.datafiles(DATA_DIR)
def test_local_to_junction(cli, tmpdir, datafiles):
- project = os.path.join(str(datafiles), 'local_to_junction')
-
- generate_junction(tmpdir,
- os.path.join(project, 'subproject'),
- os.path.join(project, 'junction.bst'),
- store_ref=True)
-
- result = cli.run(project=project, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(str(datafiles), "local_to_junction")
+
+ generate_junction(
+ tmpdir,
+ os.path.join(project, "subproject"),
+ os.path.join(project, "junction.bst"),
+ store_ref=True,
+ )
+
+ result = cli.run(
+ project=project,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_bool('included')
+ assert loaded.get_bool("included")
diff --git a/tests/format/include_composition.py b/tests/format/include_composition.py
index e10e28bc0..a840b6bad 100644
--- a/tests/format/include_composition.py
+++ b/tests/format/include_composition.py
@@ -11,7 +11,7 @@ from tests.testutils import dummy_context
@contextmanager
def make_includes(basedir):
- _yaml.roundtrip_dump({'name': 'test'}, os.path.join(basedir, 'project.conf'))
+ _yaml.roundtrip_dump({"name": "test"}, os.path.join(basedir, "project.conf"))
with dummy_context() as context:
project = Project(basedir, context)
loader = project.loader
@@ -21,131 +21,130 @@ def make_includes(basedir):
def test_main_has_priority(tmpdir):
with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump({'(@)': ['a.yml'], 'test': ['main']},
- str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump(
+ {"(@)": ["a.yml"], "test": ["main"]}, str(tmpdir.join("main.yml"))
+ )
- main = _yaml.load(str(tmpdir.join('main.yml')))
+ main = _yaml.load(str(tmpdir.join("main.yml")))
- _yaml.roundtrip_dump({'test': ['a']}, str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump({"test": ["a"]}, str(tmpdir.join("a.yml")))
includes.process(main)
- assert main.get_str_list('test') == ['main']
+ assert main.get_str_list("test") == ["main"]
def test_include_cannot_append(tmpdir):
with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump({'(@)': ['a.yml'], 'test': ['main']},
- str(tmpdir.join('main.yml')))
- main = _yaml.load(str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump(
+ {"(@)": ["a.yml"], "test": ["main"]}, str(tmpdir.join("main.yml"))
+ )
+ main = _yaml.load(str(tmpdir.join("main.yml")))
- _yaml.roundtrip_dump({'test': {'(>)': ['a']}},
- str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump({"test": {"(>)": ["a"]}}, str(tmpdir.join("a.yml")))
includes.process(main)
- assert main.get_str_list('test') == ['main']
+ assert main.get_str_list("test") == ["main"]
def test_main_can_append(tmpdir):
with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump({'(@)': ['a.yml'], 'test': {'(>)': ['main']}},
- str(tmpdir.join('main.yml')))
- main = _yaml.load(str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump(
+ {"(@)": ["a.yml"], "test": {"(>)": ["main"]}}, str(tmpdir.join("main.yml"))
+ )
+ main = _yaml.load(str(tmpdir.join("main.yml")))
- _yaml.roundtrip_dump({'test': ['a']}, str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump({"test": ["a"]}, str(tmpdir.join("a.yml")))
includes.process(main)
- assert main.get_str_list('test') == ['a', 'main']
+ assert main.get_str_list("test") == ["a", "main"]
def test_sibling_cannot_append_backward(tmpdir):
with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml']},
- str(tmpdir.join('main.yml')))
- main = _yaml.load(str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump({"(@)": ["a.yml", "b.yml"]}, str(tmpdir.join("main.yml")))
+ main = _yaml.load(str(tmpdir.join("main.yml")))
- _yaml.roundtrip_dump({'test': {'(>)': ['a']}},
- str(tmpdir.join('a.yml')))
- _yaml.roundtrip_dump({'test': ['b']},
- str(tmpdir.join('b.yml')))
+ _yaml.roundtrip_dump({"test": {"(>)": ["a"]}}, str(tmpdir.join("a.yml")))
+ _yaml.roundtrip_dump({"test": ["b"]}, str(tmpdir.join("b.yml")))
includes.process(main)
- assert main.get_str_list('test') == ['b']
+ assert main.get_str_list("test") == ["b"]
def test_sibling_can_append_forward(tmpdir):
with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml']},
- str(tmpdir.join('main.yml')))
- main = _yaml.load(str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump({"(@)": ["a.yml", "b.yml"]}, str(tmpdir.join("main.yml")))
+ main = _yaml.load(str(tmpdir.join("main.yml")))
- _yaml.roundtrip_dump({'test': ['a']},
- str(tmpdir.join('a.yml')))
- _yaml.roundtrip_dump({'test': {'(>)': ['b']}},
- str(tmpdir.join('b.yml')))
+ _yaml.roundtrip_dump({"test": ["a"]}, str(tmpdir.join("a.yml")))
+ _yaml.roundtrip_dump({"test": {"(>)": ["b"]}}, str(tmpdir.join("b.yml")))
includes.process(main)
- assert main.get_str_list('test') == ['a', 'b']
+ assert main.get_str_list("test") == ["a", "b"]
def test_lastest_sibling_has_priority(tmpdir):
with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml']},
- str(tmpdir.join('main.yml')))
- main = _yaml.load(str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump({"(@)": ["a.yml", "b.yml"]}, str(tmpdir.join("main.yml")))
+ main = _yaml.load(str(tmpdir.join("main.yml")))
- _yaml.roundtrip_dump({'test': ['a']},
- str(tmpdir.join('a.yml')))
- _yaml.roundtrip_dump({'test': ['b']},
- str(tmpdir.join('b.yml')))
+ _yaml.roundtrip_dump({"test": ["a"]}, str(tmpdir.join("a.yml")))
+ _yaml.roundtrip_dump({"test": ["b"]}, str(tmpdir.join("b.yml")))
includes.process(main)
- assert main.get_str_list('test') == ['b']
+ assert main.get_str_list("test") == ["b"]
def test_main_keeps_keys(tmpdir):
with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump({'(@)': ['a.yml'], 'something': 'else'},
- str(tmpdir.join('main.yml')))
- main = _yaml.load(str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump(
+ {"(@)": ["a.yml"], "something": "else"}, str(tmpdir.join("main.yml"))
+ )
+ main = _yaml.load(str(tmpdir.join("main.yml")))
- _yaml.roundtrip_dump({'test': ['a']}, str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump({"test": ["a"]}, str(tmpdir.join("a.yml")))
includes.process(main)
- assert main.get_str_list('test') == ['a']
- assert main.get_str('something') == 'else'
+ assert main.get_str_list("test") == ["a"]
+ assert main.get_str("something") == "else"
def test_overwrite_directive_on_later_composite(tmpdir):
with make_includes(str(tmpdir)) as includes:
- _yaml.roundtrip_dump({'(@)': ['a.yml', 'b.yml'], 'test': {'(=)': ['Overwritten']}},
- str(tmpdir.join('main.yml')))
+ _yaml.roundtrip_dump(
+ {"(@)": ["a.yml", "b.yml"], "test": {"(=)": ["Overwritten"]}},
+ str(tmpdir.join("main.yml")),
+ )
- main = _yaml.load(str(tmpdir.join('main.yml')))
+ main = _yaml.load(str(tmpdir.join("main.yml")))
# a.yml
- _yaml.roundtrip_dump({'test': ['some useless', 'list', 'to be overwritten'],
- 'foo': 'should not be present'},
- str(tmpdir.join('a.yml')))
+ _yaml.roundtrip_dump(
+ {
+ "test": ["some useless", "list", "to be overwritten"],
+ "foo": "should not be present",
+ },
+ str(tmpdir.join("a.yml")),
+ )
# b.yaml isn't going to have a 'test' node to overwrite
- _yaml.roundtrip_dump({'foo': 'should be present'},
- str(tmpdir.join('b.yml')))
+ _yaml.roundtrip_dump({"foo": "should be present"}, str(tmpdir.join("b.yml")))
includes.process(main)
- assert main.get_str_list('test') == ['Overwritten']
- assert main.get_str('foo') == 'should be present'
+ assert main.get_str_list("test") == ["Overwritten"]
+ assert main.get_str("foo") == "should be present"
diff --git a/tests/format/invalid_keys.py b/tests/format/invalid_keys.py
index 861cfeabd..40a7b7c34 100644
--- a/tests/format/invalid_keys.py
+++ b/tests/format/invalid_keys.py
@@ -7,22 +7,27 @@ from buildstream._exceptions import ErrorDomain, LoadErrorReason
from buildstream.testing.runcli import cli # pylint: disable=unused-import
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'invalid-keys'
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "invalid-keys")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize(("element", "location"), [
- ("no-path-specified.bst", "line 4 column 4"),
- ("optional-source.bst", "line 6 column 10"),
- ("included-source.bst", "line 4 column 4"),
-])
+@pytest.mark.parametrize(
+ ("element", "location"),
+ [
+ ("no-path-specified.bst", "line 4 column 4"),
+ ("optional-source.bst", "line 6 column 10"),
+ ("included-source.bst", "line 4 column 4"),
+ ],
+)
def test_compositied_node_fails_usefully(cli, datafiles, element, location):
project = str(datafiles)
- result = cli.run(project=project, args=['show', element])
+ result = cli.run(project=project, args=["show", element])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
assert "synthetic node" not in result.stderr
- assert "{} [{}]: Dictionary did not contain expected key 'path'".format(element, location) in result.stderr
+ assert (
+ "{} [{}]: Dictionary did not contain expected key 'path'".format(
+ element, location
+ )
+ in result.stderr
+ )
diff --git a/tests/format/junctions.py b/tests/format/junctions.py
index a0af521a2..eedf4d69b 100644
--- a/tests/format/junctions.py
+++ b/tests/format/junctions.py
@@ -13,54 +13,57 @@ from buildstream.testing import create_repo
from buildstream.testing._utils.site import HAVE_GIT
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'junctions',
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "junctions",)
def copy_subprojects(project, datafiles, subprojects):
for subproject in subprojects:
- shutil.copytree(os.path.join(str(datafiles), subproject), os.path.join(str(project), subproject))
+ shutil.copytree(
+ os.path.join(str(datafiles), subproject),
+ os.path.join(str(project), subproject),
+ )
@pytest.mark.datafiles(DATA_DIR)
def test_simple_pipeline(cli, datafiles):
- project = os.path.join(str(datafiles), 'foo')
- copy_subprojects(project, datafiles, ['base'])
+ project = os.path.join(str(datafiles), "foo")
+ copy_subprojects(project, datafiles, ["base"])
# Check that the pipeline includes the subproject element
- element_list = cli.get_pipeline(project, ['target.bst'])
- assert 'base.bst:target.bst' in element_list
+ element_list = cli.get_pipeline(project, ["target.bst"])
+ assert "base.bst:target.bst" in element_list
@pytest.mark.datafiles(DATA_DIR)
def test_simple_build(cli, tmpdir, datafiles):
- project = os.path.join(str(datafiles), 'foo')
- copy_subprojects(project, datafiles, ['base'])
+ project = os.path.join(str(datafiles), "foo")
+ copy_subprojects(project, datafiles, ["base"])
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Build, checkout
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the checkout contains the expected files from both projects
- assert os.path.exists(os.path.join(checkoutdir, 'base.txt'))
- assert os.path.exists(os.path.join(checkoutdir, 'foo.txt'))
+ assert os.path.exists(os.path.join(checkoutdir, "base.txt"))
+ assert os.path.exists(os.path.join(checkoutdir, "foo.txt"))
@pytest.mark.datafiles(DATA_DIR)
def test_junction_missing_project_conf(cli, datafiles):
- project = datafiles / 'foo'
- copy_subprojects(project, datafiles, ['base'])
+ project = datafiles / "foo"
+ copy_subprojects(project, datafiles, ["base"])
# TODO: see if datafiles can tidy this concat up
# py3.5 requires this str conversion.
- os.remove(str(project / 'base' / 'project.conf'))
+ os.remove(str(project / "base" / "project.conf"))
# Note that both 'foo' and 'base' projects have a 'target.bst'. The
# 'app.bst' in 'foo' depends on the 'target.bst' in 'base', i.e.:
@@ -78,7 +81,7 @@ def test_junction_missing_project_conf(cli, datafiles):
# That would lead to a 'circular dependency error' in this setup, when we
# expect an 'invalid junction'.
#
- result = cli.run(project=project, args=['build', 'app.bst'])
+ result = cli.run(project=project, args=["build", "app.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_JUNCTION)
# Assert that we have the expected provenance encoded into the error
@@ -89,20 +92,21 @@ def test_junction_missing_project_conf(cli, datafiles):
def test_workspaced_junction_missing_project_conf(cli, datafiles):
# See test_junction_missing_project_conf for some more background.
- project = datafiles / 'foo'
- workspace_dir = project / 'base_workspace'
- copy_subprojects(project, datafiles, ['base'])
+ project = datafiles / "foo"
+ workspace_dir = project / "base_workspace"
+ copy_subprojects(project, datafiles, ["base"])
result = cli.run(
project=project,
- args=['workspace', 'open', 'base.bst', '--directory', workspace_dir])
+ args=["workspace", "open", "base.bst", "--directory", workspace_dir],
+ )
print(result)
result.assert_success()
# py3.5 requires this str conversion.
- os.remove(str(workspace_dir / 'project.conf'))
+ os.remove(str(workspace_dir / "project.conf"))
- result = cli.run(project=project, args=['build', 'app.bst'])
+ result = cli.run(project=project, args=["build", "app.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_JUNCTION)
# Assert that we have the expected provenance encoded into the error
@@ -111,18 +115,18 @@ def test_workspaced_junction_missing_project_conf(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_build_of_same_junction_used_twice(cli, datafiles):
- project = os.path.join(str(datafiles), 'inconsistent-names')
+ project = os.path.join(str(datafiles), "inconsistent-names")
# Check we can build a project that contains the same junction
# that is used twice, but named differently
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
def test_missing_file_in_subproject(cli, datafiles):
- project = os.path.join(str(datafiles), 'missing-element')
- result = cli.run(project=project, args=['show', 'target.bst'])
+ project = os.path.join(str(datafiles), "missing-element")
+ result = cli.run(project=project, args=["show", "target.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
# Assert that we have the expected provenance encoded into the error
@@ -131,8 +135,8 @@ def test_missing_file_in_subproject(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_missing_file_in_subsubproject(cli, datafiles):
- project = os.path.join(str(datafiles), 'missing-element')
- result = cli.run(project=project, args=['show', 'sub-target.bst'])
+ project = os.path.join(str(datafiles), "missing-element")
+ result = cli.run(project=project, args=["show", "sub-target.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
# Assert that we have the expected provenance encoded into the error
@@ -141,8 +145,8 @@ def test_missing_file_in_subsubproject(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_missing_junction_in_subproject(cli, datafiles):
- project = os.path.join(str(datafiles), 'missing-element')
- result = cli.run(project=project, args=['show', 'sub-target-bad-junction.bst'])
+ project = os.path.join(str(datafiles), "missing-element")
+ result = cli.run(project=project, args=["show", "sub-target-bad-junction.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
# Assert that we have the expected provenance encoded into the error
@@ -151,62 +155,68 @@ def test_missing_junction_in_subproject(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_nested_simple(cli, tmpdir, datafiles):
- project = os.path.join(str(datafiles), 'foo')
- copy_subprojects(project, datafiles, ['base'])
+ project = os.path.join(str(datafiles), "foo")
+ copy_subprojects(project, datafiles, ["base"])
- project = os.path.join(str(datafiles), 'nested')
- copy_subprojects(project, datafiles, ['foo'])
+ project = os.path.join(str(datafiles), "nested")
+ copy_subprojects(project, datafiles, ["foo"])
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Build, checkout
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the checkout contains the expected files from all subprojects
- assert os.path.exists(os.path.join(checkoutdir, 'base.txt'))
- assert os.path.exists(os.path.join(checkoutdir, 'foo.txt'))
+ assert os.path.exists(os.path.join(checkoutdir, "base.txt"))
+ assert os.path.exists(os.path.join(checkoutdir, "foo.txt"))
@pytest.mark.datafiles(DATA_DIR)
def test_nested_double(cli, tmpdir, datafiles):
- project_foo = os.path.join(str(datafiles), 'foo')
- copy_subprojects(project_foo, datafiles, ['base'])
+ project_foo = os.path.join(str(datafiles), "foo")
+ copy_subprojects(project_foo, datafiles, ["base"])
- project_bar = os.path.join(str(datafiles), 'bar')
- copy_subprojects(project_bar, datafiles, ['base'])
+ project_bar = os.path.join(str(datafiles), "bar")
+ copy_subprojects(project_bar, datafiles, ["base"])
- project = os.path.join(str(datafiles), 'toplevel')
- copy_subprojects(project, datafiles, ['base', 'foo', 'bar'])
+ project = os.path.join(str(datafiles), "toplevel")
+ copy_subprojects(project, datafiles, ["base", "foo", "bar"])
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Build, checkout
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the checkout contains the expected files from all subprojects
- assert os.path.exists(os.path.join(checkoutdir, 'base.txt'))
- assert os.path.exists(os.path.join(checkoutdir, 'foo.txt'))
- assert os.path.exists(os.path.join(checkoutdir, 'bar.txt'))
+ assert os.path.exists(os.path.join(checkoutdir, "base.txt"))
+ assert os.path.exists(os.path.join(checkoutdir, "foo.txt"))
+ assert os.path.exists(os.path.join(checkoutdir, "bar.txt"))
@pytest.mark.datafiles(DATA_DIR)
def test_nested_conflict(cli, datafiles):
- project_foo = os.path.join(str(datafiles), 'foo')
- copy_subprojects(project_foo, datafiles, ['base'])
+ project_foo = os.path.join(str(datafiles), "foo")
+ copy_subprojects(project_foo, datafiles, ["base"])
- project_bar = os.path.join(str(datafiles), 'bar')
- copy_subprojects(project_bar, datafiles, ['base'])
+ project_bar = os.path.join(str(datafiles), "bar")
+ copy_subprojects(project_bar, datafiles, ["base"])
- project = os.path.join(str(datafiles), 'conflict')
- copy_subprojects(project, datafiles, ['foo', 'bar'])
+ project = os.path.join(str(datafiles), "conflict")
+ copy_subprojects(project, datafiles, ["foo", "bar"])
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.CONFLICTING_JUNCTION)
assert "bar.bst:target.bst [line 3 column 2]" in result.stderr
@@ -215,49 +225,49 @@ def test_nested_conflict(cli, datafiles):
# Test that we error correctly when the junction element itself is missing
@pytest.mark.datafiles(DATA_DIR)
def test_missing_junction(cli, datafiles):
- project = os.path.join(str(datafiles), 'invalid')
+ project = os.path.join(str(datafiles), "invalid")
- result = cli.run(project=project, args=['build', 'missing.bst'])
+ result = cli.run(project=project, args=["build", "missing.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
# Test that we error correctly when an element is not found in the subproject
@pytest.mark.datafiles(DATA_DIR)
def test_missing_subproject_element(cli, datafiles):
- project = os.path.join(str(datafiles), 'invalid')
- copy_subprojects(project, datafiles, ['base'])
+ project = os.path.join(str(datafiles), "invalid")
+ copy_subprojects(project, datafiles, ["base"])
- result = cli.run(project=project, args=['build', 'missing-element.bst'])
+ result = cli.run(project=project, args=["build", "missing-element.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
# Test that we error correctly when a junction itself has dependencies
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_with_deps(cli, datafiles):
- project = os.path.join(str(datafiles), 'invalid')
- copy_subprojects(project, datafiles, ['base'])
+ project = os.path.join(str(datafiles), "invalid")
+ copy_subprojects(project, datafiles, ["base"])
- result = cli.run(project=project, args=['build', 'junction-with-deps.bst'])
+ result = cli.run(project=project, args=["build", "junction-with-deps.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_JUNCTION)
# Test that we error correctly when a junction is directly depended on
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_junction_dep(cli, datafiles):
- project = os.path.join(str(datafiles), 'invalid')
- copy_subprojects(project, datafiles, ['base'])
+ project = os.path.join(str(datafiles), "invalid")
+ copy_subprojects(project, datafiles, ["base"])
- result = cli.run(project=project, args=['build', 'junction-dep.bst'])
+ result = cli.run(project=project, args=["build", "junction-dep.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
# Test that we error correctly when we junction-depend on a non-junction
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_junctiondep_not_a_junction(cli, datafiles):
- project = os.path.join(str(datafiles), 'invalid')
- copy_subprojects(project, datafiles, ['base'])
+ project = os.path.join(str(datafiles), "invalid")
+ copy_subprojects(project, datafiles, ["base"])
- result = cli.run(project=project, args=['build', 'junctiondep-not-a-junction.bst'])
+ result = cli.run(project=project, args=["build", "junctiondep-not-a-junction.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
# Assert that we have the expected provenance encoded into the error
@@ -266,132 +276,129 @@ def test_invalid_junctiondep_not_a_junction(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_options_default(cli, tmpdir, datafiles):
- project = os.path.join(str(datafiles), 'options-default')
- copy_subprojects(project, datafiles, ['options-base'])
+ project = os.path.join(str(datafiles), "options-default")
+ copy_subprojects(project, datafiles, ["options-base"])
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Build, checkout
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
- assert os.path.exists(os.path.join(checkoutdir, 'pony.txt'))
- assert not os.path.exists(os.path.join(checkoutdir, 'horsy.txt'))
+ assert os.path.exists(os.path.join(checkoutdir, "pony.txt"))
+ assert not os.path.exists(os.path.join(checkoutdir, "horsy.txt"))
@pytest.mark.datafiles(DATA_DIR)
def test_options(cli, tmpdir, datafiles):
- project = os.path.join(str(datafiles), 'options')
- copy_subprojects(project, datafiles, ['options-base'])
+ project = os.path.join(str(datafiles), "options")
+ copy_subprojects(project, datafiles, ["options-base"])
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Build, checkout
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
- assert not os.path.exists(os.path.join(checkoutdir, 'pony.txt'))
- assert os.path.exists(os.path.join(checkoutdir, 'horsy.txt'))
+ assert not os.path.exists(os.path.join(checkoutdir, "pony.txt"))
+ assert os.path.exists(os.path.join(checkoutdir, "horsy.txt"))
@pytest.mark.datafiles(DATA_DIR)
def test_options_inherit(cli, tmpdir, datafiles):
- project = os.path.join(str(datafiles), 'options-inherit')
- copy_subprojects(project, datafiles, ['options-base'])
+ project = os.path.join(str(datafiles), "options-inherit")
+ copy_subprojects(project, datafiles, ["options-base"])
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Build, checkout
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
- assert not os.path.exists(os.path.join(checkoutdir, 'pony.txt'))
- assert os.path.exists(os.path.join(checkoutdir, 'horsy.txt'))
+ assert not os.path.exists(os.path.join(checkoutdir, "pony.txt"))
+ assert os.path.exists(os.path.join(checkoutdir, "horsy.txt"))
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
@pytest.mark.datafiles(DATA_DIR)
def test_git_show(cli, tmpdir, datafiles):
- project = os.path.join(str(datafiles), 'foo')
+ project = os.path.join(str(datafiles), "foo")
# Create the repo from 'base' subdir
- repo = create_repo('git', str(tmpdir))
- ref = repo.create(os.path.join(str(datafiles), 'base'))
+ repo = create_repo("git", str(tmpdir))
+ ref = repo.create(os.path.join(str(datafiles), "base"))
# Write out junction element with git source
- element = {
- 'kind': 'junction',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
- _yaml.roundtrip_dump(element, os.path.join(project, 'base.bst'))
+ element = {"kind": "junction", "sources": [repo.source_config(ref=ref)]}
+ _yaml.roundtrip_dump(element, os.path.join(project, "base.bst"))
# Check that bst show succeeds with implicit subproject fetching and the
# pipeline includes the subproject element
- element_list = cli.get_pipeline(project, ['target.bst'])
- assert 'base.bst:target.bst' in element_list
+ element_list = cli.get_pipeline(project, ["target.bst"])
+ assert "base.bst:target.bst" in element_list
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
@pytest.mark.datafiles(DATA_DIR)
def test_git_build(cli, tmpdir, datafiles):
- project = os.path.join(str(datafiles), 'foo')
+ project = os.path.join(str(datafiles), "foo")
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Create the repo from 'base' subdir
- repo = create_repo('git', str(tmpdir))
- ref = repo.create(os.path.join(str(datafiles), 'base'))
+ repo = create_repo("git", str(tmpdir))
+ ref = repo.create(os.path.join(str(datafiles), "base"))
# Write out junction element with git source
- element = {
- 'kind': 'junction',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
- _yaml.roundtrip_dump(element, os.path.join(project, 'base.bst'))
+ element = {"kind": "junction", "sources": [repo.source_config(ref=ref)]}
+ _yaml.roundtrip_dump(element, os.path.join(project, "base.bst"))
# Build (with implicit fetch of subproject), checkout
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the checkout contains the expected files from both projects
- assert os.path.exists(os.path.join(checkoutdir, 'base.txt'))
- assert os.path.exists(os.path.join(checkoutdir, 'foo.txt'))
+ assert os.path.exists(os.path.join(checkoutdir, "base.txt"))
+ assert os.path.exists(os.path.join(checkoutdir, "foo.txt"))
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
@pytest.mark.datafiles(DATA_DIR)
def test_git_missing_project_conf(cli, tmpdir, datafiles):
- project = datafiles / 'foo'
+ project = datafiles / "foo"
# See test_junction_missing_project_conf for some more background.
# py3.5 requires this str conversion.
- os.remove(str(datafiles / 'base' / 'project.conf'))
+ os.remove(str(datafiles / "base" / "project.conf"))
# Create the repo from 'base' subdir
- repo = create_repo('git', str(tmpdir))
- ref = repo.create(os.path.join(str(datafiles), 'base'))
+ repo = create_repo("git", str(tmpdir))
+ ref = repo.create(os.path.join(str(datafiles), "base"))
# Write out junction element with git source
- element = {
- 'kind': 'junction',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
- _yaml.roundtrip_dump(element, str(project / 'base.bst'))
-
- result = cli.run(project=project, args=['build', 'app.bst'])
+ element = {"kind": "junction", "sources": [repo.source_config(ref=ref)]}
+ _yaml.roundtrip_dump(element, str(project / "base.bst"))
+
+ result = cli.run(project=project, args=["build", "app.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_JUNCTION)
# Assert that we have the expected provenance encoded into the error
@@ -400,93 +407,108 @@ def test_git_missing_project_conf(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_cross_junction_names(cli, datafiles):
- project = os.path.join(str(datafiles), 'foo')
- copy_subprojects(project, datafiles, ['base'])
+ project = os.path.join(str(datafiles), "foo")
+ copy_subprojects(project, datafiles, ["base"])
- element_list = cli.get_pipeline(project, ['base.bst:target.bst'])
- assert 'base.bst:target.bst' in element_list
+ element_list = cli.get_pipeline(project, ["base.bst:target.bst"])
+ assert "base.bst:target.bst" in element_list
@pytest.mark.datafiles(DATA_DIR)
def test_build_git_cross_junction_names(cli, tmpdir, datafiles):
- project = os.path.join(str(datafiles), 'foo')
+ project = os.path.join(str(datafiles), "foo")
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Create the repo from 'base' subdir
- repo = create_repo('git', str(tmpdir))
- ref = repo.create(os.path.join(str(datafiles), 'base'))
+ repo = create_repo("git", str(tmpdir))
+ ref = repo.create(os.path.join(str(datafiles), "base"))
# Write out junction element with git source
- element = {
- 'kind': 'junction',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
- _yaml.roundtrip_dump(element, os.path.join(project, 'base.bst'))
+ element = {"kind": "junction", "sources": [repo.source_config(ref=ref)]}
+ _yaml.roundtrip_dump(element, os.path.join(project, "base.bst"))
print(element)
- print(cli.get_pipeline(project, ['base.bst']))
+ print(cli.get_pipeline(project, ["base.bst"]))
# Build (with implicit fetch of subproject), checkout
- result = cli.run(project=project, args=['build', 'base.bst:target.bst'])
+ result = cli.run(project=project, args=["build", "base.bst:target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'base.bst:target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ "base.bst:target.bst",
+ "--directory",
+ checkoutdir,
+ ],
+ )
result.assert_success()
# Check that the checkout contains the expected files from both projects
- assert os.path.exists(os.path.join(checkoutdir, 'base.txt'))
+ assert os.path.exists(os.path.join(checkoutdir, "base.txt"))
@pytest.mark.datafiles(DATA_DIR)
def test_config_target(cli, tmpdir, datafiles):
- project = os.path.join(str(datafiles), 'config-target')
- checkoutdir = os.path.join(str(tmpdir), 'checkout')
+ project = os.path.join(str(datafiles), "config-target")
+ checkoutdir = os.path.join(str(tmpdir), "checkout")
# Build, checkout
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the checkout contains the expected files from sub-sub-project
- assert os.path.exists(os.path.join(checkoutdir, 'hello.txt'))
+ assert os.path.exists(os.path.join(checkoutdir, "hello.txt"))
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_sources_and_target(cli, tmpdir, datafiles):
- project = os.path.join(str(datafiles), 'config-target')
+ project = os.path.join(str(datafiles), "config-target")
- result = cli.run(project=project, args=['show', 'invalid-source-target.bst'])
+ result = cli.run(project=project, args=["show", "invalid-source-target.bst"])
result.assert_main_error(ErrorDomain.ELEMENT, None)
- assert "junction elements cannot define both 'sources' and 'target' config option" in result.stderr
+ assert (
+ "junction elements cannot define both 'sources' and 'target' config option"
+ in result.stderr
+ )
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_target_name(cli, tmpdir, datafiles):
- project = os.path.join(str(datafiles), 'config-target')
+ project = os.path.join(str(datafiles), "config-target")
# Rename our junction element to the same name as its target
- old_path = os.path.join(project, 'elements/subsubproject.bst')
- new_path = os.path.join(project, 'elements/subsubproject-junction.bst')
+ old_path = os.path.join(project, "elements/subsubproject.bst")
+ new_path = os.path.join(project, "elements/subsubproject-junction.bst")
os.rename(old_path, new_path)
# This should fail now
- result = cli.run(project=project, args=['show', 'subsubproject-junction.bst'])
+ result = cli.run(project=project, args=["show", "subsubproject-junction.bst"])
result.assert_main_error(ErrorDomain.ELEMENT, None)
- assert "junction elements cannot target an element with the same name" in result.stderr
+ assert (
+ "junction elements cannot target an element with the same name" in result.stderr
+ )
# We cannot exhaustively test all possible ways in which this can go wrong, so
# test a couple of common ways in which we expect this to go wrong.
-@pytest.mark.parametrize('target', ['no-junction.bst', 'nested-junction-target.bst'])
+@pytest.mark.parametrize("target", ["no-junction.bst", "nested-junction-target.bst"])
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_target_format(cli, tmpdir, datafiles, target):
- project = os.path.join(str(datafiles), 'config-target')
+ project = os.path.join(str(datafiles), "config-target")
- result = cli.run(project=project, args=['show', target])
+ result = cli.run(project=project, args=["show", target])
result.assert_main_error(ErrorDomain.ELEMENT, None)
- assert "'target' option must be in format '{junction-name}:{element-name}'" in result.stderr
+ assert (
+ "'target' option must be in format '{junction-name}:{element-name}'"
+ in result.stderr
+ )
diff --git a/tests/format/listdirectiveerrors.py b/tests/format/listdirectiveerrors.py
index 269b521a4..e17dd7e8c 100644
--- a/tests/format/listdirectiveerrors.py
+++ b/tests/format/listdirectiveerrors.py
@@ -12,38 +12,44 @@ DATA_DIR = os.path.dirname(os.path.realpath(__file__))
@pytest.mark.datafiles(DATA_DIR)
def test_project_error(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'list-directive-error-project')
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(
+ datafiles.dirname, datafiles.basename, "list-directive-error-project"
+ )
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.TRAILING_LIST_DIRECTIVE)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("target", [
- ('variables.bst'), ('environment.bst'), ('config.bst'), ('public.bst')
-])
+@pytest.mark.parametrize(
+ "target", [("variables.bst"), ("environment.bst"), ("config.bst"), ("public.bst")]
+)
def test_element_error(cli, datafiles, target):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'list-directive-error-element')
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- target])
+ project = os.path.join(
+ datafiles.dirname, datafiles.basename, "list-directive-error-element"
+ )
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", target],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.TRAILING_LIST_DIRECTIVE)
@pytest.mark.datafiles(DATA_DIR)
def test_project_composite_error(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'list-directive-type-error')
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(
+ datafiles.dirname, datafiles.basename, "list-directive-type-error"
+ )
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.ILLEGAL_COMPOSITE)
diff --git a/tests/format/optionarch.py b/tests/format/optionarch.py
index f347e27ae..69faee347 100644
--- a/tests/format/optionarch.py
+++ b/tests/format/optionarch.py
@@ -16,52 +16,47 @@ DATA_DIR = os.path.dirname(os.path.realpath(__file__))
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("machine,value,expected", [
- # Test explicitly provided arches
- ('arm', 'aarch32', 'Army'),
- ('arm', 'aarch64', 'Aarchy'),
-
- # Test automatically derived arches
- ('arm', None, 'Army'),
- ('aarch64', None, 'Aarchy'),
-
- # Test that explicitly provided arches dont error out
- # when the `uname` reported arch is not supported
- ('i386', 'aarch32', 'Army'),
- ('x86_64', 'aarch64', 'Aarchy'),
-])
+@pytest.mark.parametrize(
+ "machine,value,expected",
+ [
+ # Test explicitly provided arches
+ ("arm", "aarch32", "Army"),
+ ("arm", "aarch64", "Aarchy"),
+ # Test automatically derived arches
+ ("arm", None, "Army"),
+ ("aarch64", None, "Aarchy"),
+ # Test that explicitly provided arches dont error out
+ # when the `uname` reported arch is not supported
+ ("i386", "aarch32", "Army"),
+ ("x86_64", "aarch64", "Aarchy"),
+ ],
+)
def test_conditional(cli, datafiles, machine, value, expected):
with override_platform_uname(machine=machine):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-arch')
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-arch")
bst_args = []
if value is not None:
- bst_args += ['--option', 'machine_arch', value]
-
- bst_args += [
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'
- ]
+ bst_args += ["--option", "machine_arch", value]
+
+ bst_args += ["show", "--deps", "none", "--format", "%{vars}", "element.bst"]
result = cli.run(project=project, silent=True, args=bst_args)
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('result') == expected
+ assert loaded.get_str("result") == expected
@pytest.mark.datafiles(DATA_DIR)
def test_unsupported_arch(cli, datafiles):
with override_platform_uname(machine="x86_64"):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-arch')
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'
- ])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-arch")
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@@ -70,13 +65,14 @@ def test_unsupported_arch(cli, datafiles):
def test_alias(cli, datafiles):
with override_platform_uname(machine="arm"):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-arch-alias')
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'
- ])
+ project = os.path.join(
+ datafiles.dirname, datafiles.basename, "option-arch-alias"
+ )
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_success()
@@ -85,13 +81,12 @@ def test_alias(cli, datafiles):
def test_unknown_host_arch(cli, datafiles):
with override_platform_uname(machine="x86_128"):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-arch')
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'
- ])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-arch")
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_main_error(ErrorDomain.PLATFORM, None)
@@ -99,12 +94,11 @@ def test_unknown_host_arch(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_unknown_project_arch(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-arch-unknown')
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'
- ])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-arch-unknown")
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
diff --git a/tests/format/optionbool.py b/tests/format/optionbool.py
index d772b483c..275be61cf 100644
--- a/tests/format/optionbool.py
+++ b/tests/format/optionbool.py
@@ -12,101 +12,101 @@ DATA_DIR = os.path.dirname(os.path.realpath(__file__))
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("target,option,expected", [
- # Test 'foo' syntax, and valid values of 'True' / 'False'
- ('element.bst', 'True', 'a pony'),
- ('element.bst', 'true', 'a pony'),
- ('element.bst', 'False', 'not pony'),
- ('element.bst', 'false', 'not pony'),
-
- # Test 'not foo' syntax
- ('element-not.bst', 'False', 'not pony'),
- ('element-not.bst', 'True', 'a pony'),
-
- # Test 'foo == True' syntax
- ('element-equals.bst', 'False', 'not pony'),
- ('element-equals.bst', 'True', 'a pony'),
-
- # Test 'foo != True' syntax
- ('element-not-equals.bst', 'False', 'not pony'),
- ('element-not-equals.bst', 'True', 'a pony'),
-])
+@pytest.mark.parametrize(
+ "target,option,expected",
+ [
+ # Test 'foo' syntax, and valid values of 'True' / 'False'
+ ("element.bst", "True", "a pony"),
+ ("element.bst", "true", "a pony"),
+ ("element.bst", "False", "not pony"),
+ ("element.bst", "false", "not pony"),
+ # Test 'not foo' syntax
+ ("element-not.bst", "False", "not pony"),
+ ("element-not.bst", "True", "a pony"),
+ # Test 'foo == True' syntax
+ ("element-equals.bst", "False", "not pony"),
+ ("element-equals.bst", "True", "a pony"),
+ # Test 'foo != True' syntax
+ ("element-not-equals.bst", "False", "not pony"),
+ ("element-not-equals.bst", "True", "a pony"),
+ ],
+)
def test_conditional_cli(cli, datafiles, target, option, expected):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-bool')
- result = cli.run(project=project, silent=True, args=[
- '--option', 'pony', option,
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- target])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-bool")
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ "pony",
+ option,
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{vars}",
+ target,
+ ],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('thepony') == expected
+ assert loaded.get_str("thepony") == expected
# Test configuration of boolean option in the config file
#
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("target,option,expected", [
- ('element.bst', True, 'a pony'),
- ('element.bst', False, 'not pony'),
-])
+@pytest.mark.parametrize(
+ "target,option,expected",
+ [("element.bst", True, "a pony"), ("element.bst", False, "not pony"),],
+)
def test_conditional_config(cli, datafiles, target, option, expected):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-bool')
- cli.configure({
- 'projects': {
- 'test': {
- 'options': {
- 'pony': option
- }
- }
- }
- })
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- target])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-bool")
+ cli.configure({"projects": {"test": {"options": {"pony": option}}}})
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", target],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('thepony') == expected
+ assert loaded.get_str("thepony") == expected
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("cli_option", [
- ('falsey'), ('pony'), ('trUE')
-])
+@pytest.mark.parametrize("cli_option", [("falsey"), ("pony"), ("trUE")])
def test_invalid_value_cli(cli, datafiles, cli_option):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-bool')
- result = cli.run(project=project, silent=True, args=[
- '--option', 'pony', cli_option,
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-bool")
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ "pony",
+ cli_option,
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{vars}",
+ "element.bst",
+ ],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("config_option", [
- ('pony'), (['its', 'a', 'list']), ({'dic': 'tionary'})
-])
+@pytest.mark.parametrize(
+ "config_option", [("pony"), (["its", "a", "list"]), ({"dic": "tionary"})]
+)
def test_invalid_value_config(cli, datafiles, config_option):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-bool')
- cli.configure({
- 'projects': {
- 'test': {
- 'options': {
- 'pony': config_option
- }
- }
- }
- })
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-bool")
+ cli.configure({"projects": {"test": {"options": {"pony": config_option}}}})
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
diff --git a/tests/format/optioneltmask.py b/tests/format/optioneltmask.py
index 75265fdd7..2530999bf 100644
--- a/tests/format/optioneltmask.py
+++ b/tests/format/optioneltmask.py
@@ -12,73 +12,91 @@ DATA_DIR = os.path.dirname(os.path.realpath(__file__))
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("target,value,expected", [
- ('pony.bst', 'pony.bst', 'True'),
- ('horsy.bst', 'pony.bst, horsy.bst', 'True'),
- ('zebry.bst', 'pony.bst, horsy.bst', 'False'),
-])
+@pytest.mark.parametrize(
+ "target,value,expected",
+ [
+ ("pony.bst", "pony.bst", "True"),
+ ("horsy.bst", "pony.bst, horsy.bst", "True"),
+ ("zebry.bst", "pony.bst, horsy.bst", "False"),
+ ],
+)
def test_conditional_cli(cli, datafiles, target, value, expected):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-element-mask')
- result = cli.run(project=project, silent=True, args=[
- '--option', 'debug_elements', value,
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- target])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-element-mask")
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ "debug_elements",
+ value,
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{vars}",
+ target,
+ ],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('debug') == expected
+ assert loaded.get_str("debug") == expected
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("target,value,expected", [
- ('pony.bst', ['pony.bst'], 'True'),
- ('horsy.bst', ['pony.bst', 'horsy.bst'], 'True'),
- ('zebry.bst', ['pony.bst', 'horsy.bst'], 'False'),
-])
+@pytest.mark.parametrize(
+ "target,value,expected",
+ [
+ ("pony.bst", ["pony.bst"], "True"),
+ ("horsy.bst", ["pony.bst", "horsy.bst"], "True"),
+ ("zebry.bst", ["pony.bst", "horsy.bst"], "False"),
+ ],
+)
def test_conditional_config(cli, datafiles, target, value, expected):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-element-mask')
- cli.configure({
- 'projects': {
- 'test': {
- 'options': {
- 'debug_elements': value
- }
- }
- }
- })
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- target])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-element-mask")
+ cli.configure({"projects": {"test": {"options": {"debug_elements": value}}}})
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", target],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('debug') == expected
+ assert loaded.get_str("debug") == expected
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_declaration(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-element-mask-invalid')
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'pony.bst'])
+ project = os.path.join(
+ datafiles.dirname, datafiles.basename, "option-element-mask-invalid"
+ )
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", "pony.bst"],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_value(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-element-mask')
- result = cli.run(project=project, silent=True, args=[
- '--option', 'debug_elements', 'kitten.bst',
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'pony.bst'])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-element-mask")
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ "debug_elements",
+ "kitten.bst",
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{vars}",
+ "pony.bst",
+ ],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
diff --git a/tests/format/optionenum.py b/tests/format/optionenum.py
index f9aff503f..ee6a4fa0e 100644
--- a/tests/format/optionenum.py
+++ b/tests/format/optionenum.py
@@ -12,104 +12,112 @@ DATA_DIR = os.path.dirname(os.path.realpath(__file__))
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("target,option,value,expected", [
- # Test 'var == "foo"' syntax
- ('element.bst', 'brother', 'pony', 'a pony'),
- ('element.bst', 'brother', 'zebry', 'a zebry'),
- ('element.bst', 'brother', 'horsy', 'a horsy'),
- # Test 'var1 == var2' syntax
- ('element-compare.bst', 'brother', 'horsy', 'different'),
- ('element-compare.bst', 'brother', 'zebry', 'same'),
- ('element-compare.bst', 'sister', 'pony', 'same'),
-])
+@pytest.mark.parametrize(
+ "target,option,value,expected",
+ [
+ # Test 'var == "foo"' syntax
+ ("element.bst", "brother", "pony", "a pony"),
+ ("element.bst", "brother", "zebry", "a zebry"),
+ ("element.bst", "brother", "horsy", "a horsy"),
+ # Test 'var1 == var2' syntax
+ ("element-compare.bst", "brother", "horsy", "different"),
+ ("element-compare.bst", "brother", "zebry", "same"),
+ ("element-compare.bst", "sister", "pony", "same"),
+ ],
+)
def test_conditional_cli(cli, datafiles, target, option, value, expected):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-enum')
- result = cli.run(project=project, silent=True, args=[
- '--option', option, value,
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- target])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-enum")
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ option,
+ value,
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{vars}",
+ target,
+ ],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('result') == expected
+ assert loaded.get_str("result") == expected
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("target,option,value,expected", [
- # Test 'var == "foo"' syntax
- ('element.bst', 'brother', 'pony', 'a pony'),
- ('element.bst', 'brother', 'zebry', 'a zebry'),
- ('element.bst', 'brother', 'horsy', 'a horsy'),
- # Test 'var1 == var2' syntax
- ('element-compare.bst', 'brother', 'horsy', 'different'),
- ('element-compare.bst', 'brother', 'zebry', 'same'),
- ('element-compare.bst', 'sister', 'pony', 'same'),
-])
+@pytest.mark.parametrize(
+ "target,option,value,expected",
+ [
+ # Test 'var == "foo"' syntax
+ ("element.bst", "brother", "pony", "a pony"),
+ ("element.bst", "brother", "zebry", "a zebry"),
+ ("element.bst", "brother", "horsy", "a horsy"),
+ # Test 'var1 == var2' syntax
+ ("element-compare.bst", "brother", "horsy", "different"),
+ ("element-compare.bst", "brother", "zebry", "same"),
+ ("element-compare.bst", "sister", "pony", "same"),
+ ],
+)
def test_conditional_config(cli, datafiles, target, option, value, expected):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-enum')
- cli.configure({
- 'projects': {
- 'test': {
- 'options': {
- option: value
- }
- }
- }
- })
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- target])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-enum")
+ cli.configure({"projects": {"test": {"options": {option: value}}}})
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", target],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('result') == expected
+ assert loaded.get_str("result") == expected
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_value_cli(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-enum')
- result = cli.run(project=project, silent=True, args=[
- '--option', 'brother', 'giraffy',
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-enum")
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ "brother",
+ "giraffy",
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{vars}",
+ "element.bst",
+ ],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("config_option", [
- ('giraffy'), (['its', 'a', 'list']), ({'dic': 'tionary'})
-])
+@pytest.mark.parametrize(
+ "config_option", [("giraffy"), (["its", "a", "list"]), ({"dic": "tionary"})]
+)
def test_invalid_value_config(cli, datafiles, config_option):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-enum')
- cli.configure({
- 'projects': {
- 'test': {
- 'options': {
- 'brother': config_option
- }
- }
- }
- })
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-enum")
+ cli.configure({"projects": {"test": {"options": {"brother": config_option}}}})
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
def test_missing_values(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-enum-missing')
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-enum-missing")
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
diff --git a/tests/format/optionexports.py b/tests/format/optionexports.py
index 104abcf83..90bbace97 100644
--- a/tests/format/optionexports.py
+++ b/tests/format/optionexports.py
@@ -11,28 +11,37 @@ DATA_DIR = os.path.dirname(os.path.realpath(__file__))
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("option_name,option_value,var_name,var_value", [
-
- # Test boolean
- ('bool_export', 'False', 'exported-bool', '0'),
- ('bool_export', 'True', 'exported-bool', '1'),
-
- # Enum
- ('enum_export', 'pony', 'exported-enum', 'pony'),
- ('enum_export', 'horsy', 'exported-enum', 'horsy'),
-
- # Flags
- ('flags_export', 'pony', 'exported-flags', 'pony'),
- ('flags_export', 'pony, horsy', 'exported-flags', 'horsy,pony'),
-])
+@pytest.mark.parametrize(
+ "option_name,option_value,var_name,var_value",
+ [
+ # Test boolean
+ ("bool_export", "False", "exported-bool", "0"),
+ ("bool_export", "True", "exported-bool", "1"),
+ # Enum
+ ("enum_export", "pony", "exported-enum", "pony"),
+ ("enum_export", "horsy", "exported-enum", "horsy"),
+ # Flags
+ ("flags_export", "pony", "exported-flags", "pony"),
+ ("flags_export", "pony, horsy", "exported-flags", "horsy,pony"),
+ ],
+)
def test_export(cli, datafiles, option_name, option_value, var_name, var_value):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-exports')
- result = cli.run(project=project, silent=True, args=[
- '--option', option_name, option_value,
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-exports")
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ option_name,
+ option_value,
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{vars}",
+ "element.bst",
+ ],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
diff --git a/tests/format/optionflags.py b/tests/format/optionflags.py
index 29bb7ec2c..72d175bf8 100644
--- a/tests/format/optionflags.py
+++ b/tests/format/optionflags.py
@@ -12,113 +12,126 @@ DATA_DIR = os.path.dirname(os.path.realpath(__file__))
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("target,option,value,expected", [
- # Test (var == [ "foo" ]) syntax
- ('element.bst', 'farm', 'pony', 'a pony'),
- ('element.bst', 'farm', 'zebry', 'a zebry'),
- ('element.bst', 'farm', 'pony, horsy', 'a pony and a horsy'),
- ('element.bst', 'farm', 'zebry,horsy , pony', 'all the animals'),
-
- # Test ("literal" in var) syntax
- ('element-in.bst', 'farm', 'zebry, horsy, pony', 'a zebry'),
-
- # Test ("literal" not in var) syntax
- ('element-in.bst', 'farm', 'zebry, horsy', 'no pony'),
-
- # Test (var1 not in var2) syntax (where var1 is enum and var2 is flags)
- ('element-in.bst', 'farm', 'zebry, pony', 'no horsy'),
-])
+@pytest.mark.parametrize(
+ "target,option,value,expected",
+ [
+ # Test (var == [ "foo" ]) syntax
+ ("element.bst", "farm", "pony", "a pony"),
+ ("element.bst", "farm", "zebry", "a zebry"),
+ ("element.bst", "farm", "pony, horsy", "a pony and a horsy"),
+ ("element.bst", "farm", "zebry,horsy , pony", "all the animals"),
+ # Test ("literal" in var) syntax
+ ("element-in.bst", "farm", "zebry, horsy, pony", "a zebry"),
+ # Test ("literal" not in var) syntax
+ ("element-in.bst", "farm", "zebry, horsy", "no pony"),
+ # Test (var1 not in var2) syntax (where var1 is enum and var2 is flags)
+ ("element-in.bst", "farm", "zebry, pony", "no horsy"),
+ ],
+)
def test_conditional_cli(cli, datafiles, target, option, value, expected):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-flags')
- result = cli.run(project=project, silent=True, args=[
- '--option', option, value,
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- target])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-flags")
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ option,
+ value,
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{vars}",
+ target,
+ ],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('result') == expected
+ assert loaded.get_str("result") == expected
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("target,option,value,expected", [
- # Test 'var == [ "foo" ]' syntax
- ('element.bst', 'farm', ['pony'], 'a pony'),
- ('element.bst', 'farm', ['zebry'], 'a zebry'),
- ('element.bst', 'farm', ['pony', 'horsy'], 'a pony and a horsy'),
- ('element.bst', 'farm', ['zebry', 'horsy', 'pony'], 'all the animals'),
-])
+@pytest.mark.parametrize(
+ "target,option,value,expected",
+ [
+ # Test 'var == [ "foo" ]' syntax
+ ("element.bst", "farm", ["pony"], "a pony"),
+ ("element.bst", "farm", ["zebry"], "a zebry"),
+ ("element.bst", "farm", ["pony", "horsy"], "a pony and a horsy"),
+ ("element.bst", "farm", ["zebry", "horsy", "pony"], "all the animals"),
+ ],
+)
def test_conditional_config(cli, datafiles, target, option, value, expected):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-flags')
- cli.configure({
- 'projects': {
- 'test': {
- 'options': {
- option: value
- }
- }
- }
- })
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- target])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-flags")
+ cli.configure({"projects": {"test": {"options": {option: value}}}})
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", target],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('result') == expected
+ assert loaded.get_str("result") == expected
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("cli_option", [
- ('giraffy'), # Not a valid animal for the farm option
- ('horsy pony') # Does not include comma separators
-])
+@pytest.mark.parametrize(
+ "cli_option",
+ [
+ ("giraffy"), # Not a valid animal for the farm option
+ ("horsy pony"), # Does not include comma separators
+ ],
+)
def test_invalid_value_cli(cli, datafiles, cli_option):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-flags')
- result = cli.run(project=project, silent=True, args=[
- '--option', 'farm', cli_option,
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-flags")
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ "farm",
+ cli_option,
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{vars}",
+ "element.bst",
+ ],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("config_option", [
- ('pony'), # Not specified as a list
- (['horsy', 'pony', 'giraffy']), # Invalid giraffy animal for farm option
- ({'dic': 'tionary'}) # Dicts also dont make sense in the config for flags
-])
+@pytest.mark.parametrize(
+ "config_option",
+ [
+ ("pony"), # Not specified as a list
+ (["horsy", "pony", "giraffy"]), # Invalid giraffy animal for farm option
+ ({"dic": "tionary"}), # Dicts also dont make sense in the config for flags
+ ],
+)
def test_invalid_value_config(cli, datafiles, config_option):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-flags')
- cli.configure({
- 'projects': {
- 'test': {
- 'options': {
- 'farm': config_option
- }
- }
- }
- })
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-flags")
+ cli.configure({"projects": {"test": {"options": {"farm": config_option}}}})
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
def test_missing_values(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-flags-missing')
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(
+ datafiles.dirname, datafiles.basename, "option-flags-missing"
+ )
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
diff --git a/tests/format/optionos.py b/tests/format/optionos.py
index f915d889e..cb75db71f 100644
--- a/tests/format/optionos.py
+++ b/tests/format/optionos.py
@@ -15,51 +15,46 @@ DATA_DIR = os.path.dirname(os.path.realpath(__file__))
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("system,value,expected", [
- # Test explicitly provided arches
- ('Darwin', 'Linux', 'Linuxy'),
- ('SunOS', 'FreeBSD', 'FreeBSDy'),
-
- # Test automatically derived arches
- ('Linux', None, 'Linuxy'),
- ('Darwin', None, 'Darwiny'),
-
- # Test that explicitly provided arches dont error out
- # when the `uname` reported arch is not supported
- ('AIX', 'Linux', 'Linuxy'),
- ('HaikuOS', 'SunOS', 'SunOSy'),
-])
+@pytest.mark.parametrize(
+ "system,value,expected",
+ [
+ # Test explicitly provided arches
+ ("Darwin", "Linux", "Linuxy"),
+ ("SunOS", "FreeBSD", "FreeBSDy"),
+ # Test automatically derived arches
+ ("Linux", None, "Linuxy"),
+ ("Darwin", None, "Darwiny"),
+ # Test that explicitly provided arches dont error out
+ # when the `uname` reported arch is not supported
+ ("AIX", "Linux", "Linuxy"),
+ ("HaikuOS", "SunOS", "SunOSy"),
+ ],
+)
def test_conditionals(cli, datafiles, system, value, expected):
with override_platform_uname(system=system):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-os')
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-os")
bst_args = []
if value is not None:
- bst_args += ['--option', 'machine_os', value]
+ bst_args += ["--option", "machine_os", value]
- bst_args += [
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'
- ]
+ bst_args += ["show", "--deps", "none", "--format", "%{vars}", "element.bst"]
result = cli.run(project=project, silent=True, args=bst_args)
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('result') == expected
+ assert loaded.get_str("result") == expected
@pytest.mark.datafiles(DATA_DIR)
def test_unsupported_arch(cli, datafiles):
with override_platform_uname(system="AIX"):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-os')
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'
- ])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-os")
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
diff --git a/tests/format/optionoverrides.py b/tests/format/optionoverrides.py
index d4ed257dd..ba12e751f 100644
--- a/tests/format/optionoverrides.py
+++ b/tests/format/optionoverrides.py
@@ -11,22 +11,17 @@ DATA_DIR = os.path.dirname(os.path.realpath(__file__))
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("arch", [('i686'), ('x86_64')])
+@pytest.mark.parametrize("arch", [("i686"), ("x86_64")])
def test_override(cli, datafiles, arch):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'option-overrides')
+ project = os.path.join(datafiles.dirname, datafiles.basename, "option-overrides")
- bst_args = ['--option', 'arch', arch]
- bst_args += [
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'
- ]
+ bst_args = ["--option", "arch", arch]
+ bst_args += ["show", "--deps", "none", "--format", "%{vars}", "element.bst"]
result = cli.run(project=project, silent=True, args=bst_args)
result.assert_success()
# See the associated project.conf for the expected values
- expected_value = '--host={}-unknown-linux-gnu'.format(arch)
+ expected_value = "--host={}-unknown-linux-gnu".format(arch)
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('conf-global') == expected_value
+ assert loaded.get_str("conf-global") == expected_value
diff --git a/tests/format/options.py b/tests/format/options.py
index 9c0e043f9..c2f4584d4 100644
--- a/tests/format/options.py
+++ b/tests/format/options.py
@@ -8,244 +8,319 @@ from buildstream._exceptions import ErrorDomain, LoadErrorReason
from buildstream.testing.runcli import cli # pylint: disable=unused-import
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'options'
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "options")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("project_dir", [
- ('invalid-name-spaces'),
- ('invalid-name-dashes'),
- ('invalid-name-plus'),
- ('invalid-name-leading-number'),
-])
+@pytest.mark.parametrize(
+ "project_dir",
+ [
+ ("invalid-name-spaces"),
+ ("invalid-name-dashes"),
+ ("invalid-name-plus"),
+ ("invalid-name-leading-number"),
+ ],
+)
def test_invalid_option_name(cli, datafiles, project_dir):
project = os.path.join(datafiles.dirname, datafiles.basename, project_dir)
- result = cli.run(project=project, silent=True, args=['show', 'element.bst'])
+ result = cli.run(project=project, silent=True, args=["show", "element.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_SYMBOL_NAME)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("project_dir", [
- ('invalid-variable-name-spaces'),
- ('invalid-variable-name-plus'),
-])
+@pytest.mark.parametrize(
+ "project_dir", [("invalid-variable-name-spaces"), ("invalid-variable-name-plus"),]
+)
def test_invalid_variable_name(cli, datafiles, project_dir):
project = os.path.join(datafiles.dirname, datafiles.basename, project_dir)
- result = cli.run(project=project, silent=True, args=['show', 'element.bst'])
+ result = cli.run(project=project, silent=True, args=["show", "element.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_SYMBOL_NAME)
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_option_type(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'invalid-type')
+ project = os.path.join(datafiles.dirname, datafiles.basename, "invalid-type")
# Test with the opt option set
- result = cli.run(project=project, silent=True, args=[
- '--option', 'opt', 'funny',
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ "opt",
+ "funny",
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{vars}",
+ "element.bst",
+ ],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_option_cli(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'simple-condition')
+ project = os.path.join(datafiles.dirname, datafiles.basename, "simple-condition")
# Test with the opt option set
- result = cli.run(project=project, silent=True, args=[
- '--option', 'fart', 'funny',
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ "fart",
+ "funny",
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{vars}",
+ "element.bst",
+ ],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_option_config(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'simple-condition')
- cli.configure({
- 'projects': {
- 'test': {
- 'options': {
- 'fart': 'Hello'
- }
- }
- }
- })
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "simple-condition")
+ cli.configure({"projects": {"test": {"options": {"fart": "Hello"}}}})
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_expression(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'invalid-expression')
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "invalid-expression")
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.EXPRESSION_FAILED)
@pytest.mark.datafiles(DATA_DIR)
def test_undefined(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'undefined-variable')
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "undefined-variable")
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.EXPRESSION_FAILED)
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_condition(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'invalid-condition')
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "invalid-condition")
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"],
+ )
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("opt_option,expected_prefix", [
- ('False', '/usr'),
- ('True', '/opt'),
-])
+@pytest.mark.parametrize(
+ "opt_option,expected_prefix", [("False", "/usr"), ("True", "/opt"),]
+)
def test_simple_conditional(cli, datafiles, opt_option, expected_prefix):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'simple-condition')
+ project = os.path.join(datafiles.dirname, datafiles.basename, "simple-condition")
# Test with the opt option set
- result = cli.run(project=project, silent=True, args=[
- '--option', 'opt', opt_option,
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ "opt",
+ opt_option,
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{vars}",
+ "element.bst",
+ ],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('prefix') == expected_prefix
+ assert loaded.get_str("prefix") == expected_prefix
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("debug,logging,expected", [
- ('False', 'False', 'False'),
- ('True', 'False', 'False'),
- ('False', 'True', 'False'),
- ('True', 'True', 'True'),
-])
+@pytest.mark.parametrize(
+ "debug,logging,expected",
+ [
+ ("False", "False", "False"),
+ ("True", "False", "False"),
+ ("False", "True", "False"),
+ ("True", "True", "True"),
+ ],
+)
def test_nested_conditional(cli, datafiles, debug, logging, expected):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'nested-condition')
+ project = os.path.join(datafiles.dirname, datafiles.basename, "nested-condition")
# Test with the opt option set
- result = cli.run(project=project, silent=True, args=[
- '--option', 'debug', debug,
- '--option', 'logging', logging,
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ "debug",
+ debug,
+ "--option",
+ "logging",
+ logging,
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{vars}",
+ "element.bst",
+ ],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('debug') == expected
+ assert loaded.get_str("debug") == expected
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("debug,logging,expected", [
- ('False', 'False', 'False'),
- ('True', 'False', 'False'),
- ('False', 'True', 'False'),
- ('True', 'True', 'True'),
-])
+@pytest.mark.parametrize(
+ "debug,logging,expected",
+ [
+ ("False", "False", "False"),
+ ("True", "False", "False"),
+ ("False", "True", "False"),
+ ("True", "True", "True"),
+ ],
+)
def test_compound_and_conditional(cli, datafiles, debug, logging, expected):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'compound-and-condition')
+ project = os.path.join(
+ datafiles.dirname, datafiles.basename, "compound-and-condition"
+ )
# Test with the opt option set
- result = cli.run(project=project, silent=True, args=[
- '--option', 'debug', debug,
- '--option', 'logging', logging,
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ "debug",
+ debug,
+ "--option",
+ "logging",
+ logging,
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{vars}",
+ "element.bst",
+ ],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('debug') == expected
+ assert loaded.get_str("debug") == expected
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("debug,logging,expected", [
- ('False', 'False', 'False'),
- ('True', 'False', 'True'),
- ('False', 'True', 'True'),
- ('True', 'True', 'True'),
-])
+@pytest.mark.parametrize(
+ "debug,logging,expected",
+ [
+ ("False", "False", "False"),
+ ("True", "False", "True"),
+ ("False", "True", "True"),
+ ("True", "True", "True"),
+ ],
+)
def test_compound_or_conditional(cli, datafiles, debug, logging, expected):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'compound-or-condition')
+ project = os.path.join(
+ datafiles.dirname, datafiles.basename, "compound-or-condition"
+ )
# Test with the opt option set
- result = cli.run(project=project, silent=True, args=[
- '--option', 'debug', debug,
- '--option', 'logging', logging,
- 'show',
- '--deps', 'none',
- '--format', '%{vars}',
- 'element.bst'])
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ "debug",
+ debug,
+ "--option",
+ "logging",
+ logging,
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{vars}",
+ "element.bst",
+ ],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- assert loaded.get_str('logging') == expected
+ assert loaded.get_str("logging") == expected
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("option,expected", [
- ('False', 'horsy'),
- ('True', 'pony'),
-])
+@pytest.mark.parametrize("option,expected", [("False", "horsy"), ("True", "pony"),])
def test_deep_nesting_level1(cli, datafiles, option, expected):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'deep-nesting')
- result = cli.run(project=project, silent=True, args=[
- '--option', 'pony', option,
- 'show',
- '--deps', 'none',
- '--format', '%{public}',
- 'element.bst'])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "deep-nesting")
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ "pony",
+ option,
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{public}",
+ "element.bst",
+ ],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- shallow_list = loaded.get_sequence('shallow-nest')
+ shallow_list = loaded.get_sequence("shallow-nest")
first_dict = shallow_list.mapping_at(0)
- assert first_dict.get_str('animal') == expected
+ assert first_dict.get_str("animal") == expected
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("option,expected", [
- ('False', 'horsy'),
- ('True', 'pony'),
-])
+@pytest.mark.parametrize("option,expected", [("False", "horsy"), ("True", "pony"),])
def test_deep_nesting_level2(cli, datafiles, option, expected):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'deep-nesting')
- result = cli.run(project=project, silent=True, args=[
- '--option', 'pony', option,
- 'show',
- '--deps', 'none',
- '--format', '%{public}',
- 'element-deeper.bst'])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "deep-nesting")
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ "pony",
+ option,
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{public}",
+ "element-deeper.bst",
+ ],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- shallow_list = loaded.get_sequence('deep-nest')
+ shallow_list = loaded.get_sequence("deep-nest")
deeper_list = shallow_list.sequence_at(0)
first_dict = deeper_list.mapping_at(0)
- assert first_dict.get_str('animal') == expected
+ assert first_dict.get_str("animal") == expected
diff --git a/tests/format/project.py b/tests/format/project.py
index 2e0a729dc..b9171865e 100644
--- a/tests/format/project.py
+++ b/tests/format/project.py
@@ -11,30 +11,27 @@ from tests.testutils import filetypegenerator
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_missing_project_conf(cli, datafiles):
project = str(datafiles)
- result = cli.run(project=project, args=['workspace', 'list'])
+ result = cli.run(project=project, args=["workspace", "list"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_PROJECT_CONF)
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_missing_project_name(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename, "missingname")
- result = cli.run(project=project, args=['workspace', 'list'])
+ result = cli.run(project=project, args=["workspace", "list"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_missing_element(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename, "missing-element")
- result = cli.run(project=project, args=['show', 'manual.bst'])
+ result = cli.run(project=project, args=["show", "manual.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
# Assert that we have the expected provenance encoded into the error
@@ -44,7 +41,7 @@ def test_missing_element(cli, datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_missing_junction(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename, "missing-junction")
- result = cli.run(project=project, args=['show', 'manual.bst'])
+ result = cli.run(project=project, args=["show", "manual.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
# Assert that we have the expected provenance encoded into the error
@@ -54,176 +51,166 @@ def test_missing_junction(cli, datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_empty_project_name(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename, "emptyname")
- result = cli.run(project=project, args=['workspace', 'list'])
+ result = cli.run(project=project, args=["workspace", "list"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_SYMBOL_NAME)
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_invalid_project_name(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename, "invalidname")
- result = cli.run(project=project, args=['workspace', 'list'])
+ result = cli.run(project=project, args=["workspace", "list"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_SYMBOL_NAME)
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_invalid_yaml(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename, "invalid-yaml")
- result = cli.run(project=project, args=['workspace', 'list'])
+ result = cli.run(project=project, args=["workspace", "list"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_YAML)
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_load_default_project(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename, "default")
- result = cli.run(project=project, args=[
- 'show', '--format', '%{env}', 'manual.bst'
- ])
+ result = cli.run(project=project, args=["show", "--format", "%{env}", "manual.bst"])
result.assert_success()
# Read back some of our project defaults from the env
env = _yaml.load_data(result.output)
- assert env.get_str('USER') == "tomjon"
- assert env.get_str('TERM') == "dumb"
+ assert env.get_str("USER") == "tomjon"
+ assert env.get_str("TERM") == "dumb"
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_load_project_from_subdir(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'project-from-subdir')
+ project = os.path.join(datafiles.dirname, datafiles.basename, "project-from-subdir")
result = cli.run(
project=project,
- cwd=os.path.join(project, 'subdirectory'),
- args=['show', '--format', '%{env}', 'manual.bst'])
+ cwd=os.path.join(project, "subdirectory"),
+ args=["show", "--format", "%{env}", "manual.bst"],
+ )
result.assert_success()
# Read back some of our project defaults from the env
env = _yaml.load_data(result.output)
- assert env.get_str('USER') == "tomjon"
- assert env.get_str('TERM') == "dumb"
+ assert env.get_str("USER") == "tomjon"
+ assert env.get_str("TERM") == "dumb"
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_override_project_path(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename, "overridepath")
- result = cli.run(project=project, args=[
- 'show', '--format', '%{env}', 'manual.bst'
- ])
+ result = cli.run(project=project, args=["show", "--format", "%{env}", "manual.bst"])
result.assert_success()
# Read back the overridden path
env = _yaml.load_data(result.output)
- assert env.get_str('PATH') == "/bin:/sbin"
+ assert env.get_str("PATH") == "/bin:/sbin"
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_project_unsupported(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename, "unsupported")
- result = cli.run(project=project, args=['workspace', 'list'])
+ result = cli.run(project=project, args=["workspace", "list"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.UNSUPPORTED_PROJECT)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'element-path'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "element-path"))
def test_missing_element_path_directory(cli, datafiles):
project = str(datafiles)
- result = cli.run(project=project, args=['workspace', 'list'])
- result.assert_main_error(ErrorDomain.LOAD,
- LoadErrorReason.MISSING_FILE)
+ result = cli.run(project=project, args=["workspace", "list"])
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'element-path'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "element-path"))
def test_element_path_not_a_directory(cli, datafiles):
project = str(datafiles)
- path = os.path.join(project, 'elements')
+ path = os.path.join(project, "elements")
for _file_type in filetypegenerator.generate_file_types(path):
- result = cli.run(project=project, args=['workspace', 'list'])
+ result = cli.run(project=project, args=["workspace", "list"])
if not os.path.isdir(path):
- result.assert_main_error(ErrorDomain.LOAD,
- LoadErrorReason.PROJ_PATH_INVALID_KIND)
+ result.assert_main_error(
+ ErrorDomain.LOAD, LoadErrorReason.PROJ_PATH_INVALID_KIND
+ )
else:
result.assert_success()
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'local-plugin'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "local-plugin"))
def test_missing_local_plugin_directory(cli, datafiles):
project = str(datafiles)
- result = cli.run(project=project, args=['workspace', 'list'])
- result.assert_main_error(ErrorDomain.LOAD,
- LoadErrorReason.MISSING_FILE)
+ result = cli.run(project=project, args=["workspace", "list"])
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'local-plugin'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "local-plugin"))
def test_local_plugin_not_directory(cli, datafiles):
project = str(datafiles)
- path = os.path.join(project, 'plugins')
+ path = os.path.join(project, "plugins")
for _file_type in filetypegenerator.generate_file_types(path):
- result = cli.run(project=project, args=['workspace', 'list'])
+ result = cli.run(project=project, args=["workspace", "list"])
if not os.path.isdir(path):
- result.assert_main_error(ErrorDomain.LOAD,
- LoadErrorReason.PROJ_PATH_INVALID_KIND)
+ result.assert_main_error(
+ ErrorDomain.LOAD, LoadErrorReason.PROJ_PATH_INVALID_KIND
+ )
else:
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
def test_plugin_load_allowed(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'plugin-allowed')
- result = cli.run(project=project, silent=True, args=[
- 'show', 'element.bst'])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "plugin-allowed")
+ result = cli.run(project=project, silent=True, args=["show", "element.bst"])
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
def test_plugin_load_forbidden(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'plugin-forbidden')
- result = cli.run(project=project, silent=True, args=[
- 'show', 'element.bst'])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "plugin-forbidden")
+ result = cli.run(project=project, silent=True, args=["show", "element.bst"])
result.assert_main_error(ErrorDomain.PLUGIN, None)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
def test_plugin_no_load_ref(cli, datafiles, ref_storage):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'plugin-no-load-ref')
+ project = os.path.join(datafiles.dirname, datafiles.basename, "plugin-no-load-ref")
# Generate project with access to the noloadref plugin and project.refs enabled
#
config = {
- 'name': 'test',
- 'ref-storage': ref_storage,
- 'plugins': [
- {
- 'origin': 'local',
- 'path': 'plugins',
- 'sources': {
- 'noloadref': 0
- }
- }
- ]
+ "name": "test",
+ "ref-storage": ref_storage,
+ "plugins": [
+ {"origin": "local", "path": "plugins", "sources": {"noloadref": 0}}
+ ],
}
- _yaml.roundtrip_dump(config, os.path.join(project, 'project.conf'))
+ _yaml.roundtrip_dump(config, os.path.join(project, "project.conf"))
- result = cli.run(project=project, silent=True, args=['show', 'noloadref.bst'])
+ result = cli.run(project=project, silent=True, args=["show", "noloadref.bst"])
# There is no error if project.refs is not in use, otherwise we
# assert our graceful failure
- if ref_storage == 'inline':
+ if ref_storage == "inline":
result.assert_success()
else:
- result.assert_main_error(ErrorDomain.SOURCE, 'unsupported-load-ref')
+ result.assert_main_error(ErrorDomain.SOURCE, "unsupported-load-ref")
@pytest.mark.datafiles(DATA_DIR)
def test_plugin_preflight_error(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'plugin-preflight-error')
- result = cli.run(project=project, args=['source', 'fetch', 'error.bst'])
+ project = os.path.join(
+ datafiles.dirname, datafiles.basename, "plugin-preflight-error"
+ )
+ result = cli.run(project=project, args=["source", "fetch", "error.bst"])
result.assert_main_error(ErrorDomain.SOURCE, "the-preflight-error")
@pytest.mark.datafiles(DATA_DIR)
def test_duplicate_plugins(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'duplicate-plugins')
- result = cli.run(project=project, silent=True, args=[
- 'show', 'element.bst'])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "duplicate-plugins")
+ result = cli.run(project=project, silent=True, args=["show", "element.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_YAML)
@@ -232,42 +219,60 @@ def test_duplicate_plugins(cli, datafiles):
#
@pytest.mark.datafiles(DATA_DIR)
def test_project_refs_options(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'refs-options')
-
- result1 = cli.run(project=project, silent=True, args=[
- '--option', 'test', 'True',
- 'show',
- '--deps', 'none',
- '--format', '%{key}',
- 'target.bst'])
+ project = os.path.join(datafiles.dirname, datafiles.basename, "refs-options")
+
+ result1 = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ "test",
+ "True",
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{key}",
+ "target.bst",
+ ],
+ )
result1.assert_success()
- result2 = cli.run(project=project, silent=True, args=[
- '--option', 'test', 'False',
- 'show',
- '--deps', 'none',
- '--format', '%{key}',
- 'target.bst'])
+ result2 = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--option",
+ "test",
+ "False",
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{key}",
+ "target.bst",
+ ],
+ )
result2.assert_success()
# Assert that the cache keys are different
assert result1.output != result2.output
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'element-path'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "element-path"))
def test_element_path_project_path_contains_symlinks(cli, datafiles, tmpdir):
real_project = str(datafiles)
- linked_project = os.path.join(str(tmpdir), 'linked')
+ linked_project = os.path.join(str(tmpdir), "linked")
os.symlink(real_project, linked_project)
- os.makedirs(os.path.join(real_project, 'elements'), exist_ok=True)
- with open(os.path.join(real_project, 'elements', 'element.bst'), 'w') as f:
+ os.makedirs(os.path.join(real_project, "elements"), exist_ok=True)
+ with open(os.path.join(real_project, "elements", "element.bst"), "w") as f:
f.write("kind: manual\n")
- result = cli.run(project=linked_project, args=['show', 'element.bst'])
+ result = cli.run(project=linked_project, args=["show", "element.bst"])
result.assert_success()
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_empty_depends(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename, "empty-depends")
- result = cli.run(project=project, args=['show', 'manual.bst'])
+ result = cli.run(project=project, args=["show", "manual.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
diff --git a/tests/format/project/plugin-no-load-ref/plugins/noloadref.py b/tests/format/project/plugin-no-load-ref/plugins/noloadref.py
index 0cc457f07..5d44cd5c7 100644
--- a/tests/format/project/plugin-no-load-ref/plugins/noloadref.py
+++ b/tests/format/project/plugin-no-load-ref/plugins/noloadref.py
@@ -6,7 +6,6 @@ from buildstream import Source, Consistency
# Use this to test that the core behaves as expected with such plugins.
#
class NoLoadRefSource(Source):
-
def configure(self, node):
pass
diff --git a/tests/format/project/plugin-preflight-error/errorplugin/preflighterror.py b/tests/format/project/plugin-preflight-error/errorplugin/preflighterror.py
index 0eee463ad..f0d66e3c7 100644
--- a/tests/format/project/plugin-preflight-error/errorplugin/preflighterror.py
+++ b/tests/format/project/plugin-preflight-error/errorplugin/preflighterror.py
@@ -2,15 +2,16 @@ from buildstream import Source, SourceError, Consistency
class PreflightErrorSource(Source):
-
def configure(self, node):
pass
def preflight(self):
# Raise a preflight error unconditionally
- raise SourceError("Unsatisfied requirements in preflight, raising this error",
- reason="the-preflight-error")
+ raise SourceError(
+ "Unsatisfied requirements in preflight, raising this error",
+ reason="the-preflight-error",
+ )
def get_unique_key(self):
return {}
diff --git a/tests/format/projectoverrides.py b/tests/format/projectoverrides.py
index 7932ffb4a..bba630c54 100644
--- a/tests/format/projectoverrides.py
+++ b/tests/format/projectoverrides.py
@@ -8,22 +8,23 @@ from buildstream.testing.runcli import cli # pylint: disable=unused-import
# Project directory
DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project-overrides"
+ os.path.dirname(os.path.realpath(__file__)), "project-overrides"
)
@pytest.mark.datafiles(DATA_DIR)
def test_prepend_configure_commands(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'prepend-configure-commands')
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{config}',
- 'element.bst'])
+ project = os.path.join(
+ datafiles.dirname, datafiles.basename, "prepend-configure-commands"
+ )
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{config}", "element.bst"],
+ )
result.assert_success()
loaded = _yaml.load_data(result.output)
- config_commands = loaded.get_str_list('configure-commands')
+ config_commands = loaded.get_str_list("configure-commands")
assert len(config_commands) == 3
assert config_commands[0] == 'echo "Hello World!"'
diff --git a/tests/format/variables.py b/tests/format/variables.py
index 4610e039f..31f969f4b 100644
--- a/tests/format/variables.py
+++ b/tests/format/variables.py
@@ -12,13 +12,10 @@ from buildstream.testing.runcli import cli # pylint: disable=unused-import
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "variables"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "variables")
# List of BuildStream protected variables
-PROTECTED_VARIABLES = [('project-name'), ('element-name'), ('max-jobs')]
+PROTECTED_VARIABLES = [("project-name"), ("element-name"), ("max-jobs")]
def print_warning(msg):
@@ -29,24 +26,52 @@ def print_warning(msg):
###############################################################
# Test proper loading of some default commands from plugins #
###############################################################
-@pytest.mark.parametrize("target,varname,expected", [
- ('autotools.bst', 'make-install', "make -j1 DESTDIR=\"/buildstream-install\" install"),
- ('cmake.bst', 'cmake',
- "cmake -B_builddir -H\".\" -G\"Unix Makefiles\" " + "-DCMAKE_INSTALL_PREFIX:PATH=\"/usr\" \\\n" +
- "-DCMAKE_INSTALL_LIBDIR:PATH=\"lib\""),
- ('distutils.bst', 'python-install',
- "python3 ./setup.py install --prefix \"/usr\" \\\n" +
- "--root \"/buildstream-install\""),
- ('makemaker.bst', 'configure', "perl Makefile.PL PREFIX=/buildstream-install/usr"),
- ('modulebuild.bst', 'configure', "perl Build.PL --prefix \"/buildstream-install/usr\""),
- ('qmake.bst', 'make-install', "make -j1 INSTALL_ROOT=\"/buildstream-install\" install"),
-])
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'defaults'))
+@pytest.mark.parametrize(
+ "target,varname,expected",
+ [
+ (
+ "autotools.bst",
+ "make-install",
+ 'make -j1 DESTDIR="/buildstream-install" install',
+ ),
+ (
+ "cmake.bst",
+ "cmake",
+ 'cmake -B_builddir -H"." -G"Unix Makefiles" '
+ + '-DCMAKE_INSTALL_PREFIX:PATH="/usr" \\\n'
+ + '-DCMAKE_INSTALL_LIBDIR:PATH="lib"',
+ ),
+ (
+ "distutils.bst",
+ "python-install",
+ 'python3 ./setup.py install --prefix "/usr" \\\n'
+ + '--root "/buildstream-install"',
+ ),
+ (
+ "makemaker.bst",
+ "configure",
+ "perl Makefile.PL PREFIX=/buildstream-install/usr",
+ ),
+ (
+ "modulebuild.bst",
+ "configure",
+ 'perl Build.PL --prefix "/buildstream-install/usr"',
+ ),
+ (
+ "qmake.bst",
+ "make-install",
+ 'make -j1 INSTALL_ROOT="/buildstream-install" install',
+ ),
+ ],
+)
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "defaults"))
def test_defaults(cli, datafiles, target, varname, expected):
project = str(datafiles)
- result = cli.run(project=project, silent=True, args=[
- 'show', '--deps', 'none', '--format', '%{vars}', target
- ])
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", target],
+ )
result.assert_success()
result_vars = _yaml.load_data(result.output)
assert result_vars.get_str(varname) == expected
@@ -55,130 +80,134 @@ def test_defaults(cli, datafiles, target, varname, expected):
################################################################
# Test overriding of variables to produce different commands #
################################################################
-@pytest.mark.parametrize("target,varname,expected", [
- ('autotools.bst', 'make-install', "make -j1 DESTDIR=\"/custom/install/root\" install"),
- ('cmake.bst', 'cmake',
- "cmake -B_builddir -H\".\" -G\"Ninja\" " + "-DCMAKE_INSTALL_PREFIX:PATH=\"/opt\" \\\n" +
- "-DCMAKE_INSTALL_LIBDIR:PATH=\"lib\""),
- ('distutils.bst', 'python-install',
- "python3 ./setup.py install --prefix \"/opt\" \\\n" +
- "--root \"/custom/install/root\""),
- ('makemaker.bst', 'configure', "perl Makefile.PL PREFIX=/custom/install/root/opt"),
- ('modulebuild.bst', 'configure', "perl Build.PL --prefix \"/custom/install/root/opt\""),
- ('qmake.bst', 'make-install', "make -j1 INSTALL_ROOT=\"/custom/install/root\" install"),
-])
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'overrides'))
+@pytest.mark.parametrize(
+ "target,varname,expected",
+ [
+ (
+ "autotools.bst",
+ "make-install",
+ 'make -j1 DESTDIR="/custom/install/root" install',
+ ),
+ (
+ "cmake.bst",
+ "cmake",
+ 'cmake -B_builddir -H"." -G"Ninja" '
+ + '-DCMAKE_INSTALL_PREFIX:PATH="/opt" \\\n'
+ + '-DCMAKE_INSTALL_LIBDIR:PATH="lib"',
+ ),
+ (
+ "distutils.bst",
+ "python-install",
+ 'python3 ./setup.py install --prefix "/opt" \\\n'
+ + '--root "/custom/install/root"',
+ ),
+ (
+ "makemaker.bst",
+ "configure",
+ "perl Makefile.PL PREFIX=/custom/install/root/opt",
+ ),
+ (
+ "modulebuild.bst",
+ "configure",
+ 'perl Build.PL --prefix "/custom/install/root/opt"',
+ ),
+ (
+ "qmake.bst",
+ "make-install",
+ 'make -j1 INSTALL_ROOT="/custom/install/root" install',
+ ),
+ ],
+)
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "overrides"))
def test_overrides(cli, datafiles, target, varname, expected):
project = str(datafiles)
- result = cli.run(project=project, silent=True, args=[
- 'show', '--deps', 'none', '--format', '%{vars}', target
- ])
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{vars}", target],
+ )
result.assert_success()
result_vars = _yaml.load_data(result.output)
assert result_vars.get_str(varname) == expected
@pytest.mark.parametrize("element", ["manual.bst", "manual2.bst"])
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'missing_variables'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "missing_variables"))
def test_missing_variable(cli, datafiles, element):
project = str(datafiles)
- result = cli.run(project=project, silent=True, args=[
- 'show', '--deps', 'none', '--format', '%{config}', element
- ])
- result.assert_main_error(ErrorDomain.LOAD,
- LoadErrorReason.UNRESOLVED_VARIABLE)
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{config}", element],
+ )
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.UNRESOLVED_VARIABLE)
@pytest.mark.timeout(3, method="signal")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'cyclic_variables'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "cyclic_variables"))
def test_cyclic_variables(cli, datafiles):
- print_warning("Performing cyclic test, if this test times out it will " +
- "exit the test sequence")
+ print_warning(
+ "Performing cyclic test, if this test times out it will "
+ + "exit the test sequence"
+ )
project = str(datafiles)
- result = cli.run(project=project, silent=True, args=[
- "build", "cyclic.bst"
- ])
+ result = cli.run(project=project, silent=True, args=["build", "cyclic.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.RECURSIVE_VARIABLE)
-@pytest.mark.parametrize('protected_var', PROTECTED_VARIABLES)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'protected-vars'))
+@pytest.mark.parametrize("protected_var", PROTECTED_VARIABLES)
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "protected-vars"))
def test_use_of_protected_var_project_conf(cli, datafiles, protected_var):
project = str(datafiles)
- conf = {
- 'name': 'test',
- 'variables': {
- protected_var: 'some-value'
- }
- }
- _yaml.roundtrip_dump(conf, os.path.join(project, 'project.conf'))
+ conf = {"name": "test", "variables": {protected_var: "some-value"}}
+ _yaml.roundtrip_dump(conf, os.path.join(project, "project.conf"))
element = {
- 'kind': 'import',
- 'sources': [
- {
- 'kind': 'local',
- 'path': 'foo.txt'
- }
- ],
+ "kind": "import",
+ "sources": [{"kind": "local", "path": "foo.txt"}],
}
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_main_error(ErrorDomain.LOAD,
- LoadErrorReason.PROTECTED_VARIABLE_REDEFINED)
+ result = cli.run(project=project, args=["build", "target.bst"])
+ result.assert_main_error(
+ ErrorDomain.LOAD, LoadErrorReason.PROTECTED_VARIABLE_REDEFINED
+ )
-@pytest.mark.parametrize('protected_var', PROTECTED_VARIABLES)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'protected-vars'))
+@pytest.mark.parametrize("protected_var", PROTECTED_VARIABLES)
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "protected-vars"))
def test_use_of_protected_var_element_overrides(cli, datafiles, protected_var):
project = str(datafiles)
conf = {
- 'name': 'test',
- 'elements': {
- 'manual': {
- 'variables': {
- protected_var: 'some-value'
- }
- }
- }
+ "name": "test",
+ "elements": {"manual": {"variables": {protected_var: "some-value"}}},
}
- _yaml.roundtrip_dump(conf, os.path.join(project, 'project.conf'))
+ _yaml.roundtrip_dump(conf, os.path.join(project, "project.conf"))
element = {
- 'kind': 'manual',
- 'sources': [
- {
- 'kind': 'local',
- 'path': 'foo.txt'
- }
- ],
+ "kind": "manual",
+ "sources": [{"kind": "local", "path": "foo.txt"}],
}
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_main_error(ErrorDomain.LOAD,
- LoadErrorReason.PROTECTED_VARIABLE_REDEFINED)
+ result = cli.run(project=project, args=["build", "target.bst"])
+ result.assert_main_error(
+ ErrorDomain.LOAD, LoadErrorReason.PROTECTED_VARIABLE_REDEFINED
+ )
-@pytest.mark.parametrize('protected_var', PROTECTED_VARIABLES)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'protected-vars'))
+@pytest.mark.parametrize("protected_var", PROTECTED_VARIABLES)
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "protected-vars"))
def test_use_of_protected_var_in_element(cli, datafiles, protected_var):
project = str(datafiles)
element = {
- 'kind': 'import',
- 'sources': [
- {
- 'kind': 'local',
- 'path': 'foo.txt'
- }
- ],
- 'variables': {
- protected_var: 'some-value'
- }
+ "kind": "import",
+ "sources": [{"kind": "local", "path": "foo.txt"}],
+ "variables": {protected_var: "some-value"},
}
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_main_error(ErrorDomain.LOAD,
- LoadErrorReason.PROTECTED_VARIABLE_REDEFINED)
+ result = cli.run(project=project, args=["build", "target.bst"])
+ result.assert_main_error(
+ ErrorDomain.LOAD, LoadErrorReason.PROTECTED_VARIABLE_REDEFINED
+ )
diff --git a/tests/frontend/__init__.py b/tests/frontend/__init__.py
index f1c8c41b8..ad1f9ea92 100644
--- a/tests/frontend/__init__.py
+++ b/tests/frontend/__init__.py
@@ -5,6 +5,6 @@ from buildstream import _yaml
# Shared function to configure the project.conf inline
#
def configure_project(path, config):
- config['name'] = 'test'
- config['element-path'] = 'elements'
- _yaml.roundtrip_dump(config, os.path.join(path, 'project.conf'))
+ config["name"] = "test"
+ config["element-path"] = "elements"
+ _yaml.roundtrip_dump(config, os.path.join(path, "project.conf"))
diff --git a/tests/frontend/artifact_delete.py b/tests/frontend/artifact_delete.py
index a9f5ec6da..9389788b3 100644
--- a/tests/frontend/artifact_delete.py
+++ b/tests/frontend/artifact_delete.py
@@ -28,10 +28,7 @@ from tests.testutils import create_artifact_share
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
# Test that we can delete the artifact of the element which corresponds
@@ -39,60 +36,62 @@ DATA_DIR = os.path.join(
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_delete_element(cli, tmpdir, datafiles):
project = str(datafiles)
- element = 'target.bst'
+ element = "target.bst"
# Build the element and ensure it's cached
- result = cli.run(project=project, args=['build', element])
+ result = cli.run(project=project, args=["build", element])
result.assert_success()
- assert cli.get_element_state(project, element) == 'cached'
+ assert cli.get_element_state(project, element) == "cached"
- result = cli.run(project=project, args=['artifact', 'delete', element])
+ result = cli.run(project=project, args=["artifact", "delete", element])
result.assert_success()
- assert cli.get_element_state(project, element) != 'cached'
+ assert cli.get_element_state(project, element) != "cached"
# Test that we can delete an artifact by specifying its ref.
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_delete_artifact(cli, tmpdir, datafiles):
project = str(datafiles)
- element = 'target.bst'
+ element = "target.bst"
# Configure a local cache
- local_cache = os.path.join(str(tmpdir), 'cache')
- cli.configure({'cachedir': local_cache})
+ local_cache = os.path.join(str(tmpdir), "cache")
+ cli.configure({"cachedir": local_cache})
# First build an element so that we can find its artifact
- result = cli.run(project=project, args=['build', element])
+ result = cli.run(project=project, args=["build", element])
result.assert_success()
# Obtain the artifact ref
cache_key = cli.get_element_key(project, element)
- artifact = os.path.join('test', os.path.splitext(element)[0], cache_key)
+ artifact = os.path.join("test", os.path.splitext(element)[0], cache_key)
# Explicitly check that the ARTIFACT exists in the cache
- assert os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', artifact))
+ assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact))
# Delete the artifact
- result = cli.run(project=project, args=['artifact', 'delete', artifact])
+ result = cli.run(project=project, args=["artifact", "delete", artifact])
result.assert_success()
# Check that the ARTIFACT is no longer in the cache
- assert not os.path.exists(os.path.join(local_cache, 'cas', 'refs', 'heads', artifact))
+ assert not os.path.exists(
+ os.path.join(local_cache, "cas", "refs", "heads", artifact)
+ )
# Test the `bst artifact delete` command with multiple, different arguments.
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_delete_element_and_artifact(cli, tmpdir, datafiles):
project = str(datafiles)
- element = 'target.bst'
- dep = 'compose-all.bst'
+ element = "target.bst"
+ dep = "compose-all.bst"
# Configure a local cache
- local_cache = os.path.join(str(tmpdir), 'cache')
- cli.configure({'cachedir': local_cache})
+ local_cache = os.path.join(str(tmpdir), "cache")
+ cli.configure({"cachedir": local_cache})
# First build an element so that we can find its artifact
- result = cli.run(project=project, args=['build', element])
+ result = cli.run(project=project, args=["build", element])
result.assert_success()
assert cli.get_element_states(project, [element, dep], deps="none") == {
element: "cached",
@@ -101,20 +100,20 @@ def test_artifact_delete_element_and_artifact(cli, tmpdir, datafiles):
# Obtain the artifact ref
cache_key = cli.get_element_key(project, element)
- artifact = os.path.join('test', os.path.splitext(element)[0], cache_key)
+ artifact = os.path.join("test", os.path.splitext(element)[0], cache_key)
# Explicitly check that the ARTIFACT exists in the cache
- assert os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', artifact))
+ assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact))
# Delete the artifact
- result = cli.run(project=project, args=['artifact', 'delete', artifact, dep])
+ result = cli.run(project=project, args=["artifact", "delete", artifact, dep])
result.assert_success()
# Check that the ARTIFACT is no longer in the cache
- assert not os.path.exists(os.path.join(local_cache, 'artifacts', artifact))
+ assert not os.path.exists(os.path.join(local_cache, "artifacts", artifact))
# Check that the dependency ELEMENT is no longer cached
- assert cli.get_element_state(project, dep) != 'cached'
+ assert cli.get_element_state(project, dep) != "cached"
# Test that we receive the appropriate stderr when we try to delete an artifact
@@ -122,19 +121,19 @@ def test_artifact_delete_element_and_artifact(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_delete_unbuilt_artifact(cli, tmpdir, datafiles):
project = str(datafiles)
- element = 'target.bst'
+ element = "target.bst"
# delete it, just in case it's there
- _ = cli.run(project=project, args=['artifact', 'delete', element])
+ _ = cli.run(project=project, args=["artifact", "delete", element])
# Ensure the element is not cached
- assert cli.get_element_state(project, element) != 'cached'
+ assert cli.get_element_state(project, element) != "cached"
# Now try and remove it again (now we know its not there)
- result = cli.run(project=project, args=['artifact', 'delete', element])
+ result = cli.run(project=project, args=["artifact", "delete", element])
cache_key = cli.get_element_key(project, element)
- artifact = os.path.join('test', os.path.splitext(element)[0], cache_key)
+ artifact = os.path.join("test", os.path.splitext(element)[0], cache_key)
expected_err = "WARNING Could not find ref '{}'".format(artifact)
assert expected_err in result.stderr
@@ -144,122 +143,131 @@ def test_artifact_delete_unbuilt_artifact(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_delete_pulled_artifact_without_buildtree(cli, tmpdir, datafiles):
project = str(datafiles)
- element = 'target.bst'
+ element = "target.bst"
# Set up remote and local shares
- local_cache = os.path.join(str(tmpdir), 'artifacts')
- with create_artifact_share(os.path.join(str(tmpdir), 'remote')) as remote:
- cli.configure({
- 'artifacts': {'url': remote.repo, 'push': True},
- 'cachedir': local_cache,
- })
+ local_cache = os.path.join(str(tmpdir), "artifacts")
+ with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote:
+ cli.configure(
+ {"artifacts": {"url": remote.repo, "push": True}, "cachedir": local_cache,}
+ )
# Build the element
- result = cli.run(project=project, args=['build', element])
+ result = cli.run(project=project, args=["build", element])
result.assert_success()
# Make sure it's in the share
- assert remote.get_artifact(cli.get_artifact_name(project, 'test', element))
+ assert remote.get_artifact(cli.get_artifact_name(project, "test", element))
# Delete and then pull the artifact (without its buildtree)
- result = cli.run(project=project, args=['artifact', 'delete', element])
+ result = cli.run(project=project, args=["artifact", "delete", element])
result.assert_success()
- assert cli.get_element_state(project, element) != 'cached'
- result = cli.run(project=project, args=['artifact', 'pull', element])
+ assert cli.get_element_state(project, element) != "cached"
+ result = cli.run(project=project, args=["artifact", "pull", element])
result.assert_success()
- assert cli.get_element_state(project, element) == 'cached'
+ assert cli.get_element_state(project, element) == "cached"
# Now delete it again (it should have been pulled without the buildtree, but
# a digest of the buildtree is pointed to in the artifact's metadata
- result = cli.run(project=project, args=['artifact', 'delete', element])
+ result = cli.run(project=project, args=["artifact", "delete", element])
result.assert_success()
- assert cli.get_element_state(project, element) != 'cached'
+ assert cli.get_element_state(project, element) != "cached"
# Test that we can delete the build deps of an element
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_delete_elements_build_deps(cli, tmpdir, datafiles):
project = str(datafiles)
- element = 'target.bst'
+ element = "target.bst"
# Build the element and ensure it's cached
- result = cli.run(project=project, args=['build', element])
+ result = cli.run(project=project, args=["build", element])
result.assert_success()
# Assert element and build deps are cached
- assert cli.get_element_state(project, element) == 'cached'
- bdep_states = cli.get_element_states(project, [element], deps='build')
+ assert cli.get_element_state(project, element) == "cached"
+ bdep_states = cli.get_element_states(project, [element], deps="build")
for state in bdep_states.values():
- assert state == 'cached'
+ assert state == "cached"
- result = cli.run(project=project, args=['artifact', 'delete', '--deps', 'build', element])
+ result = cli.run(
+ project=project, args=["artifact", "delete", "--deps", "build", element]
+ )
result.assert_success()
# Assert that the build deps have been deleted and that the artifact remains cached
- assert cli.get_element_state(project, element) == 'cached'
- bdep_states = cli.get_element_states(project, [element], deps='build')
+ assert cli.get_element_state(project, element) == "cached"
+ bdep_states = cli.get_element_states(project, [element], deps="build")
for state in bdep_states.values():
- assert state != 'cached'
+ assert state != "cached"
# Test that we can delete the build deps of an artifact by providing an artifact ref
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_delete_artifacts_build_deps(cli, tmpdir, datafiles):
project = str(datafiles)
- element = 'target.bst'
+ element = "target.bst"
# Configure a local cache
- local_cache = os.path.join(str(tmpdir), 'cache')
- cli.configure({'cachedir': local_cache})
+ local_cache = os.path.join(str(tmpdir), "cache")
+ cli.configure({"cachedir": local_cache})
# First build an element so that we can find its artifact
- result = cli.run(project=project, args=['build', element])
+ result = cli.run(project=project, args=["build", element])
result.assert_success()
# Obtain the artifact ref
cache_key = cli.get_element_key(project, element)
- artifact = os.path.join('test', os.path.splitext(element)[0], cache_key)
+ artifact = os.path.join("test", os.path.splitext(element)[0], cache_key)
# Explicitly check that the ARTIFACT exists in the cache
- assert os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', artifact))
+ assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact))
# get the artifact refs of the build dependencies
bdep_refs = []
- bdep_states = cli.get_element_states(project, [element], deps='build')
+ bdep_states = cli.get_element_states(project, [element], deps="build")
for bdep in bdep_states.keys():
- bdep_refs.append(os.path.join('test', _get_normal_name(bdep), cli.get_element_key(project, bdep)))
+ bdep_refs.append(
+ os.path.join(
+ "test", _get_normal_name(bdep), cli.get_element_key(project, bdep)
+ )
+ )
# Assert build dependencies are cached
for ref in bdep_refs:
- assert os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', ref))
+ assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", ref))
# Delete the artifact
- result = cli.run(project=project, args=['artifact', 'delete', '--deps', 'build', artifact])
+ result = cli.run(
+ project=project, args=["artifact", "delete", "--deps", "build", artifact]
+ )
result.assert_success()
# Check that the artifact's build deps are no longer in the cache
# Assert build dependencies have been deleted and that the artifact remains
for ref in bdep_refs:
- assert not os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', ref))
- assert os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', artifact))
+ assert not os.path.exists(os.path.join(local_cache, "artifacts", "refs", ref))
+ assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact))
# Test that `--deps all` option fails if an artifact ref is specified
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_delete_artifact_with_deps_all_fails(cli, tmpdir, datafiles):
project = str(datafiles)
- element = 'target.bst'
+ element = "target.bst"
# First build an element so that we can find its artifact
- result = cli.run(project=project, args=['build', element])
+ result = cli.run(project=project, args=["build", element])
result.assert_success()
# Obtain the artifact ref
cache_key = cli.get_element_key(project, element)
- artifact = os.path.join('test', os.path.splitext(element)[0], cache_key)
+ artifact = os.path.join("test", os.path.splitext(element)[0], cache_key)
# Try to delete the artifact with all of its dependencies
- result = cli.run(project=project, args=['artifact', 'delete', '--deps', 'all', artifact])
+ result = cli.run(
+ project=project, args=["artifact", "delete", "--deps", "all", artifact]
+ )
result.assert_main_error(ErrorDomain.STREAM, None)
assert "Error: '--deps all' is not supported for artifact refs" in result.stderr
diff --git a/tests/frontend/artifact_list_contents.py b/tests/frontend/artifact_list_contents.py
index 626eb3fa7..ddd2d50a6 100644
--- a/tests/frontend/artifact_list_contents.py
+++ b/tests/frontend/artifact_list_contents.py
@@ -25,10 +25,7 @@ from buildstream.testing import cli # pylint: disable=unused-import
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
@pytest.mark.datafiles(DATA_DIR)
@@ -36,16 +33,15 @@ def test_artifact_list_exact_contents_element(cli, datafiles):
project = str(datafiles)
# Ensure we have an artifact to read
- result = cli.run(project=project, args=['build', 'import-bin.bst'])
+ result = cli.run(project=project, args=["build", "import-bin.bst"])
assert result.exit_code == 0
# List the contents via the element name
- result = cli.run(project=project, args=['artifact', 'list-contents', 'import-bin.bst'])
+ result = cli.run(
+ project=project, args=["artifact", "list-contents", "import-bin.bst"]
+ )
assert result.exit_code == 0
- expected_output = ("import-bin.bst:\n"
- "\tusr\n"
- "\tusr/bin\n"
- "\tusr/bin/hello\n\n")
+ expected_output = "import-bin.bst:\n" "\tusr\n" "\tusr/bin\n" "\tusr/bin/hello\n\n"
assert expected_output in result.output
@@ -54,20 +50,21 @@ def test_artifact_list_exact_contents_ref(cli, datafiles):
project = str(datafiles)
# Get the cache key of our test element
- key = cli.get_element_key(project, 'import-bin.bst')
+ key = cli.get_element_key(project, "import-bin.bst")
# Ensure we have an artifact to read
- result = cli.run(project=project, args=['build', 'import-bin.bst'])
+ result = cli.run(project=project, args=["build", "import-bin.bst"])
assert result.exit_code == 0
# List the contents via the key
- result = cli.run(project=project, args=['artifact', 'list-contents', 'test/import-bin/' + key])
+ result = cli.run(
+ project=project, args=["artifact", "list-contents", "test/import-bin/" + key]
+ )
assert result.exit_code == 0
- expected_output = ("test/import-bin/" + key + ":\n"
- "\tusr\n"
- "\tusr/bin\n"
- "\tusr/bin/hello\n\n")
+ expected_output = (
+ "test/import-bin/" + key + ":\n" "\tusr\n" "\tusr/bin\n" "\tusr/bin/hello\n\n"
+ )
assert expected_output in result.output
@@ -76,23 +73,25 @@ def test_artifact_list_exact_contents_glob(cli, datafiles):
project = str(datafiles)
# Ensure we have an artifact to read
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
assert result.exit_code == 0
# List the contents via glob
- result = cli.run(project=project, args=['artifact', 'list-contents', 'test/*'])
+ result = cli.run(project=project, args=["artifact", "list-contents", "test/*"])
assert result.exit_code == 0
# get the cahe keys for each element in the glob
- import_bin_key = cli.get_element_key(project, 'import-bin.bst')
- import_dev_key = cli.get_element_key(project, 'import-dev.bst')
- compose_all_key = cli.get_element_key(project, 'compose-all.bst')
- target_key = cli.get_element_key(project, 'target.bst')
-
- expected_artifacts = ["test/import-bin/" + import_bin_key,
- "test/import-dev/" + import_dev_key,
- "test/compose-all/" + compose_all_key,
- "test/target/" + target_key]
+ import_bin_key = cli.get_element_key(project, "import-bin.bst")
+ import_dev_key = cli.get_element_key(project, "import-dev.bst")
+ compose_all_key = cli.get_element_key(project, "compose-all.bst")
+ target_key = cli.get_element_key(project, "target.bst")
+
+ expected_artifacts = [
+ "test/import-bin/" + import_bin_key,
+ "test/import-dev/" + import_dev_key,
+ "test/compose-all/" + compose_all_key,
+ "test/target/" + target_key,
+ ]
for artifact in expected_artifacts:
assert artifact in result.output
@@ -103,16 +102,20 @@ def test_artifact_list_exact_contents_element_long(cli, datafiles):
project = str(datafiles)
# Ensure we have an artifact to read
- result = cli.run(project=project, args=['build', 'import-bin.bst'])
+ result = cli.run(project=project, args=["build", "import-bin.bst"])
assert result.exit_code == 0
# List the contents via the element name
- result = cli.run(project=project, args=['artifact', 'list-contents', '--long', 'import-bin.bst'])
+ result = cli.run(
+ project=project, args=["artifact", "list-contents", "--long", "import-bin.bst"]
+ )
assert result.exit_code == 0
- expected_output = ("import-bin.bst:\n"
- "\tdrwxr-xr-x dir 1 usr\n"
- "\tdrwxr-xr-x dir 1 usr/bin\n"
- "\t-rw-r--r-- reg 107 usr/bin/hello\n\n")
+ expected_output = (
+ "import-bin.bst:\n"
+ "\tdrwxr-xr-x dir 1 usr\n"
+ "\tdrwxr-xr-x dir 1 usr/bin\n"
+ "\t-rw-r--r-- reg 107 usr/bin/hello\n\n"
+ )
assert expected_output in result.output
@@ -122,19 +125,24 @@ def test_artifact_list_exact_contents_ref_long(cli, datafiles):
project = str(datafiles)
# Get the cache key of our test element
- key = cli.get_element_key(project, 'import-bin.bst')
+ key = cli.get_element_key(project, "import-bin.bst")
# Ensure we have an artifact to read
- result = cli.run(project=project, args=['build', 'import-bin.bst'])
+ result = cli.run(project=project, args=["build", "import-bin.bst"])
assert result.exit_code == 0
# List the contents via the key
- result = cli.run(project=project, args=['artifact', 'list-contents', '-l', 'test/import-bin/' + key])
+ result = cli.run(
+ project=project,
+ args=["artifact", "list-contents", "-l", "test/import-bin/" + key],
+ )
assert result.exit_code == 0
- expected_output = (" test/import-bin/" + key + ":\n"
- "\tdrwxr-xr-x dir 1 usr\n"
- "\tdrwxr-xr-x dir 1 usr/bin\n"
- "\t-rw-r--r-- reg 107 usr/bin/hello\n\n")
+ expected_output = (
+ " test/import-bin/" + key + ":\n"
+ "\tdrwxr-xr-x dir 1 usr\n"
+ "\tdrwxr-xr-x dir 1 usr/bin\n"
+ "\t-rw-r--r-- reg 107 usr/bin/hello\n\n"
+ )
assert expected_output in result.output
diff --git a/tests/frontend/artifact_log.py b/tests/frontend/artifact_log.py
index 39c9458ad..44c35aa3d 100644
--- a/tests/frontend/artifact_log.py
+++ b/tests/frontend/artifact_log.py
@@ -25,10 +25,7 @@ from buildstream.testing import cli # pylint: disable=unused-import
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
@pytest.mark.datafiles(DATA_DIR)
@@ -36,32 +33,40 @@ def test_artifact_log(cli, datafiles):
project = str(datafiles)
# Get the cache key of our test element
- result = cli.run(project=project, silent=True, args=[
- '--no-colors',
- 'show', '--deps', 'none', '--format', '%{full-key}',
- 'target.bst'
- ])
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "--no-colors",
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{full-key}",
+ "target.bst",
+ ],
+ )
key = result.output.strip()
# Ensure we have an artifact to read
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
assert result.exit_code == 0
# Read the log via the element name
- result = cli.run(project=project, args=['artifact', 'log', 'target.bst'])
+ result = cli.run(project=project, args=["artifact", "log", "target.bst"])
assert result.exit_code == 0
log = result.output
# Assert that there actually was a log file
- assert log != ''
+ assert log != ""
# Read the log via the key
- result = cli.run(project=project, args=['artifact', 'log', 'test/target/' + key])
+ result = cli.run(project=project, args=["artifact", "log", "test/target/" + key])
assert result.exit_code == 0
assert log == result.output
# Read the log via glob
- result = cli.run(project=project, args=['artifact', 'log', 'test/target/*'])
+ result = cli.run(project=project, args=["artifact", "log", "test/target/*"])
assert result.exit_code == 0
# The artifact is cached under both a strong key and a weak key
assert log == result.output
@@ -72,7 +77,7 @@ def test_artifact_log_files(cli, datafiles):
project = str(datafiles)
# Ensure we have an artifact to read
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
assert result.exit_code == 0
logfiles = os.path.join(project, "logfiles")
@@ -84,16 +89,19 @@ def test_artifact_log_files(cli, datafiles):
assert not os.path.exists(import_bin)
# Run the command and ensure the file now exists
- result = cli.run(project=project, args=['artifact', 'log', '--out', logfiles, 'target.bst', 'import-bin.bst'])
+ result = cli.run(
+ project=project,
+ args=["artifact", "log", "--out", logfiles, "target.bst", "import-bin.bst"],
+ )
assert result.exit_code == 0
assert os.path.exists(logfiles)
assert os.path.exists(target)
assert os.path.exists(import_bin)
# Ensure the file contains the logs by checking for the LOG line
- with open(target, 'r') as f:
+ with open(target, "r") as f:
data = f.read()
assert "LOG target.bst" in data
- with open(import_bin, 'r') as f:
+ with open(import_bin, "r") as f:
data = f.read()
assert "LOG import-bin.bst" in data
diff --git a/tests/frontend/artifact_show.py b/tests/frontend/artifact_show.py
index 76ea93d63..c47222e18 100644
--- a/tests/frontend/artifact_show.py
+++ b/tests/frontend/artifact_show.py
@@ -27,110 +27,108 @@ from tests.testutils import create_artifact_share
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
# Test artifact show
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_show_element_name(cli, tmpdir, datafiles):
project = str(datafiles)
- element = 'target.bst'
+ element = "target.bst"
- result = cli.run(project=project, args=['artifact', 'show', element])
+ result = cli.run(project=project, args=["artifact", "show", element])
result.assert_success()
- assert 'not cached {}'.format(element) in result.output
+ assert "not cached {}".format(element) in result.output
- result = cli.run(project=project, args=['build', element])
+ result = cli.run(project=project, args=["build", element])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'show', element])
+ result = cli.run(project=project, args=["artifact", "show", element])
result.assert_success()
- assert 'cached {}'.format(element) in result.output
+ assert "cached {}".format(element) in result.output
# Test artifact show on a failed element
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_show_failed_element(cli, tmpdir, datafiles):
project = str(datafiles)
- element = 'manual.bst'
+ element = "manual.bst"
- result = cli.run(project=project, args=['artifact', 'show', element])
+ result = cli.run(project=project, args=["artifact", "show", element])
result.assert_success()
- assert 'not cached {}'.format(element) in result.output
+ assert "not cached {}".format(element) in result.output
- result = cli.run(project=project, args=['build', element])
- result.assert_task_error(ErrorDomain.SANDBOX, 'missing-command')
+ result = cli.run(project=project, args=["build", element])
+ result.assert_task_error(ErrorDomain.SANDBOX, "missing-command")
- result = cli.run(project=project, args=['artifact', 'show', element])
+ result = cli.run(project=project, args=["artifact", "show", element])
result.assert_success()
- assert 'failed {}'.format(element) in result.output
+ assert "failed {}".format(element) in result.output
# Test artifact show with a deleted dependency
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_show_element_missing_deps(cli, tmpdir, datafiles):
project = str(datafiles)
- element = 'target.bst'
- dependency = 'import-bin.bst'
+ element = "target.bst"
+ dependency = "import-bin.bst"
- result = cli.run(project=project, args=['build', element])
+ result = cli.run(project=project, args=["build", element])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'delete', dependency])
+ result = cli.run(project=project, args=["artifact", "delete", dependency])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'show', '--deps', 'all', element])
+ result = cli.run(
+ project=project, args=["artifact", "show", "--deps", "all", element]
+ )
result.assert_success()
- assert 'not cached {}'.format(dependency) in result.output
- assert 'cached {}'.format(element) in result.output
+ assert "not cached {}".format(dependency) in result.output
+ assert "cached {}".format(element) in result.output
# Test artifact show with artifact ref
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_show_artifact_ref(cli, tmpdir, datafiles):
project = str(datafiles)
- element = 'target.bst'
+ element = "target.bst"
- result = cli.run(project=project, args=['build', element])
+ result = cli.run(project=project, args=["build", element])
result.assert_success()
cache_key = cli.get_element_key(project, element)
- artifact_ref = 'test/target/' + cache_key
+ artifact_ref = "test/target/" + cache_key
- result = cli.run(project=project, args=['artifact', 'show', artifact_ref])
+ result = cli.run(project=project, args=["artifact", "show", artifact_ref])
result.assert_success()
- assert 'cached {}'.format(artifact_ref) in result.output
+ assert "cached {}".format(artifact_ref) in result.output
# Test artifact show artifact in remote
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_show_element_available_remotely(cli, tmpdir, datafiles):
project = str(datafiles)
- element = 'target.bst'
+ element = "target.bst"
# Set up remote and local shares
- local_cache = os.path.join(str(tmpdir), 'artifacts')
- with create_artifact_share(os.path.join(str(tmpdir), 'remote')) as remote:
- cli.configure({
- 'artifacts': {'url': remote.repo, 'push': True},
- 'cachedir': local_cache,
- })
+ local_cache = os.path.join(str(tmpdir), "artifacts")
+ with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote:
+ cli.configure(
+ {"artifacts": {"url": remote.repo, "push": True}, "cachedir": local_cache,}
+ )
# Build the element
- result = cli.run(project=project, args=['build', element])
+ result = cli.run(project=project, args=["build", element])
result.assert_success()
# Make sure it's in the share
- assert remote.get_artifact(cli.get_artifact_name(project, 'test', element))
+ assert remote.get_artifact(cli.get_artifact_name(project, "test", element))
# Delete the artifact from the local cache
- result = cli.run(project=project, args=['artifact', 'delete', element])
+ result = cli.run(project=project, args=["artifact", "delete", element])
result.assert_success()
- assert cli.get_element_state(project, element) != 'cached'
+ assert cli.get_element_state(project, element) != "cached"
- result = cli.run(project=project, args=['artifact', 'show', element])
+ result = cli.run(project=project, args=["artifact", "show", element])
result.assert_success()
- assert 'available {}'.format(element) in result.output
+ assert "available {}".format(element) in result.output
diff --git a/tests/frontend/buildcheckout.py b/tests/frontend/buildcheckout.py
index a24446d61..f3080269d 100644
--- a/tests/frontend/buildcheckout.py
+++ b/tests/frontend/buildcheckout.py
@@ -21,69 +21,69 @@ from tests.testutils import generate_junction, create_artifact_share
from . import configure_project
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
def strict_args(args, strict):
if strict != "strict":
- return ['--no-strict', *args]
+ return ["--no-strict", *args]
return args
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("strict,hardlinks", [
- ("strict", "copies"),
- ("strict", "hardlinks"),
- ("non-strict", "copies"),
- ("non-strict", "hardlinks"),
-])
+@pytest.mark.parametrize(
+ "strict,hardlinks",
+ [
+ ("strict", "copies"),
+ ("strict", "hardlinks"),
+ ("non-strict", "copies"),
+ ("non-strict", "hardlinks"),
+ ],
+)
def test_build_checkout(datafiles, cli, strict, hardlinks):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
+ checkout = os.path.join(cli.directory, "checkout")
# First build it
- result = cli.run(project=project, args=strict_args(['build', 'target.bst'], strict))
+ result = cli.run(project=project, args=strict_args(["build", "target.bst"], strict))
result.assert_success()
# Assert that after a successful build, the builddir is empty
- builddir = os.path.join(cli.directory, 'build')
+ builddir = os.path.join(cli.directory, "build")
assert os.path.isdir(builddir)
assert not os.listdir(builddir)
# Prepare checkout args
- checkout_args = strict_args(['artifact', 'checkout'], strict)
+ checkout_args = strict_args(["artifact", "checkout"], strict)
if hardlinks == "hardlinks":
- checkout_args += ['--hardlinks']
- checkout_args += ['target.bst', '--directory', checkout]
+ checkout_args += ["--hardlinks"]
+ checkout_args += ["target.bst", "--directory", checkout]
# Now check it out
result = cli.run(project=project, args=checkout_args)
result.assert_success()
# Check that the executable hello file is found in the checkout
- filename = os.path.join(checkout, 'usr', 'bin', 'hello')
+ filename = os.path.join(checkout, "usr", "bin", "hello")
assert os.path.exists(filename)
- filename = os.path.join(checkout, 'usr', 'include', 'pony.h')
+ filename = os.path.join(checkout, "usr", "include", "pony.h")
assert os.path.exists(filename)
@pytest.mark.datafiles(DATA_DIR + "_world")
def test_build_default_all(datafiles, cli):
project = str(datafiles)
- result = cli.run(project=project, silent=True, args=['build'])
+ result = cli.run(project=project, silent=True, args=["build"])
result.assert_success()
target_dir = os.path.join(cli.directory, DATA_DIR + "_world", "elements")
output_dir = os.path.join(cli.directory, "logs", "test")
- expected = subprocess.Popen(('ls', target_dir), stdout=subprocess.PIPE)
+ expected = subprocess.Popen(("ls", target_dir), stdout=subprocess.PIPE)
expected = subprocess.check_output(("wc", "-w"), stdin=expected.stdout)
- results = subprocess.Popen(('ls', output_dir), stdout=subprocess.PIPE)
+ results = subprocess.Popen(("ls", output_dir), stdout=subprocess.PIPE)
results = subprocess.check_output(("wc", "-w"), stdin=results.stdout)
assert results == expected
@@ -92,7 +92,7 @@ def test_build_default_all(datafiles, cli):
@pytest.mark.datafiles(DATA_DIR + "_default")
def test_build_default(cli, datafiles):
project = str(datafiles)
- result = cli.run(project=project, silent=True, args=['build'])
+ result = cli.run(project=project, silent=True, args=["build"])
result.assert_success()
results = cli.get_element_state(project, "target2.bst")
@@ -101,60 +101,63 @@ def test_build_default(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("strict,hardlinks", [
- ("non-strict", "hardlinks"),
-])
+@pytest.mark.parametrize("strict,hardlinks", [("non-strict", "hardlinks"),])
def test_build_invalid_suffix(datafiles, cli, strict, hardlinks):
project = str(datafiles)
- result = cli.run(project=project, args=strict_args(['build', 'target.foo'], strict))
+ result = cli.run(project=project, args=strict_args(["build", "target.foo"], strict))
result.assert_main_error(ErrorDomain.LOAD, "bad-element-suffix")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("strict,hardlinks", [
- ("non-strict", "hardlinks"),
-])
+@pytest.mark.parametrize("strict,hardlinks", [("non-strict", "hardlinks"),])
def test_build_invalid_suffix_dep(datafiles, cli, strict, hardlinks):
project = str(datafiles)
# target2.bst depends on an element called target.foo
- result = cli.run(project=project, args=strict_args(['build', 'target2.bst'], strict))
+ result = cli.run(
+ project=project, args=strict_args(["build", "target2.bst"], strict)
+ )
result.assert_main_error(ErrorDomain.LOAD, "bad-element-suffix")
-@pytest.mark.skipif(IS_WINDOWS, reason='Not available on Windows')
+@pytest.mark.skipif(IS_WINDOWS, reason="Not available on Windows")
@pytest.mark.datafiles(DATA_DIR)
def test_build_invalid_filename_chars(datafiles, cli):
project = str(datafiles)
- element_name = 'invalid-chars|<>-in-name.bst'
+ element_name = "invalid-chars|<>-in-name.bst"
# The name of this file contains characters that are not allowed by
# BuildStream, using it should raise a warning.
element = {
- 'kind': 'stack',
+ "kind": "stack",
}
- _yaml.roundtrip_dump(element, os.path.join(project, 'elements', element_name))
+ _yaml.roundtrip_dump(element, os.path.join(project, "elements", element_name))
- result = cli.run(project=project, args=strict_args(['build', element_name], 'non-strict'))
+ result = cli.run(
+ project=project, args=strict_args(["build", element_name], "non-strict")
+ )
result.assert_main_error(ErrorDomain.LOAD, "bad-characters-in-name")
-@pytest.mark.skipif(IS_WINDOWS, reason='Not available on Windows')
+@pytest.mark.skipif(IS_WINDOWS, reason="Not available on Windows")
@pytest.mark.datafiles(DATA_DIR)
def test_build_invalid_filename_chars_dep(datafiles, cli):
project = str(datafiles)
- element_name = 'invalid-chars|<>-in-name.bst'
+ element_name = "invalid-chars|<>-in-name.bst"
# The name of this file contains characters that are not allowed by
# BuildStream, and is listed as a dependency of 'invalid-chars-in-dep.bst'.
# This should also raise a warning.
element = {
- 'kind': 'stack',
+ "kind": "stack",
}
- _yaml.roundtrip_dump(element, os.path.join(project, 'elements', element_name))
+ _yaml.roundtrip_dump(element, os.path.join(project, "elements", element_name))
- result = cli.run(project=project, args=strict_args(['build', 'invalid-chars-in-dep.bst'], 'non-strict'))
+ result = cli.run(
+ project=project,
+ args=strict_args(["build", "invalid-chars-in-dep.bst"], "non-strict"),
+ )
result.assert_main_error(ErrorDomain.LOAD, "bad-characters-in-name")
@@ -162,39 +165,49 @@ def test_build_invalid_filename_chars_dep(datafiles, cli):
@pytest.mark.parametrize("deps", [("run"), ("none"), ("build"), ("all")])
def test_build_checkout_deps(datafiles, cli, deps):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
+ checkout = os.path.join(cli.directory, "checkout")
element_name = "checkout-deps.bst"
# First build it
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
result.assert_success()
# Assert that after a successful build, the builddir is empty
- builddir = os.path.join(cli.directory, 'build')
+ builddir = os.path.join(cli.directory, "build")
assert os.path.isdir(builddir)
assert not os.listdir(builddir)
# Now check it out
- result = cli.run(project=project, args=['artifact', 'checkout', element_name,
- '--deps', deps, '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ element_name,
+ "--deps",
+ deps,
+ "--directory",
+ checkout,
+ ],
+ )
result.assert_success()
# Verify output of this element
- filename = os.path.join(checkout, 'etc', 'buildstream', 'config')
+ filename = os.path.join(checkout, "etc", "buildstream", "config")
if deps == "build":
assert not os.path.exists(filename)
else:
assert os.path.exists(filename)
# Verify output of this element's build dependencies
- filename = os.path.join(checkout, 'usr', 'include', 'pony.h')
+ filename = os.path.join(checkout, "usr", "include", "pony.h")
if deps in ["build", "all"]:
assert os.path.exists(filename)
else:
assert not os.path.exists(filename)
# Verify output of this element's runtime dependencies
- filename = os.path.join(checkout, 'usr', 'bin', 'hello')
+ filename = os.path.join(checkout, "usr", "bin", "hello")
if deps in ["run", "all"]:
assert os.path.exists(filename)
else:
@@ -204,162 +217,203 @@ def test_build_checkout_deps(datafiles, cli, deps):
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_unbuilt(datafiles, cli):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
+ checkout = os.path.join(cli.directory, "checkout")
# Check that checking out an unbuilt element fails nicely
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkout],
+ )
result.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_compression_no_tar(datafiles, cli):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout.tar')
+ checkout = os.path.join(cli.directory, "checkout.tar")
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- checkout_args = ['artifact', 'checkout', '--directory', checkout, '--compression', 'gz', 'target.bst']
+ checkout_args = [
+ "artifact",
+ "checkout",
+ "--directory",
+ checkout,
+ "--compression",
+ "gz",
+ "target.bst",
+ ]
result = cli.run(project=project, args=checkout_args)
- assert "ERROR: --compression can only be provided if --tar is provided" in result.stderr
+ assert (
+ "ERROR: --compression can only be provided if --tar is provided"
+ in result.stderr
+ )
assert result.exit_code != 0
+
# If we don't support the extension, we default to an uncompressed tarball
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_tar_with_unconventional_name(datafiles, cli):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout.foo')
+ checkout = os.path.join(cli.directory, "checkout.foo")
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- checkout_args = ['artifact', 'checkout', '--tar', checkout, 'target.bst']
+ checkout_args = ["artifact", "checkout", "--tar", checkout, "target.bst"]
result = cli.run(project=project, args=checkout_args)
result.assert_success()
- tar = tarfile.open(name=checkout, mode='r')
- assert os.path.join('.', 'usr', 'bin', 'hello') in tar.getnames()
- assert os.path.join('.', 'usr', 'include', 'pony.h') in tar.getnames()
+ tar = tarfile.open(name=checkout, mode="r")
+ assert os.path.join(".", "usr", "bin", "hello") in tar.getnames()
+ assert os.path.join(".", "usr", "include", "pony.h") in tar.getnames()
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_tar_with_unsupported_ext(datafiles, cli):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout.tar.foo')
+ checkout = os.path.join(cli.directory, "checkout.tar.foo")
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- checkout_args = ['artifact', 'checkout', '--tar', checkout, 'target.bst']
+ checkout_args = ["artifact", "checkout", "--tar", checkout, "target.bst"]
result = cli.run(project=project, args=checkout_args)
- assert "Invalid file extension given with '--tar': Expected compression with unknown file extension ('.foo'), " \
- "supported extensions are ('.tar'), ('.gz'), ('.xz'), ('.bz2')" in result.stderr
+ assert (
+ "Invalid file extension given with '--tar': Expected compression with unknown file extension ('.foo'), "
+ "supported extensions are ('.tar'), ('.gz'), ('.xz'), ('.bz2')" in result.stderr
+ )
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_tar_no_compression(datafiles, cli):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout.tar.gz')
+ checkout = os.path.join(cli.directory, "checkout.tar.gz")
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- builddir = os.path.join(cli.directory, 'build')
+ builddir = os.path.join(cli.directory, "build")
assert os.path.isdir(builddir)
assert not os.listdir(builddir)
- checkout_args = ['artifact', 'checkout', '--tar', checkout, 'target.bst']
+ checkout_args = ["artifact", "checkout", "--tar", checkout, "target.bst"]
result = cli.run(project=project, args=checkout_args)
result.assert_success()
- tar = tarfile.open(name=checkout, mode='r:gz')
- assert os.path.join('.', 'usr', 'bin', 'hello') in tar.getnames()
- assert os.path.join('.', 'usr', 'include', 'pony.h') in tar.getnames()
+ tar = tarfile.open(name=checkout, mode="r:gz")
+ assert os.path.join(".", "usr", "bin", "hello") in tar.getnames()
+ assert os.path.join(".", "usr", "include", "pony.h") in tar.getnames()
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_tarball(datafiles, cli):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout.tar')
+ checkout = os.path.join(cli.directory, "checkout.tar")
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- builddir = os.path.join(cli.directory, 'build')
+ builddir = os.path.join(cli.directory, "build")
assert os.path.isdir(builddir)
assert not os.listdir(builddir)
- checkout_args = ['artifact', 'checkout', '--tar', checkout, 'target.bst']
+ checkout_args = ["artifact", "checkout", "--tar", checkout, "target.bst"]
result = cli.run(project=project, args=checkout_args)
result.assert_success()
tar = tarfile.TarFile(checkout)
- assert os.path.join('.', 'usr', 'bin', 'hello') in tar.getnames()
- assert os.path.join('.', 'usr', 'include', 'pony.h') in tar.getnames()
+ assert os.path.join(".", "usr", "bin", "hello") in tar.getnames()
+ assert os.path.join(".", "usr", "include", "pony.h") in tar.getnames()
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_using_ref(datafiles, cli):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
+ checkout = os.path.join(cli.directory, "checkout")
- result = cli.run(project=project, args=['build', 'checkout-deps.bst'])
+ result = cli.run(project=project, args=["build", "checkout-deps.bst"])
result.assert_success()
- key = cli.get_element_key(project, 'checkout-deps.bst')
- checkout_args = ['artifact', 'checkout', '--directory', checkout, '--deps', 'none', 'test/checkout-deps/' + key]
+ key = cli.get_element_key(project, "checkout-deps.bst")
+ checkout_args = [
+ "artifact",
+ "checkout",
+ "--directory",
+ checkout,
+ "--deps",
+ "none",
+ "test/checkout-deps/" + key,
+ ]
result = cli.run(project=project, args=checkout_args)
result.assert_success()
- filename = os.path.join(checkout, 'etc', 'buildstream', 'config')
+ filename = os.path.join(checkout, "etc", "buildstream", "config")
assert os.path.exists(filename)
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_tarball_using_ref(datafiles, cli):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout.tar')
+ checkout = os.path.join(cli.directory, "checkout.tar")
- result = cli.run(project=project, args=['build', 'checkout-deps.bst'])
+ result = cli.run(project=project, args=["build", "checkout-deps.bst"])
result.assert_success()
- builddir = os.path.join(cli.directory, 'build')
+ builddir = os.path.join(cli.directory, "build")
assert os.path.isdir(builddir)
assert not os.listdir(builddir)
- key = cli.get_element_key(project, 'checkout-deps.bst')
- checkout_args = ['artifact', 'checkout', '--deps', 'none', '--tar', checkout, 'test/checkout-deps/' + key]
+ key = cli.get_element_key(project, "checkout-deps.bst")
+ checkout_args = [
+ "artifact",
+ "checkout",
+ "--deps",
+ "none",
+ "--tar",
+ checkout,
+ "test/checkout-deps/" + key,
+ ]
result = cli.run(project=project, args=checkout_args)
result.assert_success()
tar = tarfile.TarFile(checkout)
- assert os.path.join('.', 'etc', 'buildstream', 'config') in tar.getnames()
+ assert os.path.join(".", "etc", "buildstream", "config") in tar.getnames()
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_build_deps_using_ref(datafiles, cli):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
+ checkout = os.path.join(cli.directory, "checkout")
- result = cli.run(project=project, args=['build', 'checkout-deps.bst'])
+ result = cli.run(project=project, args=["build", "checkout-deps.bst"])
result.assert_success()
- key = cli.get_element_key(project, 'checkout-deps.bst')
- checkout_args = ['artifact', 'checkout', '--directory', checkout, '--deps', 'build', 'test/checkout-deps/' + key]
+ key = cli.get_element_key(project, "checkout-deps.bst")
+ checkout_args = [
+ "artifact",
+ "checkout",
+ "--directory",
+ checkout,
+ "--deps",
+ "build",
+ "test/checkout-deps/" + key,
+ ]
result = cli.run(project=project, args=checkout_args)
result.assert_success()
- build_dep_files = os.path.join(checkout, 'usr', 'include', 'pony.h')
- runtime_dep_files = os.path.join(checkout, 'usr', 'bin', 'hello')
- target_files = os.path.join(checkout, 'etc', 'buildstream', 'config')
+ build_dep_files = os.path.join(checkout, "usr", "include", "pony.h")
+ runtime_dep_files = os.path.join(checkout, "usr", "bin", "hello")
+ target_files = os.path.join(checkout, "etc", "buildstream", "config")
assert os.path.exists(build_dep_files)
assert not os.path.exists(runtime_dep_files)
assert not os.path.exists(target_files)
@@ -368,13 +422,21 @@ def test_build_checkout_build_deps_using_ref(datafiles, cli):
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_runtime_deps_using_ref_fails(datafiles, cli):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
+ checkout = os.path.join(cli.directory, "checkout")
- result = cli.run(project=project, args=['build', 'checkout-deps.bst'])
+ result = cli.run(project=project, args=["build", "checkout-deps.bst"])
result.assert_success()
- key = cli.get_element_key(project, 'checkout-deps.bst')
- checkout_args = ['artifact', 'checkout', '--directory', checkout, '--deps', 'run', 'test/checkout-deps/' + key]
+ key = cli.get_element_key(project, "checkout-deps.bst")
+ checkout_args = [
+ "artifact",
+ "checkout",
+ "--directory",
+ checkout,
+ "--deps",
+ "run",
+ "test/checkout-deps/" + key,
+ ]
result = cli.run(project=project, args=checkout_args)
result.assert_main_error(ErrorDomain.STREAM, None)
@@ -383,20 +445,31 @@ def test_build_checkout_runtime_deps_using_ref_fails(datafiles, cli):
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_invalid_ref(datafiles, cli):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout.tar')
+ checkout = os.path.join(cli.directory, "checkout.tar")
- result = cli.run(project=project, args=['build', 'checkout-deps.bst'])
+ result = cli.run(project=project, args=["build", "checkout-deps.bst"])
result.assert_success()
- builddir = os.path.join(cli.directory, 'build')
+ builddir = os.path.join(cli.directory, "build")
assert os.path.isdir(builddir)
assert not os.listdir(builddir)
- non_existent_artifact = 'test/checkout-deps/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
- checkout_args = ['artifact', 'checkout', '--deps', 'none', '--tar', checkout, non_existent_artifact]
+ non_existent_artifact = "test/checkout-deps/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+ checkout_args = [
+ "artifact",
+ "checkout",
+ "--deps",
+ "none",
+ "--tar",
+ checkout,
+ non_existent_artifact,
+ ]
result = cli.run(project=project, args=checkout_args)
- assert "Error while staging dependencies into a sandbox: 'No artifacts to stage'" in result.stderr
+ assert (
+ "Error while staging dependencies into a sandbox: 'No artifacts to stage'"
+ in result.stderr
+ )
@pytest.mark.datafiles(DATA_DIR)
@@ -404,16 +477,16 @@ def test_build_checkout_no_tar_no_directory(datafiles, cli, tmpdir):
project = str(datafiles)
runtestdir = str(tmpdir)
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- checkout_args = ['artifact', 'checkout', 'target.bst']
+ checkout_args = ["artifact", "checkout", "target.bst"]
result = cli.run(cwd=runtestdir, project=project, args=checkout_args)
result.assert_success()
- filename = os.path.join(runtestdir, 'target', 'usr', 'bin', 'hello')
+ filename = os.path.join(runtestdir, "target", "usr", "bin", "hello")
assert os.path.exists(filename)
- filename = os.path.join(runtestdir, 'target', 'usr', 'include', 'pony.h')
+ filename = os.path.join(runtestdir, "target", "usr", "include", "pony.h")
assert os.path.exists(filename)
@@ -421,61 +494,69 @@ def test_build_checkout_no_tar_no_directory(datafiles, cli, tmpdir):
@pytest.mark.parametrize("compression", [("gz"), ("xz"), ("bz2")])
def test_build_checkout_tarball_compression(datafiles, cli, compression):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout.tar')
+ checkout = os.path.join(cli.directory, "checkout.tar")
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- builddir = os.path.join(cli.directory, 'build')
+ builddir = os.path.join(cli.directory, "build")
assert os.path.isdir(builddir)
assert not os.listdir(builddir)
- checkout_args = ['artifact', 'checkout', '--tar', checkout, '--compression', compression, 'target.bst']
+ checkout_args = [
+ "artifact",
+ "checkout",
+ "--tar",
+ checkout,
+ "--compression",
+ compression,
+ "target.bst",
+ ]
result = cli.run(project=project, args=checkout_args)
result.assert_success()
# Docs say not to use TarFile class directly, using .open seems to work.
# https://docs.python.org/3/library/tarfile.html#tarfile.TarFile
- tar = tarfile.open(name=checkout, mode='r:' + compression)
- assert os.path.join('.', 'usr', 'bin', 'hello') in tar.getnames()
- assert os.path.join('.', 'usr', 'include', 'pony.h') in tar.getnames()
+ tar = tarfile.open(name=checkout, mode="r:" + compression)
+ assert os.path.join(".", "usr", "bin", "hello") in tar.getnames()
+ assert os.path.join(".", "usr", "include", "pony.h") in tar.getnames()
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_tarball_stdout(datafiles, cli):
project = str(datafiles)
- tarball = os.path.join(cli.directory, 'tarball.tar')
+ tarball = os.path.join(cli.directory, "tarball.tar")
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- builddir = os.path.join(cli.directory, 'build')
+ builddir = os.path.join(cli.directory, "build")
assert os.path.isdir(builddir)
assert not os.listdir(builddir)
- checkout_args = ['artifact', 'checkout', '--tar', '-', 'target.bst']
+ checkout_args = ["artifact", "checkout", "--tar", "-", "target.bst"]
result = cli.run(project=project, args=checkout_args, binary_capture=True)
result.assert_success()
- with open(tarball, 'wb') as f:
+ with open(tarball, "wb") as f:
f.write(result.output)
tar = tarfile.TarFile(tarball)
- assert os.path.join('.', 'usr', 'bin', 'hello') in tar.getnames()
- assert os.path.join('.', 'usr', 'include', 'pony.h') in tar.getnames()
+ assert os.path.join(".", "usr", "bin", "hello") in tar.getnames()
+ assert os.path.join(".", "usr", "include", "pony.h") in tar.getnames()
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_tarball_mtime_nonzero(datafiles, cli):
project = str(datafiles)
- tarpath = os.path.join(cli.directory, 'mtime_tar.tar')
+ tarpath = os.path.join(cli.directory, "mtime_tar.tar")
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- checkout_args = ['artifact', 'checkout', '--tar', tarpath, 'target.bst']
+ checkout_args = ["artifact", "checkout", "--tar", tarpath, "target.bst"]
result = cli.run(project=project, args=checkout_args)
result.assert_success()
@@ -490,31 +571,31 @@ def test_build_checkout_tarball_mtime_nonzero(datafiles, cli):
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_tarball_is_deterministic(datafiles, cli):
project = str(datafiles)
- tarball1 = os.path.join(cli.directory, 'tarball1.tar')
- tarball2 = os.path.join(cli.directory, 'tarball2.tar')
+ tarball1 = os.path.join(cli.directory, "tarball1.tar")
+ tarball2 = os.path.join(cli.directory, "tarball2.tar")
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- builddir = os.path.join(cli.directory, 'build')
+ builddir = os.path.join(cli.directory, "build")
assert os.path.isdir(builddir)
assert not os.listdir(builddir)
- checkout_args = ['artifact', 'checkout', '--force', 'target.bst']
+ checkout_args = ["artifact", "checkout", "--force", "target.bst"]
- checkout_args1 = checkout_args + ['--tar', tarball1]
+ checkout_args1 = checkout_args + ["--tar", tarball1]
result = cli.run(project=project, args=checkout_args1)
result.assert_success()
- checkout_args2 = checkout_args + ['--tar', tarball2]
+ checkout_args2 = checkout_args + ["--tar", tarball2]
result = cli.run(project=project, args=checkout_args2)
result.assert_success()
- with open(tarball1, 'rb') as f:
+ with open(tarball1, "rb") as f:
contents = f.read()
hash1 = hashlib.sha1(contents).hexdigest()
- with open(tarball2, 'rb') as f:
+ with open(tarball2, "rb") as f:
contents = f.read()
hash2 = hashlib.sha1(contents).hexdigest()
@@ -524,75 +605,91 @@ def test_build_checkout_tarball_is_deterministic(datafiles, cli):
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_tarball_links(datafiles, cli):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout.tar')
- extract = os.path.join(cli.directory, 'extract')
+ checkout = os.path.join(cli.directory, "checkout.tar")
+ extract = os.path.join(cli.directory, "extract")
# Create the link before running the tests.
# This is needed for users working on Windows, git checks out symlinks as files which content is the name
# of the symlink and the test therefore doesn't have the correct content
os.symlink(
os.path.join("..", "basicfile"),
- os.path.join(project, "files", "files-and-links", "basicfolder", "basicsymlink")
+ os.path.join(
+ project, "files", "files-and-links", "basicfolder", "basicsymlink"
+ ),
)
- result = cli.run(project=project, args=['build', 'import-links.bst'])
+ result = cli.run(project=project, args=["build", "import-links.bst"])
result.assert_success()
- builddir = os.path.join(cli.directory, 'build')
+ builddir = os.path.join(cli.directory, "build")
assert os.path.isdir(builddir)
assert not os.listdir(builddir)
- checkout_args = ['artifact', 'checkout', '--tar', checkout, 'import-links.bst']
+ checkout_args = ["artifact", "checkout", "--tar", checkout, "import-links.bst"]
result = cli.run(project=project, args=checkout_args)
result.assert_success()
tar = tarfile.open(name=checkout, mode="r:")
tar.extractall(extract)
- assert open(os.path.join(extract, 'basicfolder', 'basicsymlink')).read() == "file contents\n"
+ assert (
+ open(os.path.join(extract, "basicfolder", "basicsymlink")).read()
+ == "file contents\n"
+ )
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_links(datafiles, cli):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
+ checkout = os.path.join(cli.directory, "checkout")
# Create the link before running the tests.
# This is needed for users working on Windows, git checks out symlinks as files which content is the name
# of the symlink and the test therefore doesn't have the correct content
os.symlink(
os.path.join("..", "basicfile"),
- os.path.join(project, "files", "files-and-links", "basicfolder", "basicsymlink")
+ os.path.join(
+ project, "files", "files-and-links", "basicfolder", "basicsymlink"
+ ),
)
- result = cli.run(project=project, args=['build', 'import-links.bst'])
+ result = cli.run(project=project, args=["build", "import-links.bst"])
result.assert_success()
- builddir = os.path.join(cli.directory, 'build')
+ builddir = os.path.join(cli.directory, "build")
assert os.path.isdir(builddir)
assert not os.listdir(builddir)
- checkout_args = ['artifact', 'checkout', '--directory', checkout, 'import-links.bst']
+ checkout_args = [
+ "artifact",
+ "checkout",
+ "--directory",
+ checkout,
+ "import-links.bst",
+ ]
result = cli.run(project=project, args=checkout_args)
result.assert_success()
- assert open(os.path.join(checkout, 'basicfolder', 'basicsymlink')).read() == "file contents\n"
+ assert (
+ open(os.path.join(checkout, "basicfolder", "basicsymlink")).read()
+ == "file contents\n"
+ )
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("hardlinks", [("copies"), ("hardlinks")])
def test_build_checkout_nonempty(datafiles, cli, hardlinks):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
+ checkout = os.path.join(cli.directory, "checkout")
filename = os.path.join(checkout, "file.txt")
# First build it
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
# Assert that after a successful build, the builddir is empty
- builddir = os.path.join(cli.directory, 'build')
+ builddir = os.path.join(cli.directory, "build")
assert os.path.isdir(builddir)
assert not os.listdir(builddir)
@@ -602,10 +699,10 @@ def test_build_checkout_nonempty(datafiles, cli, hardlinks):
f.write("Hello")
# Prepare checkout args
- checkout_args = ['artifact', 'checkout']
+ checkout_args = ["artifact", "checkout"]
if hardlinks == "hardlinks":
- checkout_args += ['--hardlinks']
- checkout_args += ['target.bst', '--directory', checkout]
+ checkout_args += ["--hardlinks"]
+ checkout_args += ["target.bst", "--directory", checkout]
# Now check it out
result = cli.run(project=project, args=checkout_args)
@@ -616,15 +713,15 @@ def test_build_checkout_nonempty(datafiles, cli, hardlinks):
@pytest.mark.parametrize("hardlinks", [("copies"), ("hardlinks")])
def test_build_checkout_force(datafiles, cli, hardlinks):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
+ checkout = os.path.join(cli.directory, "checkout")
filename = os.path.join(checkout, "file.txt")
# First build it
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
# Assert that after a successful build, the builddir is empty
- builddir = os.path.join(cli.directory, 'build')
+ builddir = os.path.join(cli.directory, "build")
assert os.path.isdir(builddir)
assert not os.listdir(builddir)
@@ -634,78 +731,76 @@ def test_build_checkout_force(datafiles, cli, hardlinks):
f.write("Hello")
# Prepare checkout args
- checkout_args = ['artifact', 'checkout', '--force']
+ checkout_args = ["artifact", "checkout", "--force"]
if hardlinks == "hardlinks":
- checkout_args += ['--hardlinks']
- checkout_args += ['target.bst', '--directory', checkout]
+ checkout_args += ["--hardlinks"]
+ checkout_args += ["target.bst", "--directory", checkout]
# Now check it out
result = cli.run(project=project, args=checkout_args)
result.assert_success()
# Check that the file we added is still there
- filename = os.path.join(checkout, 'file.txt')
+ filename = os.path.join(checkout, "file.txt")
assert os.path.exists(filename)
# Check that the executable hello file is found in the checkout
- filename = os.path.join(checkout, 'usr', 'bin', 'hello')
+ filename = os.path.join(checkout, "usr", "bin", "hello")
assert os.path.exists(filename)
# Check that the executable hello file is found in the checkout
- filename = os.path.join(checkout, 'usr', 'include', 'pony.h')
+ filename = os.path.join(checkout, "usr", "include", "pony.h")
assert os.path.exists(filename)
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_force_tarball(datafiles, cli):
project = str(datafiles)
- tarball = os.path.join(cli.directory, 'tarball.tar')
+ tarball = os.path.join(cli.directory, "tarball.tar")
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- builddir = os.path.join(cli.directory, 'build')
+ builddir = os.path.join(cli.directory, "build")
assert os.path.isdir(builddir)
assert not os.listdir(builddir)
with open(tarball, "w") as f:
f.write("Hello")
- checkout_args = ['artifact', 'checkout', '--force', '--tar', tarball, 'target.bst']
+ checkout_args = ["artifact", "checkout", "--force", "--tar", tarball, "target.bst"]
result = cli.run(project=project, args=checkout_args)
result.assert_success()
tar = tarfile.TarFile(tarball)
- assert os.path.join('.', 'usr', 'bin', 'hello') in tar.getnames()
- assert os.path.join('.', 'usr', 'include', 'pony.h') in tar.getnames()
+ assert os.path.join(".", "usr", "bin", "hello") in tar.getnames()
+ assert os.path.join(".", "usr", "include", "pony.h") in tar.getnames()
@pytest.mark.datafiles(DATA_DIR)
def test_install_to_build(cli, datafiles):
project = str(datafiles)
- element = 'installed-to-build.bst'
+ element = "installed-to-build.bst"
# Attempt building the element
# We expect this to throw an ElementError, since the element will
# attempt to stage into /buildstream/build, which is not allowed.
- result = cli.run(project=project, args=strict_args(['build', element], True))
+ result = cli.run(project=project, args=strict_args(["build", element], True))
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.ELEMENT, None)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- element_path = os.path.join(project, 'elements', 'junction-dep.bst')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ element_path = os.path.join(project, "elements", "junction-dep.bst")
- configure_project(project, {
- 'ref-storage': ref_storage
- })
+ configure_project(project, {"ref-storage": ref_storage})
# Create a repo to hold the subproject and generate a junction element for it
generate_junction(tmpdir, subproject_path, junction_path, store_ref=False)
@@ -713,88 +808,68 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage):
# Create a stack element to depend on a cross junction element
#
element = {
- 'kind': 'stack',
- 'depends': [
- {
- 'junction': 'junction.bst',
- 'filename': 'import-etc.bst'
- }
- ]
+ "kind": "stack",
+ "depends": [{"junction": "junction.bst", "filename": "import-etc.bst"}],
}
_yaml.roundtrip_dump(element, element_path)
# Now try to track it, this will bail with the appropriate error
# informing the user to track the junction first
- result = cli.run(project=project, args=['build', 'junction-dep.bst'])
+ result = cli.run(project=project, args=["build", "junction-dep.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
# Assert that we have the expected provenance encoded into the error
- element_node = _yaml.load(element_path, shortname='junction-dep.bst')
- ref_node = element_node.get_sequence('depends').mapping_at(0)
+ element_node = _yaml.load(element_path, shortname="junction-dep.bst")
+ ref_node = element_node.get_sequence("depends").mapping_at(0)
provenance = ref_node.get_provenance()
assert str(provenance) in result.stderr
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
def test_unfetched_junction(cli, tmpdir, datafiles, ref_storage):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- element_path = os.path.join(project, 'elements', 'junction-dep.bst')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ element_path = os.path.join(project, "elements", "junction-dep.bst")
- configure_project(project, {
- 'ref-storage': ref_storage
- })
+ configure_project(project, {"ref-storage": ref_storage})
# Create a repo to hold the subproject and generate a junction element for it
- ref = generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == 'inline'))
+ ref = generate_junction(
+ tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline")
+ )
# Create a stack element to depend on a cross junction element
#
element = {
- 'kind': 'stack',
- 'depends': [
- {
- 'junction': 'junction.bst',
- 'filename': 'import-etc.bst'
- }
- ]
+ "kind": "stack",
+ "depends": [{"junction": "junction.bst", "filename": "import-etc.bst"}],
}
_yaml.roundtrip_dump(element, element_path)
# Dump a project.refs if we're using project.refs storage
#
- if ref_storage == 'project.refs':
- project_refs = {
- 'projects': {
- 'test': {
- 'junction.bst': [
- {
- 'ref': ref
- }
- ]
- }
- }
- }
- _yaml.roundtrip_dump(project_refs, os.path.join(project, 'junction.refs'))
+ if ref_storage == "project.refs":
+ project_refs = {"projects": {"test": {"junction.bst": [{"ref": ref}]}}}
+ _yaml.roundtrip_dump(project_refs, os.path.join(project, "junction.refs"))
# Now try to build it, this should automatically result in fetching
# the junction itself at load time.
- result = cli.run(project=project, args=['build', 'junction-dep.bst'])
+ result = cli.run(project=project, args=["build", "junction-dep.bst"])
result.assert_success()
# Assert that it's cached now
- assert cli.get_element_state(project, 'junction-dep.bst') == 'cached'
+ assert cli.get_element_state(project, "junction-dep.bst") == "cached"
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_junction(cli, tmpdir, datafiles):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- element_path = os.path.join(project, 'elements', 'junction-dep.bst')
- checkout = os.path.join(cli.directory, 'checkout')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ element_path = os.path.join(project, "elements", "junction-dep.bst")
+ checkout = os.path.join(cli.directory, "checkout")
# Create a repo to hold the subproject and generate a junction element for it
generate_junction(tmpdir, subproject_path, junction_path)
@@ -802,46 +877,42 @@ def test_build_checkout_junction(cli, tmpdir, datafiles):
# Create a stack element to depend on a cross junction element
#
element = {
- 'kind': 'stack',
- 'depends': [
- {
- 'junction': 'junction.bst',
- 'filename': 'import-etc.bst'
- }
- ]
+ "kind": "stack",
+ "depends": [{"junction": "junction.bst", "filename": "import-etc.bst"}],
}
_yaml.roundtrip_dump(element, element_path)
# Now try to build it, this should automatically result in fetching
# the junction itself at load time.
- result = cli.run(project=project, args=['build', 'junction-dep.bst'])
+ result = cli.run(project=project, args=["build", "junction-dep.bst"])
result.assert_success()
# Assert that it's cached now
- assert cli.get_element_state(project, 'junction-dep.bst') == 'cached'
+ assert cli.get_element_state(project, "junction-dep.bst") == "cached"
# Now check it out
- result = cli.run(project=project, args=[
- 'artifact', 'checkout', 'junction-dep.bst', '--directory', checkout
- ])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],
+ )
result.assert_success()
# Assert the content of /etc/animal.conf
- filename = os.path.join(checkout, 'etc', 'animal.conf')
+ filename = os.path.join(checkout, "etc", "animal.conf")
assert os.path.exists(filename)
- with open(filename, 'r') as f:
+ with open(filename, "r") as f:
contents = f.read()
- assert contents == 'animal=Pony\n'
+ assert contents == "animal=Pony\n"
# Test that default targets work with projects with junctions
@pytest.mark.datafiles(DATA_DIR + "_world")
def test_build_checkout_junction_default_targets(cli, tmpdir, datafiles):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- element_path = os.path.join(project, 'elements', 'junction-dep.bst')
- checkout = os.path.join(cli.directory, 'checkout')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ element_path = os.path.join(project, "elements", "junction-dep.bst")
+ checkout = os.path.join(cli.directory, "checkout")
# Create a repo to hold the subproject and generate a junction element for it
generate_junction(tmpdir, subproject_path, junction_path)
@@ -849,46 +920,42 @@ def test_build_checkout_junction_default_targets(cli, tmpdir, datafiles):
# Create a stack element to depend on a cross junction element
#
element = {
- 'kind': 'stack',
- 'depends': [
- {
- 'junction': 'junction.bst',
- 'filename': 'import-etc.bst'
- }
- ]
+ "kind": "stack",
+ "depends": [{"junction": "junction.bst", "filename": "import-etc.bst"}],
}
_yaml.roundtrip_dump(element, element_path)
# Now try to build it, this should automatically result in fetching
# the junction itself at load time.
- result = cli.run(project=project, args=['build'])
+ result = cli.run(project=project, args=["build"])
result.assert_success()
# Assert that it's cached now
- assert cli.get_element_state(project, 'junction-dep.bst') == 'cached'
+ assert cli.get_element_state(project, "junction-dep.bst") == "cached"
# Now check it out
- result = cli.run(project=project, args=[
- 'artifact', 'checkout', 'junction-dep.bst', '--directory', checkout
- ])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],
+ )
result.assert_success()
# Assert the content of /etc/animal.conf
- filename = os.path.join(checkout, 'etc', 'animal.conf')
+ filename = os.path.join(checkout, "etc", "animal.conf")
assert os.path.exists(filename)
- with open(filename, 'r') as f:
+ with open(filename, "r") as f:
contents = f.read()
- assert contents == 'animal=Pony\n'
+ assert contents == "animal=Pony\n"
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_workspaced_junction(cli, tmpdir, datafiles):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- element_path = os.path.join(project, 'elements', 'junction-dep.bst')
- workspace = os.path.join(cli.directory, 'workspace')
- checkout = os.path.join(cli.directory, 'checkout')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ element_path = os.path.join(project, "elements", "junction-dep.bst")
+ workspace = os.path.join(cli.directory, "workspace")
+ checkout = os.path.join(cli.directory, "checkout")
# Create a repo to hold the subproject and generate a junction element for it
generate_junction(tmpdir, subproject_path, junction_path)
@@ -896,122 +963,127 @@ def test_build_checkout_workspaced_junction(cli, tmpdir, datafiles):
# Create a stack element to depend on a cross junction element
#
element = {
- 'kind': 'stack',
- 'depends': [
- {
- 'junction': 'junction.bst',
- 'filename': 'import-etc.bst'
- }
- ]
+ "kind": "stack",
+ "depends": [{"junction": "junction.bst", "filename": "import-etc.bst"}],
}
_yaml.roundtrip_dump(element, element_path)
# Now open a workspace on the junction
#
- result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, 'junction.bst'])
+ result = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace, "junction.bst"],
+ )
result.assert_success()
- filename = os.path.join(workspace, 'files', 'etc-files', 'etc', 'animal.conf')
+ filename = os.path.join(workspace, "files", "etc-files", "etc", "animal.conf")
# Assert the content of /etc/animal.conf in the workspace
assert os.path.exists(filename)
- with open(filename, 'r') as f:
+ with open(filename, "r") as f:
contents = f.read()
- assert contents == 'animal=Pony\n'
+ assert contents == "animal=Pony\n"
# Modify the content of the animal.conf in the workspace
- with open(filename, 'w') as f:
- f.write('animal=Horsy\n')
+ with open(filename, "w") as f:
+ f.write("animal=Horsy\n")
# Now try to build it, this should automatically result in fetching
# the junction itself at load time.
- result = cli.run(project=project, args=['build', 'junction-dep.bst'])
+ result = cli.run(project=project, args=["build", "junction-dep.bst"])
result.assert_success()
# Assert that it's cached now
- assert cli.get_element_state(project, 'junction-dep.bst') == 'cached'
+ assert cli.get_element_state(project, "junction-dep.bst") == "cached"
# Now check it out
- result = cli.run(project=project, args=[
- 'artifact', 'checkout', 'junction-dep.bst', '--directory', checkout
- ])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],
+ )
result.assert_success()
# Assert the workspace modified content of /etc/animal.conf
- filename = os.path.join(checkout, 'etc', 'animal.conf')
+ filename = os.path.join(checkout, "etc", "animal.conf")
assert os.path.exists(filename)
- with open(filename, 'r') as f:
+ with open(filename, "r") as f:
contents = f.read()
- assert contents == 'animal=Horsy\n'
+ assert contents == "animal=Horsy\n"
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_cross_junction(datafiles, cli, tmpdir):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- checkout = os.path.join(cli.directory, 'checkout')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ checkout = os.path.join(cli.directory, "checkout")
generate_junction(tmpdir, subproject_path, junction_path)
- result = cli.run(project=project, args=['build', 'junction.bst:import-etc.bst'])
+ result = cli.run(project=project, args=["build", "junction.bst:import-etc.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'junction.bst:import-etc.bst',
- '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ "junction.bst:import-etc.bst",
+ "--directory",
+ checkout,
+ ],
+ )
result.assert_success()
- filename = os.path.join(checkout, 'etc', 'animal.conf')
+ filename = os.path.join(checkout, "etc", "animal.conf")
assert os.path.exists(filename)
@pytest.mark.datafiles(DATA_DIR)
def test_build_junction_short_notation(cli, tmpdir, datafiles):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- element_path = os.path.join(project, 'elements', 'junction-dep.bst')
- checkout = os.path.join(cli.directory, 'checkout')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ element_path = os.path.join(project, "elements", "junction-dep.bst")
+ checkout = os.path.join(cli.directory, "checkout")
# Create a repo to hold the subproject and generate a junction element for it
generate_junction(tmpdir, subproject_path, junction_path)
# Create a stack element to depend on a cross junction element, using
# colon (:) as the separator
- element = {
- 'kind': 'stack',
- 'depends': ['junction.bst:import-etc.bst']
- }
+ element = {"kind": "stack", "depends": ["junction.bst:import-etc.bst"]}
_yaml.roundtrip_dump(element, element_path)
# Now try to build it, this should automatically result in fetching
# the junction itself at load time.
- result = cli.run(project=project, args=['build', 'junction-dep.bst'])
+ result = cli.run(project=project, args=["build", "junction-dep.bst"])
result.assert_success()
# Assert that it's cached now
- assert cli.get_element_state(project, 'junction-dep.bst') == 'cached'
+ assert cli.get_element_state(project, "junction-dep.bst") == "cached"
# Now check it out
- result = cli.run(project=project, args=[
- 'artifact', 'checkout', 'junction-dep.bst', '--directory', checkout
- ])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],
+ )
result.assert_success()
# Assert the content of /etc/animal.conf
- filename = os.path.join(checkout, 'etc', 'animal.conf')
+ filename = os.path.join(checkout, "etc", "animal.conf")
assert os.path.exists(filename)
- with open(filename, 'r') as f:
+ with open(filename, "r") as f:
contents = f.read()
- assert contents == 'animal=Pony\n'
+ assert contents == "animal=Pony\n"
@pytest.mark.datafiles(DATA_DIR)
def test_build_junction_short_notation_filename(cli, tmpdir, datafiles):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- element_path = os.path.join(project, 'elements', 'junction-dep.bst')
- checkout = os.path.join(cli.directory, 'checkout')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ element_path = os.path.join(project, "elements", "junction-dep.bst")
+ checkout = os.path.join(cli.directory, "checkout")
# Create a repo to hold the subproject and generate a junction element for it
generate_junction(tmpdir, subproject_path, junction_path)
@@ -1019,39 +1091,40 @@ def test_build_junction_short_notation_filename(cli, tmpdir, datafiles):
# Create a stack element to depend on a cross junction element, using
# colon (:) as the separator
element = {
- 'kind': 'stack',
- 'depends': [{'filename': 'junction.bst:import-etc.bst'}]
+ "kind": "stack",
+ "depends": [{"filename": "junction.bst:import-etc.bst"}],
}
_yaml.roundtrip_dump(element, element_path)
# Now try to build it, this should automatically result in fetching
# the junction itself at load time.
- result = cli.run(project=project, args=['build', 'junction-dep.bst'])
+ result = cli.run(project=project, args=["build", "junction-dep.bst"])
result.assert_success()
# Assert that it's cached now
- assert cli.get_element_state(project, 'junction-dep.bst') == 'cached'
+ assert cli.get_element_state(project, "junction-dep.bst") == "cached"
# Now check it out
- result = cli.run(project=project, args=[
- 'artifact', 'checkout', 'junction-dep.bst', '--directory', checkout
- ])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "junction-dep.bst", "--directory", checkout],
+ )
result.assert_success()
# Assert the content of /etc/animal.conf
- filename = os.path.join(checkout, 'etc', 'animal.conf')
+ filename = os.path.join(checkout, "etc", "animal.conf")
assert os.path.exists(filename)
- with open(filename, 'r') as f:
+ with open(filename, "r") as f:
contents = f.read()
- assert contents == 'animal=Pony\n'
+ assert contents == "animal=Pony\n"
@pytest.mark.datafiles(DATA_DIR)
def test_build_junction_short_notation_with_junction(cli, tmpdir, datafiles):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- element_path = os.path.join(project, 'elements', 'junction-dep.bst')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ element_path = os.path.join(project, "elements", "junction-dep.bst")
# Create a repo to hold the subproject and generate a junction element for it
generate_junction(tmpdir, subproject_path, junction_path)
@@ -1059,41 +1132,37 @@ def test_build_junction_short_notation_with_junction(cli, tmpdir, datafiles):
# Create a stack element to depend on a cross junction element, using
# colon (:) as the separator
element = {
- 'kind': 'stack',
- 'depends': [{
- 'filename': 'junction.bst:import-etc.bst',
- 'junction': 'junction.bst',
- }]
+ "kind": "stack",
+ "depends": [
+ {"filename": "junction.bst:import-etc.bst", "junction": "junction.bst",}
+ ],
}
_yaml.roundtrip_dump(element, element_path)
# Now try to build it, this should fail as filenames should not contain
# `:` when junction is explicity specified
- result = cli.run(project=project, args=['build', 'junction-dep.bst'])
+ result = cli.run(project=project, args=["build", "junction-dep.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
def test_build_junction_transitive_short_notation_with_junction(cli, tmpdir, datafiles):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- element_path = os.path.join(project, 'elements', 'junction-dep.bst')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ element_path = os.path.join(project, "elements", "junction-dep.bst")
# Create a repo to hold the subproject and generate a junction element for it
generate_junction(tmpdir, subproject_path, junction_path)
# Create a stack element to depend on a cross junction element, using
# colon (:) as the separator
- element = {
- 'kind': 'stack',
- 'depends': ['junction.bst:import-etc.bst:foo.bst']
- }
+ element = {"kind": "stack", "depends": ["junction.bst:import-etc.bst:foo.bst"]}
_yaml.roundtrip_dump(element, element_path)
# Now try to build it, this should fail as recursive lookups for
# cross-junction elements is not allowed.
- result = cli.run(project=project, args=['build', 'junction-dep.bst'])
+ result = cli.run(project=project, args=["build", "junction-dep.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@@ -1102,11 +1171,11 @@ def test_build_junction_transitive_short_notation_with_junction(cli, tmpdir, dat
@pytest.mark.datafiles(DATA_DIR)
def test_partial_artifact_checkout_fetch(cli, datafiles, tmpdir):
project = str(datafiles)
- checkout_dir = os.path.join(str(tmpdir), 'checkout')
+ checkout_dir = os.path.join(str(tmpdir), "checkout")
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
repo.create(os.path.join(str(datafiles), "files"))
- element_dir = os.path.join(str(tmpdir), 'elements')
+ element_dir = os.path.join(str(tmpdir), "elements")
project = str(tmpdir)
project_config = {
"name": "partial-artifact-checkout-fetch",
@@ -1118,38 +1187,46 @@ def test_partial_artifact_checkout_fetch(cli, datafiles, tmpdir):
"kind": "import",
"sources": [repo.source_config()],
}
- input_name = 'input.bst'
+ input_name = "input.bst"
input_file = os.path.join(element_dir, input_name)
_yaml.roundtrip_dump(input_config, input_file)
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
- cli.configure({'artifacts': {
- 'url': share.repo,
- 'push': True
- }})
+ cli.configure({"artifacts": {"url": share.repo, "push": True}})
- result = cli.run(project=project, args=['source', 'track', input_name])
+ result = cli.run(project=project, args=["source", "track", input_name])
result.assert_success()
- result = cli.run(project=project, args=['build', input_name])
+ result = cli.run(project=project, args=["build", input_name])
result.assert_success()
# A push artifact cache means we have to pull to push to them, so
# delete some blobs from that CAS such that we have to fetch
- digest = utils.sha256sum(os.path.join(project, 'files', 'bin-files', 'usr', 'bin', 'hello'))
- objpath = os.path.join(cli.directory, 'cas', 'objects', digest[:2], digest[2:])
+ digest = utils.sha256sum(
+ os.path.join(project, "files", "bin-files", "usr", "bin", "hello")
+ )
+ objpath = os.path.join(cli.directory, "cas", "objects", digest[:2], digest[2:])
os.unlink(objpath)
# Verify that the build-only dependency is not (complete) in the local cache
- result = cli.run(project=project, args=[
- 'artifact', 'checkout', input_name,
- '--directory', checkout_dir])
- result.assert_main_error(ErrorDomain.STREAM, 'uncached-checkout-attempt')
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", input_name, "--directory", checkout_dir],
+ )
+ result.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
# Verify that the pull method fetches relevant artifacts in order to stage
- result = cli.run(project=project, args=[
- 'artifact', 'checkout', '--pull', input_name,
- '--directory', checkout_dir])
+ result = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ "--pull",
+ input_name,
+ "--directory",
+ checkout_dir,
+ ],
+ )
result.assert_success()
# should have pulled whatever was deleted previous
@@ -1159,18 +1236,25 @@ def test_partial_artifact_checkout_fetch(cli, datafiles, tmpdir):
@pytest.mark.datafiles(DATA_DIR)
def test_partial_checkout_fail(tmpdir, datafiles, cli):
project = str(datafiles)
- build_elt = 'import-bin.bst'
- checkout_dir = os.path.join(str(tmpdir), 'checkout')
-
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
-
- cli.configure({'artifacts': {
- 'url': share.repo,
- 'push': True
- }})
-
- res = cli.run(project=project, args=[
- 'artifact', 'checkout', '--pull', build_elt, '--directory',
- checkout_dir])
- res.assert_main_error(ErrorDomain.STREAM, 'uncached-checkout-attempt')
- assert re.findall(r'Remote \((\S+)\) does not have artifact (\S+) cached', res.stderr)
+ build_elt = "import-bin.bst"
+ checkout_dir = os.path.join(str(tmpdir), "checkout")
+
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
+
+ cli.configure({"artifacts": {"url": share.repo, "push": True}})
+
+ res = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ "--pull",
+ build_elt,
+ "--directory",
+ checkout_dir,
+ ],
+ )
+ res.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
+ assert re.findall(
+ r"Remote \((\S+)\) does not have artifact (\S+) cached", res.stderr
+ )
diff --git a/tests/frontend/completions.py b/tests/frontend/completions.py
index 5df873666..075fd70f1 100644
--- a/tests/frontend/completions.py
+++ b/tests/frontend/completions.py
@@ -6,20 +6,17 @@ import pytest
from buildstream.testing import cli # pylint: disable=unused-import
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'completions'
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "completions")
MAIN_COMMANDS = [
- 'artifact ',
- 'build ',
- 'help ',
- 'init ',
- 'shell ',
- 'show ',
- 'source ',
- 'workspace '
+ "artifact ",
+ "build ",
+ "help ",
+ "init ",
+ "shell ",
+ "show ",
+ "source ",
+ "workspace ",
]
MAIN_OPTIONS = [
@@ -54,27 +51,22 @@ MAIN_OPTIONS = [
]
SOURCE_COMMANDS = [
- 'checkout ',
- 'fetch ',
- 'track ',
+ "checkout ",
+ "fetch ",
+ "track ",
]
ARTIFACT_COMMANDS = [
- 'checkout ',
- 'delete ',
- 'push ',
- 'pull ',
- 'log ',
- 'list-contents ',
- 'show ',
+ "checkout ",
+ "delete ",
+ "push ",
+ "pull ",
+ "log ",
+ "list-contents ",
+ "show ",
]
-WORKSPACE_COMMANDS = [
- 'close ',
- 'list ',
- 'open ',
- 'reset '
-]
+WORKSPACE_COMMANDS = ["close ", "list ", "open ", "reset "]
PROJECT_ELEMENTS = [
"compose-all.bst",
@@ -82,23 +74,24 @@ PROJECT_ELEMENTS = [
"compose-include-bin.bst",
"import-bin.bst",
"import-dev.bst",
- "target.bst"
+ "target.bst",
]
-INVALID_ELEMENTS = [
- "target.foo"
- "target.bst.bar"
-]
+INVALID_ELEMENTS = ["target.foo" "target.bst.bar"]
MIXED_ELEMENTS = PROJECT_ELEMENTS + INVALID_ELEMENTS
def assert_completion(cli, cmd, word_idx, expected, cwd=None):
- result = cli.run(project='.', cwd=cwd, env={
- '_BST_COMPLETION': 'complete',
- 'COMP_WORDS': cmd,
- 'COMP_CWORD': str(word_idx)
- })
+ result = cli.run(
+ project=".",
+ cwd=cwd,
+ env={
+ "_BST_COMPLETION": "complete",
+ "COMP_WORDS": cmd,
+ "COMP_CWORD": str(word_idx),
+ },
+ )
words = []
if result.output:
words = result.output.splitlines()
@@ -112,11 +105,14 @@ def assert_completion(cli, cmd, word_idx, expected, cwd=None):
def assert_completion_failed(cli, cmd, word_idx, expected, cwd=None):
- result = cli.run(cwd=cwd, env={
- '_BST_COMPLETION': 'complete',
- 'COMP_WORDS': cmd,
- 'COMP_CWORD': str(word_idx)
- })
+ result = cli.run(
+ cwd=cwd,
+ env={
+ "_BST_COMPLETION": "complete",
+ "COMP_WORDS": cmd,
+ "COMP_CWORD": str(word_idx),
+ },
+ )
words = []
if result.output:
words = result.output.splitlines()
@@ -129,67 +125,96 @@ def assert_completion_failed(cli, cmd, word_idx, expected, cwd=None):
assert words != expected
-@pytest.mark.parametrize("cmd,word_idx,expected", [
- ('bst', 0, []),
- ('bst ', 1, MAIN_COMMANDS),
- ('bst artifact ', 2, ARTIFACT_COMMANDS),
- ('bst source ', 2, SOURCE_COMMANDS),
- ('bst w ', 1, ['workspace ']),
- ('bst workspace ', 2, WORKSPACE_COMMANDS),
-])
+@pytest.mark.parametrize(
+ "cmd,word_idx,expected",
+ [
+ ("bst", 0, []),
+ ("bst ", 1, MAIN_COMMANDS),
+ ("bst artifact ", 2, ARTIFACT_COMMANDS),
+ ("bst source ", 2, SOURCE_COMMANDS),
+ ("bst w ", 1, ["workspace "]),
+ ("bst workspace ", 2, WORKSPACE_COMMANDS),
+ ],
+)
def test_commands(cli, cmd, word_idx, expected):
assert_completion(cli, cmd, word_idx, expected)
-@pytest.mark.parametrize("cmd,word_idx,expected", [
- ('bst -', 1, MAIN_OPTIONS),
- ('bst --l', 1, ['--log-file ']),
-
- # Test that options of subcommands also complete
- ('bst --no-colors build -', 3, ['--deps ', '-d ',
- '--remote ', '-r ']),
-
- # Test the behavior of completing after an option that has a
- # parameter that cannot be completed, vs an option that has
- # no parameter
- ('bst --fetchers ', 2, []),
- ('bst --no-colors ', 2, MAIN_COMMANDS),
-])
+@pytest.mark.parametrize(
+ "cmd,word_idx,expected",
+ [
+ ("bst -", 1, MAIN_OPTIONS),
+ ("bst --l", 1, ["--log-file "]),
+ # Test that options of subcommands also complete
+ ("bst --no-colors build -", 3, ["--deps ", "-d ", "--remote ", "-r "]),
+ # Test the behavior of completing after an option that has a
+ # parameter that cannot be completed, vs an option that has
+ # no parameter
+ ("bst --fetchers ", 2, []),
+ ("bst --no-colors ", 2, MAIN_COMMANDS),
+ ],
+)
def test_options(cli, cmd, word_idx, expected):
assert_completion(cli, cmd, word_idx, expected)
-@pytest.mark.parametrize("cmd,word_idx,expected", [
- ('bst --on-error ', 2, ['continue ', 'quit ', 'terminate ']),
- ('bst --cache-buildtrees ', 2, ['always ', 'auto ', 'never ']),
- ('bst show --deps ', 3, ['all ', 'build ', 'none ', 'plan ', 'run ']),
- ('bst show --deps=', 2, ['all ', 'build ', 'none ', 'plan ', 'run ']),
- ('bst show --deps b', 3, ['build ']),
- ('bst show --deps=b', 2, ['build ']),
- ('bst show --deps r', 3, ['run ']),
- ('bst source track --deps ', 4, ['all ', 'none ']),
-])
+@pytest.mark.parametrize(
+ "cmd,word_idx,expected",
+ [
+ ("bst --on-error ", 2, ["continue ", "quit ", "terminate "]),
+ ("bst --cache-buildtrees ", 2, ["always ", "auto ", "never "]),
+ ("bst show --deps ", 3, ["all ", "build ", "none ", "plan ", "run "]),
+ ("bst show --deps=", 2, ["all ", "build ", "none ", "plan ", "run "]),
+ ("bst show --deps b", 3, ["build "]),
+ ("bst show --deps=b", 2, ["build "]),
+ ("bst show --deps r", 3, ["run "]),
+ ("bst source track --deps ", 4, ["all ", "none "]),
+ ],
+)
def test_option_choice(cli, cmd, word_idx, expected):
assert_completion(cli, cmd, word_idx, expected)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
-@pytest.mark.parametrize("cmd,word_idx,expected,subdir", [
- # Note that elements/ and files/ are partial completions and
- # as such do not come with trailing whitespace
- ('bst --config ', 2, ['cache/', 'elements/', 'files/', 'project.conf '], None),
- ('bst --log-file ', 2, ['cache/', 'elements/', 'files/', 'project.conf '], None),
- ('bst --config f', 2, ['files/'], None),
- ('bst --log-file f', 2, ['files/'], None),
- ('bst --config files', 2, ['files/bin-files/', 'files/dev-files/'], None),
- ('bst --log-file files', 2, ['files/bin-files/', 'files/dev-files/'], None),
- ('bst --config files/', 2, ['files/bin-files/', 'files/dev-files/'], None),
- ('bst --log-file elements/', 2, [os.path.join('elements', e) + ' ' for e in PROJECT_ELEMENTS], None),
- ('bst --config ../', 2, ['../cache/', '../elements/', '../files/', '../project.conf '], 'files'),
- ('bst --config ../elements/', 2, [os.path.join('..', 'elements', e) + ' ' for e in PROJECT_ELEMENTS], 'files'),
- ('bst --config ../nofile', 2, [], 'files'),
- ('bst --config /pony/rainbow/nobodyhas/this/file', 2, [], 'files'),
-])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
+@pytest.mark.parametrize(
+ "cmd,word_idx,expected,subdir",
+ [
+ # Note that elements/ and files/ are partial completions and
+ # as such do not come with trailing whitespace
+ ("bst --config ", 2, ["cache/", "elements/", "files/", "project.conf "], None),
+ (
+ "bst --log-file ",
+ 2,
+ ["cache/", "elements/", "files/", "project.conf "],
+ None,
+ ),
+ ("bst --config f", 2, ["files/"], None),
+ ("bst --log-file f", 2, ["files/"], None),
+ ("bst --config files", 2, ["files/bin-files/", "files/dev-files/"], None),
+ ("bst --log-file files", 2, ["files/bin-files/", "files/dev-files/"], None),
+ ("bst --config files/", 2, ["files/bin-files/", "files/dev-files/"], None),
+ (
+ "bst --log-file elements/",
+ 2,
+ [os.path.join("elements", e) + " " for e in PROJECT_ELEMENTS],
+ None,
+ ),
+ (
+ "bst --config ../",
+ 2,
+ ["../cache/", "../elements/", "../files/", "../project.conf "],
+ "files",
+ ),
+ (
+ "bst --config ../elements/",
+ 2,
+ [os.path.join("..", "elements", e) + " " for e in PROJECT_ELEMENTS],
+ "files",
+ ),
+ ("bst --config ../nofile", 2, [], "files"),
+ ("bst --config /pony/rainbow/nobodyhas/this/file", 2, [], "files"),
+ ],
+)
def test_option_file(datafiles, cli, cmd, word_idx, expected, subdir):
cwd = str(datafiles)
if subdir:
@@ -197,15 +222,18 @@ def test_option_file(datafiles, cli, cmd, word_idx, expected, subdir):
assert_completion(cli, cmd, word_idx, expected, cwd=cwd)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
-@pytest.mark.parametrize("cmd,word_idx,expected,subdir", [
- # Note that regular files like project.conf are not returned when
- # completing for a directory
- ('bst --directory ', 2, ['cache/', 'elements/', 'files/'], None),
- ('bst --directory elements/', 2, [], None),
- ('bst --directory ', 2, ['dev-files/', 'bin-files/'], 'files'),
- ('bst --directory ../', 2, ['../cache/', '../elements/', '../files/'], 'files'),
-])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
+@pytest.mark.parametrize(
+ "cmd,word_idx,expected,subdir",
+ [
+ # Note that regular files like project.conf are not returned when
+ # completing for a directory
+ ("bst --directory ", 2, ["cache/", "elements/", "files/"], None),
+ ("bst --directory elements/", 2, [], None),
+ ("bst --directory ", 2, ["dev-files/", "bin-files/"], "files"),
+ ("bst --directory ../", 2, ["../cache/", "../elements/", "../files/"], "files"),
+ ],
+)
def test_option_directory(datafiles, cli, cmd, word_idx, expected, subdir):
cwd = str(datafiles)
if subdir:
@@ -214,56 +242,154 @@ def test_option_directory(datafiles, cli, cmd, word_idx, expected, subdir):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("project,cmd,word_idx,expected,subdir", [
- # When running in the project directory
- ('project', 'bst show ', 2, [e + ' ' for e in PROJECT_ELEMENTS], None),
- ('project', 'bst build com', 2,
- ['compose-all.bst ', 'compose-include-bin.bst ', 'compose-exclude-dev.bst '], None),
-
- # When running from the files subdir
- ('project', 'bst show ', 2, [e + ' ' for e in PROJECT_ELEMENTS], 'files'),
- ('project', 'bst build com', 2,
- ['compose-all.bst ', 'compose-include-bin.bst ', 'compose-exclude-dev.bst '], 'files'),
-
- # When passing the project directory
- ('project', 'bst --directory ../ show ', 4, [e + ' ' for e in PROJECT_ELEMENTS], 'files'),
- ('project', 'bst --directory ../ build com', 4,
- ['compose-all.bst ', 'compose-include-bin.bst ', 'compose-exclude-dev.bst '], 'files'),
-
- # Also try multi arguments together
- ('project', 'bst --directory ../ artifact checkout t ', 5, ['target.bst '], 'files'),
- ('project', 'bst --directory ../ artifact checkout --directory ', 6,
- ['bin-files/', 'dev-files/'], 'files'),
-
- # When running in the project directory
- ('no-element-path', 'bst show ', 2,
- [e + ' ' for e in PROJECT_ELEMENTS] + ['files/'], None),
- ('no-element-path', 'bst build com', 2,
- ['compose-all.bst ', 'compose-include-bin.bst ', 'compose-exclude-dev.bst '], None),
-
- # When running from the files subdir
- ('no-element-path', 'bst show ', 2,
- [e + ' ' for e in PROJECT_ELEMENTS] + ['files/'], 'files'),
- ('no-element-path', 'bst build com', 2,
- ['compose-all.bst ', 'compose-include-bin.bst ', 'compose-exclude-dev.bst '], 'files'),
-
- # When passing the project directory
- ('no-element-path', 'bst --directory ../ show ', 4,
- [e + ' ' for e in PROJECT_ELEMENTS] + ['files/'], 'files'),
- ('no-element-path', 'bst --directory ../ show f', 4, ['files/'], 'files'),
- ('no-element-path', 'bst --directory ../ show files/', 4, ['files/bin-files/', 'files/dev-files/'], 'files'),
- ('no-element-path', 'bst --directory ../ build com', 4,
- ['compose-all.bst ', 'compose-include-bin.bst ', 'compose-exclude-dev.bst '], 'files'),
-
- # Also try multi arguments together
- ('no-element-path', 'bst --directory ../ artifact checkout t ', 5, ['target.bst '], 'files'),
- ('no-element-path', 'bst --directory ../ artifact checkout --directory ', 6,
- ['bin-files/', 'dev-files/'], 'files'),
-
- # When element-path have sub-folders
- ('sub-folders', 'bst show base', 2, ['base/wanted.bst '], None),
- ('sub-folders', 'bst show base/', 2, ['base/wanted.bst '], None),
-])
+@pytest.mark.parametrize(
+ "project,cmd,word_idx,expected,subdir",
+ [
+ # When running in the project directory
+ ("project", "bst show ", 2, [e + " " for e in PROJECT_ELEMENTS], None),
+ (
+ "project",
+ "bst build com",
+ 2,
+ [
+ "compose-all.bst ",
+ "compose-include-bin.bst ",
+ "compose-exclude-dev.bst ",
+ ],
+ None,
+ ),
+ # When running from the files subdir
+ ("project", "bst show ", 2, [e + " " for e in PROJECT_ELEMENTS], "files"),
+ (
+ "project",
+ "bst build com",
+ 2,
+ [
+ "compose-all.bst ",
+ "compose-include-bin.bst ",
+ "compose-exclude-dev.bst ",
+ ],
+ "files",
+ ),
+ # When passing the project directory
+ (
+ "project",
+ "bst --directory ../ show ",
+ 4,
+ [e + " " for e in PROJECT_ELEMENTS],
+ "files",
+ ),
+ (
+ "project",
+ "bst --directory ../ build com",
+ 4,
+ [
+ "compose-all.bst ",
+ "compose-include-bin.bst ",
+ "compose-exclude-dev.bst ",
+ ],
+ "files",
+ ),
+ # Also try multi arguments together
+ (
+ "project",
+ "bst --directory ../ artifact checkout t ",
+ 5,
+ ["target.bst "],
+ "files",
+ ),
+ (
+ "project",
+ "bst --directory ../ artifact checkout --directory ",
+ 6,
+ ["bin-files/", "dev-files/"],
+ "files",
+ ),
+ # When running in the project directory
+ (
+ "no-element-path",
+ "bst show ",
+ 2,
+ [e + " " for e in PROJECT_ELEMENTS] + ["files/"],
+ None,
+ ),
+ (
+ "no-element-path",
+ "bst build com",
+ 2,
+ [
+ "compose-all.bst ",
+ "compose-include-bin.bst ",
+ "compose-exclude-dev.bst ",
+ ],
+ None,
+ ),
+ # When running from the files subdir
+ (
+ "no-element-path",
+ "bst show ",
+ 2,
+ [e + " " for e in PROJECT_ELEMENTS] + ["files/"],
+ "files",
+ ),
+ (
+ "no-element-path",
+ "bst build com",
+ 2,
+ [
+ "compose-all.bst ",
+ "compose-include-bin.bst ",
+ "compose-exclude-dev.bst ",
+ ],
+ "files",
+ ),
+ # When passing the project directory
+ (
+ "no-element-path",
+ "bst --directory ../ show ",
+ 4,
+ [e + " " for e in PROJECT_ELEMENTS] + ["files/"],
+ "files",
+ ),
+ ("no-element-path", "bst --directory ../ show f", 4, ["files/"], "files"),
+ (
+ "no-element-path",
+ "bst --directory ../ show files/",
+ 4,
+ ["files/bin-files/", "files/dev-files/"],
+ "files",
+ ),
+ (
+ "no-element-path",
+ "bst --directory ../ build com",
+ 4,
+ [
+ "compose-all.bst ",
+ "compose-include-bin.bst ",
+ "compose-exclude-dev.bst ",
+ ],
+ "files",
+ ),
+ # Also try multi arguments together
+ (
+ "no-element-path",
+ "bst --directory ../ artifact checkout t ",
+ 5,
+ ["target.bst "],
+ "files",
+ ),
+ (
+ "no-element-path",
+ "bst --directory ../ artifact checkout --directory ",
+ 6,
+ ["bin-files/", "dev-files/"],
+ "files",
+ ),
+ # When element-path have sub-folders
+ ("sub-folders", "bst show base", 2, ["base/wanted.bst "], None),
+ ("sub-folders", "bst show base/", 2, ["base/wanted.bst "], None),
+ ],
+)
def test_argument_element(datafiles, cli, project, cmd, word_idx, expected, subdir):
cwd = os.path.join(str(datafiles), project)
if subdir:
@@ -272,67 +398,86 @@ def test_argument_element(datafiles, cli, project, cmd, word_idx, expected, subd
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("project,cmd,word_idx,expected,subdir", [
-
- # When element has invalid suffix
- ('project', 'bst --directory ../ show ', 4, [e + ' ' for e in MIXED_ELEMENTS], 'files')
-])
-def test_argument_element_invalid(datafiles, cli, project, cmd, word_idx, expected, subdir):
+@pytest.mark.parametrize(
+ "project,cmd,word_idx,expected,subdir",
+ [
+ # When element has invalid suffix
+ (
+ "project",
+ "bst --directory ../ show ",
+ 4,
+ [e + " " for e in MIXED_ELEMENTS],
+ "files",
+ )
+ ],
+)
+def test_argument_element_invalid(
+ datafiles, cli, project, cmd, word_idx, expected, subdir
+):
cwd = os.path.join(str(datafiles), project)
if subdir:
cwd = os.path.join(cwd, subdir)
assert_completion_failed(cli, cmd, word_idx, expected, cwd=cwd)
-@pytest.mark.parametrize("cmd,word_idx,expected", [
- ('bst he', 1, ['help ']),
- ('bst help ', 2, MAIN_COMMANDS),
- ('bst help artifact ', 3, ARTIFACT_COMMANDS),
- ('bst help in', 2, ['init ']),
- ('bst help source ', 3, SOURCE_COMMANDS),
- ('bst help artifact ', 3, ARTIFACT_COMMANDS),
- ('bst help w', 2, ['workspace ']),
- ('bst help workspace ', 3, WORKSPACE_COMMANDS),
-])
+@pytest.mark.parametrize(
+ "cmd,word_idx,expected",
+ [
+ ("bst he", 1, ["help "]),
+ ("bst help ", 2, MAIN_COMMANDS),
+ ("bst help artifact ", 3, ARTIFACT_COMMANDS),
+ ("bst help in", 2, ["init "]),
+ ("bst help source ", 3, SOURCE_COMMANDS),
+ ("bst help artifact ", 3, ARTIFACT_COMMANDS),
+ ("bst help w", 2, ["workspace "]),
+ ("bst help workspace ", 3, WORKSPACE_COMMANDS),
+ ],
+)
def test_help_commands(cli, cmd, word_idx, expected):
assert_completion(cli, cmd, word_idx, expected)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
def test_argument_artifact(cli, datafiles):
project = str(datafiles)
# Build an import element with no dependencies (as there will only be ONE cache key)
- result = cli.run(project=project, args=['build', 'import-bin.bst']) # Has no dependencies
+ result = cli.run(
+ project=project, args=["build", "import-bin.bst"]
+ ) # Has no dependencies
result.assert_success()
# Get the key and the artifact ref ($project/$element_name/$key)
- key = cli.get_element_key(project, 'import-bin.bst')
- artifact = os.path.join('test', 'import-bin', key)
+ key = cli.get_element_key(project, "import-bin.bst")
+ artifact = os.path.join("test", "import-bin", key)
# Test autocompletion of the artifact
- cmds = [
- 'bst artifact log ',
- 'bst artifact log t',
- 'bst artifact log test/'
- ]
+ cmds = ["bst artifact log ", "bst artifact log t", "bst artifact log test/"]
for i, cmd in enumerate(cmds):
word_idx = 3
- result = cli.run(project=project, cwd=project, env={
- '_BST_COMPLETION': 'complete',
- 'COMP_WORDS': cmd,
- 'COMP_CWORD': str(word_idx)
- })
+ result = cli.run(
+ project=project,
+ cwd=project,
+ env={
+ "_BST_COMPLETION": "complete",
+ "COMP_WORDS": cmd,
+ "COMP_CWORD": str(word_idx),
+ },
+ )
if result.output:
- words = result.output.splitlines() # This leaves an extra space on each e.g. ['foo.bst ']
+ words = (
+ result.output.splitlines()
+ ) # This leaves an extra space on each e.g. ['foo.bst ']
words = [word.strip() for word in words]
if i == 0:
- expected = PROJECT_ELEMENTS + [artifact] # We should now be able to see the artifact
+ expected = PROJECT_ELEMENTS + [
+ artifact
+ ] # We should now be able to see the artifact
elif i == 1:
- expected = ['target.bst', artifact]
+ expected = ["target.bst", artifact]
elif i == 2:
expected = [artifact]
diff --git a/tests/frontend/compose_splits.py b/tests/frontend/compose_splits.py
index f1f9b73be..3a308a9f5 100644
--- a/tests/frontend/compose_splits.py
+++ b/tests/frontend/compose_splits.py
@@ -6,35 +6,31 @@ import pytest
from buildstream.testing.runcli import cli # pylint: disable=unused-import
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
-@pytest.mark.parametrize("target", [
- ('compose-include-bin.bst'),
- ('compose-exclude-dev.bst')
-])
+@pytest.mark.parametrize(
+ "target", [("compose-include-bin.bst"), ("compose-exclude-dev.bst")]
+)
@pytest.mark.datafiles(DATA_DIR)
def test_compose_splits(datafiles, cli, target):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
+ checkout = os.path.join(cli.directory, "checkout")
# First build it
- result = cli.run(project=project, args=['build', target])
+ result = cli.run(project=project, args=["build", target])
result.assert_success()
# Now check it out
- result = cli.run(project=project, args=[
- 'artifact', 'checkout', target, '--directory', checkout
- ])
+ result = cli.run(
+ project=project, args=["artifact", "checkout", target, "--directory", checkout]
+ )
result.assert_success()
# Check that the executable hello file is found in the checkout
- filename = os.path.join(checkout, 'usr', 'bin', 'hello')
+ filename = os.path.join(checkout, "usr", "bin", "hello")
assert os.path.exists(filename)
# Check that the executable hello file is found in the checkout
- filename = os.path.join(checkout, 'usr', 'include', 'pony.h')
+ filename = os.path.join(checkout, "usr", "include", "pony.h")
assert not os.path.exists(filename)
diff --git a/tests/frontend/configurable_warnings.py b/tests/frontend/configurable_warnings.py
index caa91bb61..f756aae2b 100644
--- a/tests/frontend/configurable_warnings.py
+++ b/tests/frontend/configurable_warnings.py
@@ -11,10 +11,7 @@ from buildstream import _yaml
from buildstream.testing.runcli import cli # pylint: disable=unused-import
from buildstream.testing._utils.site import HAVE_SANDBOX
-TOP_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "configuredwarning"
-)
+TOP_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "configuredwarning")
def get_project(fatal_warnings):
@@ -25,14 +22,10 @@ def get_project(fatal_warnings):
{
"origin": "local",
"path": "plugins",
- "elements": {
- "warninga": 0,
- "warningb": 0,
- "corewarn": 0,
- }
+ "elements": {"warninga": 0, "warningb": 0, "corewarn": 0,},
}
],
- "fatal-warnings": fatal_warnings
+ "fatal-warnings": fatal_warnings,
}
@@ -47,19 +40,23 @@ def build_project(datafiles, fatal_warnings):
@pytest.mark.datafiles(TOP_DIR)
-@pytest.mark.parametrize("element_name, fatal_warnings, expect_fatal, error_domain", [
- ("corewarn.bst", [CoreWarnings.OVERLAPS], True, ErrorDomain.STREAM),
- ("warninga.bst", ["warninga:warning-a"], True, ErrorDomain.STREAM),
- ("warningb.bst", ["warningb:warning-b"], True, ErrorDomain.STREAM),
- ("corewarn.bst", [], False, None),
- ("warninga.bst", [], False, None),
- ("warningb.bst", [], False, None),
- ("warninga.bst", [CoreWarnings.OVERLAPS], False, None),
- ("warningb.bst", [CoreWarnings.OVERLAPS], False, None),
-])
-def test_fatal_warnings(cli, datafiles, element_name,
- fatal_warnings, expect_fatal, error_domain):
- if HAVE_SANDBOX == 'buildbox' and error_domain != ErrorDomain.STREAM:
+@pytest.mark.parametrize(
+ "element_name, fatal_warnings, expect_fatal, error_domain",
+ [
+ ("corewarn.bst", [CoreWarnings.OVERLAPS], True, ErrorDomain.STREAM),
+ ("warninga.bst", ["warninga:warning-a"], True, ErrorDomain.STREAM),
+ ("warningb.bst", ["warningb:warning-b"], True, ErrorDomain.STREAM),
+ ("corewarn.bst", [], False, None),
+ ("warninga.bst", [], False, None),
+ ("warningb.bst", [], False, None),
+ ("warninga.bst", [CoreWarnings.OVERLAPS], False, None),
+ ("warningb.bst", [CoreWarnings.OVERLAPS], False, None),
+ ],
+)
+def test_fatal_warnings(
+ cli, datafiles, element_name, fatal_warnings, expect_fatal, error_domain
+):
+ if HAVE_SANDBOX == "buildbox" and error_domain != ErrorDomain.STREAM:
pytest.xfail()
project_path = build_project(datafiles, fatal_warnings)
diff --git a/tests/frontend/configuredwarning/plugins/corewarn.py b/tests/frontend/configuredwarning/plugins/corewarn.py
index 1f263a0ce..5e43115f7 100644
--- a/tests/frontend/configuredwarning/plugins/corewarn.py
+++ b/tests/frontend/configuredwarning/plugins/corewarn.py
@@ -19,8 +19,10 @@ class CoreWarn(Element):
pass
def assemble(self, sandbox):
- self.warn("Testing: CoreWarning produced during assemble",
- warning_token=CoreWarnings.OVERLAPS)
+ self.warn(
+ "Testing: CoreWarning produced during assemble",
+ warning_token=CoreWarnings.OVERLAPS,
+ )
def setup():
diff --git a/tests/frontend/configuredwarning/plugins/warninga.py b/tests/frontend/configuredwarning/plugins/warninga.py
index 9fd8dc61b..dde90bb42 100644
--- a/tests/frontend/configuredwarning/plugins/warninga.py
+++ b/tests/frontend/configuredwarning/plugins/warninga.py
@@ -20,7 +20,9 @@ class WarningA(Element):
pass
def assemble(self, sandbox):
- self.warn("Testing: warning-a produced during assemble", warning_token=WARNING_A)
+ self.warn(
+ "Testing: warning-a produced during assemble", warning_token=WARNING_A
+ )
def setup():
diff --git a/tests/frontend/configuredwarning/plugins/warningb.py b/tests/frontend/configuredwarning/plugins/warningb.py
index 64d25ef39..d9229f0d0 100644
--- a/tests/frontend/configuredwarning/plugins/warningb.py
+++ b/tests/frontend/configuredwarning/plugins/warningb.py
@@ -20,7 +20,9 @@ class WarningB(Element):
pass
def assemble(self, sandbox):
- self.warn("Testing: warning-b produced during assemble", warning_token=WARNING_B)
+ self.warn(
+ "Testing: warning-b produced during assemble", warning_token=WARNING_B
+ )
def setup():
diff --git a/tests/frontend/consistencyerror/plugins/consistencybug.py b/tests/frontend/consistencyerror/plugins/consistencybug.py
index c60d81ea0..c442d883a 100644
--- a/tests/frontend/consistencyerror/plugins/consistencybug.py
+++ b/tests/frontend/consistencyerror/plugins/consistencybug.py
@@ -2,7 +2,6 @@ from buildstream import Source
class ConsistencyBugSource(Source):
-
def configure(self, node):
pass
diff --git a/tests/frontend/consistencyerror/plugins/consistencyerror.py b/tests/frontend/consistencyerror/plugins/consistencyerror.py
index bcbd1b5e9..656bd981c 100644
--- a/tests/frontend/consistencyerror/plugins/consistencyerror.py
+++ b/tests/frontend/consistencyerror/plugins/consistencyerror.py
@@ -2,7 +2,6 @@ from buildstream import Source, SourceError
class ConsistencyErrorSource(Source):
-
def configure(self, node):
pass
@@ -15,8 +14,9 @@ class ConsistencyErrorSource(Source):
def get_consistency(self):
# Raise an error unconditionally
- raise SourceError("Something went terribly wrong",
- reason="the-consistency-error")
+ raise SourceError(
+ "Something went terribly wrong", reason="the-consistency-error"
+ )
def get_ref(self):
return None
diff --git a/tests/frontend/cross_junction_workspace.py b/tests/frontend/cross_junction_workspace.py
index ca21e7548..90e68d8ac 100644
--- a/tests/frontend/cross_junction_workspace.py
+++ b/tests/frontend/cross_junction_workspace.py
@@ -13,8 +13,8 @@ def prepare_junction_project(cli, tmpdir):
os.makedirs(str(main_project))
os.makedirs(str(sub_project))
- _yaml.roundtrip_dump({'name': 'main'}, str(main_project.join("project.conf")))
- _yaml.roundtrip_dump({'name': 'sub'}, str(sub_project.join("project.conf")))
+ _yaml.roundtrip_dump({"name": "main"}, str(main_project.join("project.conf")))
+ _yaml.roundtrip_dump({"name": "sub"}, str(sub_project.join("project.conf")))
import_dir = tmpdir.join("import")
os.makedirs(str(import_dir))
@@ -26,20 +26,22 @@ def prepare_junction_project(cli, tmpdir):
import_repo = create_repo("git", str(import_repo_dir))
import_ref = import_repo.create(str(import_dir))
- _yaml.roundtrip_dump({'kind': 'import',
- 'sources': [import_repo.source_config(ref=import_ref)]},
- str(sub_project.join("data.bst")))
+ _yaml.roundtrip_dump(
+ {"kind": "import", "sources": [import_repo.source_config(ref=import_ref)]},
+ str(sub_project.join("data.bst")),
+ )
sub_repo_dir = tmpdir.join("sub_repo")
os.makedirs(str(sub_repo_dir))
sub_repo = create_repo("git", str(sub_repo_dir))
sub_ref = sub_repo.create(str(sub_project))
- _yaml.roundtrip_dump({'kind': 'junction',
- 'sources': [sub_repo.source_config(ref=sub_ref)]},
- str(main_project.join("sub.bst")))
+ _yaml.roundtrip_dump(
+ {"kind": "junction", "sources": [sub_repo.source_config(ref=sub_ref)]},
+ str(main_project.join("sub.bst")),
+ )
- args = ['source', 'fetch', 'sub.bst']
+ args = ["source", "fetch", "sub.bst"]
result = cli.run(project=str(main_project), args=args)
result.assert_success()
@@ -50,13 +52,13 @@ def open_cross_junction(cli, tmpdir):
project = prepare_junction_project(cli, tmpdir)
workspace = tmpdir.join("workspace")
- element = 'sub.bst:data.bst'
- args = ['workspace', 'open', '--directory', str(workspace), element]
+ element = "sub.bst:data.bst"
+ args = ["workspace", "open", "--directory", str(workspace), element]
result = cli.run(project=project, args=args)
result.assert_success()
- assert cli.get_element_state(project, element) == 'buildable'
- assert os.path.exists(str(workspace.join('hello.txt')))
+ assert cli.get_element_state(project, element) == "buildable"
+ assert os.path.exists(str(workspace.join("hello.txt")))
return project, workspace
@@ -68,55 +70,55 @@ def test_open_cross_junction(cli, tmpdir):
def test_list_cross_junction(cli, tmpdir):
project, _ = open_cross_junction(cli, tmpdir)
- element = 'sub.bst:data.bst'
+ element = "sub.bst:data.bst"
- args = ['workspace', 'list']
+ args = ["workspace", "list"]
result = cli.run(project=project, args=args)
result.assert_success()
loaded = _yaml.load_data(result.output)
- workspaces = loaded.get_sequence('workspaces')
+ workspaces = loaded.get_sequence("workspaces")
assert len(workspaces) == 1
first_workspace = workspaces.mapping_at(0)
- assert 'element' in first_workspace
- assert first_workspace.get_str('element') == element
+ assert "element" in first_workspace
+ assert first_workspace.get_str("element") == element
def test_close_cross_junction(cli, tmpdir):
project, workspace = open_cross_junction(cli, tmpdir)
- element = 'sub.bst:data.bst'
- args = ['workspace', 'close', '--remove-dir', element]
+ element = "sub.bst:data.bst"
+ args = ["workspace", "close", "--remove-dir", element]
result = cli.run(project=project, args=args)
result.assert_success()
assert not os.path.exists(str(workspace))
- args = ['workspace', 'list']
+ args = ["workspace", "list"]
result = cli.run(project=project, args=args)
result.assert_success()
loaded = _yaml.load_data(result.output)
- workspaces = loaded.get_sequence('workspaces')
+ workspaces = loaded.get_sequence("workspaces")
assert not workspaces
def test_close_all_cross_junction(cli, tmpdir):
project, workspace = open_cross_junction(cli, tmpdir)
- args = ['workspace', 'close', '--remove-dir', '--all']
+ args = ["workspace", "close", "--remove-dir", "--all"]
result = cli.run(project=project, args=args)
result.assert_success()
assert not os.path.exists(str(workspace))
- args = ['workspace', 'list']
+ args = ["workspace", "list"]
result = cli.run(project=project, args=args)
result.assert_success()
loaded = _yaml.load_data(result.output)
- workspaces = loaded.get_sequence('workspaces')
+ workspaces = loaded.get_sequence("workspaces")
assert not workspaces
@@ -124,17 +126,17 @@ def test_subdir_command_cross_junction(cli, tmpdir):
# i.e. commands can be run successfully from a subdirectory of the
# junction's workspace, in case project loading logic has gone wrong
project = prepare_junction_project(cli, tmpdir)
- workspace = os.path.join(str(tmpdir), 'workspace')
- junction_element = 'sub.bst'
+ workspace = os.path.join(str(tmpdir), "workspace")
+ junction_element = "sub.bst"
# Open the junction as a workspace
- args = ['workspace', 'open', '--directory', workspace, junction_element]
+ args = ["workspace", "open", "--directory", workspace, junction_element]
result = cli.run(project=project, args=args)
result.assert_success()
# Run commands from a subdirectory of the workspace
newdir = os.path.join(str(workspace), "newdir")
- element_name = 'data.bst'
+ element_name = "data.bst"
os.makedirs(newdir)
- result = cli.run(project=str(workspace), args=['-C', newdir, 'show', element_name])
+ result = cli.run(project=str(workspace), args=["-C", newdir, "show", element_name])
result.assert_success()
diff --git a/tests/frontend/fetch.py b/tests/frontend/fetch.py
index 7ea357ac2..d34764d13 100644
--- a/tests/frontend/fetch.py
+++ b/tests/frontend/fetch.py
@@ -15,54 +15,48 @@ from . import configure_project
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
-DATA_DIR = os.path.join(TOP_DIR, 'project')
+DATA_DIR = os.path.join(TOP_DIR, "project")
-@pytest.mark.datafiles(os.path.join(TOP_DIR, 'project_world'))
+@pytest.mark.datafiles(os.path.join(TOP_DIR, "project_world"))
def test_fetch_default_targets(cli, tmpdir, datafiles):
project = str(datafiles)
- element_path = os.path.join(project, 'elements')
- element_name = 'fetch-test.bst'
+ element_path = os.path.join(project, "elements")
+ element_name = "fetch-test.bst"
# Create our repo object of the given source type with
# the bin files, and then collect the initial ref.
#
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
ref = repo.create(project)
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
- _yaml.roundtrip_dump(element,
- os.path.join(element_path, element_name))
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# Assert that a fetch is needed
- assert cli.get_element_state(project, element_name) == 'fetch needed'
+ assert cli.get_element_state(project, element_name) == "fetch needed"
# Now try to fetch it, using the default target feature
- result = cli.run(project=project, args=['source', 'fetch'])
+ result = cli.run(project=project, args=["source", "fetch"])
result.assert_success()
# Assert that we are now buildable because the source is
# now cached.
- assert cli.get_element_state(project, element_name) == 'buildable'
+ assert cli.get_element_state(project, element_name) == "buildable"
-@pytest.mark.datafiles(os.path.join(TOP_DIR, 'consistencyerror'))
+@pytest.mark.datafiles(os.path.join(TOP_DIR, "consistencyerror"))
def test_fetch_consistency_error(cli, datafiles):
project = str(datafiles)
# When the error occurs outside of the scheduler at load time,
# then the SourceError is reported directly as the main error.
- result = cli.run(project=project, args=['source', 'fetch', 'error.bst'])
- result.assert_main_error(ErrorDomain.SOURCE, 'the-consistency-error')
+ result = cli.run(project=project, args=["source", "fetch", "error.bst"])
+ result.assert_main_error(ErrorDomain.SOURCE, "the-consistency-error")
-@pytest.mark.datafiles(os.path.join(TOP_DIR, 'consistencyerror'))
+@pytest.mark.datafiles(os.path.join(TOP_DIR, "consistencyerror"))
def test_fetch_consistency_bug(cli, datafiles):
project = str(datafiles)
@@ -73,80 +67,57 @@ def test_fetch_consistency_bug(cli, datafiles):
# for a fetch command, we could report this to the user
# more gracefully as a BUG message.
#
- result = cli.run(project=project, args=['source', 'fetch', 'bug.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "bug.bst"])
assert result.exc is not None
assert str(result.exc) == "Something went terribly wrong"
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("strict", [True, False], ids=["strict", "no-strict"])
-@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
def test_unfetched_junction(cli, tmpdir, datafiles, strict, ref_storage):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- element_path = os.path.join(project, 'elements', 'junction-dep.bst')
-
- configure_project(project, {
- 'ref-storage': ref_storage
- })
- cli.configure({
- 'projects': {
- 'test': {
- 'strict': strict
- }
- }
- })
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ element_path = os.path.join(project, "elements", "junction-dep.bst")
+
+ configure_project(project, {"ref-storage": ref_storage})
+ cli.configure({"projects": {"test": {"strict": strict}}})
# Create a repo to hold the subproject and generate a junction element for it
- ref = generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == 'inline'))
+ ref = generate_junction(
+ tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline")
+ )
# Create a stack element to depend on a cross junction element
#
element = {
- 'kind': 'stack',
- 'depends': [
- {
- 'junction': 'junction.bst',
- 'filename': 'import-etc.bst'
- }
- ]
+ "kind": "stack",
+ "depends": [{"junction": "junction.bst", "filename": "import-etc.bst"}],
}
_yaml.roundtrip_dump(element, element_path)
# Dump a project.refs if we're using project.refs storage
#
- if ref_storage == 'project.refs':
- project_refs = {
- 'projects': {
- 'test': {
- 'junction.bst': [
- {
- 'ref': ref
- }
- ]
- }
- }
- }
- _yaml.roundtrip_dump(project_refs, os.path.join(project, 'junction.refs'))
+ if ref_storage == "project.refs":
+ project_refs = {"projects": {"test": {"junction.bst": [{"ref": ref}]}}}
+ _yaml.roundtrip_dump(project_refs, os.path.join(project, "junction.refs"))
# Now try to fetch it, this should automatically result in fetching
# the junction itself.
- result = cli.run(project=project, args=['source', 'fetch', 'junction-dep.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "junction-dep.bst"])
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- element_path = os.path.join(project, 'elements', 'junction-dep.bst')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ element_path = os.path.join(project, "elements", "junction-dep.bst")
- configure_project(project, {
- 'ref-storage': ref_storage
- })
+ configure_project(project, {"ref-storage": ref_storage})
# Create a repo to hold the subproject and generate a junction element for it
generate_junction(tmpdir, subproject_path, junction_path, store_ref=False)
@@ -154,23 +125,18 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage):
# Create a stack element to depend on a cross junction element
#
element = {
- 'kind': 'stack',
- 'depends': [
- {
- 'junction': 'junction.bst',
- 'filename': 'import-etc.bst'
- }
- ]
+ "kind": "stack",
+ "depends": [{"junction": "junction.bst", "filename": "import-etc.bst"}],
}
_yaml.roundtrip_dump(element, element_path)
# Now try to fetch it, this will bail with the appropriate error
# informing the user to track the junction first
- result = cli.run(project=project, args=['source', 'fetch', 'junction-dep.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "junction-dep.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
# Assert that we have the expected provenance encoded into the error
- element_node = _yaml.load(element_path, shortname='junction-dep.bst')
- ref_node = element_node.get_sequence('depends').mapping_at(0)
+ element_node = _yaml.load(element_path, shortname="junction-dep.bst")
+ ref_node = element_node.get_sequence("depends").mapping_at(0)
provenance = ref_node.get_provenance()
assert str(provenance) in result.stderr
diff --git a/tests/frontend/help.py b/tests/frontend/help.py
index 3bc18499b..3bbae44f5 100644
--- a/tests/frontend/help.py
+++ b/tests/frontend/help.py
@@ -8,28 +8,31 @@ from buildstream.testing.runcli import cli # pylint: disable=unused-import
def assert_help(cli_output):
expected_start = "Usage: "
if not cli_output.startswith(expected_start):
- raise AssertionError("Help output expected to begin with '{}',"
- .format(expected_start) +
- " output was: {}"
- .format(cli_output))
+ raise AssertionError(
+ "Help output expected to begin with '{}',".format(expected_start)
+ + " output was: {}".format(cli_output)
+ )
def test_help_main(cli):
- result = cli.run(args=['--help'])
+ result = cli.run(args=["--help"])
result.assert_success()
assert_help(result.output)
-@pytest.mark.parametrize("command", [
- ('artifact'),
- ('build'),
- ('checkout'),
- ('shell'),
- ('show'),
- ('source'),
- ('workspace')
-])
+@pytest.mark.parametrize(
+ "command",
+ [
+ ("artifact"),
+ ("build"),
+ ("checkout"),
+ ("shell"),
+ ("show"),
+ ("source"),
+ ("workspace"),
+ ],
+)
def test_help(cli, command):
- result = cli.run(args=[command, '--help'])
+ result = cli.run(args=[command, "--help"])
result.assert_success()
assert_help(result.output)
diff --git a/tests/frontend/init.py b/tests/frontend/init.py
index 0fdc0eda5..01686b7c6 100644
--- a/tests/frontend/init.py
+++ b/tests/frontend/init.py
@@ -13,114 +13,137 @@ from buildstream._versions import BST_FORMAT_VERSION
def test_defaults(cli, tmpdir):
project = str(tmpdir)
- project_path = os.path.join(project, 'project.conf')
+ project_path = os.path.join(project, "project.conf")
- result = cli.run(args=['init', '--project-name', 'foo', project])
+ result = cli.run(args=["init", "--project-name", "foo", project])
result.assert_success()
project_conf = _yaml.load(project_path)
- assert project_conf.get_str('name') == 'foo'
- assert project_conf.get_str('format-version') == str(BST_FORMAT_VERSION)
- assert project_conf.get_str('element-path') == 'elements'
+ assert project_conf.get_str("name") == "foo"
+ assert project_conf.get_str("format-version") == str(BST_FORMAT_VERSION)
+ assert project_conf.get_str("element-path") == "elements"
def test_all_options(cli, tmpdir):
project = str(tmpdir)
- project_path = os.path.join(project, 'project.conf')
-
- result = cli.run(args=[
- 'init',
- '--project-name', 'foo',
- '--format-version', '2',
- '--element-path', 'ponies',
- project
- ])
+ project_path = os.path.join(project, "project.conf")
+
+ result = cli.run(
+ args=[
+ "init",
+ "--project-name",
+ "foo",
+ "--format-version",
+ "2",
+ "--element-path",
+ "ponies",
+ project,
+ ]
+ )
result.assert_success()
project_conf = _yaml.load(project_path)
- assert project_conf.get_str('name') == 'foo'
- assert project_conf.get_str('format-version') == str(2)
- assert project_conf.get_str('element-path') == 'ponies'
+ assert project_conf.get_str("name") == "foo"
+ assert project_conf.get_str("format-version") == str(2)
+ assert project_conf.get_str("element-path") == "ponies"
- elements_dir = os.path.join(project, 'ponies')
+ elements_dir = os.path.join(project, "ponies")
assert os.path.isdir(elements_dir)
def test_no_project_name(cli, tmpdir):
- result = cli.run(args=['init', str(tmpdir)])
- result.assert_main_error(ErrorDomain.APP, 'unspecified-project-name')
+ result = cli.run(args=["init", str(tmpdir)])
+ result.assert_main_error(ErrorDomain.APP, "unspecified-project-name")
def test_project_exists(cli, tmpdir):
project = str(tmpdir)
- project_path = os.path.join(project, 'project.conf')
- with open(project_path, 'w') as f:
- f.write('name: pony\n')
+ project_path = os.path.join(project, "project.conf")
+ with open(project_path, "w") as f:
+ f.write("name: pony\n")
- result = cli.run(args=['init', '--project-name', 'foo', project])
- result.assert_main_error(ErrorDomain.APP, 'project-exists')
+ result = cli.run(args=["init", "--project-name", "foo", project])
+ result.assert_main_error(ErrorDomain.APP, "project-exists")
def test_force_overwrite_project(cli, tmpdir):
project = str(tmpdir)
- project_path = os.path.join(project, 'project.conf')
- with open(project_path, 'w') as f:
- f.write('name: pony\n')
+ project_path = os.path.join(project, "project.conf")
+ with open(project_path, "w") as f:
+ f.write("name: pony\n")
- result = cli.run(args=['init', '--project-name', 'foo', '--force', project])
+ result = cli.run(args=["init", "--project-name", "foo", "--force", project])
result.assert_success()
project_conf = _yaml.load(project_path)
- assert project_conf.get_str('name') == 'foo'
- assert project_conf.get_str('format-version') == str(BST_FORMAT_VERSION)
+ assert project_conf.get_str("name") == "foo"
+ assert project_conf.get_str("format-version") == str(BST_FORMAT_VERSION)
def test_relative_path_directory_as_argument(cli, tmpdir):
- project = os.path.join(str(tmpdir), 'child-directory')
+ project = os.path.join(str(tmpdir), "child-directory")
os.makedirs(project, exist_ok=True)
- project_path = os.path.join(project, 'project.conf')
+ project_path = os.path.join(project, "project.conf")
rel_path = os.path.relpath(project)
- result = cli.run(args=['init', '--project-name', 'foo', rel_path])
+ result = cli.run(args=["init", "--project-name", "foo", rel_path])
result.assert_success()
project_conf = _yaml.load(project_path)
- assert project_conf.get_str('name') == 'foo'
- assert project_conf.get_int('format-version') == BST_FORMAT_VERSION
- assert project_conf.get_str('element-path') == 'elements'
+ assert project_conf.get_str("name") == "foo"
+ assert project_conf.get_int("format-version") == BST_FORMAT_VERSION
+ assert project_conf.get_str("element-path") == "elements"
def test_set_directory_and_directory_as_argument(cli, tmpdir):
- result = cli.run(args=['-C', '/foo/bar', 'init', '--project-name', 'foo', '/boo/far'])
- result.assert_main_error(ErrorDomain.APP, 'init-with-set-directory')
+ result = cli.run(
+ args=["-C", "/foo/bar", "init", "--project-name", "foo", "/boo/far"]
+ )
+ result.assert_main_error(ErrorDomain.APP, "init-with-set-directory")
-@pytest.mark.parametrize("project_name", [('Micheal Jackson'), ('one+one')])
+@pytest.mark.parametrize("project_name", [("Micheal Jackson"), ("one+one")])
def test_bad_project_name(cli, tmpdir, project_name):
- result = cli.run(args=['init', '--project-name', str(tmpdir)])
+ result = cli.run(args=["init", "--project-name", str(tmpdir)])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_SYMBOL_NAME)
@pytest.mark.parametrize("format_version", [(str(-1)), (str(BST_FORMAT_VERSION + 1))])
def test_bad_format_version(cli, tmpdir, format_version):
- result = cli.run(args=[
- 'init', '--project-name', 'foo', '--format-version', format_version, str(tmpdir)
- ])
- result.assert_main_error(ErrorDomain.APP, 'invalid-format-version')
-
-
-@pytest.mark.parametrize("element_path", [('/absolute/path'), ('../outside/of/project')])
+ result = cli.run(
+ args=[
+ "init",
+ "--project-name",
+ "foo",
+ "--format-version",
+ format_version,
+ str(tmpdir),
+ ]
+ )
+ result.assert_main_error(ErrorDomain.APP, "invalid-format-version")
+
+
+@pytest.mark.parametrize(
+ "element_path", [("/absolute/path"), ("../outside/of/project")]
+)
def test_bad_element_path(cli, tmpdir, element_path):
- result = cli.run(args=[
- 'init', '--project-name', 'foo', '--element-path', element_path, str(tmpdir)
- ])
- result.assert_main_error(ErrorDomain.APP, 'invalid-element-path')
-
-
-@pytest.mark.parametrize("element_path", [('foo'), ('foo/bar')])
+ result = cli.run(
+ args=[
+ "init",
+ "--project-name",
+ "foo",
+ "--element-path",
+ element_path,
+ str(tmpdir),
+ ]
+ )
+ result.assert_main_error(ErrorDomain.APP, "invalid-element-path")
+
+
+@pytest.mark.parametrize("element_path", [("foo"), ("foo/bar")])
def test_element_path_interactive(cli, tmp_path, monkeypatch, element_path):
project = tmp_path
- project_conf_path = project.joinpath('project.conf')
+ project_conf_path = project.joinpath("project.conf")
class DummyInteractiveApp(App):
def __init__(self, *args, **kwargs):
@@ -131,18 +154,20 @@ def test_element_path_interactive(cli, tmp_path, monkeypatch, element_path):
def create(cls, *args, **kwargs):
return DummyInteractiveApp(*args, **kwargs)
- def _init_project_interactive(self, *args, **kwargs): # pylint: disable=arguments-differ
- return ('project_name', '0', element_path)
+ def _init_project_interactive(
+ self, *args, **kwargs
+ ): # pylint: disable=arguments-differ
+ return ("project_name", "0", element_path)
- monkeypatch.setattr(App, 'create', DummyInteractiveApp.create)
+ monkeypatch.setattr(App, "create", DummyInteractiveApp.create)
- result = cli.run(args=['init', str(project)])
+ result = cli.run(args=["init", str(project)])
result.assert_success()
full_element_path = project.joinpath(element_path)
assert full_element_path.exists()
project_conf = _yaml.load(str(project_conf_path))
- assert project_conf.get_str('name') == 'project_name'
- assert project_conf.get_str('format-version') == '0'
- assert project_conf.get_str('element-path') == element_path
+ assert project_conf.get_str("name") == "project_name"
+ assert project_conf.get_str("format-version") == "0"
+ assert project_conf.get_str("element-path") == element_path
diff --git a/tests/frontend/large_directory.py b/tests/frontend/large_directory.py
index 921e2ddbe..e01d5f3c6 100644
--- a/tests/frontend/large_directory.py
+++ b/tests/frontend/large_directory.py
@@ -29,10 +29,7 @@ from tests.testutils import create_artifact_share, assert_shared
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
@contextmanager
@@ -40,7 +37,9 @@ def limit_grpc_message_length(limit):
orig_insecure_channel = grpc.insecure_channel
def new_insecure_channel(target):
- return orig_insecure_channel(target, options=(('grpc.max_send_message_length', limit),))
+ return orig_insecure_channel(
+ target, options=(("grpc.max_send_message_length", limit),)
+ )
grpc.insecure_channel = new_insecure_channel
try:
@@ -58,29 +57,27 @@ def test_large_directory(cli, tmpdir, datafiles):
MAX_MESSAGE_LENGTH = 1024 * 1024
NUM_FILES = MAX_MESSAGE_LENGTH // 64 + 1
- large_directory_dir = os.path.join(project, 'files', 'large-directory')
+ large_directory_dir = os.path.join(project, "files", "large-directory")
os.mkdir(large_directory_dir)
for i in range(NUM_FILES):
- with open(os.path.join(large_directory_dir, str(i)), 'w') as f:
+ with open(os.path.join(large_directory_dir, str(i)), "w") as f:
# The files need to have different content as we want different digests.
f.write(str(i))
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# Configure bst to push to the artifact share
- cli.configure({
- 'artifacts': [
- {'url': share.repo, 'push': True},
- ]
- })
+ cli.configure({"artifacts": [{"url": share.repo, "push": True},]})
# Enforce 1 MB gRPC message limit
with limit_grpc_message_length(MAX_MESSAGE_LENGTH):
# Build and push
- result = cli.run(project=project, args=['build', 'import-large-directory.bst'])
+ result = cli.run(
+ project=project, args=["build", "import-large-directory.bst"]
+ )
result.assert_success()
# Assert that we are now cached locally
- assert cli.get_element_state(project, 'import-large-directory.bst') == 'cached'
+ assert cli.get_element_state(project, "import-large-directory.bst") == "cached"
# Assert that the push was successful
- assert_shared(cli, share, project, 'import-large-directory.bst')
+ assert_shared(cli, share, project, "import-large-directory.bst")
diff --git a/tests/frontend/logging.py b/tests/frontend/logging.py
index 462af821f..d4f8d0d23 100644
--- a/tests/frontend/logging.py
+++ b/tests/frontend/logging.py
@@ -13,77 +13,69 @@ from buildstream._exceptions import ErrorDomain
from buildstream.testing import cli # pylint: disable=unused-import
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
@pytest.mark.datafiles(DATA_DIR)
def test_default_logging(cli, tmpdir, datafiles):
project = str(datafiles)
- bin_files_path = os.path.join(project, 'files', 'bin-files')
- element_path = os.path.join(project, 'elements')
- element_name = 'fetch-test-git.bst'
+ bin_files_path = os.path.join(project, "files", "bin-files")
+ element_path = os.path.join(project, "elements")
+ element_name = "fetch-test-git.bst"
# Create our repo object of the given source type with
# the bin files, and then collect the initial ref.
#
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
ref = repo.create(bin_files_path)
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
- _yaml.roundtrip_dump(element,
- os.path.join(element_path, element_name))
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# Now try to fetch it
- result = cli.run(project=project, args=['source', 'fetch', element_name])
+ result = cli.run(project=project, args=["source", "fetch", element_name])
result.assert_success()
- m = re.search(r"\[\d\d:\d\d:\d\d\]\[\s*\]\[.*\] SUCCESS Checking sources", result.stderr)
+ m = re.search(
+ r"\[\d\d:\d\d:\d\d\]\[\s*\]\[.*\] SUCCESS Checking sources", result.stderr
+ )
assert m is not None
@pytest.mark.datafiles(DATA_DIR)
def test_custom_logging(cli, tmpdir, datafiles):
project = str(datafiles)
- bin_files_path = os.path.join(project, 'files', 'bin-files')
- element_path = os.path.join(project, 'elements')
- element_name = 'fetch-test-git.bst'
-
- custom_log_format = ('%{elapsed},%{elapsed-us},%{wallclock},%{wallclock-us},'
- '%{key},%{element},%{action},%{message}')
- user_config = {'logging': {'message-format': custom_log_format}}
+ bin_files_path = os.path.join(project, "files", "bin-files")
+ element_path = os.path.join(project, "elements")
+ element_name = "fetch-test-git.bst"
+
+ custom_log_format = (
+ "%{elapsed},%{elapsed-us},%{wallclock},%{wallclock-us},"
+ "%{key},%{element},%{action},%{message}"
+ )
+ user_config = {"logging": {"message-format": custom_log_format}}
cli.configure(user_config)
# Create our repo object of the given source type with
# the bin files, and then collect the initial ref.
#
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
ref = repo.create(bin_files_path)
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
- _yaml.roundtrip_dump(element,
- os.path.join(element_path, element_name))
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# Now try to fetch it
- result = cli.run(project=project, args=['source', 'fetch', element_name])
+ result = cli.run(project=project, args=["source", "fetch", element_name])
result.assert_success()
- m = re.search(r"\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6},\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6}\s*,.*"
- r",SUCCESS,Checking sources", result.stderr)
+ m = re.search(
+ r"\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6},\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6}\s*,.*"
+ r",SUCCESS,Checking sources",
+ result.stderr,
+ )
assert m is not None
@@ -92,19 +84,14 @@ def test_failed_build_listing(cli, datafiles):
project = str(datafiles)
element_names = []
for i in range(3):
- element_name = 'testfail-{}.bst'.format(i)
- element_path = os.path.join('elements', element_name)
- element = {
- 'kind': 'script',
- 'config': {
- 'commands': [
- 'false'
- ]
- }
- }
+ element_name = "testfail-{}.bst".format(i)
+ element_path = os.path.join("elements", element_name)
+ element = {"kind": "script", "config": {"commands": ["false"]}}
_yaml.roundtrip_dump(element, os.path.join(project, element_path))
element_names.append(element_name)
- result = cli.run(project=project, args=['--on-error=continue', 'build', *element_names])
+ result = cli.run(
+ project=project, args=["--on-error=continue", "build", *element_names]
+ )
result.assert_main_error(ErrorDomain.STREAM, None)
# Check that we re-print the failure summaries only in the "Failure Summary"
@@ -115,10 +102,14 @@ def test_failed_build_listing(cli, datafiles):
# testfail-0.bst:
# [00:00:00][44f1b8c3][ build:testfail-0.bst ] FAILURE Running 'commands'
#
- failure_heading_pos = re.search(r'^Failure Summary$', result.stderr, re.MULTILINE).start()
- pipeline_heading_pos = re.search(r'^Pipeline Summary$', result.stderr, re.MULTILINE).start()
+ failure_heading_pos = re.search(
+ r"^Failure Summary$", result.stderr, re.MULTILINE
+ ).start()
+ pipeline_heading_pos = re.search(
+ r"^Pipeline Summary$", result.stderr, re.MULTILINE
+ ).start()
failure_summary_range = range(failure_heading_pos, pipeline_heading_pos)
- matches = tuple(re.finditer(r'^\s+testfail-.\.bst:$', result.stderr, re.MULTILINE))
+ matches = tuple(re.finditer(r"^\s+testfail-.\.bst:$", result.stderr, re.MULTILINE))
for m in matches:
assert m.start() in failure_summary_range
assert m.end() in failure_summary_range
@@ -128,4 +119,6 @@ def test_failed_build_listing(cli, datafiles):
# with the name of the relevant element, e.g. 'testfail-1.bst'. Check that
# they have the name as expected.
pattern = r"\[..:..:..\] FAILURE testfail-.\.bst: Staged artifacts do not provide command 'sh'"
- assert len(re.findall(pattern, result.stderr, re.MULTILINE)) == 6 # each element should be matched twice.
+ assert (
+ len(re.findall(pattern, result.stderr, re.MULTILINE)) == 6
+ ) # each element should be matched twice.
diff --git a/tests/frontend/main.py b/tests/frontend/main.py
index 0df52e2c9..d864a0b1f 100644
--- a/tests/frontend/main.py
+++ b/tests/frontend/main.py
@@ -5,7 +5,7 @@ from buildstream._frontend.app import _prefix_choice_value_proc
def test_prefix_choice_value_proc_full_match():
- value_proc = _prefix_choice_value_proc(['foo', 'bar', 'baz'])
+ value_proc = _prefix_choice_value_proc(["foo", "bar", "baz"])
assert value_proc("foo") == "foo"
assert value_proc("bar") == "bar"
@@ -13,13 +13,13 @@ def test_prefix_choice_value_proc_full_match():
def test_prefix_choice_value_proc_prefix_match():
- value_proc = _prefix_choice_value_proc(['foo'])
+ value_proc = _prefix_choice_value_proc(["foo"])
assert value_proc("f") == "foo"
def test_prefix_choice_value_proc_ambigous_match():
- value_proc = _prefix_choice_value_proc(['bar', 'baz'])
+ value_proc = _prefix_choice_value_proc(["bar", "baz"])
assert value_proc("bar") == "bar"
assert value_proc("baz") == "baz"
@@ -28,7 +28,7 @@ def test_prefix_choice_value_proc_ambigous_match():
def test_prefix_choice_value_proc_value_not_in_choices():
- value_proc = _prefix_choice_value_proc(['bar', 'baz'])
+ value_proc = _prefix_choice_value_proc(["bar", "baz"])
with pytest.raises(click.UsageError):
value_proc("foo")
diff --git a/tests/frontend/mirror.py b/tests/frontend/mirror.py
index 855155785..dbd21e1e9 100644
--- a/tests/frontend/mirror.py
+++ b/tests/frontend/mirror.py
@@ -11,15 +11,15 @@ from buildstream.testing import cli # pylint: disable=unused-import
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
-DATA_DIR = os.path.join(TOP_DIR, 'project')
+DATA_DIR = os.path.join(TOP_DIR, "project")
def generate_element(output_file):
element = {
- 'kind': 'import',
- 'sources': [
+ "kind": "import",
+ "sources": [
{
- 'kind': 'fetch_source',
+ "kind": "fetch_source",
"output-text": output_file,
"urls": ["foo:repo1", "bar:repo2"],
"fetch-succeeds": {
@@ -31,53 +31,26 @@ def generate_element(output_file):
"RBA/repo2": False,
"ooF/repo1": False,
"raB/repo2": False,
- }
+ },
}
- ]
+ ],
}
return element
def generate_project():
project = {
- 'name': 'test',
- 'element-path': 'elements',
- 'aliases': {
- 'foo': 'FOO/',
- 'bar': 'BAR/',
- },
- 'mirrors': [
- {
- 'name': 'middle-earth',
- 'aliases': {
- 'foo': ['OOF/'],
- 'bar': ['RAB/'],
- },
- },
- {
- 'name': 'arrakis',
- 'aliases': {
- 'foo': ['OFO/'],
- 'bar': ['RBA/'],
- },
- },
- {
- 'name': 'oz',
- 'aliases': {
- 'foo': ['ooF/'],
- 'bar': ['raB/'],
- }
- },
+ "name": "test",
+ "element-path": "elements",
+ "aliases": {"foo": "FOO/", "bar": "BAR/",},
+ "mirrors": [
+ {"name": "middle-earth", "aliases": {"foo": ["OOF/"], "bar": ["RAB/"],},},
+ {"name": "arrakis", "aliases": {"foo": ["OFO/"], "bar": ["RBA/"],},},
+ {"name": "oz", "aliases": {"foo": ["ooF/"], "bar": ["raB/"],}},
+ ],
+ "plugins": [
+ {"origin": "local", "path": "sources", "sources": {"fetch_source": 0}}
],
- 'plugins': [
- {
- 'origin': 'local',
- 'path': 'sources',
- 'sources': {
- 'fetch_source': 0
- }
- }
- ]
}
return project
@@ -86,74 +59,68 @@ def generate_project():
@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
@pytest.mark.parametrize("mirror", [("no-mirror"), ("mirror"), ("unrelated-mirror")])
def test_mirror_fetch_ref_storage(cli, tmpdir, datafiles, ref_storage, mirror):
- bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
- dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
- upstream_repodir = os.path.join(str(tmpdir), 'upstream')
- mirror_repodir = os.path.join(str(tmpdir), 'mirror')
- project_dir = os.path.join(str(tmpdir), 'project')
+ bin_files_path = os.path.join(str(datafiles), "files", "bin-files", "usr")
+ dev_files_path = os.path.join(str(datafiles), "files", "dev-files", "usr")
+ upstream_repodir = os.path.join(str(tmpdir), "upstream")
+ mirror_repodir = os.path.join(str(tmpdir), "mirror")
+ project_dir = os.path.join(str(tmpdir), "project")
os.makedirs(project_dir)
- element_dir = os.path.join(project_dir, 'elements')
+ element_dir = os.path.join(project_dir, "elements")
# Create repo objects of the upstream and mirror
- upstream_repo = create_repo('tar', upstream_repodir)
+ upstream_repo = create_repo("tar", upstream_repodir)
upstream_repo.create(bin_files_path)
mirror_repo = upstream_repo.copy(mirror_repodir)
upstream_ref = upstream_repo.create(dev_files_path)
element = {
- 'kind': 'import',
- 'sources': [
- upstream_repo.source_config(ref=upstream_ref if ref_storage == 'inline' else None)
- ]
+ "kind": "import",
+ "sources": [
+ upstream_repo.source_config(
+ ref=upstream_ref if ref_storage == "inline" else None
+ )
+ ],
}
- element_name = 'test.bst'
+ element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
- full_repo = element['sources'][0]['url']
+ full_repo = element["sources"][0]["url"]
upstream_map, repo_name = os.path.split(full_repo)
- alias = 'foo'
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
- full_mirror = mirror_repo.source_config()['url']
+ alias = "foo"
+ aliased_repo = alias + ":" + repo_name
+ element["sources"][0]["url"] = aliased_repo
+ full_mirror = mirror_repo.source_config()["url"]
mirror_map, _ = os.path.split(full_mirror)
os.makedirs(element_dir)
_yaml.roundtrip_dump(element, element_path)
- if ref_storage == 'project.refs':
+ if ref_storage == "project.refs":
# Manually set project.refs to avoid caching the repo prematurely
- project_refs = {'projects': {
- 'test': {
- element_name: [
- {'ref': upstream_ref}
- ]
- }
- }}
- project_refs_path = os.path.join(project_dir, 'project.refs')
+ project_refs = {"projects": {"test": {element_name: [{"ref": upstream_ref}]}}}
+ project_refs_path = os.path.join(project_dir, "project.refs")
_yaml.roundtrip_dump(project_refs, project_refs_path)
project = {
- 'name': 'test',
- 'element-path': 'elements',
- 'aliases': {
- alias: upstream_map + "/"
- },
- 'ref-storage': ref_storage
+ "name": "test",
+ "element-path": "elements",
+ "aliases": {alias: upstream_map + "/"},
+ "ref-storage": ref_storage,
}
- if mirror != 'no-mirror':
- mirror_data = [{
- 'name': 'middle-earth',
- 'aliases': {alias: [mirror_map + '/']}
- }]
- if mirror == 'unrelated-mirror':
- mirror_data.insert(0, {
- 'name': 'narnia',
- 'aliases': {'frob': ['http://www.example.com/repo']}
- })
- project['mirrors'] = mirror_data
-
- project_file = os.path.join(project_dir, 'project.conf')
+ if mirror != "no-mirror":
+ mirror_data = [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"]}}]
+ if mirror == "unrelated-mirror":
+ mirror_data.insert(
+ 0,
+ {
+ "name": "narnia",
+ "aliases": {"frob": ["http://www.example.com/repo"]},
+ },
+ )
+ project["mirrors"] = mirror_data
+
+ project_file = os.path.join(project_dir, "project.conf")
_yaml.roundtrip_dump(project, project_file)
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ result = cli.run(project=project_dir, args=["source", "fetch", element_name])
result.assert_success()
@@ -162,18 +129,18 @@ def test_mirror_fetch_ref_storage(cli, tmpdir, datafiles, ref_storage, mirror):
def test_mirror_fetch_multi(cli, tmpdir):
output_file = os.path.join(str(tmpdir), "output.txt")
project_dir = str(tmpdir)
- element_dir = os.path.join(project_dir, 'elements')
+ element_dir = os.path.join(project_dir, "elements")
os.makedirs(element_dir, exist_ok=True)
element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
element = generate_element(output_file)
_yaml.roundtrip_dump(element, element_path)
- project_file = os.path.join(project_dir, 'project.conf')
+ project_file = os.path.join(project_dir, "project.conf")
project = generate_project()
_yaml.roundtrip_dump(project, project_file)
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ result = cli.run(project=project_dir, args=["source", "fetch", element_name])
result.assert_success()
with open(output_file) as f:
contents = f.read()
@@ -186,18 +153,21 @@ def test_mirror_fetch_multi(cli, tmpdir):
def test_mirror_fetch_default_cmdline(cli, tmpdir):
output_file = os.path.join(str(tmpdir), "output.txt")
project_dir = str(tmpdir)
- element_dir = os.path.join(project_dir, 'elements')
+ element_dir = os.path.join(project_dir, "elements")
os.makedirs(element_dir, exist_ok=True)
element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
element = generate_element(output_file)
_yaml.roundtrip_dump(element, element_path)
- project_file = os.path.join(project_dir, 'project.conf')
+ project_file = os.path.join(project_dir, "project.conf")
project = generate_project()
_yaml.roundtrip_dump(project, project_file)
- result = cli.run(project=project_dir, args=['--default-mirror', 'arrakis', 'source', 'fetch', element_name])
+ result = cli.run(
+ project=project_dir,
+ args=["--default-mirror", "arrakis", "source", "fetch", element_name],
+ )
result.assert_success()
with open(output_file) as f:
contents = f.read()
@@ -209,7 +179,9 @@ def test_mirror_fetch_default_cmdline(cli, tmpdir):
me_str = "OOF/repo1"
me_pos = contents.find(me_str)
assert me_pos != -1, "'{}' wasn't found".format(me_str)
- assert arrakis_pos < me_pos, "'{}' wasn't found before '{}'".format(arrakis_str, me_str)
+ assert arrakis_pos < me_pos, "'{}' wasn't found before '{}'".format(
+ arrakis_str, me_str
+ )
@pytest.mark.datafiles(DATA_DIR)
@@ -217,27 +189,21 @@ def test_mirror_fetch_default_cmdline(cli, tmpdir):
def test_mirror_fetch_default_userconfig(cli, tmpdir):
output_file = os.path.join(str(tmpdir), "output.txt")
project_dir = str(tmpdir)
- element_dir = os.path.join(project_dir, 'elements')
+ element_dir = os.path.join(project_dir, "elements")
os.makedirs(element_dir, exist_ok=True)
element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
element = generate_element(output_file)
_yaml.roundtrip_dump(element, element_path)
- project_file = os.path.join(project_dir, 'project.conf')
+ project_file = os.path.join(project_dir, "project.conf")
project = generate_project()
_yaml.roundtrip_dump(project, project_file)
- userconfig = {
- 'projects': {
- 'test': {
- 'default-mirror': 'oz'
- }
- }
- }
+ userconfig = {"projects": {"test": {"default-mirror": "oz"}}}
cli.configure(userconfig)
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ result = cli.run(project=project_dir, args=["source", "fetch", element_name])
result.assert_success()
with open(output_file) as f:
contents = f.read()
@@ -257,27 +223,24 @@ def test_mirror_fetch_default_userconfig(cli, tmpdir):
def test_mirror_fetch_default_cmdline_overrides_config(cli, tmpdir):
output_file = os.path.join(str(tmpdir), "output.txt")
project_dir = str(tmpdir)
- element_dir = os.path.join(project_dir, 'elements')
+ element_dir = os.path.join(project_dir, "elements")
os.makedirs(element_dir, exist_ok=True)
element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
element = generate_element(output_file)
_yaml.roundtrip_dump(element, element_path)
- project_file = os.path.join(project_dir, 'project.conf')
+ project_file = os.path.join(project_dir, "project.conf")
project = generate_project()
_yaml.roundtrip_dump(project, project_file)
- userconfig = {
- 'projects': {
- 'test': {
- 'default-mirror': 'oz'
- }
- }
- }
+ userconfig = {"projects": {"test": {"default-mirror": "oz"}}}
cli.configure(userconfig)
- result = cli.run(project=project_dir, args=['--default-mirror', 'arrakis', 'source', 'fetch', element_name])
+ result = cli.run(
+ project=project_dir,
+ args=["--default-mirror", "arrakis", "source", "fetch", element_name],
+ )
result.assert_success()
with open(output_file) as f:
contents = f.read()
@@ -289,86 +252,79 @@ def test_mirror_fetch_default_cmdline_overrides_config(cli, tmpdir):
me_str = "OOF/repo1"
me_pos = contents.find(me_str)
assert me_pos != -1, "'{}' wasn't found".format(me_str)
- assert arrakis_pos < me_pos, "'{}' wasn't found before '{}'".format(arrakis_str, me_str)
+ assert arrakis_pos < me_pos, "'{}' wasn't found before '{}'".format(
+ arrakis_str, me_str
+ )
@pytest.mark.datafiles(DATA_DIR)
def test_mirror_git_submodule_fetch(cli, tmpdir, datafiles):
# Test that it behaves as expected with submodules, both defined in config
# and discovered when fetching.
- foo_file = os.path.join(str(datafiles), 'files', 'foo')
- bar_file = os.path.join(str(datafiles), 'files', 'bar')
- bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
- dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
- mirror_dir = os.path.join(str(datafiles), 'mirror')
+ foo_file = os.path.join(str(datafiles), "files", "foo")
+ bar_file = os.path.join(str(datafiles), "files", "bar")
+ bin_files_path = os.path.join(str(datafiles), "files", "bin-files", "usr")
+ dev_files_path = os.path.join(str(datafiles), "files", "dev-files", "usr")
+ mirror_dir = os.path.join(str(datafiles), "mirror")
- defined_subrepo = create_repo('git', str(tmpdir), 'defined_subrepo')
+ defined_subrepo = create_repo("git", str(tmpdir), "defined_subrepo")
defined_subrepo.create(bin_files_path)
defined_subrepo.copy(mirror_dir)
defined_subrepo.add_file(foo_file)
- found_subrepo = create_repo('git', str(tmpdir), 'found_subrepo')
+ found_subrepo = create_repo("git", str(tmpdir), "found_subrepo")
found_subrepo.create(dev_files_path)
- main_repo = create_repo('git', str(tmpdir))
+ main_repo = create_repo("git", str(tmpdir))
main_mirror_ref = main_repo.create(bin_files_path)
- main_repo.add_submodule('defined', 'file://' + defined_subrepo.repo)
- main_repo.add_submodule('found', 'file://' + found_subrepo.repo)
+ main_repo.add_submodule("defined", "file://" + defined_subrepo.repo)
+ main_repo.add_submodule("found", "file://" + found_subrepo.repo)
main_mirror = main_repo.copy(mirror_dir)
main_repo.add_file(bar_file)
- project_dir = os.path.join(str(tmpdir), 'project')
+ project_dir = os.path.join(str(tmpdir), "project")
os.makedirs(project_dir)
- element_dir = os.path.join(project_dir, 'elements')
+ element_dir = os.path.join(project_dir, "elements")
os.makedirs(element_dir)
element = {
- 'kind': 'import',
- 'sources': [
- main_repo.source_config(ref=main_mirror_ref)
- ]
+ "kind": "import",
+ "sources": [main_repo.source_config(ref=main_mirror_ref)],
}
- element_name = 'test.bst'
+ element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
# Alias the main repo
- full_repo = element['sources'][0]['url']
+ full_repo = element["sources"][0]["url"]
_, repo_name = os.path.split(full_repo)
- alias = 'foo'
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['url'] = aliased_repo
+ alias = "foo"
+ aliased_repo = alias + ":" + repo_name
+ element["sources"][0]["url"] = aliased_repo
# Hide the found subrepo
- del element['sources'][0]['submodules']['found']
+ del element["sources"][0]["submodules"]["found"]
# Alias the defined subrepo
- subrepo = element['sources'][0]['submodules']['defined']['url']
+ subrepo = element["sources"][0]["submodules"]["defined"]["url"]
_, repo_name = os.path.split(subrepo)
- aliased_repo = alias + ':' + repo_name
- element['sources'][0]['submodules']['defined']['url'] = aliased_repo
+ aliased_repo = alias + ":" + repo_name
+ element["sources"][0]["submodules"]["defined"]["url"] = aliased_repo
_yaml.roundtrip_dump(element, element_path)
- full_mirror = main_mirror.source_config()['url']
+ full_mirror = main_mirror.source_config()["url"]
mirror_map, _ = os.path.split(full_mirror)
project = {
- 'name': 'test',
- 'element-path': 'elements',
- 'aliases': {
- alias: 'http://www.example.com/'
- },
- 'mirrors': [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + "/"],
- },
- },
- ]
+ "name": "test",
+ "element-path": "elements",
+ "aliases": {alias: "http://www.example.com/"},
+ "mirrors": [
+ {"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],},},
+ ],
}
- project_file = os.path.join(project_dir, 'project.conf')
+ project_file = os.path.join(project_dir, "project.conf")
_yaml.roundtrip_dump(project, project_file)
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ result = cli.run(project=project_dir, args=["source", "fetch", element_name])
result.assert_success()
@@ -383,90 +339,84 @@ def test_mirror_fallback_git_only_submodules(cli, tmpdir, datafiles):
# - overriden submodule is fetched from mirror.
# - other submodule is fetched.
- bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
- dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
+ bin_files_path = os.path.join(str(datafiles), "files", "bin-files", "usr")
+ dev_files_path = os.path.join(str(datafiles), "files", "dev-files", "usr")
- upstream_bin_repodir = os.path.join(str(tmpdir), 'bin-upstream')
- mirror_bin_repodir = os.path.join(str(tmpdir), 'bin-mirror')
- upstream_bin_repo = create_repo('git', upstream_bin_repodir)
+ upstream_bin_repodir = os.path.join(str(tmpdir), "bin-upstream")
+ mirror_bin_repodir = os.path.join(str(tmpdir), "bin-mirror")
+ upstream_bin_repo = create_repo("git", upstream_bin_repodir)
upstream_bin_repo.create(bin_files_path)
mirror_bin_repo = upstream_bin_repo.copy(mirror_bin_repodir)
- dev_repodir = os.path.join(str(tmpdir), 'dev-upstream')
- dev_repo = create_repo('git', dev_repodir)
+ dev_repodir = os.path.join(str(tmpdir), "dev-upstream")
+ dev_repo = create_repo("git", dev_repodir)
dev_repo.create(dev_files_path)
- main_files = os.path.join(str(tmpdir), 'main-files')
+ main_files = os.path.join(str(tmpdir), "main-files")
os.makedirs(main_files)
- with open(os.path.join(main_files, 'README'), 'w') as f:
+ with open(os.path.join(main_files, "README"), "w") as f:
f.write("TEST\n")
- main_repodir = os.path.join(str(tmpdir), 'main-upstream')
- main_repo = create_repo('git', main_repodir)
+ main_repodir = os.path.join(str(tmpdir), "main-upstream")
+ main_repo = create_repo("git", main_repodir)
main_repo.create(main_files)
- upstream_url = 'file://{}'.format(upstream_bin_repo.repo)
- main_repo.add_submodule('bin', url=upstream_url)
- main_repo.add_submodule('dev', url='file://{}'.format(dev_repo.repo))
+ upstream_url = "file://{}".format(upstream_bin_repo.repo)
+ main_repo.add_submodule("bin", url=upstream_url)
+ main_repo.add_submodule("dev", url="file://{}".format(dev_repo.repo))
# Unlist 'dev'.
- del main_repo.submodules['dev']
+ del main_repo.submodules["dev"]
main_ref = main_repo.latest_commit()
upstream_map, repo_name = os.path.split(upstream_url)
- alias = 'foo'
- aliased_repo = '{}:{}'.format(alias, repo_name)
- main_repo.submodules['bin']['url'] = aliased_repo
+ alias = "foo"
+ aliased_repo = "{}:{}".format(alias, repo_name)
+ main_repo.submodules["bin"]["url"] = aliased_repo
- full_mirror = mirror_bin_repo.source_config()['url']
+ full_mirror = mirror_bin_repo.source_config()["url"]
mirror_map, _ = os.path.split(full_mirror)
- project_dir = os.path.join(str(tmpdir), 'project')
+ project_dir = os.path.join(str(tmpdir), "project")
os.makedirs(project_dir)
- element_dir = os.path.join(project_dir, 'elements')
+ element_dir = os.path.join(project_dir, "elements")
element = {
- 'kind': 'import',
- 'sources': [
+ "kind": "import",
+ "sources": [
main_repo.source_config_extra(ref=main_ref, checkout_submodules=True)
- ]
+ ],
}
- element_name = 'test.bst'
+ element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
os.makedirs(element_dir)
_yaml.roundtrip_dump(element, element_path)
project = {
- 'name': 'test',
- 'element-path': 'elements',
- 'aliases': {
- alias: upstream_map + "/"
- },
- 'mirrors': [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + "/"],
- }
- }
- ]
+ "name": "test",
+ "element-path": "elements",
+ "aliases": {alias: upstream_map + "/"},
+ "mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],}}],
}
- project_file = os.path.join(project_dir, 'project.conf')
+ project_file = os.path.join(project_dir, "project.conf")
_yaml.roundtrip_dump(project, project_file)
# Now make the upstream unavailable.
- os.rename(upstream_bin_repo.repo, '{}.bak'.format(upstream_bin_repo.repo))
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ os.rename(upstream_bin_repo.repo, "{}.bak".format(upstream_bin_repo.repo))
+ result = cli.run(project=project_dir, args=["source", "fetch", element_name])
result.assert_success()
- result = cli.run(project=project_dir, args=['build', element_name])
+ result = cli.run(project=project_dir, args=["build", element_name])
result.assert_success()
- checkout = os.path.join(str(tmpdir), 'checkout')
- result = cli.run(project=project_dir, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ checkout = os.path.join(str(tmpdir), "checkout")
+ result = cli.run(
+ project=project_dir,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
result.assert_success()
- assert os.path.exists(os.path.join(checkout, 'bin', 'bin', 'hello'))
- assert os.path.exists(os.path.join(checkout, 'dev', 'include', 'pony.h'))
+ assert os.path.exists(os.path.join(checkout, "bin", "bin", "hello"))
+ assert os.path.exists(os.path.join(checkout, "dev", "include", "pony.h"))
@pytest.mark.datafiles(DATA_DIR)
@@ -476,90 +426,86 @@ def test_mirror_fallback_git_with_submodules(cli, tmpdir, datafiles):
# We expect:
# - we will fetch submodules anyway
- bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
- dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
+ bin_files_path = os.path.join(str(datafiles), "files", "bin-files", "usr")
+ dev_files_path = os.path.join(str(datafiles), "files", "dev-files", "usr")
- bin_repodir = os.path.join(str(tmpdir), 'bin-repo')
- bin_repo = create_repo('git', bin_repodir)
+ bin_repodir = os.path.join(str(tmpdir), "bin-repo")
+ bin_repo = create_repo("git", bin_repodir)
bin_repo.create(bin_files_path)
- dev_repodir = os.path.join(str(tmpdir), 'dev-repo')
- dev_repo = create_repo('git', dev_repodir)
+ dev_repodir = os.path.join(str(tmpdir), "dev-repo")
+ dev_repo = create_repo("git", dev_repodir)
dev_repo.create(dev_files_path)
- main_files = os.path.join(str(tmpdir), 'main-files')
+ main_files = os.path.join(str(tmpdir), "main-files")
os.makedirs(main_files)
- with open(os.path.join(main_files, 'README'), 'w') as f:
+ with open(os.path.join(main_files, "README"), "w") as f:
f.write("TEST\n")
- upstream_main_repodir = os.path.join(str(tmpdir), 'main-upstream')
- upstream_main_repo = create_repo('git', upstream_main_repodir)
+ upstream_main_repodir = os.path.join(str(tmpdir), "main-upstream")
+ upstream_main_repo = create_repo("git", upstream_main_repodir)
upstream_main_repo.create(main_files)
- upstream_main_repo.add_submodule('bin', url='file://{}'.format(bin_repo.repo))
- upstream_main_repo.add_submodule('dev', url='file://{}'.format(dev_repo.repo))
+ upstream_main_repo.add_submodule("bin", url="file://{}".format(bin_repo.repo))
+ upstream_main_repo.add_submodule("dev", url="file://{}".format(dev_repo.repo))
# Unlist submodules.
- del upstream_main_repo.submodules['bin']
- del upstream_main_repo.submodules['dev']
+ del upstream_main_repo.submodules["bin"]
+ del upstream_main_repo.submodules["dev"]
upstream_main_ref = upstream_main_repo.latest_commit()
- mirror_main_repodir = os.path.join(str(tmpdir), 'main-mirror')
+ mirror_main_repodir = os.path.join(str(tmpdir), "main-mirror")
mirror_main_repo = upstream_main_repo.copy(mirror_main_repodir)
- upstream_url = mirror_main_repo.source_config()['url']
+ upstream_url = mirror_main_repo.source_config()["url"]
upstream_map, repo_name = os.path.split(upstream_url)
- alias = 'foo'
- aliased_repo = '{}:{}'.format(alias, repo_name)
+ alias = "foo"
+ aliased_repo = "{}:{}".format(alias, repo_name)
- full_mirror = mirror_main_repo.source_config()['url']
+ full_mirror = mirror_main_repo.source_config()["url"]
mirror_map, _ = os.path.split(full_mirror)
- project_dir = os.path.join(str(tmpdir), 'project')
+ project_dir = os.path.join(str(tmpdir), "project")
os.makedirs(project_dir)
- element_dir = os.path.join(project_dir, 'elements')
+ element_dir = os.path.join(project_dir, "elements")
element = {
- 'kind': 'import',
- 'sources': [
- upstream_main_repo.source_config_extra(ref=upstream_main_ref, checkout_submodules=True)
- ]
+ "kind": "import",
+ "sources": [
+ upstream_main_repo.source_config_extra(
+ ref=upstream_main_ref, checkout_submodules=True
+ )
+ ],
}
- element['sources'][0]['url'] = aliased_repo
- element_name = 'test.bst'
+ element["sources"][0]["url"] = aliased_repo
+ element_name = "test.bst"
element_path = os.path.join(element_dir, element_name)
os.makedirs(element_dir)
_yaml.roundtrip_dump(element, element_path)
project = {
- 'name': 'test',
- 'element-path': 'elements',
- 'aliases': {
- alias: upstream_map + "/"
- },
- 'mirrors': [
- {
- 'name': 'middle-earth',
- 'aliases': {
- alias: [mirror_map + "/"],
- }
- }
- ]
+ "name": "test",
+ "element-path": "elements",
+ "aliases": {alias: upstream_map + "/"},
+ "mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],}}],
}
- project_file = os.path.join(project_dir, 'project.conf')
+ project_file = os.path.join(project_dir, "project.conf")
_yaml.roundtrip_dump(project, project_file)
# Now make the upstream unavailable.
- os.rename(upstream_main_repo.repo, '{}.bak'.format(upstream_main_repo.repo))
- result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ os.rename(upstream_main_repo.repo, "{}.bak".format(upstream_main_repo.repo))
+ result = cli.run(project=project_dir, args=["source", "fetch", element_name])
result.assert_success()
- result = cli.run(project=project_dir, args=['build', element_name])
+ result = cli.run(project=project_dir, args=["build", element_name])
result.assert_success()
- checkout = os.path.join(str(tmpdir), 'checkout')
- result = cli.run(project=project_dir, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ checkout = os.path.join(str(tmpdir), "checkout")
+ result = cli.run(
+ project=project_dir,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
result.assert_success()
- assert os.path.exists(os.path.join(checkout, 'bin', 'bin', 'hello'))
- assert os.path.exists(os.path.join(checkout, 'dev', 'include', 'pony.h'))
+ assert os.path.exists(os.path.join(checkout, "bin", "bin", "hello"))
+ assert os.path.exists(os.path.join(checkout, "dev", "include", "pony.h"))
diff --git a/tests/frontend/order.py b/tests/frontend/order.py
index c62377419..a66064694 100644
--- a/tests/frontend/order.py
+++ b/tests/frontend/order.py
@@ -9,10 +9,7 @@ from buildstream.testing import cli # pylint: disable=unused-import
from buildstream import _yaml
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
# create_element()
@@ -25,17 +22,15 @@ DATA_DIR = os.path.join(
# Returns:
# (Repo): The corresponding git repository created for the element
def create_element(project, name, dependencies):
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
- repo = create_repo('git', project, "{}-repo".format(name))
+ dev_files_path = os.path.join(project, "files", "dev-files")
+ element_path = os.path.join(project, "elements")
+ repo = create_repo("git", project, "{}-repo".format(name))
ref = repo.create(dev_files_path)
element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ],
- 'depends': dependencies
+ "kind": "import",
+ "sources": [repo.source_config(ref=ref)],
+ "depends": dependencies,
}
_yaml.roundtrip_dump(element, os.path.join(element_path, name))
@@ -56,37 +51,55 @@ def create_element(project, name, dependencies):
# expected (list): A list of element names in the expected order
#
@pytest.mark.datafiles(os.path.join(DATA_DIR))
-@pytest.mark.parametrize("target,template,expected", [
- # First simple test
- ('3.bst', {
- '0.bst': ['1.bst'],
- '1.bst': [],
- '2.bst': ['0.bst'],
- '3.bst': ['0.bst', '1.bst', '2.bst']
- }, ['1.bst', '0.bst', '2.bst', '3.bst']),
-
- # A more complicated test with build of build dependencies
- ('target.bst', {
- 'a.bst': [],
- 'base.bst': [],
- 'timezones.bst': [],
- 'middleware.bst': [{'filename': 'base.bst', 'type': 'build'}],
- 'app.bst': [{'filename': 'middleware.bst', 'type': 'build'}],
- 'target.bst': ['a.bst', 'base.bst', 'middleware.bst', 'app.bst', 'timezones.bst']
- }, ['base.bst', 'middleware.bst', 'a.bst', 'app.bst', 'timezones.bst', 'target.bst']),
-])
-@pytest.mark.parametrize("operation", [('show'), ('fetch'), ('build')])
+@pytest.mark.parametrize(
+ "target,template,expected",
+ [
+ # First simple test
+ (
+ "3.bst",
+ {
+ "0.bst": ["1.bst"],
+ "1.bst": [],
+ "2.bst": ["0.bst"],
+ "3.bst": ["0.bst", "1.bst", "2.bst"],
+ },
+ ["1.bst", "0.bst", "2.bst", "3.bst"],
+ ),
+ # A more complicated test with build of build dependencies
+ (
+ "target.bst",
+ {
+ "a.bst": [],
+ "base.bst": [],
+ "timezones.bst": [],
+ "middleware.bst": [{"filename": "base.bst", "type": "build"}],
+ "app.bst": [{"filename": "middleware.bst", "type": "build"}],
+ "target.bst": [
+ "a.bst",
+ "base.bst",
+ "middleware.bst",
+ "app.bst",
+ "timezones.bst",
+ ],
+ },
+ [
+ "base.bst",
+ "middleware.bst",
+ "a.bst",
+ "app.bst",
+ "timezones.bst",
+ "target.bst",
+ ],
+ ),
+ ],
+)
+@pytest.mark.parametrize("operation", [("show"), ("fetch"), ("build")])
def test_order(cli, datafiles, operation, target, template, expected):
project = str(datafiles)
# Configure to only allow one fetcher at a time, make it easy to
# determine what is being planned in what order.
- cli.configure({
- 'scheduler': {
- 'fetchers': 1,
- 'builders': 1
- }
- })
+ cli.configure({"scheduler": {"fetchers": 1, "builders": 1}})
# Build the project from the template, make import elements
# all with the same repo
@@ -95,13 +108,19 @@ def test_order(cli, datafiles, operation, target, template, expected):
create_element(project, element, dependencies)
# Run test and collect results
- if operation == 'show':
- result = cli.run(args=['show', '--deps', 'plan', '--format', '%{name}', target], project=project, silent=True)
+ if operation == "show":
+ result = cli.run(
+ args=["show", "--deps", "plan", "--format", "%{name}", target],
+ project=project,
+ silent=True,
+ )
result.assert_success()
results = result.output.splitlines()
else:
- if operation == 'fetch':
- result = cli.run(args=['source', 'fetch', target], project=project, silent=True)
+ if operation == "fetch":
+ result = cli.run(
+ args=["source", "fetch", target], project=project, silent=True
+ )
else:
result = cli.run(args=[operation, target], project=project, silent=True)
result.assert_success()
diff --git a/tests/frontend/overlaps.py b/tests/frontend/overlaps.py
index eb2cd4a86..4f6f72af5 100644
--- a/tests/frontend/overlaps.py
+++ b/tests/frontend/overlaps.py
@@ -10,16 +10,13 @@ from buildstream.plugin import CoreWarnings
from tests.testutils import generate_junction
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "overlaps"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "overlaps")
-def gen_project(project_dir, fail_on_overlap, use_fatal_warnings=True, project_name="test"):
- template = {
- "name": project_name
- }
+def gen_project(
+ project_dir, fail_on_overlap, use_fatal_warnings=True, project_name="test"
+):
+ template = {"name": project_name}
if use_fatal_warnings:
template["fatal-warnings"] = [CoreWarnings.OVERLAPS] if fail_on_overlap else []
else:
@@ -33,8 +30,7 @@ def gen_project(project_dir, fail_on_overlap, use_fatal_warnings=True, project_n
def test_overlaps(cli, datafiles, use_fatal_warnings):
project_dir = str(datafiles)
gen_project(project_dir, False, use_fatal_warnings)
- result = cli.run(project=project_dir, silent=True, args=[
- 'build', 'collect.bst'])
+ result = cli.run(project=project_dir, silent=True, args=["build", "collect.bst"])
result.assert_success()
@@ -43,8 +39,7 @@ def test_overlaps(cli, datafiles, use_fatal_warnings):
def test_overlaps_error(cli, datafiles, use_fatal_warnings):
project_dir = str(datafiles)
gen_project(project_dir, True, use_fatal_warnings)
- result = cli.run(project=project_dir, silent=True, args=[
- 'build', 'collect.bst'])
+ result = cli.run(project=project_dir, silent=True, args=["build", "collect.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.PLUGIN, CoreWarnings.OVERLAPS)
@@ -53,8 +48,9 @@ def test_overlaps_error(cli, datafiles, use_fatal_warnings):
def test_overlaps_whitelist(cli, datafiles):
project_dir = str(datafiles)
gen_project(project_dir, True)
- result = cli.run(project=project_dir, silent=True, args=[
- 'build', 'collect-whitelisted.bst'])
+ result = cli.run(
+ project=project_dir, silent=True, args=["build", "collect-whitelisted.bst"]
+ )
result.assert_success()
@@ -62,8 +58,9 @@ def test_overlaps_whitelist(cli, datafiles):
def test_overlaps_whitelist_ignored(cli, datafiles):
project_dir = str(datafiles)
gen_project(project_dir, False)
- result = cli.run(project=project_dir, silent=True, args=[
- 'build', 'collect-whitelisted.bst'])
+ result = cli.run(
+ project=project_dir, silent=True, args=["build", "collect-whitelisted.bst"]
+ )
result.assert_success()
@@ -74,8 +71,11 @@ def test_overlaps_whitelist_on_overlapper(cli, datafiles):
# it'll still fail because A doesn't permit overlaps.
project_dir = str(datafiles)
gen_project(project_dir, True)
- result = cli.run(project=project_dir, silent=True, args=[
- 'build', 'collect-partially-whitelisted.bst'])
+ result = cli.run(
+ project=project_dir,
+ silent=True,
+ args=["build", "collect-partially-whitelisted.bst"],
+ )
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.PLUGIN, CoreWarnings.OVERLAPS)
@@ -87,21 +87,22 @@ def test_overlaps_script(cli, datafiles, use_fatal_warnings):
# Element.stage_dependency_artifacts() with Scope.RUN
project_dir = str(datafiles)
gen_project(project_dir, False, use_fatal_warnings)
- result = cli.run(project=project_dir, silent=True, args=[
- 'build', 'script.bst'])
+ result = cli.run(project=project_dir, silent=True, args=["build", "script.bst"])
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("project_policy", [('fail'), ('warn')])
-@pytest.mark.parametrize("subproject_policy", [('fail'), ('warn')])
+@pytest.mark.parametrize("project_policy", [("fail"), ("warn")])
+@pytest.mark.parametrize("subproject_policy", [("fail"), ("warn")])
def test_overlap_subproject(cli, tmpdir, datafiles, project_policy, subproject_policy):
project_dir = str(datafiles)
- subproject_dir = os.path.join(project_dir, 'sub-project')
- junction_path = os.path.join(project_dir, 'sub-project.bst')
+ subproject_dir = os.path.join(project_dir, "sub-project")
+ junction_path = os.path.join(project_dir, "sub-project.bst")
- gen_project(project_dir, bool(project_policy == 'fail'), project_name='test')
- gen_project(subproject_dir, bool(subproject_policy == 'fail'), project_name='subtest')
+ gen_project(project_dir, bool(project_policy == "fail"), project_name="test")
+ gen_project(
+ subproject_dir, bool(subproject_policy == "fail"), project_name="subtest"
+ )
generate_junction(tmpdir, subproject_dir, junction_path)
# Here we have a dependency chain where the project element
@@ -110,8 +111,10 @@ def test_overlap_subproject(cli, tmpdir, datafiles, project_policy, subproject_p
# Test that overlap error vs warning policy for this overlap
# is always controlled by the project and not the subproject.
#
- result = cli.run(project=project_dir, silent=True, args=['build', 'sub-collect.bst'])
- if project_policy == 'fail':
+ result = cli.run(
+ project=project_dir, silent=True, args=["build", "sub-collect.bst"]
+ )
+ if project_policy == "fail":
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.PLUGIN, CoreWarnings.OVERLAPS)
else:
diff --git a/tests/frontend/progress.py b/tests/frontend/progress.py
index e3b127f3b..3ca81f543 100644
--- a/tests/frontend/progress.py
+++ b/tests/frontend/progress.py
@@ -11,26 +11,26 @@ from buildstream._exceptions import ErrorDomain, LoadErrorReason
from tests.testutils import generate_junction
# Project directory
-DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), )
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)),)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
def test_show_progress_tally(cli, datafiles):
# Check that the progress reporting messages give correct tallies
project = str(datafiles)
- result = cli.run(project=project, args=['show', 'compose-all.bst'])
+ result = cli.run(project=project, args=["show", "compose-all.bst"])
result.assert_success()
assert " 3 subtasks processed" in result.stderr
assert "3 of 3 subtasks processed" in result.stderr
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
def test_junction_tally(cli, tmpdir, datafiles):
# Check that the progress reporting messages count elements in junctions
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- element_path = os.path.join(project, 'elements', 'junction-dep.bst')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ element_path = os.path.join(project, "elements", "junction-dep.bst")
# Create a repo to hold the subproject and generate a junction element for it
generate_junction(tmpdir, subproject_path, junction_path, store_ref=True)
@@ -38,37 +38,36 @@ def test_junction_tally(cli, tmpdir, datafiles):
# Create a stack element to depend on a cross junction element
#
element = {
- 'kind': 'stack',
- 'depends': [{
- 'junction': 'junction.bst',
- 'filename': 'import-etc.bst'
- }]
+ "kind": "stack",
+ "depends": [{"junction": "junction.bst", "filename": "import-etc.bst"}],
}
_yaml.roundtrip_dump(element, element_path)
- result = cli.run(project=project,
- silent=True,
- args=['source', 'fetch', 'junction.bst'])
+ result = cli.run(
+ project=project, silent=True, args=["source", "fetch", "junction.bst"]
+ )
result.assert_success()
# Assert the correct progress tallies are in the logging
- result = cli.run(project=project, args=['show', 'junction-dep.bst'])
+ result = cli.run(project=project, args=["show", "junction-dep.bst"])
assert " 2 subtasks processed" in result.stderr
assert "2 of 2 subtasks processed" in result.stderr
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
def test_nested_junction_tally(cli, tmpdir, datafiles):
# Check that the progress reporting messages count elements in
# junctions of junctions
project = str(datafiles)
- sub1_path = os.path.join(project, 'files', 'sub-project')
- sub2_path = os.path.join(project, 'files', 'sub2-project')
+ sub1_path = os.path.join(project, "files", "sub-project")
+ sub2_path = os.path.join(project, "files", "sub2-project")
# A junction element which pulls sub1 into sub2
- sub1_element = os.path.join(project, 'files', 'sub2-project', 'elements', 'sub-junction.bst')
+ sub1_element = os.path.join(
+ project, "files", "sub2-project", "elements", "sub-junction.bst"
+ )
# A junction element which pulls sub2 into the main project
- sub2_element = os.path.join(project, 'elements', 'junction.bst')
- element_path = os.path.join(project, 'elements', 'junction-dep.bst')
+ sub2_element = os.path.join(project, "elements", "junction.bst")
+ element_path = os.path.join(project, "elements", "junction-dep.bst")
generate_junction(tmpdir / "sub-project", sub1_path, sub1_element, store_ref=True)
generate_junction(tmpdir / "sub2-project", sub2_path, sub2_element, store_ref=True)
@@ -76,60 +75,50 @@ def test_nested_junction_tally(cli, tmpdir, datafiles):
# Create a stack element to depend on a cross junction element
#
element = {
- 'kind': 'stack',
- 'depends': [{
- 'junction': 'junction.bst',
- 'filename': 'import-sub.bst'
- }]
+ "kind": "stack",
+ "depends": [{"junction": "junction.bst", "filename": "import-sub.bst"}],
}
_yaml.roundtrip_dump(element, element_path)
- result = cli.run(project=project,
- silent=True,
- args=['source', 'fetch', 'junction.bst'])
+ result = cli.run(
+ project=project, silent=True, args=["source", "fetch", "junction.bst"]
+ )
result.assert_success()
# Assert the correct progress tallies are in the logging
- result = cli.run(project=project, args=['show', 'junction-dep.bst'])
+ result = cli.run(project=project, args=["show", "junction-dep.bst"])
assert " 3 subtasks processed" in result.stderr
assert "3 of 3 subtasks processed" in result.stderr
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
def test_junction_dep_tally(cli, tmpdir, datafiles):
# Check that the progress reporting messages count elements in junctions
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- element_path = os.path.join(project, 'elements', 'junction-dep.bst')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ element_path = os.path.join(project, "elements", "junction-dep.bst")
# Create a repo to hold the subproject and generate a junction element for it
generate_junction(tmpdir, subproject_path, junction_path, store_ref=True)
# Add dependencies to the junction (not allowed, but let's do it
# anyway)
- with open(junction_path, 'a') as f:
- deps = {
- 'depends': [
- 'manual.bst'
- ]
- }
+ with open(junction_path, "a") as f:
+ deps = {"depends": ["manual.bst"]}
_yaml.roundtrip_dump(deps, f)
# Create a stack element to depend on a cross junction element
#
element = {
- 'kind': 'stack',
- 'depends': [{
- 'junction': 'junction.bst',
- 'filename': 'import-etc.bst'
- }]
+ "kind": "stack",
+ "depends": [{"junction": "junction.bst", "filename": "import-etc.bst"}],
}
_yaml.roundtrip_dump(element, element_path)
- result = cli.run(project=project,
- silent=True,
- args=['source', 'fetch', 'junction-dep.bst'])
+ result = cli.run(
+ project=project, silent=True, args=["source", "fetch", "junction-dep.bst"]
+ )
# Since we aren't allowed to specify any dependencies on a
# junction, we should fail
diff --git a/tests/frontend/project/sources/fetch_source.py b/tests/frontend/project/sources/fetch_source.py
index ac3020ec2..cb3ab024e 100644
--- a/tests/frontend/project/sources/fetch_source.py
+++ b/tests/frontend/project/sources/fetch_source.py
@@ -22,14 +22,16 @@ class FetchFetcher(SourceFetcher):
self.mark_download_url(url)
def fetch(self, alias_override=None):
- url = self.source.translate_url(self.original_url,
- alias_override=alias_override,
- primary=self.primary)
+ url = self.source.translate_url(
+ self.original_url, alias_override=alias_override, primary=self.primary
+ )
with open(self.source.output_file, "a") as f:
- success = url in self.source.fetch_succeeds and self.source.fetch_succeeds[url]
- message = "Fetch {} {} from {}\n".format(self.original_url,
- "succeeded" if success else "failed",
- url)
+ success = (
+ url in self.source.fetch_succeeds and self.source.fetch_succeeds[url]
+ )
+ message = "Fetch {} {} from {}\n".format(
+ self.original_url, "succeeded" if success else "failed", url
+ )
f.write(message)
if not success:
raise SourceError("Failed to fetch {}".format(url))
@@ -38,11 +40,11 @@ class FetchFetcher(SourceFetcher):
class FetchSource(Source):
# Read config to know which URLs to fetch
def configure(self, node):
- self.original_urls = node.get_str_list('urls')
- self.output_file = node.get_str('output-text')
+ self.original_urls = node.get_str_list("urls")
+ self.output_file = node.get_str("output-text")
self.fetch_succeeds = {
key: value.as_bool()
- for key, value in node.get_mapping('fetch-succeeds', {}).items()
+ for key, value in node.get_mapping("fetch-succeeds", {}).items()
}
# First URL is the primary one for this test
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index 234f1133d..970987d36 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -8,14 +8,16 @@ import pytest
from buildstream import utils, _yaml
from buildstream.testing import cli # pylint: disable=unused-import
from buildstream.testing import create_repo
-from tests.testutils import create_artifact_share, generate_junction, assert_shared, assert_not_shared
+from tests.testutils import (
+ create_artifact_share,
+ generate_junction,
+ assert_shared,
+ assert_not_shared,
+)
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
# Tests that:
@@ -27,40 +29,45 @@ DATA_DIR = os.path.join(
def test_push_pull_all(cli, tmpdir, datafiles):
project = str(datafiles)
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# First build the target element and push to the remote.
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True}
- })
- result = cli.run(project=project, args=['build', 'target.bst'])
+ cli.configure({"artifacts": {"url": share.repo, "push": True}})
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ assert cli.get_element_state(project, "target.bst") == "cached"
# Assert that everything is now cached in the remote.
- all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst']
+ all_elements = [
+ "target.bst",
+ "import-bin.bst",
+ "import-dev.bst",
+ "compose-all.bst",
+ ]
for element_name in all_elements:
assert_shared(cli, share, project, element_name)
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
- casdir = os.path.join(cli.directory, 'cas')
+ casdir = os.path.join(cli.directory, "cas")
shutil.rmtree(casdir)
- artifactdir = os.path.join(cli.directory, 'artifacts')
+ artifactdir = os.path.join(cli.directory, "artifacts")
shutil.rmtree(artifactdir)
# Assert that nothing is cached locally anymore
states = cli.get_element_states(project, all_elements)
- assert not any(states[e] == 'cached' for e in all_elements)
+ assert not any(states[e] == "cached" for e in all_elements)
# Now try bst artifact pull
- result = cli.run(project=project, args=['artifact', 'pull', '--deps', 'all', 'target.bst'])
+ result = cli.run(
+ project=project, args=["artifact", "pull", "--deps", "all", "target.bst"]
+ )
result.assert_success()
# And assert that it's again in the local cache, without having built
states = cli.get_element_states(project, all_elements)
- assert not any(states[e] != 'cached' for e in all_elements)
+ assert not any(states[e] != "cached" for e in all_elements)
# Tests that:
@@ -68,51 +75,52 @@ def test_push_pull_all(cli, tmpdir, datafiles):
# * `bst artifact push` (default targets) pushes all built elements to configured 'push' cache
# * `bst artifact pull` (default targets) downloads everything from cache after local deletion
#
-@pytest.mark.datafiles(DATA_DIR + '_world')
+@pytest.mark.datafiles(DATA_DIR + "_world")
def test_push_pull_default_targets(cli, tmpdir, datafiles):
project = str(datafiles)
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# First build the target elements
- cli.configure({
- 'artifacts': {'url': share.repo}
- })
- result = cli.run(project=project, args=['build'])
+ cli.configure({"artifacts": {"url": share.repo}})
+ result = cli.run(project=project, args=["build"])
result.assert_success()
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ assert cli.get_element_state(project, "target.bst") == "cached"
# Push all elements
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True}
- })
- result = cli.run(project=project, args=['artifact', 'push'])
+ cli.configure({"artifacts": {"url": share.repo, "push": True}})
+ result = cli.run(project=project, args=["artifact", "push"])
result.assert_success()
# Assert that everything is now cached in the remote.
- all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst']
+ all_elements = [
+ "target.bst",
+ "import-bin.bst",
+ "import-dev.bst",
+ "compose-all.bst",
+ ]
for element_name in all_elements:
assert_shared(cli, share, project, element_name)
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
- casdir = os.path.join(cli.directory, 'cas')
+ casdir = os.path.join(cli.directory, "cas")
shutil.rmtree(casdir)
- artifactdir = os.path.join(cli.directory, 'artifacts')
+ artifactdir = os.path.join(cli.directory, "artifacts")
shutil.rmtree(artifactdir)
# Assert that nothing is cached locally anymore
states = cli.get_element_states(project, all_elements)
- assert not any(states[e] == 'cached' for e in all_elements)
+ assert not any(states[e] == "cached" for e in all_elements)
# Now try bst artifact pull
- result = cli.run(project=project, args=['artifact', 'pull'])
+ result = cli.run(project=project, args=["artifact", "pull"])
result.assert_success()
# And assert that it's again in the local cache, without having built
states = cli.get_element_states(project, all_elements)
- assert not any(states[e] != 'cached' for e in all_elements)
+ assert not any(states[e] != "cached" for e in all_elements)
# Tests that:
@@ -124,38 +132,43 @@ def test_push_pull_default_targets(cli, tmpdir, datafiles):
def test_pull_secondary_cache(cli, tmpdir, datafiles):
project = str(datafiles)
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1')) as share1,\
- create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as share2:
+ with create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare1")
+ ) as share1, create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare2")
+ ) as share2:
# Build the target and push it to share2 only.
- cli.configure({
- 'artifacts': [
- {'url': share1.repo, 'push': False},
- {'url': share2.repo, 'push': True},
- ]
- })
- result = cli.run(project=project, args=['build', 'target.bst'])
+ cli.configure(
+ {
+ "artifacts": [
+ {"url": share1.repo, "push": False},
+ {"url": share2.repo, "push": True},
+ ]
+ }
+ )
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- assert_not_shared(cli, share1, project, 'target.bst')
- assert_shared(cli, share2, project, 'target.bst')
+ assert_not_shared(cli, share1, project, "target.bst")
+ assert_shared(cli, share2, project, "target.bst")
# Delete the user's local artifact cache.
- casdir = os.path.join(cli.directory, 'cas')
+ casdir = os.path.join(cli.directory, "cas")
shutil.rmtree(casdir)
- artifactdir = os.path.join(cli.directory, 'artifacts')
+ artifactdir = os.path.join(cli.directory, "artifacts")
shutil.rmtree(artifactdir)
# Assert that the element is not cached anymore.
- assert cli.get_element_state(project, 'target.bst') != 'cached'
+ assert cli.get_element_state(project, "target.bst") != "cached"
# Now try bst artifact pull
- result = cli.run(project=project, args=['artifact', 'pull', 'target.bst'])
+ result = cli.run(project=project, args=["artifact", "pull", "target.bst"])
result.assert_success()
# And assert that it's again in the local cache, without having built,
# i.e. we found it in share2.
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ assert cli.get_element_state(project, "target.bst") == "cached"
# Tests that:
@@ -167,47 +180,53 @@ def test_pull_secondary_cache(cli, tmpdir, datafiles):
def test_push_pull_specific_remote(cli, tmpdir, datafiles):
project = str(datafiles)
- with create_artifact_share(os.path.join(str(tmpdir), 'goodartifactshare')) as good_share,\
- create_artifact_share(os.path.join(str(tmpdir), 'badartifactshare')) as bad_share:
+ with create_artifact_share(
+ os.path.join(str(tmpdir), "goodartifactshare")
+ ) as good_share, create_artifact_share(
+ os.path.join(str(tmpdir), "badartifactshare")
+ ) as bad_share:
# Build the target so we have it cached locally only.
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- state = cli.get_element_state(project, 'target.bst')
- assert state == 'cached'
+ state = cli.get_element_state(project, "target.bst")
+ assert state == "cached"
# Configure the default push location to be bad_share; we will assert that
# nothing actually gets pushed there.
- cli.configure({
- 'artifacts': {'url': bad_share.repo, 'push': True},
- })
+ cli.configure(
+ {"artifacts": {"url": bad_share.repo, "push": True},}
+ )
# Now try `bst artifact push` to the good_share.
- result = cli.run(project=project, args=[
- 'artifact', 'push', 'target.bst', '--remote', good_share.repo
- ])
+ result = cli.run(
+ project=project,
+ args=["artifact", "push", "target.bst", "--remote", good_share.repo],
+ )
result.assert_success()
# Assert that all the artifacts are in the share we pushed
# to, and not the other.
- assert_shared(cli, good_share, project, 'target.bst')
- assert_not_shared(cli, bad_share, project, 'target.bst')
+ assert_shared(cli, good_share, project, "target.bst")
+ assert_not_shared(cli, bad_share, project, "target.bst")
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the good_share.
#
- casdir = os.path.join(cli.directory, 'cas')
+ casdir = os.path.join(cli.directory, "cas")
shutil.rmtree(casdir)
- artifactdir = os.path.join(cli.directory, 'artifacts')
+ artifactdir = os.path.join(cli.directory, "artifacts")
shutil.rmtree(artifactdir)
- result = cli.run(project=project, args=['artifact', 'pull', 'target.bst', '--remote',
- good_share.repo])
+ result = cli.run(
+ project=project,
+ args=["artifact", "pull", "target.bst", "--remote", good_share.repo],
+ )
result.assert_success()
# And assert that it's again in the local cache, without having built
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ assert cli.get_element_state(project, "target.bst") == "cached"
# Tests that:
@@ -218,123 +237,137 @@ def test_push_pull_specific_remote(cli, tmpdir, datafiles):
def test_push_pull_non_strict(cli, tmpdir, datafiles):
project = str(datafiles)
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# First build the target element and push to the remote.
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- 'projects': {
- 'test': {'strict': False}
+ cli.configure(
+ {
+ "artifacts": {"url": share.repo, "push": True},
+ "projects": {"test": {"strict": False}},
}
- })
- result = cli.run(project=project, args=['build', 'target.bst'])
+ )
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ assert cli.get_element_state(project, "target.bst") == "cached"
# Assert that everything is now cached in the remote.
- all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst']
+ all_elements = [
+ "target.bst",
+ "import-bin.bst",
+ "import-dev.bst",
+ "compose-all.bst",
+ ]
for element_name in all_elements:
assert_shared(cli, share, project, element_name)
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
- casdir = os.path.join(cli.directory, 'cas')
+ casdir = os.path.join(cli.directory, "cas")
shutil.rmtree(casdir)
- artifactdir = os.path.join(cli.directory, 'artifacts')
+ artifactdir = os.path.join(cli.directory, "artifacts")
shutil.rmtree(artifactdir)
# Assert that nothing is cached locally anymore
for element_name in all_elements:
- assert cli.get_element_state(project, element_name) != 'cached'
+ assert cli.get_element_state(project, element_name) != "cached"
# Add a file to force change in strict cache key of import-bin.bst
- with open(os.path.join(str(project), 'files', 'bin-files', 'usr', 'bin', 'world'), 'w') as f:
- f.write('world')
+ with open(
+ os.path.join(str(project), "files", "bin-files", "usr", "bin", "world"), "w"
+ ) as f:
+ f.write("world")
# Assert that the workspaced element requires a rebuild
- assert cli.get_element_state(project, 'import-bin.bst') == 'buildable'
+ assert cli.get_element_state(project, "import-bin.bst") == "buildable"
# Assert that the target is still waiting due to --no-strict
- assert cli.get_element_state(project, 'target.bst') == 'waiting'
+ assert cli.get_element_state(project, "target.bst") == "waiting"
# Now try bst artifact pull
- result = cli.run(project=project, args=['artifact', 'pull', '--deps', 'all', 'target.bst'])
+ result = cli.run(
+ project=project, args=["artifact", "pull", "--deps", "all", "target.bst"]
+ )
result.assert_success()
# And assert that the target is again in the local cache, without having built
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ assert cli.get_element_state(project, "target.bst") == "cached"
@pytest.mark.datafiles(DATA_DIR)
def test_push_pull_cross_junction(cli, tmpdir, datafiles):
project = str(datafiles)
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
generate_junction(tmpdir, subproject_path, junction_path, store_ref=True)
# First build the target element and push to the remote.
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True}
- })
- result = cli.run(project=project, args=['build', 'junction.bst:import-etc.bst'])
+ cli.configure({"artifacts": {"url": share.repo, "push": True}})
+ result = cli.run(project=project, args=["build", "junction.bst:import-etc.bst"])
result.assert_success()
- assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
+ assert cli.get_element_state(project, "junction.bst:import-etc.bst") == "cached"
- cache_dir = os.path.join(project, 'cache', 'cas')
+ cache_dir = os.path.join(project, "cache", "cas")
shutil.rmtree(cache_dir)
- artifact_dir = os.path.join(project, 'cache', 'artifacts')
+ artifact_dir = os.path.join(project, "cache", "artifacts")
shutil.rmtree(artifact_dir)
- assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'buildable'
+ assert (
+ cli.get_element_state(project, "junction.bst:import-etc.bst") == "buildable"
+ )
# Now try bst artifact pull
- result = cli.run(project=project, args=['artifact', 'pull', 'junction.bst:import-etc.bst'])
+ result = cli.run(
+ project=project, args=["artifact", "pull", "junction.bst:import-etc.bst"]
+ )
result.assert_success()
# And assert that it's again in the local cache, without having built
- assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
+ assert cli.get_element_state(project, "junction.bst:import-etc.bst") == "cached"
@pytest.mark.datafiles(DATA_DIR)
def test_pull_missing_blob(cli, tmpdir, datafiles):
project = str(datafiles)
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# First build the target element and push to the remote.
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True}
- })
- result = cli.run(project=project, args=['build', 'target.bst'])
+ cli.configure({"artifacts": {"url": share.repo, "push": True}})
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ assert cli.get_element_state(project, "target.bst") == "cached"
# Assert that everything is now cached in the remote.
- all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst']
+ all_elements = [
+ "target.bst",
+ "import-bin.bst",
+ "import-dev.bst",
+ "compose-all.bst",
+ ]
for element_name in all_elements:
assert_shared(cli, share, project, element_name)
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
#
- casdir = os.path.join(cli.directory, 'cas')
+ casdir = os.path.join(cli.directory, "cas")
shutil.rmtree(casdir)
- artifactdir = os.path.join(cli.directory, 'artifacts')
+ artifactdir = os.path.join(cli.directory, "artifacts")
shutil.rmtree(artifactdir)
# Assert that nothing is cached locally anymore
for element_name in all_elements:
- assert cli.get_element_state(project, element_name) != 'cached'
+ assert cli.get_element_state(project, element_name) != "cached"
# Now delete blobs in the remote without deleting the artifact ref.
# This simulates scenarios with concurrent artifact expiry.
- remote_objdir = os.path.join(share.repodir, 'cas', 'objects')
+ remote_objdir = os.path.join(share.repodir, "cas", "objects")
shutil.rmtree(remote_objdir)
# Now try bst build
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
# Assert that no artifacts were pulled
@@ -344,9 +377,9 @@ def test_pull_missing_blob(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_pull_missing_local_blob(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
repo.create(os.path.join(str(datafiles), "files"))
- element_dir = os.path.join(str(tmpdir), 'elements')
+ element_dir = os.path.join(str(tmpdir), "elements")
project = str(tmpdir)
project_config = {
"name": "pull-missing-local-blob",
@@ -358,43 +391,41 @@ def test_pull_missing_local_blob(cli, tmpdir, datafiles):
"kind": "import",
"sources": [repo.source_config()],
}
- input_name = 'input.bst'
+ input_name = "input.bst"
input_file = os.path.join(element_dir, input_name)
_yaml.roundtrip_dump(input_config, input_file)
- depends_name = 'depends.bst'
+ depends_name = "depends.bst"
depends_config = {
"kind": "stack",
- "depends": [
- {"filename": input_name, "type": "build"}
- ]
+ "depends": [{"filename": input_name, "type": "build"}],
}
depends_file = os.path.join(element_dir, depends_name)
_yaml.roundtrip_dump(depends_config, depends_file)
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# First build the import-bin element and push to the remote.
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True}
- })
+ cli.configure({"artifacts": {"url": share.repo, "push": True}})
- result = cli.run(project=project, args=['source', 'track', input_name])
+ result = cli.run(project=project, args=["source", "track", input_name])
result.assert_success()
- result = cli.run(project=project, args=['build', input_name])
+ result = cli.run(project=project, args=["build", input_name])
result.assert_success()
- assert cli.get_element_state(project, input_name) == 'cached'
+ assert cli.get_element_state(project, input_name) == "cached"
# Delete a file blob from the local cache.
# This is a placeholder to test partial CAS handling until we support
# partial artifact pulling (or blob-based CAS expiry).
#
- digest = utils.sha256sum(os.path.join(project, 'files', 'bin-files', 'usr', 'bin', 'hello'))
- objpath = os.path.join(cli.directory, 'cas', 'objects', digest[:2], digest[2:])
+ digest = utils.sha256sum(
+ os.path.join(project, "files", "bin-files", "usr", "bin", "hello")
+ )
+ objpath = os.path.join(cli.directory, "cas", "objects", digest[:2], digest[2:])
os.unlink(objpath)
# Now try bst build
- result = cli.run(project=project, args=['build', depends_name])
+ result = cli.run(project=project, args=["build", depends_name])
result.assert_success()
# Assert that the import-bin artifact was pulled (completing the partial artifact)
@@ -406,16 +437,15 @@ def test_pull_missing_notifies_user(caplog, cli, tmpdir, datafiles):
project = str(datafiles)
caplog.set_level(1)
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
- cli.configure({
- 'artifacts': {'url': share.repo}
- })
- result = cli.run(project=project, args=['build', 'target.bst'])
+ cli.configure({"artifacts": {"url": share.repo}})
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- assert not result.get_pulled_elements(), \
- "No elements should have been pulled since the cache was empty"
+ assert (
+ not result.get_pulled_elements()
+ ), "No elements should have been pulled since the cache was empty"
assert "INFO Remote ({}) does not have".format(share.repo) in result.stderr
assert "SKIPPED Pull" in result.stderr
@@ -426,25 +456,29 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
project = str(datafiles)
caplog.set_level(1)
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1')) as shareuser,\
- create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as shareproject,\
- create_artifact_share(os.path.join(str(tmpdir), 'artifactshare3')) as sharecli:
+ with create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare1")
+ ) as shareuser, create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare2")
+ ) as shareproject, create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare3")
+ ) as sharecli:
# Add shareproject repo url to project.conf
with open(os.path.join(project, "project.conf"), "a") as projconf:
- projconf.write("artifacts:\n url: {}\n push: True".format(shareproject.repo))
+ projconf.write(
+ "artifacts:\n url: {}\n push: True".format(shareproject.repo)
+ )
# Configure shareuser remote in user conf
- cli.configure({
- 'artifacts': {'url': shareuser.repo, 'push': True}
- })
+ cli.configure({"artifacts": {"url": shareuser.repo, "push": True}})
# Push the artifacts to the shareuser and shareproject remotes.
# Assert that shareuser and shareproject have the artfifacts cached,
# but sharecli doesn't, then delete locally cached elements
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- all_elements = ['target.bst', 'import-bin.bst', 'compose-all.bst']
+ all_elements = ["target.bst", "import-bin.bst", "compose-all.bst"]
for element_name in all_elements:
assert element_name in result.get_pushed_elements()
assert_not_shared(cli, sharecli, project, element_name)
@@ -455,7 +489,9 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
# Now check that a build with cli set as sharecli results in nothing being pulled,
# as it doesn't have them cached and shareuser/shareproject should be ignored. This
# will however result in the artifacts being built and pushed to it
- result = cli.run(project=project, args=['build', '--remote', sharecli.repo, 'target.bst'])
+ result = cli.run(
+ project=project, args=["build", "--remote", sharecli.repo, "target.bst"]
+ )
result.assert_success()
for element_name in all_elements:
assert element_name not in result.get_pulled_elements()
@@ -464,10 +500,12 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
# Now check that a clean build with cli set as sharecli should result in artifacts only
# being pulled from it, as that was provided via the cli and is populated
- result = cli.run(project=project, args=['build', '--remote', sharecli.repo, 'target.bst'])
+ result = cli.run(
+ project=project, args=["build", "--remote", sharecli.repo, "target.bst"]
+ )
result.assert_success()
for element_name in all_elements:
- assert cli.get_element_state(project, element_name) == 'cached'
+ assert cli.get_element_state(project, element_name) == "cached"
assert element_name in result.get_pulled_elements()
assert shareproject.repo not in result.stderr
assert shareuser.repo not in result.stderr
@@ -477,70 +515,82 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_pull_access_rights(cli, tmpdir, datafiles):
project = str(datafiles)
- checkout = os.path.join(str(tmpdir), 'checkout')
+ checkout = os.path.join(str(tmpdir), "checkout")
# Work-around datafiles not preserving mode
- os.chmod(os.path.join(project, 'files/bin-files/usr/bin/hello'), 0o0755)
+ os.chmod(os.path.join(project, "files/bin-files/usr/bin/hello"), 0o0755)
# We need a big file that does not go into a batch to test a different
# code path
- os.makedirs(os.path.join(project, 'files/dev-files/usr/share'), exist_ok=True)
- with open(os.path.join(project, 'files/dev-files/usr/share/big-file'), 'w') as f:
- buf = ' ' * 4096
+ os.makedirs(os.path.join(project, "files/dev-files/usr/share"), exist_ok=True)
+ with open(os.path.join(project, "files/dev-files/usr/share/big-file"), "w") as f:
+ buf = " " * 4096
for _ in range(1024):
f.write(buf)
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True}
- })
- result = cli.run(project=project, args=['build', 'compose-all.bst'])
+ cli.configure({"artifacts": {"url": share.repo, "push": True}})
+ result = cli.run(project=project, args=["build", "compose-all.bst"])
result.assert_success()
- result = cli.run(project=project,
- args=['artifact', 'checkout',
- '--hardlinks', '--no-integrate',
- 'compose-all.bst',
- '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ "--hardlinks",
+ "--no-integrate",
+ "compose-all.bst",
+ "--directory",
+ checkout,
+ ],
+ )
result.assert_success()
- st = os.lstat(os.path.join(checkout, 'usr/include/pony.h'))
+ st = os.lstat(os.path.join(checkout, "usr/include/pony.h"))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0644
- st = os.lstat(os.path.join(checkout, 'usr/bin/hello'))
+ st = os.lstat(os.path.join(checkout, "usr/bin/hello"))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0755
- st = os.lstat(os.path.join(checkout, 'usr/share/big-file'))
+ st = os.lstat(os.path.join(checkout, "usr/share/big-file"))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0644
shutil.rmtree(checkout)
- casdir = os.path.join(cli.directory, 'cas')
+ casdir = os.path.join(cli.directory, "cas")
shutil.rmtree(casdir)
- result = cli.run(project=project, args=['artifact', 'pull', 'compose-all.bst'])
+ result = cli.run(project=project, args=["artifact", "pull", "compose-all.bst"])
result.assert_success()
- result = cli.run(project=project,
- args=['artifact', 'checkout',
- '--hardlinks', '--no-integrate',
- 'compose-all.bst',
- '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ "--hardlinks",
+ "--no-integrate",
+ "compose-all.bst",
+ "--directory",
+ checkout,
+ ],
+ )
result.assert_success()
- st = os.lstat(os.path.join(checkout, 'usr/include/pony.h'))
+ st = os.lstat(os.path.join(checkout, "usr/include/pony.h"))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0644
- st = os.lstat(os.path.join(checkout, 'usr/bin/hello'))
+ st = os.lstat(os.path.join(checkout, "usr/bin/hello"))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0755
- st = os.lstat(os.path.join(checkout, 'usr/share/big-file'))
+ st = os.lstat(os.path.join(checkout, "usr/share/big-file"))
assert stat.S_ISREG(st.st_mode)
assert stat.S_IMODE(st.st_mode) == 0o0644
@@ -549,39 +599,43 @@ def test_pull_access_rights(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_pull_artifact(cli, tmpdir, datafiles):
project = str(datafiles)
- element = 'target.bst'
+ element = "target.bst"
# Configure a local cache
- local_cache = os.path.join(str(tmpdir), 'cache')
- cli.configure({'cachedir': local_cache})
+ local_cache = os.path.join(str(tmpdir), "cache")
+ cli.configure({"cachedir": local_cache})
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# First build the target element and push to the remote.
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True}
- })
+ cli.configure({"artifacts": {"url": share.repo, "push": True}})
- result = cli.run(project=project, args=['build', element])
+ result = cli.run(project=project, args=["build", element])
result.assert_success()
# Assert that the *artifact* is cached locally
cache_key = cli.get_element_key(project, element)
- artifact_ref = os.path.join('test', os.path.splitext(element)[0], cache_key)
- assert os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', artifact_ref))
+ artifact_ref = os.path.join("test", os.path.splitext(element)[0], cache_key)
+ assert os.path.exists(
+ os.path.join(local_cache, "artifacts", "refs", artifact_ref)
+ )
# Assert that the target is shared (note that assert shared will use the artifact name)
assert_shared(cli, share, project, element)
# Now we've pushed, remove the local cache
- shutil.rmtree(os.path.join(local_cache, 'artifacts'))
+ shutil.rmtree(os.path.join(local_cache, "artifacts"))
# Assert that nothing is cached locally anymore
- assert not os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', artifact_ref))
+ assert not os.path.exists(
+ os.path.join(local_cache, "artifacts", "refs", artifact_ref)
+ )
# Now try bst artifact pull
- result = cli.run(project=project, args=['artifact', 'pull', artifact_ref])
+ result = cli.run(project=project, args=["artifact", "pull", artifact_ref])
result.assert_success()
# And assert that it's again in the local cache, without having built
- assert os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', artifact_ref))
+ assert os.path.exists(
+ os.path.join(local_cache, "artifacts", "refs", artifact_ref)
+ )
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 31f96cbdf..21a47838c 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -28,15 +28,18 @@ import pytest
from buildstream._exceptions import ErrorDomain
from buildstream.testing import cli # pylint: disable=unused-import
-from tests.testutils import create_artifact_share, create_element_size, generate_junction, \
- wait_for_cache_granularity, assert_shared, assert_not_shared
+from tests.testutils import (
+ create_artifact_share,
+ create_element_size,
+ generate_junction,
+ wait_for_cache_granularity,
+ assert_shared,
+ assert_not_shared,
+)
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
# Tests that:
@@ -49,100 +52,105 @@ def test_push(cli, tmpdir, datafiles):
project = str(datafiles)
# First build the project without the artifact cache configured
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
# Assert that we are now cached locally
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ assert cli.get_element_state(project, "target.bst") == "cached"
# Set up two artifact shares.
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1')) as share1:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as share1:
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as share2:
+ with create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare2")
+ ) as share2:
# Try pushing with no remotes configured. This should fail.
- result = cli.run(project=project, args=['artifact', 'push', 'target.bst'])
+ result = cli.run(project=project, args=["artifact", "push", "target.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
# Configure bst to pull but not push from a cache and run `bst artifact push`.
# This should also fail.
- cli.configure({
- 'artifacts': {'url': share1.repo, 'push': False},
- })
- result = cli.run(project=project, args=['artifact', 'push', 'target.bst'])
+ cli.configure(
+ {"artifacts": {"url": share1.repo, "push": False},}
+ )
+ result = cli.run(project=project, args=["artifact", "push", "target.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
# Configure bst to push to one of the caches and run `bst artifact push`. This works.
- cli.configure({
- 'artifacts': [
- {'url': share1.repo, 'push': False},
- {'url': share2.repo, 'push': True},
- ]
- })
- cli.run(project=project, args=['artifact', 'push', 'target.bst'])
+ cli.configure(
+ {
+ "artifacts": [
+ {"url": share1.repo, "push": False},
+ {"url": share2.repo, "push": True},
+ ]
+ }
+ )
+ cli.run(project=project, args=["artifact", "push", "target.bst"])
- assert_not_shared(cli, share1, project, 'target.bst')
- assert_shared(cli, share2, project, 'target.bst')
+ assert_not_shared(cli, share1, project, "target.bst")
+ assert_shared(cli, share2, project, "target.bst")
# Now try pushing to both
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as share2:
- cli.configure({
- 'artifacts': [
- {'url': share1.repo, 'push': True},
- {'url': share2.repo, 'push': True},
- ]
- })
- cli.run(project=project, args=['artifact', 'push', 'target.bst'])
+ with create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare2")
+ ) as share2:
+ cli.configure(
+ {
+ "artifacts": [
+ {"url": share1.repo, "push": True},
+ {"url": share2.repo, "push": True},
+ ]
+ }
+ )
+ cli.run(project=project, args=["artifact", "push", "target.bst"])
- assert_shared(cli, share1, project, 'target.bst')
- assert_shared(cli, share2, project, 'target.bst')
+ assert_shared(cli, share1, project, "target.bst")
+ assert_shared(cli, share2, project, "target.bst")
# Tests `bst artifact push $artifact_ref`
@pytest.mark.datafiles(DATA_DIR)
def test_push_artifact(cli, tmpdir, datafiles):
project = str(datafiles)
- element = 'target.bst'
+ element = "target.bst"
# Configure a local cache
- local_cache = os.path.join(str(tmpdir), 'cache')
- cli.configure({'cachedir': local_cache})
+ local_cache = os.path.join(str(tmpdir), "cache")
+ cli.configure({"cachedir": local_cache})
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# First build it without the artifact cache configured
- result = cli.run(project=project, args=['build', element])
+ result = cli.run(project=project, args=["build", element])
result.assert_success()
# Assert that the *artifact* is cached locally
cache_key = cli.get_element_key(project, element)
- artifact_ref = os.path.join('test', os.path.splitext(element)[0], cache_key)
- assert os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', artifact_ref))
+ artifact_ref = os.path.join("test", os.path.splitext(element)[0], cache_key)
+ assert os.path.exists(
+ os.path.join(local_cache, "artifacts", "refs", artifact_ref)
+ )
# Configure artifact share
- cli.configure({
- #
- # FIXME: This test hangs "sometimes" if we allow
- # concurrent push.
- #
- # It's not too bad to ignore since we're
- # using the local artifact cache functionality
- # only, but it should probably be fixed.
- #
- 'scheduler': {
- 'pushers': 1
- },
- 'artifacts': {
- 'url': share.repo,
- 'push': True,
+ cli.configure(
+ {
+ #
+ # FIXME: This test hangs "sometimes" if we allow
+ # concurrent push.
+ #
+ # It's not too bad to ignore since we're
+ # using the local artifact cache functionality
+ # only, but it should probably be fixed.
+ #
+ "scheduler": {"pushers": 1},
+ "artifacts": {"url": share.repo, "push": True,},
}
- })
+ )
# Now try bst artifact push all the deps
- result = cli.run(project=project, args=[
- 'artifact', 'push', artifact_ref
- ])
+ result = cli.run(project=project, args=["artifact", "push", artifact_ref])
result.assert_success()
# And finally assert that all the artifacts are in the share
@@ -162,27 +170,23 @@ def test_push_fails(cli, tmpdir, datafiles):
project = str(datafiles)
# Set up the share
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# Configure bst to be able to push to the share
- cli.configure({
- 'artifacts': [
- {'url': share.repo, 'push': True},
- ]
- })
+ cli.configure({"artifacts": [{"url": share.repo, "push": True},]})
# First ensure that the target is *NOT* cache
- assert cli.get_element_state(project, 'target.bst') != 'cached'
+ assert cli.get_element_state(project, "target.bst") != "cached"
# Now try and push the target
- result = cli.run(project=project, args=['artifact', 'push', 'target.bst'])
+ result = cli.run(project=project, args=["artifact", "push", "target.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
assert "Push failed: target.bst is not cached" in result.stderr
# Now ensure that deps are also not cached
- assert cli.get_element_state(project, 'import-bin.bst') != 'cached'
- assert cli.get_element_state(project, 'import-dev.bst') != 'cached'
- assert cli.get_element_state(project, 'compose-all.bst') != 'cached'
+ assert cli.get_element_state(project, "import-bin.bst") != "cached"
+ assert cli.get_element_state(project, "import-dev.bst") != "cached"
+ assert cli.get_element_state(project, "compose-all.bst") != "cached"
# Tests that:
@@ -194,45 +198,50 @@ def test_push_fails_with_on_error_continue(cli, tmpdir, datafiles):
project = str(datafiles)
# Set up the share
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# First build the target (and its deps)
- result = cli.run(project=project, args=['build', 'target.bst'])
- assert cli.get_element_state(project, 'target.bst') == 'cached'
- assert cli.get_element_state(project, 'import-dev.bst') == 'cached'
+ result = cli.run(project=project, args=["build", "target.bst"])
+ assert cli.get_element_state(project, "target.bst") == "cached"
+ assert cli.get_element_state(project, "import-dev.bst") == "cached"
# Now delete the artifact of a dependency and ensure it is not in the cache
- result = cli.run(project=project, args=['artifact', 'delete', 'import-dev.bst'])
- assert cli.get_element_state(project, 'import-dev.bst') != 'cached'
+ result = cli.run(project=project, args=["artifact", "delete", "import-dev.bst"])
+ assert cli.get_element_state(project, "import-dev.bst") != "cached"
# Configure bst to be able to push to the share
- cli.configure({
- 'artifacts': [
- {'url': share.repo, 'push': True},
- ]
- })
+ cli.configure({"artifacts": [{"url": share.repo, "push": True},]})
# Now try and push the target with its deps using --on-error continue
# and assert that push failed, but what could be pushed was pushed
- result = cli.run(project=project,
- args=['--on-error=continue', 'artifact', 'push', '--deps', 'all', 'target.bst'])
+ result = cli.run(
+ project=project,
+ args=[
+ "--on-error=continue",
+ "artifact",
+ "push",
+ "--deps",
+ "all",
+ "target.bst",
+ ],
+ )
# The overall process should return as failed
result.assert_main_error(ErrorDomain.STREAM, None)
# We should still have pushed what we could
- assert_shared(cli, share, project, 'import-bin.bst')
- assert_shared(cli, share, project, 'compose-all.bst')
- assert_shared(cli, share, project, 'target.bst')
+ assert_shared(cli, share, project, "import-bin.bst")
+ assert_shared(cli, share, project, "compose-all.bst")
+ assert_shared(cli, share, project, "target.bst")
- assert_not_shared(cli, share, project, 'import-dev.bst')
+ assert_not_shared(cli, share, project, "import-dev.bst")
errors = [
"import-dev.bst is not cached",
(
"Error while pushing. The following elements were not pushed as they are not yet cached:\n"
"\n"
"\timport-dev.bst\n"
- )
+ ),
]
for error in errors:
assert error in result.stderr
@@ -244,91 +253,87 @@ def test_push_fails_with_on_error_continue(cli, tmpdir, datafiles):
def test_push_all(cli, tmpdir, datafiles):
project = str(datafiles)
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# First build it without the artifact cache configured
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
# Assert that we are now cached locally
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ assert cli.get_element_state(project, "target.bst") == "cached"
# Configure artifact share
- cli.configure({
- #
- # FIXME: This test hangs "sometimes" if we allow
- # concurrent push.
- #
- # It's not too bad to ignore since we're
- # using the local artifact cache functionality
- # only, but it should probably be fixed.
- #
- 'scheduler': {
- 'pushers': 1
- },
- 'artifacts': {
- 'url': share.repo,
- 'push': True,
+ cli.configure(
+ {
+ #
+ # FIXME: This test hangs "sometimes" if we allow
+ # concurrent push.
+ #
+ # It's not too bad to ignore since we're
+ # using the local artifact cache functionality
+ # only, but it should probably be fixed.
+ #
+ "scheduler": {"pushers": 1},
+ "artifacts": {"url": share.repo, "push": True,},
}
- })
+ )
# Now try bst artifact push all the deps
- result = cli.run(project=project, args=[
- 'artifact', 'push', 'target.bst',
- '--deps', 'all'
- ])
+ result = cli.run(
+ project=project, args=["artifact", "push", "target.bst", "--deps", "all"]
+ )
result.assert_success()
# And finally assert that all the artifacts are in the share
- assert_shared(cli, share, project, 'target.bst')
- assert_shared(cli, share, project, 'import-bin.bst')
- assert_shared(cli, share, project, 'import-dev.bst')
- assert_shared(cli, share, project, 'compose-all.bst')
+ assert_shared(cli, share, project, "target.bst")
+ assert_shared(cli, share, project, "import-bin.bst")
+ assert_shared(cli, share, project, "import-dev.bst")
+ assert_shared(cli, share, project, "compose-all.bst")
+
# Tests that `bst artifact push --deps run $artifact_ref` fails
@pytest.mark.datafiles(DATA_DIR)
def test_push_artifacts_all_deps_fails(cli, tmpdir, datafiles):
project = str(datafiles)
- element = 'checkout-deps.bst'
+ element = "checkout-deps.bst"
# Configure a local cache
- local_cache = os.path.join(str(tmpdir), 'cache')
- cli.configure({'cachedir': local_cache})
+ local_cache = os.path.join(str(tmpdir), "cache")
+ cli.configure({"cachedir": local_cache})
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# First build it without the artifact cache configured
- result = cli.run(project=project, args=['build', element])
+ result = cli.run(project=project, args=["build", element])
result.assert_success()
# Assert that the *artifact* is cached locally
cache_key = cli.get_element_key(project, element)
- artifact_ref = os.path.join('test', os.path.splitext(element)[0], cache_key)
- assert os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', artifact_ref))
+ artifact_ref = os.path.join("test", os.path.splitext(element)[0], cache_key)
+ assert os.path.exists(
+ os.path.join(local_cache, "artifacts", "refs", artifact_ref)
+ )
# Configure artifact share
- cli.configure({
- #
- # FIXME: This test hangs "sometimes" if we allow
- # concurrent push.
- #
- # It's not too bad to ignore since we're
- # using the local artifact cache functionality
- # only, but it should probably be fixed.
- #
- 'scheduler': {
- 'pushers': 1
- },
- 'artifacts': {
- 'url': share.repo,
- 'push': True,
+ cli.configure(
+ {
+ #
+ # FIXME: This test hangs "sometimes" if we allow
+ # concurrent push.
+ #
+ # It's not too bad to ignore since we're
+ # using the local artifact cache functionality
+ # only, but it should probably be fixed.
+ #
+ "scheduler": {"pushers": 1},
+ "artifacts": {"url": share.repo, "push": True,},
}
- })
+ )
# Now try bst artifact push all the deps
- result = cli.run(project=project, args=[
- 'artifact', 'push', '--deps', 'all', artifact_ref
- ])
+ result = cli.run(
+ project=project, args=["artifact", "push", "--deps", "all", artifact_ref]
+ )
result.assert_main_error(ErrorDomain.STREAM, None)
assert "Error: '--deps all' is not supported for artifact refs" in result.stderr
@@ -342,47 +347,52 @@ def test_push_after_pull(cli, tmpdir, datafiles):
project = str(datafiles)
# Set up two artifact shares.
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1')) as share1,\
- create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as share2:
+ with create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare1")
+ ) as share1, create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare2")
+ ) as share2:
# Set the scene: share1 has the artifact, share2 does not.
#
- cli.configure({
- 'artifacts': {'url': share1.repo, 'push': True},
- })
+ cli.configure(
+ {"artifacts": {"url": share1.repo, "push": True},}
+ )
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- cli.remove_artifact_from_cache(project, 'target.bst')
+ cli.remove_artifact_from_cache(project, "target.bst")
- assert_shared(cli, share1, project, 'target.bst')
- assert_not_shared(cli, share2, project, 'target.bst')
- assert cli.get_element_state(project, 'target.bst') != 'cached'
+ assert_shared(cli, share1, project, "target.bst")
+ assert_not_shared(cli, share2, project, "target.bst")
+ assert cli.get_element_state(project, "target.bst") != "cached"
# Now run the build again. Correct `bst build` behaviour is to download the
# artifact from share1 but not push it back again.
#
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- assert result.get_pulled_elements() == ['target.bst']
+ assert result.get_pulled_elements() == ["target.bst"]
assert result.get_pushed_elements() == []
# Delete the artifact locally again.
- cli.remove_artifact_from_cache(project, 'target.bst')
+ cli.remove_artifact_from_cache(project, "target.bst")
# Now we add share2 into the mix as a second push remote. This time,
# `bst build` should push to share2 after pulling from share1.
- cli.configure({
- 'artifacts': [
- {'url': share1.repo, 'push': True},
- {'url': share2.repo, 'push': True},
- ]
- })
- result = cli.run(project=project, args=['build', 'target.bst'])
+ cli.configure(
+ {
+ "artifacts": [
+ {"url": share1.repo, "push": True},
+ {"url": share2.repo, "push": True},
+ ]
+ }
+ )
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- assert result.get_pulled_elements() == ['target.bst']
- assert result.get_pushed_elements() == ['target.bst']
+ assert result.get_pulled_elements() == ["target.bst"]
+ assert result.get_pushed_elements() == ["target.bst"]
# Ensure that when an artifact's size exceeds available disk space
@@ -391,52 +401,53 @@ def test_push_after_pull(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_expires(cli, datafiles, tmpdir):
project = str(datafiles)
- element_path = 'elements'
+ element_path = "elements"
# Create an artifact share (remote artifact cache) in the tmpdir/artifactshare
# Set a 22 MB quota
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'),
- quota=int(22e6)) as share:
+ with create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)
+ ) as share:
# Configure bst to push to the cache
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- })
+ cli.configure(
+ {"artifacts": {"url": share.repo, "push": True},}
+ )
# Create and build an element of 15 MB
- create_element_size('element1.bst', project, element_path, [], int(15e6))
- result = cli.run(project=project, args=['build', 'element1.bst'])
+ create_element_size("element1.bst", project, element_path, [], int(15e6))
+ result = cli.run(project=project, args=["build", "element1.bst"])
result.assert_success()
# Create and build an element of 5 MB
- create_element_size('element2.bst', project, element_path, [], int(5e6))
- result = cli.run(project=project, args=['build', 'element2.bst'])
+ create_element_size("element2.bst", project, element_path, [], int(5e6))
+ result = cli.run(project=project, args=["build", "element2.bst"])
result.assert_success()
# check that element's 1 and 2 are cached both locally and remotely
- states = cli.get_element_states(project, ['element1.bst', 'element2.bst'])
+ states = cli.get_element_states(project, ["element1.bst", "element2.bst"])
assert states == {
"element1.bst": "cached",
"element2.bst": "cached",
}
- assert_shared(cli, share, project, 'element1.bst')
- assert_shared(cli, share, project, 'element2.bst')
+ assert_shared(cli, share, project, "element1.bst")
+ assert_shared(cli, share, project, "element2.bst")
# Create and build another element of 5 MB (This will exceed the free disk space available)
- create_element_size('element3.bst', project, element_path, [], int(5e6))
- result = cli.run(project=project, args=['build', 'element3.bst'])
+ create_element_size("element3.bst", project, element_path, [], int(5e6))
+ result = cli.run(project=project, args=["build", "element3.bst"])
result.assert_success()
# Ensure it is cached both locally and remotely
- assert cli.get_element_state(project, 'element3.bst') == 'cached'
- assert_shared(cli, share, project, 'element3.bst')
+ assert cli.get_element_state(project, "element3.bst") == "cached"
+ assert_shared(cli, share, project, "element3.bst")
# Ensure element1 has been removed from the share
- assert_not_shared(cli, share, project, 'element1.bst')
+ assert_not_shared(cli, share, project, "element1.bst")
# Ensure that elemen2 remains
- assert_shared(cli, share, project, 'element2.bst')
+ assert_shared(cli, share, project, "element2.bst")
# Test that a large artifact, whose size exceeds the quota, is not pushed
@@ -444,26 +455,27 @@ def test_artifact_expires(cli, datafiles, tmpdir):
@pytest.mark.datafiles(DATA_DIR)
def test_artifact_too_large(cli, datafiles, tmpdir):
project = str(datafiles)
- element_path = 'elements'
+ element_path = "elements"
# Create an artifact share (remote cache) in tmpdir/artifactshare
# Mock a file system with 5 MB total space
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'),
- quota=int(5e6)) as share:
+ with create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare"), quota=int(5e6)
+ ) as share:
# Configure bst to push to the remote cache
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- })
+ cli.configure(
+ {"artifacts": {"url": share.repo, "push": True},}
+ )
# Create and push a 3MB element
- create_element_size('small_element.bst', project, element_path, [], int(3e6))
- result = cli.run(project=project, args=['build', 'small_element.bst'])
+ create_element_size("small_element.bst", project, element_path, [], int(3e6))
+ result = cli.run(project=project, args=["build", "small_element.bst"])
result.assert_success()
# Create and try to push a 6MB element.
- create_element_size('large_element.bst', project, element_path, [], int(6e6))
- result = cli.run(project=project, args=['build', 'large_element.bst'])
+ create_element_size("large_element.bst", project, element_path, [], int(6e6))
+ result = cli.run(project=project, args=["build", "large_element.bst"])
# This should fail; the server will refuse to store the CAS
# blobs for the artifact, and then fail to find the files for
# the uploaded artifact proto.
@@ -476,100 +488,111 @@ def test_artifact_too_large(cli, datafiles, tmpdir):
result.assert_main_error(ErrorDomain.STREAM, None)
# Ensure that the small artifact is still in the share
- states = cli.get_element_states(project, ['small_element.bst', 'large_element.bst'])
- assert states['small_element.bst'] == 'cached'
- assert_shared(cli, share, project, 'small_element.bst')
+ states = cli.get_element_states(
+ project, ["small_element.bst", "large_element.bst"]
+ )
+ assert states["small_element.bst"] == "cached"
+ assert_shared(cli, share, project, "small_element.bst")
# Ensure that the artifact is cached locally but NOT remotely
- assert states['large_element.bst'] == 'cached'
- assert_not_shared(cli, share, project, 'large_element.bst')
+ assert states["large_element.bst"] == "cached"
+ assert_not_shared(cli, share, project, "large_element.bst")
# Test that when an element is pulled recently, it is not considered the LRU element.
@pytest.mark.datafiles(DATA_DIR)
def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
project = str(datafiles)
- element_path = 'elements'
+ element_path = "elements"
# Create an artifact share (remote cache) in tmpdir/artifactshare
# Set a 22 MB quota
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'),
- quota=int(22e6)) as share:
+ with create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)
+ ) as share:
# Configure bst to push to the cache
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- })
+ cli.configure(
+ {"artifacts": {"url": share.repo, "push": True},}
+ )
# Create and build 2 elements, one 5 MB and one 15 MB.
- create_element_size('element1.bst', project, element_path, [], int(5e6))
- result = cli.run(project=project, args=['build', 'element1.bst'])
+ create_element_size("element1.bst", project, element_path, [], int(5e6))
+ result = cli.run(project=project, args=["build", "element1.bst"])
result.assert_success()
- create_element_size('element2.bst', project, element_path, [], int(15e6))
- result = cli.run(project=project, args=['build', 'element2.bst'])
+ create_element_size("element2.bst", project, element_path, [], int(15e6))
+ result = cli.run(project=project, args=["build", "element2.bst"])
result.assert_success()
# Ensure they are cached locally
- states = cli.get_element_states(project, ['element1.bst', 'element2.bst'])
+ states = cli.get_element_states(project, ["element1.bst", "element2.bst"])
assert states == {
"element1.bst": "cached",
"element2.bst": "cached",
}
# Ensure that they have been pushed to the cache
- assert_shared(cli, share, project, 'element1.bst')
- assert_shared(cli, share, project, 'element2.bst')
+ assert_shared(cli, share, project, "element1.bst")
+ assert_shared(cli, share, project, "element2.bst")
# Remove element1 from the local cache
- cli.remove_artifact_from_cache(project, 'element1.bst')
- assert cli.get_element_state(project, 'element1.bst') != 'cached'
+ cli.remove_artifact_from_cache(project, "element1.bst")
+ assert cli.get_element_state(project, "element1.bst") != "cached"
# Pull the element1 from the remote cache (this should update its mtime)
- result = cli.run(project=project, args=['artifact', 'pull', 'element1.bst', '--remote',
- share.repo])
+ result = cli.run(
+ project=project,
+ args=["artifact", "pull", "element1.bst", "--remote", share.repo],
+ )
result.assert_success()
# Ensure element1 is cached locally
- assert cli.get_element_state(project, 'element1.bst') == 'cached'
+ assert cli.get_element_state(project, "element1.bst") == "cached"
wait_for_cache_granularity()
# Create and build the element3 (of 5 MB)
- create_element_size('element3.bst', project, element_path, [], int(5e6))
- result = cli.run(project=project, args=['build', 'element3.bst'])
+ create_element_size("element3.bst", project, element_path, [], int(5e6))
+ result = cli.run(project=project, args=["build", "element3.bst"])
result.assert_success()
# Make sure it's cached locally and remotely
- assert cli.get_element_state(project, 'element3.bst') == 'cached'
- assert_shared(cli, share, project, 'element3.bst')
+ assert cli.get_element_state(project, "element3.bst") == "cached"
+ assert_shared(cli, share, project, "element3.bst")
# Ensure that element2 was deleted from the share and element1 remains
- assert_not_shared(cli, share, project, 'element2.bst')
- assert_shared(cli, share, project, 'element1.bst')
+ assert_not_shared(cli, share, project, "element2.bst")
+ assert_shared(cli, share, project, "element1.bst")
@pytest.mark.datafiles(DATA_DIR)
def test_push_cross_junction(cli, tmpdir, datafiles):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
generate_junction(tmpdir, subproject_path, junction_path, store_ref=True)
- result = cli.run(project=project, args=['build', 'junction.bst:import-etc.bst'])
+ result = cli.run(project=project, args=["build", "junction.bst:import-etc.bst"])
result.assert_success()
- assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
+ assert cli.get_element_state(project, "junction.bst:import-etc.bst") == "cached"
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- })
- cli.run(project=project, args=['artifact', 'push', 'junction.bst:import-etc.bst'])
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
+ cli.configure(
+ {"artifacts": {"url": share.repo, "push": True},}
+ )
+ cli.run(
+ project=project, args=["artifact", "push", "junction.bst:import-etc.bst"]
+ )
- cache_key = cli.get_element_key(project, 'junction.bst:import-etc.bst')
- assert share.get_artifact(cli.get_artifact_name(project, 'subtest', 'import-etc.bst', cache_key=cache_key))
+ cache_key = cli.get_element_key(project, "junction.bst:import-etc.bst")
+ assert share.get_artifact(
+ cli.get_artifact_name(
+ project, "subtest", "import-etc.bst", cache_key=cache_key
+ )
+ )
@pytest.mark.datafiles(DATA_DIR)
@@ -577,20 +600,20 @@ def test_push_already_cached(caplog, cli, tmpdir, datafiles):
project = str(datafiles)
caplog.set_level(1)
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True}
- })
- result = cli.run(project=project, args=['build', 'target.bst'])
+ cli.configure({"artifacts": {"url": share.repo, "push": True}})
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
assert "SKIPPED Push" not in result.stderr
- result = cli.run(project=project, args=['artifact', 'push', 'target.bst'])
+ result = cli.run(project=project, args=["artifact", "push", "target.bst"])
result.assert_success()
- assert not result.get_pushed_elements(), "No elements should have been pushed since the cache was populated"
+ assert (
+ not result.get_pushed_elements()
+ ), "No elements should have been pushed since the cache was populated"
assert "INFO Remote ({}) already has ".format(share.repo) in result.stderr
assert "SKIPPED Push" in result.stderr
@@ -600,24 +623,30 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
project = str(datafiles)
caplog.set_level(1)
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1')) as shareuser,\
- create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as shareproject,\
- create_artifact_share(os.path.join(str(tmpdir), 'artifactshare3')) as sharecli:
+ with create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare1")
+ ) as shareuser, create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare2")
+ ) as shareproject, create_artifact_share(
+ os.path.join(str(tmpdir), "artifactshare3")
+ ) as sharecli:
# Add shareproject repo url to project.conf
with open(os.path.join(project, "project.conf"), "a") as projconf:
- projconf.write("artifacts:\n url: {}\n push: True".format(shareproject.repo))
+ projconf.write(
+ "artifacts:\n url: {}\n push: True".format(shareproject.repo)
+ )
# Configure shareuser remote in user conf
- cli.configure({
- 'artifacts': {'url': shareuser.repo, 'push': True}
- })
+ cli.configure({"artifacts": {"url": shareuser.repo, "push": True}})
- result = cli.run(project=project, args=['build', '--remote', sharecli.repo, 'target.bst'])
+ result = cli.run(
+ project=project, args=["build", "--remote", sharecli.repo, "target.bst"]
+ )
# Artifacts should have only been pushed to sharecli, as that was provided via the cli
result.assert_success()
- all_elements = ['target.bst', 'import-bin.bst', 'compose-all.bst']
+ all_elements = ["target.bst", "import-bin.bst", "compose-all.bst"]
for element_name in all_elements:
assert element_name in result.get_pushed_elements()
assert_shared(cli, sharecli, project, element_name)
@@ -632,26 +661,21 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
# This is a regression test for issue #990
#
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("buildtrees", [('buildtrees'), ('normal')])
+@pytest.mark.parametrize("buildtrees", [("buildtrees"), ("normal")])
def test_push_no_strict(caplog, cli, tmpdir, datafiles, buildtrees):
project = os.path.join(datafiles.dirname, datafiles.basename)
caplog.set_level(1)
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
- cli.configure({
- 'artifacts': {
- 'url': share.repo,
- 'push': True
- },
- 'projects': {
- 'test': {
- 'strict': False
- }
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
+ cli.configure(
+ {
+ "artifacts": {"url": share.repo, "push": True},
+ "projects": {"test": {"strict": False}},
}
- })
+ )
# First get us a build
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
# Now cause one of the dependenies to change their cache key
@@ -659,12 +683,12 @@ def test_push_no_strict(caplog, cli, tmpdir, datafiles, buildtrees):
# Here we just add a file, causing the strong cache key of the
# import-bin.bst element to change due to the local files it
# imports changing.
- path = os.path.join(project, 'files', 'bin-files', 'newfile')
- with open(path, 'w') as f:
+ path = os.path.join(project, "files", "bin-files", "newfile")
+ with open(path, "w") as f:
f.write("PONY !")
# Now build again after having changed the dependencies
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
# Now run `bst artifact push`.
@@ -673,8 +697,8 @@ def test_push_no_strict(caplog, cli, tmpdir, datafiles, buildtrees):
# a pull queue to be added to the `push` command, the behavior
# around this is different.
args = []
- if buildtrees == 'buildtrees':
- args += ['--pull-buildtrees']
- args += ['artifact', 'push', '--deps', 'all', 'target.bst']
+ if buildtrees == "buildtrees":
+ args += ["--pull-buildtrees"]
+ args += ["artifact", "push", "--deps", "all", "target.bst"]
result = cli.run(project=project, args=args)
result.assert_success()
diff --git a/tests/frontend/rebuild.py b/tests/frontend/rebuild.py
index 1cdb45d11..d3e36e6f4 100644
--- a/tests/frontend/rebuild.py
+++ b/tests/frontend/rebuild.py
@@ -6,15 +6,12 @@ import pytest
from buildstream.testing import cli # pylint: disable=unused-import
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
def strict_args(args, strict):
if strict != "strict":
- return ['--no-strict', *args]
+ return ["--no-strict", *args]
return args
@@ -24,15 +21,19 @@ def test_rebuild(datafiles, cli, strict):
project = str(datafiles)
# First build intermediate target.bst
- result = cli.run(project=project, args=strict_args(['build', 'target.bst'], strict))
+ result = cli.run(project=project, args=strict_args(["build", "target.bst"], strict))
result.assert_success()
# Modify base import
- with open(os.path.join(project, 'files', 'dev-files', 'usr', 'include', 'new.h'), "w") as f:
+ with open(
+ os.path.join(project, "files", "dev-files", "usr", "include", "new.h"), "w"
+ ) as f:
f.write("#define NEW")
# Rebuild base import and build top-level rebuild-target.bst
# In non-strict mode, this does not rebuild intermediate target.bst,
# which means that a weakly cached target.bst will be staged as dependency.
- result = cli.run(project=project, args=strict_args(['build', 'rebuild-target.bst'], strict))
+ result = cli.run(
+ project=project, args=strict_args(["build", "rebuild-target.bst"], strict)
+ )
result.assert_success()
diff --git a/tests/frontend/remote-caches.py b/tests/frontend/remote-caches.py
index 6ee57df23..b112e0882 100644
--- a/tests/frontend/remote-caches.py
+++ b/tests/frontend/remote-caches.py
@@ -28,7 +28,7 @@ from buildstream import _yaml
from tests.testutils import create_artifact_share, create_element_size
-DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'project')
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
def message_handler(message, context):
@@ -37,54 +37,46 @@ def message_handler(message, context):
@pytest.mark.datafiles(DATA_DIR)
def test_source_artifact_caches(cli, tmpdir, datafiles):
- cachedir = os.path.join(str(tmpdir), 'cache')
+ cachedir = os.path.join(str(tmpdir), "cache")
project_dir = str(datafiles)
- element_path = os.path.join(project_dir, 'elements')
+ element_path = os.path.join(project_dir, "elements")
- with create_artifact_share(os.path.join(str(tmpdir), 'share')) as share:
- user_config_file = str(tmpdir.join('buildstream.conf'))
+ with create_artifact_share(os.path.join(str(tmpdir), "share")) as share:
+ user_config_file = str(tmpdir.join("buildstream.conf"))
user_config = {
- 'scheduler': {
- 'pushers': 1
- },
- 'source-caches': {
- 'url': share.repo,
- 'push': True,
- },
- 'artifacts': {
- 'url': share.repo,
- 'push': True,
- },
- 'cachedir': cachedir
+ "scheduler": {"pushers": 1},
+ "source-caches": {"url": share.repo, "push": True,},
+ "artifacts": {"url": share.repo, "push": True,},
+ "cachedir": cachedir,
}
_yaml.roundtrip_dump(user_config, file=user_config_file)
cli.configure(user_config)
- create_element_size('repo.bst', project_dir, element_path, [], 10000)
+ create_element_size("repo.bst", project_dir, element_path, [], 10000)
- res = cli.run(project=project_dir, args=['build', 'repo.bst'])
+ res = cli.run(project=project_dir, args=["build", "repo.bst"])
res.assert_success()
assert "Pushed source " in res.stderr
assert "Pushed artifact " in res.stderr
# delete local sources and artifacts and check it pulls them
- shutil.rmtree(os.path.join(cachedir, 'cas'))
- shutil.rmtree(os.path.join(cachedir, 'sources'))
+ shutil.rmtree(os.path.join(cachedir, "cas"))
+ shutil.rmtree(os.path.join(cachedir, "sources"))
# this should just fetch the artifacts
- res = cli.run(project=project_dir, args=['build', 'repo.bst'])
+ res = cli.run(project=project_dir, args=["build", "repo.bst"])
res.assert_success()
assert "Pulled artifact " in res.stderr
assert "Pulled source " not in res.stderr
# remove the artifact from the repo and check it pulls sources, builds
# and then pushes the artifacts
- shutil.rmtree(os.path.join(cachedir, 'cas'))
- shutil.rmtree(os.path.join(cachedir, 'artifacts'))
- print(os.listdir(os.path.join(share.repodir, 'artifacts', 'refs')))
- shutil.rmtree(os.path.join(share.repodir, 'artifacts', 'refs', 'test'))
+ shutil.rmtree(os.path.join(cachedir, "cas"))
+ shutil.rmtree(os.path.join(cachedir, "artifacts"))
+ print(os.listdir(os.path.join(share.repodir, "artifacts", "refs")))
+ shutil.rmtree(os.path.join(share.repodir, "artifacts", "refs", "test"))
- res = cli.run(project=project_dir, args=['build', 'repo.bst'])
+ res = cli.run(project=project_dir, args=["build", "repo.bst"])
res.assert_success()
assert "Remote ({}) does not have artifact ".format(share.repo) in res.stderr
assert "Pulled source" in res.stderr
diff --git a/tests/frontend/show.py b/tests/frontend/show.py
index bc51d2967..a54d625ea 100644
--- a/tests/frontend/show.py
+++ b/tests/frontend/show.py
@@ -15,85 +15,108 @@ from tests.testutils import generate_junction
from . import configure_project
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)),)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
-@pytest.mark.parametrize("target,fmt,expected", [
- ('import-bin.bst', '%{name}', 'import-bin.bst'),
- ('import-bin.bst', '%{state}', 'buildable'),
- ('compose-all.bst', '%{state}', 'waiting')
-])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
+@pytest.mark.parametrize(
+ "target,fmt,expected",
+ [
+ ("import-bin.bst", "%{name}", "import-bin.bst"),
+ ("import-bin.bst", "%{state}", "buildable"),
+ ("compose-all.bst", "%{state}", "waiting"),
+ ],
+)
def test_show(cli, datafiles, target, fmt, expected):
project = str(datafiles)
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', fmt,
- target])
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", fmt, target],
+ )
result.assert_success()
if result.output.strip() != expected:
- raise AssertionError("Expected output:\n{}\nInstead received output:\n{}"
- .format(expected, result.output))
+ raise AssertionError(
+ "Expected output:\n{}\nInstead received output:\n{}".format(
+ expected, result.output
+ )
+ )
-@pytest.mark.datafiles(os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "invalid_element_path",
-))
+@pytest.mark.datafiles(
+ os.path.join(os.path.dirname(os.path.realpath(__file__)), "invalid_element_path",)
+)
def test_show_invalid_element_path(cli, datafiles):
project = str(datafiles)
- cli.run(project=project, silent=True, args=['show', "foo.bst"])
+ cli.run(project=project, silent=True, args=["show", "foo.bst"])
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project_default'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "project_default"))
def test_show_default(cli, datafiles):
project = str(datafiles)
- result = cli.run(project=project, silent=True, args=[
- 'show'])
+ result = cli.run(project=project, silent=True, args=["show"])
result.assert_success()
# Get the result output of "[state sha element]" and turn into a list
results = result.output.strip().split(" ")
- expected = 'target2.bst'
+ expected = "target2.bst"
assert results[2] == expected
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project_fail'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "project_fail"))
def test_show_fail(cli, datafiles):
project = str(datafiles)
- result = cli.run(project=project, silent=True, args=[
- 'show'])
+ result = cli.run(project=project, silent=True, args=["show"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
-@pytest.mark.parametrize("target,except_,expected", [
- ('target.bst', 'import-bin.bst', ['import-dev.bst', 'compose-all.bst', 'target.bst']),
- ('target.bst', 'import-dev.bst', ['import-bin.bst', 'compose-all.bst', 'target.bst']),
- ('target.bst', 'compose-all.bst', ['import-bin.bst', 'target.bst']),
- ('compose-all.bst', 'import-bin.bst', ['import-dev.bst', 'compose-all.bst'])
-])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
+@pytest.mark.parametrize(
+ "target,except_,expected",
+ [
+ (
+ "target.bst",
+ "import-bin.bst",
+ ["import-dev.bst", "compose-all.bst", "target.bst"],
+ ),
+ (
+ "target.bst",
+ "import-dev.bst",
+ ["import-bin.bst", "compose-all.bst", "target.bst"],
+ ),
+ ("target.bst", "compose-all.bst", ["import-bin.bst", "target.bst"]),
+ ("compose-all.bst", "import-bin.bst", ["import-dev.bst", "compose-all.bst"]),
+ ],
+)
def test_show_except_simple(cli, datafiles, target, except_, expected):
project = str(datafiles)
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'all',
- '--format', '%{name}',
- '--except', except_,
- target])
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "show",
+ "--deps",
+ "all",
+ "--format",
+ "%{name}",
+ "--except",
+ except_,
+ target,
+ ],
+ )
result.assert_success()
results = result.output.strip().splitlines()
if results != expected:
- raise AssertionError("Expected elements:\n{}\nInstead received elements:\n{}"
- .format(expected, results))
+ raise AssertionError(
+ "Expected elements:\n{}\nInstead received elements:\n{}".format(
+ expected, results
+ )
+ )
# This test checks various constructions of a pipeline
@@ -101,104 +124,124 @@ def test_show_except_simple(cli, datafiles, target, except_, expected):
# each data set provides the targets, exceptions and expected
# result list.
#
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'exceptions'))
-@pytest.mark.parametrize("targets,exceptions,expected", [
-
- # Test without exceptions, lets just see the whole list here
- (['build.bst'], None, [
- 'fourth-level-1.bst',
- 'third-level-1.bst',
- 'fourth-level-2.bst',
- 'third-level-2.bst',
- 'fourth-level-3.bst',
- 'third-level-3.bst',
- 'second-level-1.bst',
- 'first-level-1.bst',
- 'first-level-2.bst',
- 'build.bst',
- ]),
-
- # Test one target and excepting a part of the pipeline, this
- # removes forth-level-1 and third-level-1
- (['build.bst'], ['third-level-1.bst'], [
- 'fourth-level-2.bst',
- 'third-level-2.bst',
- 'fourth-level-3.bst',
- 'third-level-3.bst',
- 'second-level-1.bst',
- 'first-level-1.bst',
- 'first-level-2.bst',
- 'build.bst',
- ]),
-
- # Test one target and excepting a part of the pipeline, check that
- # excepted dependencies remain in the pipeline if depended on from
- # outside of the except element
- (['build.bst'], ['second-level-1.bst'], [
- 'fourth-level-2.bst',
- 'third-level-2.bst', # first-level-2 depends on this, so not excepted
- 'first-level-1.bst',
- 'first-level-2.bst',
- 'build.bst',
- ]),
-
- # The same as the above test, but excluding the toplevel build.bst,
- # instead only select the two toplevel dependencies as targets
- (['first-level-1.bst', 'first-level-2.bst'], ['second-level-1.bst'], [
- 'fourth-level-2.bst',
- 'third-level-2.bst', # first-level-2 depends on this, so not excepted
- 'first-level-1.bst',
- 'first-level-2.bst',
- ]),
-
- # Test one target and excepting an element outisde the pipeline
- (['build.bst'], ['unrelated-1.bst'], [
- 'fourth-level-2.bst',
- 'third-level-2.bst', # first-level-2 depends on this, so not excepted
- 'first-level-1.bst',
- 'first-level-2.bst',
- 'build.bst',
- ]),
-
- # Test one target and excepting two elements
- (['build.bst'], ['unrelated-1.bst', 'unrelated-2.bst'], [
- 'first-level-1.bst',
- 'build.bst',
- ]),
-])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "exceptions"))
+@pytest.mark.parametrize(
+ "targets,exceptions,expected",
+ [
+ # Test without exceptions, lets just see the whole list here
+ (
+ ["build.bst"],
+ None,
+ [
+ "fourth-level-1.bst",
+ "third-level-1.bst",
+ "fourth-level-2.bst",
+ "third-level-2.bst",
+ "fourth-level-3.bst",
+ "third-level-3.bst",
+ "second-level-1.bst",
+ "first-level-1.bst",
+ "first-level-2.bst",
+ "build.bst",
+ ],
+ ),
+ # Test one target and excepting a part of the pipeline, this
+ # removes forth-level-1 and third-level-1
+ (
+ ["build.bst"],
+ ["third-level-1.bst"],
+ [
+ "fourth-level-2.bst",
+ "third-level-2.bst",
+ "fourth-level-3.bst",
+ "third-level-3.bst",
+ "second-level-1.bst",
+ "first-level-1.bst",
+ "first-level-2.bst",
+ "build.bst",
+ ],
+ ),
+ # Test one target and excepting a part of the pipeline, check that
+ # excepted dependencies remain in the pipeline if depended on from
+ # outside of the except element
+ (
+ ["build.bst"],
+ ["second-level-1.bst"],
+ [
+ "fourth-level-2.bst",
+ "third-level-2.bst", # first-level-2 depends on this, so not excepted
+ "first-level-1.bst",
+ "first-level-2.bst",
+ "build.bst",
+ ],
+ ),
+ # The same as the above test, but excluding the toplevel build.bst,
+ # instead only select the two toplevel dependencies as targets
+ (
+ ["first-level-1.bst", "first-level-2.bst"],
+ ["second-level-1.bst"],
+ [
+ "fourth-level-2.bst",
+ "third-level-2.bst", # first-level-2 depends on this, so not excepted
+ "first-level-1.bst",
+ "first-level-2.bst",
+ ],
+ ),
+ # Test one target and excepting an element outisde the pipeline
+ (
+ ["build.bst"],
+ ["unrelated-1.bst"],
+ [
+ "fourth-level-2.bst",
+ "third-level-2.bst", # first-level-2 depends on this, so not excepted
+ "first-level-1.bst",
+ "first-level-2.bst",
+ "build.bst",
+ ],
+ ),
+ # Test one target and excepting two elements
+ (
+ ["build.bst"],
+ ["unrelated-1.bst", "unrelated-2.bst"],
+ ["first-level-1.bst", "build.bst",],
+ ),
+ ],
+)
def test_show_except(cli, datafiles, targets, exceptions, expected):
basedir = str(datafiles)
- results = cli.get_pipeline(basedir, targets, except_=exceptions, scope='all')
+ results = cli.get_pipeline(basedir, targets, except_=exceptions, scope="all")
if results != expected:
- raise AssertionError("Expected elements:\n{}\nInstead received elements:\n{}"
- .format(expected, results))
+ raise AssertionError(
+ "Expected elements:\n{}\nInstead received elements:\n{}".format(
+ expected, results
+ )
+ )
###############################################################
# Testing multiple targets #
###############################################################
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
def test_parallel_order(cli, datafiles):
project = str(datafiles)
- elements = ['multiple_targets/order/0.bst',
- 'multiple_targets/order/1.bst']
+ elements = ["multiple_targets/order/0.bst", "multiple_targets/order/1.bst"]
- args = ['show', '-d', 'plan', '-f', '%{name}', *elements]
+ args = ["show", "-d", "plan", "-f", "%{name}", *elements]
result = cli.run(project=project, args=args)
result.assert_success()
# Get the planned order
names = result.output.splitlines()
- names = [name[len('multiple_targets/order/'):] for name in names]
+ names = [name[len("multiple_targets/order/") :] for name in names]
# Create all possible 'correct' topological orderings
orderings = itertools.product(
- [('5.bst', '6.bst')],
- itertools.permutations(['4.bst', '7.bst']),
- itertools.permutations(['3.bst', '8.bst']),
- itertools.permutations(['2.bst', '9.bst']),
- itertools.permutations(['0.bst', '1.bst', 'run.bst'])
+ [("5.bst", "6.bst")],
+ itertools.permutations(["4.bst", "7.bst"]),
+ itertools.permutations(["3.bst", "8.bst"]),
+ itertools.permutations(["2.bst", "9.bst"]),
+ itertools.permutations(["0.bst", "1.bst", "run.bst"]),
)
orderings = [list(itertools.chain.from_iterable(perm)) for perm in orderings]
@@ -206,95 +249,92 @@ def test_parallel_order(cli, datafiles):
assert names in orderings, "We got: {}".format(", ".join(names))
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
def test_target_is_dependency(cli, datafiles):
project = str(datafiles)
- elements = ['multiple_targets/dependency/zebry.bst',
- 'multiple_targets/dependency/horsey.bst']
+ elements = [
+ "multiple_targets/dependency/zebry.bst",
+ "multiple_targets/dependency/horsey.bst",
+ ]
- args = ['show', '-d', 'plan', '-f', '%{name}', *elements]
+ args = ["show", "-d", "plan", "-f", "%{name}", *elements]
result = cli.run(project=project, args=args)
result.assert_success()
# Get the planned order
names = result.output.splitlines()
- names = [name[len('multiple_targets/dependency/'):] for name in names]
+ names = [name[len("multiple_targets/dependency/") :] for name in names]
- assert names == ['pony.bst', 'horsey.bst', 'zebry.bst']
+ assert names == ["pony.bst", "horsey.bst", "zebry.bst"]
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
-@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
-@pytest.mark.parametrize("element_name", ['junction-dep.bst', 'junction.bst:import-etc.bst'])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
+@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
+@pytest.mark.parametrize(
+ "element_name", ["junction-dep.bst", "junction.bst:import-etc.bst"]
+)
@pytest.mark.parametrize("workspaced", [True, False], ids=["workspace", "no-workspace"])
-def test_unfetched_junction(cli, tmpdir, datafiles, ref_storage, element_name, workspaced):
+def test_unfetched_junction(
+ cli, tmpdir, datafiles, ref_storage, element_name, workspaced
+):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- element_path = os.path.join(project, 'elements', 'junction-dep.bst')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ element_path = os.path.join(project, "elements", "junction-dep.bst")
- configure_project(project, {
- 'ref-storage': ref_storage
- })
+ configure_project(project, {"ref-storage": ref_storage})
# Create a repo to hold the subproject and generate a junction element for it
- ref = generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == 'inline'))
+ ref = generate_junction(
+ tmpdir, subproject_path, junction_path, store_ref=(ref_storage == "inline")
+ )
# Create a stack element to depend on a cross junction element
#
element = {
- 'kind': 'stack',
- 'depends': [
- {
- 'junction': 'junction.bst',
- 'filename': 'import-etc.bst'
- }
- ]
+ "kind": "stack",
+ "depends": [{"junction": "junction.bst", "filename": "import-etc.bst"}],
}
_yaml.roundtrip_dump(element, element_path)
# Dump a project.refs if we're using project.refs storage
#
- if ref_storage == 'project.refs':
- project_refs = {
- 'projects': {
- 'test': {
- 'junction.bst': [
- {
- 'ref': ref
- }
- ]
- }
- }
- }
- _yaml.roundtrip_dump(project_refs, os.path.join(project, 'junction.refs'))
+ if ref_storage == "project.refs":
+ project_refs = {"projects": {"test": {"junction.bst": [{"ref": ref}]}}}
+ _yaml.roundtrip_dump(project_refs, os.path.join(project, "junction.refs"))
# Open a workspace if we're testing workspaced behavior
if workspaced:
- result = cli.run(project=project, silent=True, args=[
- 'workspace', 'open', '--no-checkout', '--directory', subproject_path, 'junction.bst'
- ])
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "workspace",
+ "open",
+ "--no-checkout",
+ "--directory",
+ subproject_path,
+ "junction.bst",
+ ],
+ )
result.assert_success()
# Assert successful bst show (requires implicit subproject fetching)
- result = cli.run(project=project, silent=True, args=[
- 'show', element_name])
+ result = cli.run(project=project, silent=True, args=["show", element_name])
result.assert_success()
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
-@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
+@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
@pytest.mark.parametrize("workspaced", [True, False], ids=["workspace", "no-workspace"])
def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage, workspaced):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- element_path = os.path.join(project, 'elements', 'junction-dep.bst')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ element_path = os.path.join(project, "elements", "junction-dep.bst")
- configure_project(project, {
- 'ref-storage': ref_storage
- })
+ configure_project(project, {"ref-storage": ref_storage})
# Create a repo to hold the subproject and generate a junction element for it
generate_junction(tmpdir, subproject_path, junction_path, store_ref=False)
@@ -302,30 +342,36 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage, workspaced):
# Create a stack element to depend on a cross junction element
#
element = {
- 'kind': 'stack',
- 'depends': [
- {
- 'junction': 'junction.bst',
- 'filename': 'import-etc.bst'
- }
- ]
+ "kind": "stack",
+ "depends": [{"junction": "junction.bst", "filename": "import-etc.bst"}],
}
_yaml.roundtrip_dump(element, element_path)
# Open a workspace if we're testing workspaced behavior
if workspaced:
- result = cli.run(project=project, silent=True, args=[
- 'workspace', 'open', '--no-checkout', '--directory', subproject_path, 'junction.bst'
- ])
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "workspace",
+ "open",
+ "--no-checkout",
+ "--directory",
+ subproject_path,
+ "junction.bst",
+ ],
+ )
result.assert_success()
# Assert the correct error when trying to show the pipeline
- dep_result = cli.run(project=project, silent=True, args=[
- 'show', 'junction-dep.bst'])
+ dep_result = cli.run(
+ project=project, silent=True, args=["show", "junction-dep.bst"]
+ )
# Assert the correct error when trying to show the pipeline
- etc_result = cli.run(project=project, silent=True, args=[
- 'show', 'junction.bst:import-etc.bst'])
+ etc_result = cli.run(
+ project=project, silent=True, args=["show", "junction.bst:import-etc.bst"]
+ )
# If a workspace is open, no ref is needed
if workspaced:
@@ -333,24 +379,30 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage, workspaced):
etc_result.assert_success()
else:
# Assert that we have the expected provenance encoded into the error
- element_node = _yaml.load(element_path, shortname='junction-dep.bst')
- ref_node = element_node.get_sequence('depends').mapping_at(0)
+ element_node = _yaml.load(element_path, shortname="junction-dep.bst")
+ ref_node = element_node.get_sequence("depends").mapping_at(0)
provenance = ref_node.get_provenance()
assert str(provenance) in dep_result.stderr
- dep_result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
- etc_result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
+ dep_result.assert_main_error(
+ ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT
+ )
+ etc_result.assert_main_error(
+ ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT
+ )
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
-@pytest.mark.parametrize("element_name", ['junction-dep.bst', 'junction.bst:import-etc.bst'])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
+@pytest.mark.parametrize(
+ "element_name", ["junction-dep.bst", "junction.bst:import-etc.bst"]
+)
@pytest.mark.parametrize("workspaced", [True, False], ids=["workspace", "no-workspace"])
def test_fetched_junction(cli, tmpdir, datafiles, element_name, workspaced):
project = str(datafiles)
project = os.path.join(datafiles.dirname, datafiles.basename)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- element_path = os.path.join(project, 'elements', 'junction-dep.bst')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ element_path = os.path.join(project, "elements", "junction-dep.bst")
# Create a repo to hold the subproject and generate a junction element for it
generate_junction(tmpdir, subproject_path, junction_path, store_ref=True)
@@ -358,33 +410,41 @@ def test_fetched_junction(cli, tmpdir, datafiles, element_name, workspaced):
# Create a stack element to depend on a cross junction element
#
element = {
- 'kind': 'stack',
- 'depends': [
- {
- 'junction': 'junction.bst',
- 'filename': 'import-etc.bst'
- }
- ]
+ "kind": "stack",
+ "depends": [{"junction": "junction.bst", "filename": "import-etc.bst"}],
}
_yaml.roundtrip_dump(element, element_path)
- result = cli.run(project=project, silent=True, args=[
- 'source', 'fetch', 'junction.bst'])
+ result = cli.run(
+ project=project, silent=True, args=["source", "fetch", "junction.bst"]
+ )
result.assert_success()
# Open a workspace if we're testing workspaced behavior
if workspaced:
- result = cli.run(project=project, silent=True, args=[
- 'workspace', 'open', '--no-checkout', '--directory', subproject_path, 'junction.bst'
- ])
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=[
+ "workspace",
+ "open",
+ "--no-checkout",
+ "--directory",
+ subproject_path,
+ "junction.bst",
+ ],
+ )
result.assert_success()
# Assert the correct error when trying to show the pipeline
- result = cli.run(project=project, silent=True, args=[
- 'show', '--format', '%{name}-%{state}', element_name])
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--format", "%{name}-%{state}", element_name],
+ )
results = result.output.strip().splitlines()
- assert 'junction.bst:import-etc.bst-buildable' in results
+ assert "junction.bst:import-etc.bst-buildable" in results
###############################################################
@@ -404,8 +464,9 @@ def test_exceed_max_recursion_depth(cli, tmpdir, dependency_depth):
"""
os.mkdir(project_path)
- result = cli.run(silent=True,
- args=['init', '--project-name', project_name, project_path])
+ result = cli.run(
+ silent=True, args=["init", "--project-name", project_name, project_path]
+ )
result.assert_success()
sourcefiles_path = os.path.join(project_path, "files")
@@ -414,22 +475,26 @@ def test_exceed_max_recursion_depth(cli, tmpdir, dependency_depth):
element_path = os.path.join(project_path, "elements")
for i in range(0, dependency_depth + 1):
element = {
- 'kind': 'import',
- 'sources': [{'kind': 'local',
- 'path': 'files/source{}'.format(str(i))}],
- 'depends': ['element{}.bst'.format(str(i - 1))]
+ "kind": "import",
+ "sources": [{"kind": "local", "path": "files/source{}".format(str(i))}],
+ "depends": ["element{}.bst".format(str(i - 1))],
}
if i == 0:
- del element['depends']
- _yaml.roundtrip_dump(element, os.path.join(element_path, "element{}.bst".format(str(i))))
+ del element["depends"]
+ _yaml.roundtrip_dump(
+ element, os.path.join(element_path, "element{}.bst".format(str(i)))
+ )
source = os.path.join(sourcefiles_path, "source{}".format(str(i)))
- open(source, 'x').close()
+ open(source, "x").close()
assert os.path.exists(source)
setup_test()
- result = cli.run(project=project_path, silent=True,
- args=['show', "element{}.bst".format(str(dependency_depth))])
+ result = cli.run(
+ project=project_path,
+ silent=True,
+ args=["show", "element{}.bst".format(str(dependency_depth))],
+ )
recursion_limit = sys.getrecursionlimit()
if dependency_depth <= recursion_limit:
@@ -445,71 +510,69 @@ def test_exceed_max_recursion_depth(cli, tmpdir, dependency_depth):
###############################################################
# Testing format symbols #
###############################################################
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
-@pytest.mark.parametrize("dep_kind, expected_deps", [
- ('%{deps}', '[import-dev.bst, import-bin.bst]'),
- ('%{build-deps}', '[import-dev.bst]'),
- ('%{runtime-deps}', '[import-bin.bst]')
-])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
+@pytest.mark.parametrize(
+ "dep_kind, expected_deps",
+ [
+ ("%{deps}", "[import-dev.bst, import-bin.bst]"),
+ ("%{build-deps}", "[import-dev.bst]"),
+ ("%{runtime-deps}", "[import-bin.bst]"),
+ ],
+)
def test_format_deps(cli, datafiles, dep_kind, expected_deps):
project = str(datafiles)
- target = 'checkout-deps.bst'
- result = cli.run(project=project, silent=True, args=[
- 'show',
- '--deps', 'none',
- '--format', '%{name}: ' + dep_kind,
- target])
+ target = "checkout-deps.bst"
+ result = cli.run(
+ project=project,
+ silent=True,
+ args=["show", "--deps", "none", "--format", "%{name}: " + dep_kind, target],
+ )
result.assert_success()
- expected = '{name}: {deps}'.format(name=target, deps=expected_deps)
+ expected = "{name}: {deps}".format(name=target, deps=expected_deps)
if result.output.strip() != expected:
- raise AssertionError("Expected output:\n{}\nInstead received output:\n{}"
- .format(expected, result.output))
+ raise AssertionError(
+ "Expected output:\n{}\nInstead received output:\n{}".format(
+ expected, result.output
+ )
+ )
# This tests the resolved value of the 'max-jobs' variable,
# ensuring at least that the variables are resolved according
# to how the user has configured max-jobs
#
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
-@pytest.mark.parametrize("cli_value, config_value", [
- (None, None),
- (None, '16'),
- ('16', None),
- ('5', '16'),
- ('0', '16'),
- ('16', '0'),
-])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "project"))
+@pytest.mark.parametrize(
+ "cli_value, config_value",
+ [(None, None), (None, "16"), ("16", None), ("5", "16"), ("0", "16"), ("16", "0"),],
+)
def test_max_jobs(cli, datafiles, cli_value, config_value):
project = str(datafiles)
- target = 'target.bst'
+ target = "target.bst"
# Specify `--max-jobs` if this test sets it
args = []
if cli_value is not None:
- args += ['--max-jobs', cli_value]
- args += ['show', '--deps', 'none', '--format', '%{vars}', target]
+ args += ["--max-jobs", cli_value]
+ args += ["show", "--deps", "none", "--format", "%{vars}", target]
# Specify `max-jobs` in user configuration if this test sets it
if config_value is not None:
- cli.configure({
- 'build': {
- 'max-jobs': config_value
- }
- })
+ cli.configure({"build": {"max-jobs": config_value}})
result = cli.run(project=project, silent=True, args=args)
result.assert_success()
loaded = _yaml.load_data(result.output)
- loaded_value = loaded.get_int('max-jobs')
+ loaded_value = loaded.get_int("max-jobs")
# We expect the value provided on the command line to take
# precedence over the configuration file value, if specified.
#
# If neither are specified then we expect the default
- expected_value = cli_value or config_value or '0'
+ expected_value = cli_value or config_value or "0"
- if expected_value == '0':
+ if expected_value == "0":
# If we are expecting the automatic behavior of using the maximum
# number of cores available, just check that it is a value > 0
assert loaded_value > 0, "Automatic setting of max-jobs didnt work"
@@ -534,39 +597,33 @@ def test_max_jobs(cli, datafiles, cli_value, config_value):
# depends on the changing import element, and one which
# depends on it regularly.
#
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'strict-depends'))
-@pytest.mark.parametrize("target, expected_state", [
- ("non-strict-depends.bst", "cached"),
- ("strict-depends.bst", "waiting"),
-])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "strict-depends"))
+@pytest.mark.parametrize(
+ "target, expected_state",
+ [("non-strict-depends.bst", "cached"), ("strict-depends.bst", "waiting"),],
+)
def test_strict_dependencies(cli, datafiles, target, expected_state):
project = str(datafiles)
# Configure non strict mode, this will have
# an effect on the build and the `bst show`
# commands run via cli.get_element_states()
- cli.configure({
- 'projects': {
- 'test': {
- 'strict': False
- }
- }
- })
+ cli.configure({"projects": {"test": {"strict": False}}})
- result = cli.run(project=project, silent=True, args=['build', target])
+ result = cli.run(project=project, silent=True, args=["build", target])
result.assert_success()
- states = cli.get_element_states(project, ['base.bst', target])
- assert states['base.bst'] == 'cached'
- assert states[target] == 'cached'
+ states = cli.get_element_states(project, ["base.bst", target])
+ assert states["base.bst"] == "cached"
+ assert states[target] == "cached"
# Now modify the file, effectively causing the common base.bst
# dependency to change it's cache key
- hello_path = os.path.join(project, 'files', 'hello.txt')
- with open(hello_path, 'w') as f:
+ hello_path = os.path.join(project, "files", "hello.txt")
+ with open(hello_path, "w") as f:
f.write("Goodbye")
# Now assert that we have the states we expect as a result
- states = cli.get_element_states(project, ['base.bst', target])
- assert states['base.bst'] == 'buildable'
+ states = cli.get_element_states(project, ["base.bst", target])
+ assert states["base.bst"] == "buildable"
assert states[target] == expected_state
diff --git a/tests/frontend/source_checkout.py b/tests/frontend/source_checkout.py
index 8d6bae83b..f10c24c5d 100644
--- a/tests/frontend/source_checkout.py
+++ b/tests/frontend/source_checkout.py
@@ -11,23 +11,20 @@ from buildstream.testing import cli # pylint: disable=unused-import
from buildstream import utils, _yaml
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'project',
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
def generate_remote_import_element(input_path, output_path):
return {
- 'kind': 'import',
- 'sources': [
+ "kind": "import",
+ "sources": [
{
- 'kind': 'remote',
- 'url': 'file://{}'.format(input_path),
- 'filename': output_path,
- 'ref': utils.sha256sum(input_path),
+ "kind": "remote",
+ "url": "file://{}".format(input_path),
+ "filename": output_path,
+ "ref": utils.sha256sum(input_path),
}
- ]
+ ],
}
@@ -35,65 +32,90 @@ def generate_remote_import_element(input_path, output_path):
@pytest.mark.parametrize(
"with_workspace,guess_element",
[(True, True), (True, False), (False, False)],
- ids=["workspace-guess", "workspace-no-guess", "no-workspace-no-guess"]
+ ids=["workspace-guess", "workspace-no-guess", "no-workspace-no-guess"],
)
def test_source_checkout(datafiles, cli, tmpdir_factory, with_workspace, guess_element):
tmpdir = tmpdir_factory.mktemp("")
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'source-checkout')
- target = 'checkout-deps.bst'
- workspace = os.path.join(str(tmpdir), 'workspace')
+ checkout = os.path.join(cli.directory, "source-checkout")
+ target = "checkout-deps.bst"
+ workspace = os.path.join(str(tmpdir), "workspace")
elm_cmd = [target] if not guess_element else []
if with_workspace:
- ws_cmd = ['-C', workspace]
- result = cli.run(project=project, args=["workspace", "open", "--directory", workspace, target])
+ ws_cmd = ["-C", workspace]
+ result = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace, target],
+ )
result.assert_success()
else:
ws_cmd = []
- args = ws_cmd + ['source', 'checkout', '--deps', 'none', '--directory', checkout, *elm_cmd]
+ args = ws_cmd + [
+ "source",
+ "checkout",
+ "--deps",
+ "none",
+ "--directory",
+ checkout,
+ *elm_cmd,
+ ]
result = cli.run(project=project, args=args)
result.assert_success()
- assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
+ assert os.path.exists(
+ os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config")
+ )
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize('force_flag', ['--force', '-f'])
+@pytest.mark.parametrize("force_flag", ["--force", "-f"])
def test_source_checkout_force(datafiles, cli, force_flag):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'source-checkout')
- target = 'checkout-deps.bst'
+ checkout = os.path.join(cli.directory, "source-checkout")
+ target = "checkout-deps.bst"
# Make the checkout directory with 'some-thing' inside it
- os.makedirs(os.path.join(checkout, 'some-thing'))
-
- result = cli.run(project=project, args=['source', 'checkout',
- force_flag,
- '--deps', 'none',
- '--directory', checkout,
- target])
+ os.makedirs(os.path.join(checkout, "some-thing"))
+
+ result = cli.run(
+ project=project,
+ args=[
+ "source",
+ "checkout",
+ force_flag,
+ "--deps",
+ "none",
+ "--directory",
+ checkout,
+ target,
+ ],
+ )
result.assert_success()
- assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
+ assert os.path.exists(
+ os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config")
+ )
@pytest.mark.datafiles(DATA_DIR)
def test_source_checkout_tar(datafiles, cli):
project = str(datafiles)
- tar = os.path.join(cli.directory, 'source-checkout.tar')
- target = 'checkout-deps.bst'
+ tar = os.path.join(cli.directory, "source-checkout.tar")
+ target = "checkout-deps.bst"
- result = cli.run(project=project, args=['source', 'checkout',
- '--tar', tar,
- '--deps', 'none',
- target])
+ result = cli.run(
+ project=project,
+ args=["source", "checkout", "--tar", tar, "--deps", "none", target],
+ )
result.assert_success()
assert os.path.exists(tar)
with tarfile.open(tar) as tf:
- expected_content = os.path.join(tar, 'checkout-deps', 'etc', 'buildstream', 'config')
+ expected_content = os.path.join(
+ tar, "checkout-deps", "etc", "buildstream", "config"
+ )
tar_members = [f.name for f in tf]
for member in tar_members:
assert member in expected_content
@@ -105,111 +127,151 @@ def test_source_checkout_compressed_tar(datafiles, cli, compression):
project = str(datafiles)
tarfile_name = "source-checkout.tar" + compression
tar = os.path.join(cli.directory, tarfile_name)
- target = 'checkout-deps.bst'
-
- result = cli.run(project=project, args=['source', 'checkout',
- '--tar', tar,
- '--compression', compression,
- '--deps', 'none',
- target])
+ target = "checkout-deps.bst"
+
+ result = cli.run(
+ project=project,
+ args=[
+ "source",
+ "checkout",
+ "--tar",
+ tar,
+ "--compression",
+ compression,
+ "--deps",
+ "none",
+ target,
+ ],
+ )
result.assert_success()
- tar = tarfile.open(name=tar, mode='r:' + compression)
- assert os.path.join('checkout-deps', 'etc', 'buildstream', 'config') in tar.getnames()
+ tar = tarfile.open(name=tar, mode="r:" + compression)
+ assert (
+ os.path.join("checkout-deps", "etc", "buildstream", "config") in tar.getnames()
+ )
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize('deps', [('build'), ('none'), ('run'), ('all')])
+@pytest.mark.parametrize("deps", [("build"), ("none"), ("run"), ("all")])
def test_source_checkout_deps(datafiles, cli, deps):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'source-checkout')
- target = 'checkout-deps.bst'
+ checkout = os.path.join(cli.directory, "source-checkout")
+ target = "checkout-deps.bst"
- result = cli.run(project=project, args=['source', 'checkout',
- '--directory', checkout,
- '--deps', deps,
- target])
+ result = cli.run(
+ project=project,
+ args=["source", "checkout", "--directory", checkout, "--deps", deps, target],
+ )
result.assert_success()
# Sources of the target
- if deps == 'build':
- assert not os.path.exists(os.path.join(checkout, 'checkout-deps'))
+ if deps == "build":
+ assert not os.path.exists(os.path.join(checkout, "checkout-deps"))
else:
- assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
+ assert os.path.exists(
+ os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config")
+ )
# Sources of the target's build dependencies
- if deps in ('build', 'all'):
- assert os.path.exists(os.path.join(checkout, 'import-dev', 'usr', 'include', 'pony.h'))
+ if deps in ("build", "all"):
+ assert os.path.exists(
+ os.path.join(checkout, "import-dev", "usr", "include", "pony.h")
+ )
else:
- assert not os.path.exists(os.path.join(checkout, 'import-dev'))
+ assert not os.path.exists(os.path.join(checkout, "import-dev"))
# Sources of the target's runtime dependencies
- if deps in ('run', 'all'):
- assert os.path.exists(os.path.join(checkout, 'import-bin', 'usr', 'bin', 'hello'))
+ if deps in ("run", "all"):
+ assert os.path.exists(
+ os.path.join(checkout, "import-bin", "usr", "bin", "hello")
+ )
else:
- assert not os.path.exists(os.path.join(checkout, 'import-bin'))
+ assert not os.path.exists(os.path.join(checkout, "import-bin"))
@pytest.mark.datafiles(DATA_DIR)
def test_source_checkout_except(datafiles, cli):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'source-checkout')
- target = 'checkout-deps.bst'
-
- result = cli.run(project=project, args=['source', 'checkout',
- '--directory', checkout,
- '--deps', 'all',
- '--except', 'import-bin.bst',
- target])
+ checkout = os.path.join(cli.directory, "source-checkout")
+ target = "checkout-deps.bst"
+
+ result = cli.run(
+ project=project,
+ args=[
+ "source",
+ "checkout",
+ "--directory",
+ checkout,
+ "--deps",
+ "all",
+ "--except",
+ "import-bin.bst",
+ target,
+ ],
+ )
result.assert_success()
# Sources for the target should be present
- assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
+ assert os.path.exists(
+ os.path.join(checkout, "checkout-deps", "etc", "buildstream", "config")
+ )
# Sources for import-bin.bst should not be present
- assert not os.path.exists(os.path.join(checkout, 'import-bin'))
+ assert not os.path.exists(os.path.join(checkout, "import-bin"))
# Sources for other dependencies should be present
- assert os.path.exists(os.path.join(checkout, 'import-dev', 'usr', 'include', 'pony.h'))
+ assert os.path.exists(
+ os.path.join(checkout, "import-dev", "usr", "include", "pony.h")
+ )
@pytest.mark.datafiles(DATA_DIR)
def test_source_checkout_fetch(datafiles, cli):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'source-checkout')
- target = 'remote-import-dev.bst'
- target_path = os.path.join(project, 'elements', target)
+ checkout = os.path.join(cli.directory, "source-checkout")
+ target = "remote-import-dev.bst"
+ target_path = os.path.join(project, "elements", target)
# Create an element with remote source
element = generate_remote_import_element(
- os.path.join(project, 'files', 'dev-files', 'usr', 'include', 'pony.h'),
- 'pony.h')
+ os.path.join(project, "files", "dev-files", "usr", "include", "pony.h"),
+ "pony.h",
+ )
_yaml.roundtrip_dump(element, target_path)
# Testing implicit fetching requires that we do not have the sources
# cached already
- assert cli.get_element_state(project, target) == 'fetch needed'
+ assert cli.get_element_state(project, target) == "fetch needed"
- args = ['source', 'checkout']
+ args = ["source", "checkout"]
args += [target, checkout]
- result = cli.run(project=project, args=['source', 'checkout', '--directory', checkout, target])
+ result = cli.run(
+ project=project, args=["source", "checkout", "--directory", checkout, target]
+ )
result.assert_success()
- assert os.path.exists(os.path.join(checkout, 'remote-import-dev', 'pony.h'))
+ assert os.path.exists(os.path.join(checkout, "remote-import-dev", "pony.h"))
@pytest.mark.datafiles(DATA_DIR)
def test_source_checkout_build_scripts(cli, tmpdir, datafiles):
project_path = str(datafiles)
- element_name = 'source-bundle/source-bundle-hello.bst'
- normal_name = 'source-bundle-source-bundle-hello'
- checkout = os.path.join(str(tmpdir), 'source-checkout')
-
- args = ['source', 'checkout', '--include-build-scripts', '--directory', checkout, element_name]
+ element_name = "source-bundle/source-bundle-hello.bst"
+ normal_name = "source-bundle-source-bundle-hello"
+ checkout = os.path.join(str(tmpdir), "source-checkout")
+
+ args = [
+ "source",
+ "checkout",
+ "--include-build-scripts",
+ "--directory",
+ checkout,
+ element_name,
+ ]
result = cli.run(project=project_path, args=args)
result.assert_success()
# There sould be a script for each element (just one in this case) and a top level build script
- expected_scripts = ['build.sh', 'build-' + normal_name]
+ expected_scripts = ["build.sh", "build-" + normal_name]
for script in expected_scripts:
assert script in os.listdir(checkout)
@@ -217,17 +279,24 @@ def test_source_checkout_build_scripts(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_source_checkout_tar_buildscripts(cli, tmpdir, datafiles):
project_path = str(datafiles)
- element_name = 'source-bundle/source-bundle-hello.bst'
- normal_name = 'source-bundle-source-bundle-hello'
- tar_file = os.path.join(str(tmpdir), 'source-checkout.tar')
-
- args = ['source', 'checkout', '--include-build-scripts', '--tar', tar_file, element_name]
+ element_name = "source-bundle/source-bundle-hello.bst"
+ normal_name = "source-bundle-source-bundle-hello"
+ tar_file = os.path.join(str(tmpdir), "source-checkout.tar")
+
+ args = [
+ "source",
+ "checkout",
+ "--include-build-scripts",
+ "--tar",
+ tar_file,
+ element_name,
+ ]
result = cli.run(project=project_path, args=args)
result.assert_success()
- expected_scripts = ['build.sh', 'build-' + normal_name]
+ expected_scripts = ["build.sh", "build-" + normal_name]
- with tarfile.open(tar_file, 'r') as tf:
+ with tarfile.open(tar_file, "r") as tf:
for script in expected_scripts:
assert script in tf.getnames()
@@ -236,14 +305,14 @@ def test_source_checkout_tar_buildscripts(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_source_checkout_options_tar_and_dir_conflict(cli, tmpdir, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'source-checkout')
- tar_file = os.path.join(str(tmpdir), 'source-checkout.tar')
- target = 'checkout-deps.bst'
+ checkout = os.path.join(cli.directory, "source-checkout")
+ tar_file = os.path.join(str(tmpdir), "source-checkout.tar")
+ target = "checkout-deps.bst"
- result = cli.run(project=project, args=['source', 'checkout',
- '--directory', checkout,
- '--tar', tar_file,
- target])
+ result = cli.run(
+ project=project,
+ args=["source", "checkout", "--directory", checkout, "--tar", tar_file, target],
+ )
assert result.exit_code != 0
assert "ERROR: options --directory and --tar conflict" in result.stderr
@@ -253,13 +322,21 @@ def test_source_checkout_options_tar_and_dir_conflict(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_source_checkout_compression_without_tar(cli, tmpdir, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'source-checkout')
- target = 'checkout-deps.bst'
-
- result = cli.run(project=project, args=['source', 'checkout',
- '--directory', checkout,
- '--compression', 'xz',
- target])
+ checkout = os.path.join(cli.directory, "source-checkout")
+ target = "checkout-deps.bst"
+
+ result = cli.run(
+ project=project,
+ args=[
+ "source",
+ "checkout",
+ "--directory",
+ checkout,
+ "--compression",
+ "xz",
+ target,
+ ],
+ )
assert result.exit_code != 0
assert "ERROR: --compression specified without --tar" in result.stderr
diff --git a/tests/frontend/track.py b/tests/frontend/track.py
index a628043d8..6d9e3bb3f 100644
--- a/tests/frontend/track.py
+++ b/tests/frontend/track.py
@@ -14,18 +14,13 @@ from . import configure_project
# Project directory
TOP_DIR = os.path.dirname(os.path.realpath(__file__))
-DATA_DIR = os.path.join(TOP_DIR, 'project')
+DATA_DIR = os.path.join(TOP_DIR, "project")
def generate_element(repo, element_path, dep_name=None):
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config()
- ]
- }
+ element = {"kind": "import", "sources": [repo.source_config()]}
if dep_name:
- element['depends'] = [dep_name]
+ element["depends"] = [dep_name]
_yaml.roundtrip_dump(element, element_path)
@@ -33,21 +28,22 @@ def generate_element(repo, element_path, dep_name=None):
@pytest.mark.datafiles(DATA_DIR)
def test_track_single(cli, tmpdir, datafiles):
project = str(datafiles)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
- element_dep_name = 'track-test-dep.bst'
- element_target_name = 'track-test-target.bst'
+ dev_files_path = os.path.join(project, "files", "dev-files")
+ element_path = os.path.join(project, "elements")
+ element_dep_name = "track-test-dep.bst"
+ element_target_name = "track-test-target.bst"
# Create our repo object of the given source type with
# the dev files, and then collect the initial ref.
#
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
repo.create(dev_files_path)
# Write out our test targets
generate_element(repo, os.path.join(element_path, element_dep_name))
- generate_element(repo, os.path.join(element_path, element_target_name),
- dep_name=element_dep_name)
+ generate_element(
+ repo, os.path.join(element_path, element_target_name), dep_name=element_dep_name
+ )
# Assert that tracking is needed for both elements
states = cli.get_element_states(project, [element_target_name])
@@ -57,71 +53,101 @@ def test_track_single(cli, tmpdir, datafiles):
}
# Now first try to track only one element
- result = cli.run(project=project, args=[
- 'source', 'track', '--deps', 'none',
- element_target_name])
+ result = cli.run(
+ project=project, args=["source", "track", "--deps", "none", element_target_name]
+ )
result.assert_success()
# And now fetch it
- result = cli.run(project=project, args=[
- 'source', 'fetch', '--deps', 'none',
- element_target_name])
+ result = cli.run(
+ project=project, args=["source", "fetch", "--deps", "none", element_target_name]
+ )
result.assert_success()
# Assert that the dependency is waiting and the target has still never been tracked
states = cli.get_element_states(project, [element_target_name])
assert states == {
- element_dep_name: 'no reference',
- element_target_name: 'waiting',
+ element_dep_name: "no reference",
+ element_target_name: "waiting",
}
@pytest.mark.datafiles(os.path.join(TOP_DIR))
-@pytest.mark.parametrize("ref_storage", [('inline'), ('project-refs')])
+@pytest.mark.parametrize("ref_storage", [("inline"), ("project-refs")])
def test_track_optional(cli, tmpdir, datafiles, ref_storage):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'track-optional-' + ref_storage)
- dev_files_path = os.path.join(project, 'files')
- element_path = os.path.join(project, 'target.bst')
+ project = os.path.join(
+ datafiles.dirname, datafiles.basename, "track-optional-" + ref_storage
+ )
+ dev_files_path = os.path.join(project, "files")
+ element_path = os.path.join(project, "target.bst")
# Create our repo object of the given source type with
# the dev files, and then collect the initial ref.
#
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
repo.create(dev_files_path)
# Now create an optional test branch and add a commit to that,
# so two branches with different heads now exist.
#
- repo.branch('test')
+ repo.branch("test")
repo.add_commit()
# Substitute the {repo} for the git repo we created
with open(element_path) as f:
target_bst = f.read()
target_bst = target_bst.format(repo=repo.repo)
- with open(element_path, 'w') as f:
+ with open(element_path, "w") as f:
f.write(target_bst)
# First track for both options
#
# We want to track and persist the ref separately in this test
#
- result = cli.run(project=project, args=['--option', 'test', 'False', 'source', 'track', 'target.bst'])
+ result = cli.run(
+ project=project,
+ args=["--option", "test", "False", "source", "track", "target.bst"],
+ )
result.assert_success()
- result = cli.run(project=project, args=['--option', 'test', 'True', 'source', 'track', 'target.bst'])
+ result = cli.run(
+ project=project,
+ args=["--option", "test", "True", "source", "track", "target.bst"],
+ )
result.assert_success()
# Now fetch the key for both options
#
- result = cli.run(project=project, args=[
- '--option', 'test', 'False', 'show', '--deps', 'none', '--format', '%{key}', 'target.bst'
- ])
+ result = cli.run(
+ project=project,
+ args=[
+ "--option",
+ "test",
+ "False",
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{key}",
+ "target.bst",
+ ],
+ )
result.assert_success()
master_key = result.output
- result = cli.run(project=project, args=[
- '--option', 'test', 'True', 'show', '--deps', 'none', '--format', '%{key}', 'target.bst'
- ])
+ result = cli.run(
+ project=project,
+ args=[
+ "--option",
+ "test",
+ "True",
+ "show",
+ "--deps",
+ "none",
+ "--format",
+ "%{key}",
+ "target.bst",
+ ],
+ )
result.assert_success()
test_key = result.output
@@ -130,81 +156,82 @@ def test_track_optional(cli, tmpdir, datafiles, ref_storage):
assert test_key != master_key
-@pytest.mark.datafiles(os.path.join(TOP_DIR, 'track-cross-junction'))
-@pytest.mark.parametrize("cross_junction", [('cross'), ('nocross')])
-@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.datafiles(os.path.join(TOP_DIR, "track-cross-junction"))
+@pytest.mark.parametrize("cross_junction", [("cross"), ("nocross")])
+@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
def test_track_cross_junction(cli, tmpdir, datafiles, cross_junction, ref_storage):
project = str(datafiles)
- dev_files_path = os.path.join(project, 'files')
- target_path = os.path.join(project, 'target.bst')
- subtarget_path = os.path.join(project, 'subproject', 'subtarget.bst')
+ dev_files_path = os.path.join(project, "files")
+ target_path = os.path.join(project, "target.bst")
+ subtarget_path = os.path.join(project, "subproject", "subtarget.bst")
# Create our repo object of the given source type with
# the dev files, and then collect the initial ref.
#
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
repo.create(dev_files_path)
# Generate two elements using the git source, one in
# the main project and one in the subproject.
- generate_element(repo, target_path, dep_name='subproject.bst')
+ generate_element(repo, target_path, dep_name="subproject.bst")
generate_element(repo, subtarget_path)
# Generate project.conf
#
- project_conf = {
- 'name': 'test',
- 'ref-storage': ref_storage
- }
- _yaml.roundtrip_dump(project_conf, os.path.join(project, 'project.conf'))
+ project_conf = {"name": "test", "ref-storage": ref_storage}
+ _yaml.roundtrip_dump(project_conf, os.path.join(project, "project.conf"))
#
# FIXME: This can be simplified when we have support
# for addressing of junctioned elements.
#
def get_subproject_element_state():
- result = cli.run(project=project, args=[
- 'show', '--deps', 'all',
- '--format', '%{name}|%{state}', 'target.bst'
- ])
+ result = cli.run(
+ project=project,
+ args=[
+ "show",
+ "--deps",
+ "all",
+ "--format",
+ "%{name}|%{state}",
+ "target.bst",
+ ],
+ )
result.assert_success()
# Create two dimentional list of the result,
# first line should be the junctioned element
- lines = [
- line.split('|')
- for line in result.output.splitlines()
- ]
- assert lines[0][0] == 'subproject-junction.bst:subtarget.bst'
+ lines = [line.split("|") for line in result.output.splitlines()]
+ assert lines[0][0] == "subproject-junction.bst:subtarget.bst"
return lines[0][1]
#
# Assert that we have no reference yet for the cross junction element
#
- assert get_subproject_element_state() == 'no reference'
+ assert get_subproject_element_state() == "no reference"
# Track recursively across the junction
- args = ['source', 'track', '--deps', 'all']
- if cross_junction == 'cross':
- args += ['--cross-junctions']
- args += ['target.bst']
+ args = ["source", "track", "--deps", "all"]
+ if cross_junction == "cross":
+ args += ["--cross-junctions"]
+ args += ["target.bst"]
result = cli.run(project=project, args=args)
- if ref_storage == 'inline':
+ if ref_storage == "inline":
- if cross_junction == 'cross':
+ if cross_junction == "cross":
#
# Cross junction tracking is not allowed when the toplevel project
# is using inline ref storage.
#
- result.assert_main_error(ErrorDomain.PIPELINE, 'untrackable-sources')
+ result.assert_main_error(ErrorDomain.PIPELINE, "untrackable-sources")
else:
#
# No cross juction tracking was requested
#
result.assert_success()
- assert get_subproject_element_state() == 'no reference'
+ assert get_subproject_element_state() == "no reference"
else:
#
# Tracking is allowed with project.refs ref storage
@@ -214,44 +241,42 @@ def test_track_cross_junction(cli, tmpdir, datafiles, cross_junction, ref_storag
#
# If cross junction tracking was enabled, we should now be buildable
#
- if cross_junction == 'cross':
- assert get_subproject_element_state() == 'buildable'
+ if cross_junction == "cross":
+ assert get_subproject_element_state() == "buildable"
else:
- assert get_subproject_element_state() == 'no reference'
+ assert get_subproject_element_state() == "no reference"
-@pytest.mark.datafiles(os.path.join(TOP_DIR, 'consistencyerror'))
+@pytest.mark.datafiles(os.path.join(TOP_DIR, "consistencyerror"))
def test_track_consistency_error(cli, datafiles):
project = str(datafiles)
# Track the element causing a consistency error
- result = cli.run(project=project, args=['source', 'track', 'error.bst'])
+ result = cli.run(project=project, args=["source", "track", "error.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
- result.assert_task_error(ErrorDomain.SOURCE, 'the-consistency-error')
+ result.assert_task_error(ErrorDomain.SOURCE, "the-consistency-error")
-@pytest.mark.datafiles(os.path.join(TOP_DIR, 'consistencyerror'))
+@pytest.mark.datafiles(os.path.join(TOP_DIR, "consistencyerror"))
def test_track_consistency_bug(cli, datafiles):
project = str(datafiles)
# Track the element causing an unhandled exception
- result = cli.run(project=project, args=['source', 'track', 'bug.bst'])
+ result = cli.run(project=project, args=["source", "track", "bug.bst"])
# We expect BuildStream to fail gracefully, with no recorded exception.
result.assert_main_error(ErrorDomain.STREAM, None)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- element_path = os.path.join(project, 'elements', 'junction-dep.bst')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ element_path = os.path.join(project, "elements", "junction-dep.bst")
- configure_project(project, {
- 'ref-storage': ref_storage
- })
+ configure_project(project, {"ref-storage": ref_storage})
# Create a repo to hold the subproject and generate a junction element for it
generate_junction(tmpdir, subproject_path, junction_path, store_ref=False)
@@ -259,39 +284,32 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage):
# Create a stack element to depend on a cross junction element
#
element = {
- 'kind': 'stack',
- 'depends': [
- {
- 'junction': 'junction.bst',
- 'filename': 'import-etc.bst'
- }
- ]
+ "kind": "stack",
+ "depends": [{"junction": "junction.bst", "filename": "import-etc.bst"}],
}
_yaml.roundtrip_dump(element, element_path)
# Now try to track it, this will bail with the appropriate error
# informing the user to track the junction first
- result = cli.run(project=project, args=['source', 'track', 'junction-dep.bst'])
+ result = cli.run(project=project, args=["source", "track", "junction-dep.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
# Assert that we have the expected provenance encoded into the error
- element_node = _yaml.load(element_path, shortname='junction-dep.bst')
- ref_node = element_node.get_sequence('depends').mapping_at(0)
+ element_node = _yaml.load(element_path, shortname="junction-dep.bst")
+ ref_node = element_node.get_sequence("depends").mapping_at(0)
provenance = ref_node.get_provenance()
assert str(provenance) in result.stderr
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
def test_junction_element(cli, tmpdir, datafiles, ref_storage):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- element_path = os.path.join(project, 'elements', 'junction-dep.bst')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ junction_path = os.path.join(project, "elements", "junction.bst")
+ element_path = os.path.join(project, "elements", "junction-dep.bst")
- configure_project(project, {
- 'ref-storage': ref_storage
- })
+ configure_project(project, {"ref-storage": ref_storage})
# Create a repo to hold the subproject and generate a junction element for it
generate_junction(tmpdir, subproject_path, junction_path, store_ref=False)
@@ -299,32 +317,27 @@ def test_junction_element(cli, tmpdir, datafiles, ref_storage):
# Create a stack element to depend on a cross junction element
#
element = {
- 'kind': 'stack',
- 'depends': [
- {
- 'junction': 'junction.bst',
- 'filename': 'import-etc.bst'
- }
- ]
+ "kind": "stack",
+ "depends": [{"junction": "junction.bst", "filename": "import-etc.bst"}],
}
_yaml.roundtrip_dump(element, element_path)
# First demonstrate that showing the pipeline yields an error
- result = cli.run(project=project, args=['show', 'junction-dep.bst'])
+ result = cli.run(project=project, args=["show", "junction-dep.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
# Assert that we have the expected provenance encoded into the error
- element_node = _yaml.load(element_path, shortname='junction-dep.bst')
- ref_node = element_node.get_sequence('depends').mapping_at(0)
+ element_node = _yaml.load(element_path, shortname="junction-dep.bst")
+ ref_node = element_node.get_sequence("depends").mapping_at(0)
provenance = ref_node.get_provenance()
assert str(provenance) in result.stderr
# Now track the junction itself
- result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
+ result = cli.run(project=project, args=["source", "track", "junction.bst"])
result.assert_success()
# Now assert element state (via bst show under the hood) of the dep again
- assert cli.get_element_state(project, 'junction-dep.bst') == 'waiting'
+ assert cli.get_element_state(project, "junction-dep.bst") == "waiting"
@pytest.mark.datafiles(DATA_DIR)
@@ -333,15 +346,13 @@ def test_track_error_cannot_write_file(cli, tmpdir, datafiles):
pytest.skip("This is not testable with root permissions")
project = str(datafiles)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
- element_name = 'track-test.bst'
+ dev_files_path = os.path.join(project, "files", "dev-files")
+ element_path = os.path.join(project, "elements")
+ element_name = "track-test.bst"
- configure_project(project, {
- 'ref-storage': 'inline'
- })
+ configure_project(project, {"ref-storage": "inline"})
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
repo.create(dev_files_path)
element_full_path = os.path.join(element_path, element_name)
@@ -352,9 +363,9 @@ def test_track_error_cannot_write_file(cli, tmpdir, datafiles):
read_mask = stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH
os.chmod(element_path, stat.S_IMODE(st.st_mode) & ~read_mask)
- result = cli.run(project=project, args=['source', 'track', element_name])
+ result = cli.run(project=project, args=["source", "track", element_name])
result.assert_main_error(ErrorDomain.STREAM, None)
- result.assert_task_error(ErrorDomain.SOURCE, 'save-ref-error')
+ result.assert_task_error(ErrorDomain.SOURCE, "save-ref-error")
finally:
os.chmod(element_path, stat.S_IMODE(st.st_mode))
@@ -362,14 +373,14 @@ def test_track_error_cannot_write_file(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_no_needless_overwrite(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
- dev_files_path = os.path.join(project, 'files', 'dev-files')
- element_path = os.path.join(project, 'elements')
- target = 'track-test-target.bst'
+ dev_files_path = os.path.join(project, "files", "dev-files")
+ element_path = os.path.join(project, "elements")
+ target = "track-test-target.bst"
# Create our repo object of the given source type with
# the dev files, and then collect the initial ref.
#
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
repo.create(dev_files_path)
# Write out our test target and assert it exists
@@ -380,10 +391,10 @@ def test_no_needless_overwrite(cli, tmpdir, datafiles):
# Assert tracking is needed
states = cli.get_element_states(project, [target])
- assert states[target] == 'no reference'
+ assert states[target] == "no reference"
# Perform the track
- result = cli.run(project=project, args=['source', 'track', target])
+ result = cli.run(project=project, args=["source", "track", target])
result.assert_success()
track1_mtime = os.path.getmtime(path_to_target)
@@ -391,7 +402,7 @@ def test_no_needless_overwrite(cli, tmpdir, datafiles):
assert creation_mtime != track1_mtime
# Now (needlessly) track again
- result = cli.run(project=project, args=['source', 'track', target])
+ result = cli.run(project=project, args=["source", "track", target])
result.assert_success()
track2_mtime = os.path.getmtime(path_to_target)
diff --git a/tests/frontend/version.py b/tests/frontend/version.py
index e7db19915..279a51747 100644
--- a/tests/frontend/version.py
+++ b/tests/frontend/version.py
@@ -12,13 +12,13 @@ def assert_version(cli_version_output):
major, minor = utils.get_bst_version()
expected_start = "{}.{}".format(major, minor)
if not cli_version_output.startswith(expected_start):
- raise AssertionError("Version output expected to begin with '{}',"
- .format(expected_start) +
- " output was: {}"
- .format(cli_version_output))
+ raise AssertionError(
+ "Version output expected to begin with '{}',".format(expected_start)
+ + " output was: {}".format(cli_version_output)
+ )
def test_version(cli):
- result = cli.run(args=['--version'])
+ result = cli.run(args=["--version"])
result.assert_success()
assert_version(result.output)
diff --git a/tests/frontend/workspace.py b/tests/frontend/workspace.py
index 43cd6f381..ba4e9577f 100644
--- a/tests/frontend/workspace.py
+++ b/tests/frontend/workspace.py
@@ -40,18 +40,19 @@ from buildstream import _yaml
from buildstream._exceptions import ErrorDomain, LoadErrorReason
from buildstream._workspaces import BST_WORKSPACE_FORMAT_VERSION
-from tests.testutils import create_artifact_share, create_element_size, wait_for_cache_granularity
+from tests.testutils import (
+ create_artifact_share,
+ create_element_size,
+ wait_for_cache_granularity,
+)
repo_kinds = [(kind) for kind in ALL_REPO_KINDS]
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
-class WorkspaceCreator():
+class WorkspaceCreator:
def __init__(self, cli, tmpdir, datafiles, project_path=None):
self.cli = cli
self.tmpdir = tmpdir
@@ -63,17 +64,18 @@ class WorkspaceCreator():
shutil.copytree(str(datafiles), project_path)
self.project_path = project_path
- self.bin_files_path = os.path.join(project_path, 'files', 'bin-files')
+ self.bin_files_path = os.path.join(project_path, "files", "bin-files")
- self.workspace_cmd = os.path.join(self.project_path, 'workspace_cmd')
+ self.workspace_cmd = os.path.join(self.project_path, "workspace_cmd")
- def create_workspace_element(self, kind, suffix='', workspace_dir=None,
- element_attrs=None):
- element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
- element_path = os.path.join(self.project_path, 'elements')
+ def create_workspace_element(
+ self, kind, suffix="", workspace_dir=None, element_attrs=None
+ ):
+ element_name = "workspace-test-{}{}.bst".format(kind, suffix)
+ element_path = os.path.join(self.project_path, "elements")
if not workspace_dir:
workspace_dir = os.path.join(self.workspace_cmd, element_name)
- if workspace_dir[-4:] == '.bst':
+ if workspace_dir[-4:] == ".bst":
workspace_dir = workspace_dir[:-4]
# Create our repo object of the given source type with
@@ -82,85 +84,101 @@ class WorkspaceCreator():
ref = repo.create(self.bin_files_path)
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
if element_attrs:
element = {**element, **element_attrs}
- _yaml.roundtrip_dump(element,
- os.path.join(element_path, element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
return element_name, element_path, workspace_dir
- def create_workspace_elements(self, kinds, suffixs=None, workspace_dir_usr=None,
- element_attrs=None):
+ def create_workspace_elements(
+ self, kinds, suffixs=None, workspace_dir_usr=None, element_attrs=None
+ ):
element_tuples = []
if suffixs is None:
- suffixs = ['', ] * len(kinds)
+ suffixs = ["",] * len(kinds)
else:
if len(suffixs) != len(kinds):
raise "terable error"
for suffix, kind in zip(suffixs, kinds):
- element_name, _, workspace_dir = \
- self.create_workspace_element(kind, suffix, workspace_dir_usr,
- element_attrs)
+ element_name, _, workspace_dir = self.create_workspace_element(
+ kind, suffix, workspace_dir_usr, element_attrs
+ )
element_tuples.append((element_name, workspace_dir))
# Assert that there is a fetch is needed
- states = self.cli.get_element_states(self.project_path, [
- e for e, _ in element_tuples
- ])
- assert not any(states[e] != 'fetch needed' for e, _ in element_tuples)
+ states = self.cli.get_element_states(
+ self.project_path, [e for e, _ in element_tuples]
+ )
+ assert not any(states[e] != "fetch needed" for e, _ in element_tuples)
return element_tuples
- def open_workspaces(self, kinds, suffixs=None, workspace_dir=None,
- element_attrs=None, no_checkout=False):
-
- element_tuples = self.create_workspace_elements(kinds, suffixs, workspace_dir,
- element_attrs)
+ def open_workspaces(
+ self,
+ kinds,
+ suffixs=None,
+ workspace_dir=None,
+ element_attrs=None,
+ no_checkout=False,
+ ):
+
+ element_tuples = self.create_workspace_elements(
+ kinds, suffixs, workspace_dir, element_attrs
+ )
os.makedirs(self.workspace_cmd, exist_ok=True)
# Now open the workspace, this should have the effect of automatically
# tracking & fetching the source from the repo.
- args = ['workspace', 'open']
+ args = ["workspace", "open"]
if no_checkout:
- args.append('--no-checkout')
+ args.append("--no-checkout")
if workspace_dir is not None:
assert len(element_tuples) == 1, "test logic error"
_, workspace_dir = element_tuples[0]
- args.extend(['--directory', workspace_dir])
+ args.extend(["--directory", workspace_dir])
- args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
- result = self.cli.run(cwd=self.workspace_cmd, project=self.project_path, args=args)
+ args.extend(
+ [element_name for element_name, workspace_dir_suffix in element_tuples]
+ )
+ result = self.cli.run(
+ cwd=self.workspace_cmd, project=self.project_path, args=args
+ )
result.assert_success()
if not no_checkout:
# Assert that we are now buildable because the source is now cached.
- states = self.cli.get_element_states(self.project_path, [
- e for e, _ in element_tuples
- ])
- assert not any(states[e] != 'buildable' for e, _ in element_tuples)
+ states = self.cli.get_element_states(
+ self.project_path, [e for e, _ in element_tuples]
+ )
+ assert not any(states[e] != "buildable" for e, _ in element_tuples)
# Check that the executable hello file is found in each workspace
for _, workspace in element_tuples:
- filename = os.path.join(workspace, 'usr', 'bin', 'hello')
+ filename = os.path.join(workspace, "usr", "bin", "hello")
assert os.path.exists(filename)
return element_tuples
-def open_workspace(cli, tmpdir, datafiles, kind, suffix='', workspace_dir=None,
- project_path=None, element_attrs=None, no_checkout=False):
+def open_workspace(
+ cli,
+ tmpdir,
+ datafiles,
+ kind,
+ suffix="",
+ workspace_dir=None,
+ project_path=None,
+ element_attrs=None,
+ no_checkout=False,
+):
workspace_object = WorkspaceCreator(cli, tmpdir, datafiles, project_path)
- workspaces = workspace_object.open_workspaces((kind, ), (suffix, ), workspace_dir,
- element_attrs, no_checkout)
+ workspaces = workspace_object.open_workspaces(
+ (kind,), (suffix,), workspace_dir, element_attrs, no_checkout
+ )
assert len(workspaces) == 1
element_name, workspace = workspaces[0]
return element_name, workspace_object.project_path, workspace
@@ -176,11 +194,12 @@ def test_open_bzr_customize(cli, tmpdir, datafiles):
# Check that the correct origin branch is set
element_config = _yaml.load(os.path.join(project, "elements", element_name))
- source_config = element_config.get_sequence('sources').mapping_at(0)
+ source_config = element_config.get_sequence("sources").mapping_at(0)
output = subprocess.check_output(["bzr", "info"], cwd=workspace)
- stripped_url = source_config.get_str('url').lstrip("file:///")
- expected_output_str = ("checkout of branch: /{}/{}"
- .format(stripped_url, source_config.get_str('track')))
+ stripped_url = source_config.get_str("url").lstrip("file:///")
+ expected_output_str = "checkout of branch: /{}/{}".format(
+ stripped_url, source_config.get_str("track")
+ )
assert expected_output_str in str(output)
@@ -193,16 +212,18 @@ def test_open_multi(cli, tmpdir, datafiles):
for (elname, workspace), kind in zip(workspaces, repo_kinds):
assert kind in elname
workspace_lsdir = os.listdir(workspace)
- if kind == 'git':
- assert '.git' in workspace_lsdir
- elif kind == 'bzr':
- assert '.bzr' in workspace_lsdir
+ if kind == "git":
+ assert ".git" in workspace_lsdir
+ elif kind == "bzr":
+ assert ".bzr" in workspace_lsdir
else:
- assert '.git' not in workspace_lsdir
- assert '.bzr' not in workspace_lsdir
+ assert ".git" not in workspace_lsdir
+ assert ".bzr" not in workspace_lsdir
-@pytest.mark.skipif(os.geteuid() == 0, reason="root may have CAP_DAC_OVERRIDE and ignore permissions")
+@pytest.mark.skipif(
+ os.geteuid() == 0, reason="root may have CAP_DAC_OVERRIDE and ignore permissions"
+)
@pytest.mark.datafiles(DATA_DIR)
def test_open_multi_unwritable(cli, tmpdir, datafiles):
workspace_object = WorkspaceCreator(cli, tmpdir, datafiles)
@@ -212,14 +233,16 @@ def test_open_multi_unwritable(cli, tmpdir, datafiles):
# Now open the workspace, this should have the effect of automatically
# tracking & fetching the source from the repo.
- args = ['workspace', 'open']
+ args = ["workspace", "open"]
args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
- cli.configure({'workspacedir': workspace_object.workspace_cmd})
+ cli.configure({"workspacedir": workspace_object.workspace_cmd})
cwdstat = os.stat(workspace_object.workspace_cmd)
try:
os.chmod(workspace_object.workspace_cmd, cwdstat.st_mode - stat.S_IWRITE)
- result = workspace_object.cli.run(project=workspace_object.project_path, args=args)
+ result = workspace_object.cli.run(
+ project=workspace_object.project_path, args=args
+ )
finally:
# Using this finally to make sure we always put thing back how they should be.
os.chmod(workspace_object.workspace_cmd, cwdstat.st_mode)
@@ -227,7 +250,12 @@ def test_open_multi_unwritable(cli, tmpdir, datafiles):
result.assert_main_error(ErrorDomain.STREAM, None)
# Normally we avoid checking stderr in favour of using the mechine readable result.assert_main_error
# But Tristan was very keen that the names of the elements left needing workspaces were present in the out put
- assert " ".join([element_name for element_name, workspace_dir_suffix in element_tuples[1:]]) in result.stderr
+ assert (
+ " ".join(
+ [element_name for element_name, workspace_dir_suffix in element_tuples[1:]]
+ )
+ in result.stderr
+ )
@pytest.mark.datafiles(DATA_DIR)
@@ -239,14 +267,17 @@ def test_open_multi_with_directory(cli, tmpdir, datafiles):
# Now open the workspace, this should have the effect of automatically
# tracking & fetching the source from the repo.
- args = ['workspace', 'open']
- args.extend(['--directory', 'any/dir/should/fail'])
+ args = ["workspace", "open"]
+ args.extend(["--directory", "any/dir/should/fail"])
args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
- result = workspace_object.cli.run(cwd=workspace_object.workspace_cmd, project=workspace_object.project_path,
- args=args)
+ result = workspace_object.cli.run(
+ cwd=workspace_object.workspace_cmd,
+ project=workspace_object.project_path,
+ args=args,
+ )
- result.assert_main_error(ErrorDomain.STREAM, 'directory-with-multiple-elements')
+ result.assert_main_error(ErrorDomain.STREAM, "directory-with-multiple-elements")
@pytest.mark.datafiles(DATA_DIR)
@@ -254,31 +285,35 @@ def test_open_defaultlocation(cli, tmpdir, datafiles):
workspace_object = WorkspaceCreator(cli, tmpdir, datafiles)
# pylint: disable=unbalanced-tuple-unpacking
- ((element_name, workspace_dir), ) = workspace_object.create_workspace_elements(['git'], ['git'])
+ ((element_name, workspace_dir),) = workspace_object.create_workspace_elements(
+ ["git"], ["git"]
+ )
os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
# Now open the workspace, this should have the effect of automatically
# tracking & fetching the source from the repo.
- args = ['workspace', 'open']
+ args = ["workspace", "open"]
args.append(element_name)
# In the other tests we set the cmd to workspace_object.workspace_cmd with the optional
# argument, cwd for the workspace_object.cli.run function. But hear we set the default
# workspace location to workspace_object.workspace_cmd and run the cli.run function with
# no cwd option so that it runs in the project directory.
- cli.configure({'workspacedir': workspace_object.workspace_cmd})
- result = workspace_object.cli.run(project=workspace_object.project_path,
- args=args)
+ cli.configure({"workspacedir": workspace_object.workspace_cmd})
+ result = workspace_object.cli.run(project=workspace_object.project_path, args=args)
result.assert_success()
- assert cli.get_element_state(workspace_object.project_path, element_name) == 'buildable'
+ assert (
+ cli.get_element_state(workspace_object.project_path, element_name)
+ == "buildable"
+ )
# Check that the executable hello file is found in the workspace
# even though the cli.run function was not run with cwd = workspace_object.workspace_cmd
# the workspace should be created in there as we used the 'workspacedir' configuration
# option.
- filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
+ filename = os.path.join(workspace_dir, "usr", "bin", "hello")
assert os.path.exists(filename)
@@ -287,105 +322,117 @@ def test_open_defaultlocation_exists(cli, tmpdir, datafiles):
workspace_object = WorkspaceCreator(cli, tmpdir, datafiles)
# pylint: disable=unbalanced-tuple-unpacking
- ((element_name, workspace_dir), ) = workspace_object.create_workspace_elements(['git'], ['git'])
+ ((element_name, workspace_dir),) = workspace_object.create_workspace_elements(
+ ["git"], ["git"]
+ )
os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
- with open(workspace_dir, 'w') as fl:
- fl.write('foo')
+ with open(workspace_dir, "w") as fl:
+ fl.write("foo")
# Now open the workspace, this should have the effect of automatically
# tracking & fetching the source from the repo.
- args = ['workspace', 'open']
+ args = ["workspace", "open"]
args.append(element_name)
# In the other tests we set the cmd to workspace_object.workspace_cmd with the optional
# argument, cwd for the workspace_object.cli.run function. But hear we set the default
# workspace location to workspace_object.workspace_cmd and run the cli.run function with
# no cwd option so that it runs in the project directory.
- cli.configure({'workspacedir': workspace_object.workspace_cmd})
- result = workspace_object.cli.run(project=workspace_object.project_path,
- args=args)
+ cli.configure({"workspacedir": workspace_object.workspace_cmd})
+ result = workspace_object.cli.run(project=workspace_object.project_path, args=args)
- result.assert_main_error(ErrorDomain.STREAM, 'bad-directory')
+ result.assert_main_error(ErrorDomain.STREAM, "bad-directory")
@pytest.mark.datafiles(DATA_DIR)
def test_open_track(cli, tmpdir, datafiles):
- open_workspace(cli, tmpdir, datafiles, 'git')
+ open_workspace(cli, tmpdir, datafiles, "git")
@pytest.mark.datafiles(DATA_DIR)
def test_open_force(cli, tmpdir, datafiles):
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git')
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git")
# Close the workspace
- result = cli.run(project=project, args=[
- 'workspace', 'close', element_name
- ])
+ result = cli.run(project=project, args=["workspace", "close", element_name])
result.assert_success()
# Assert the workspace dir still exists
assert os.path.exists(workspace)
# Now open the workspace again with --force, this should happily succeed
- result = cli.run(project=project, args=[
- 'workspace', 'open', '--force', '--directory', workspace, element_name
- ])
+ result = cli.run(
+ project=project,
+ args=["workspace", "open", "--force", "--directory", workspace, element_name],
+ )
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
def test_open_force_open(cli, tmpdir, datafiles):
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git')
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git")
# Assert the workspace dir exists
assert os.path.exists(workspace)
# Now open the workspace again with --force, this should happily succeed
- result = cli.run(project=project, args=[
- 'workspace', 'open', '--force', '--directory', workspace, element_name
- ])
+ result = cli.run(
+ project=project,
+ args=["workspace", "open", "--force", "--directory", workspace, element_name],
+ )
result.assert_success()
# Regression test for #1086.
@pytest.mark.datafiles(DATA_DIR)
def test_open_force_open_no_checkout(cli, tmpdir, datafiles):
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git')
- hello_path = os.path.join(workspace, 'hello.txt')
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git")
+ hello_path = os.path.join(workspace, "hello.txt")
# Assert the workspace dir exists
assert os.path.exists(workspace)
# Create a new file in the workspace
- with open(hello_path, 'w') as f:
- f.write('hello')
+ with open(hello_path, "w") as f:
+ f.write("hello")
# Now open the workspace again with --force and --no-checkout
- result = cli.run(project=project, args=[
- 'workspace', 'open', '--force', '--no-checkout', '--directory', workspace, element_name
- ])
+ result = cli.run(
+ project=project,
+ args=[
+ "workspace",
+ "open",
+ "--force",
+ "--no-checkout",
+ "--directory",
+ workspace,
+ element_name,
+ ],
+ )
result.assert_success()
# Ensure that our files were not overwritten
assert os.path.exists(hello_path)
with open(hello_path) as f:
- assert f.read() == 'hello'
+ assert f.read() == "hello"
@pytest.mark.datafiles(DATA_DIR)
def test_open_force_different_workspace(cli, tmpdir, datafiles):
- _, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git', "-alpha")
+ _, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", "-alpha")
# Assert the workspace dir exists
assert os.path.exists(workspace)
- hello_path = os.path.join(workspace, 'usr', 'bin', 'hello')
- hello1_path = os.path.join(workspace, 'usr', 'bin', 'hello1')
+ hello_path = os.path.join(workspace, "usr", "bin", "hello")
+ hello1_path = os.path.join(workspace, "usr", "bin", "hello1")
tmpdir = os.path.join(str(tmpdir), "-beta")
shutil.move(hello_path, hello1_path)
- element_name2, _, workspace2 = open_workspace(cli, tmpdir, datafiles, 'git', "-beta")
+ element_name2, _, workspace2 = open_workspace(
+ cli, tmpdir, datafiles, "git", "-beta"
+ )
# Assert the workspace dir exists
assert os.path.exists(workspace2)
@@ -394,12 +441,13 @@ def test_open_force_different_workspace(cli, tmpdir, datafiles):
assert os.path.exists(hello1_path)
# Assert that workspace 2 contains the unmodified file
- assert os.path.exists(os.path.join(workspace2, 'usr', 'bin', 'hello'))
+ assert os.path.exists(os.path.join(workspace2, "usr", "bin", "hello"))
# Now open the workspace again with --force, this should happily succeed
- result = cli.run(project=project, args=[
- 'workspace', 'open', '--force', '--directory', workspace, element_name2
- ])
+ result = cli.run(
+ project=project,
+ args=["workspace", "open", "--force", "--directory", workspace, element_name2],
+ )
# Assert that the file in workspace 1 has been replaced
# With the file from workspace 2
@@ -411,12 +459,12 @@ def test_open_force_different_workspace(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_close(cli, tmpdir, datafiles):
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git')
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git")
# Close the workspace
- result = cli.run(project=project, args=[
- 'workspace', 'close', '--remove-dir', element_name
- ])
+ result = cli.run(
+ project=project, args=["workspace", "close", "--remove-dir", element_name]
+ )
result.assert_success()
# Assert the workspace dir has been deleted
@@ -426,17 +474,19 @@ def test_close(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_close_external_after_move_project(cli, tmpdir, datafiles):
workspace_dir = os.path.join(str(tmpdir), "workspace")
- project_path = os.path.join(str(tmpdir), 'initial_project')
- element_name, _, _ = open_workspace(cli, tmpdir, datafiles, 'git', "", workspace_dir, project_path)
+ project_path = os.path.join(str(tmpdir), "initial_project")
+ element_name, _, _ = open_workspace(
+ cli, tmpdir, datafiles, "git", "", workspace_dir, project_path
+ )
assert os.path.exists(workspace_dir)
- moved_dir = os.path.join(str(tmpdir), 'external_project')
+ moved_dir = os.path.join(str(tmpdir), "external_project")
shutil.move(project_path, moved_dir)
assert os.path.exists(moved_dir)
# Close the workspace
- result = cli.run(project=moved_dir, args=[
- 'workspace', 'close', '--remove-dir', element_name
- ])
+ result = cli.run(
+ project=moved_dir, args=["workspace", "close", "--remove-dir", element_name]
+ )
result.assert_success()
# Assert the workspace dir has been deleted
@@ -445,36 +495,40 @@ def test_close_external_after_move_project(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_close_internal_after_move_project(cli, tmpdir, datafiles):
- initial_dir = os.path.join(str(tmpdir), 'initial_project')
- initial_workspace = os.path.join(initial_dir, 'workspace')
- element_name, _, _ = open_workspace(cli, tmpdir, datafiles, 'git',
- workspace_dir=initial_workspace, project_path=initial_dir)
- moved_dir = os.path.join(str(tmpdir), 'internal_project')
+ initial_dir = os.path.join(str(tmpdir), "initial_project")
+ initial_workspace = os.path.join(initial_dir, "workspace")
+ element_name, _, _ = open_workspace(
+ cli,
+ tmpdir,
+ datafiles,
+ "git",
+ workspace_dir=initial_workspace,
+ project_path=initial_dir,
+ )
+ moved_dir = os.path.join(str(tmpdir), "internal_project")
shutil.move(initial_dir, moved_dir)
assert os.path.exists(moved_dir)
# Close the workspace
- result = cli.run(project=moved_dir, args=[
- 'workspace', 'close', '--remove-dir', element_name
- ])
+ result = cli.run(
+ project=moved_dir, args=["workspace", "close", "--remove-dir", element_name]
+ )
result.assert_success()
# Assert the workspace dir has been deleted
- workspace = os.path.join(moved_dir, 'workspace')
+ workspace = os.path.join(moved_dir, "workspace")
assert not os.path.exists(workspace)
@pytest.mark.datafiles(DATA_DIR)
def test_close_removed(cli, tmpdir, datafiles):
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git')
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git")
# Remove it first, closing the workspace should work
shutil.rmtree(workspace)
# Close the workspace
- result = cli.run(project=project, args=[
- 'workspace', 'close', element_name
- ])
+ result = cli.run(project=project, args=["workspace", "close", element_name])
result.assert_success()
# Assert the workspace dir has been deleted
@@ -483,8 +537,10 @@ def test_close_removed(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_close_nonexistant_element(cli, tmpdir, datafiles):
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git')
- element_path = os.path.join(datafiles.dirname, datafiles.basename, 'elements', element_name)
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git")
+ element_path = os.path.join(
+ datafiles.dirname, datafiles.basename, "elements", element_name
+ )
# First brutally remove the element.bst file, ensuring that
# the element does not exist anymore in the project where
@@ -492,9 +548,9 @@ def test_close_nonexistant_element(cli, tmpdir, datafiles):
os.remove(element_path)
# Close the workspace
- result = cli.run(project=project, args=[
- 'workspace', 'close', '--remove-dir', element_name
- ])
+ result = cli.run(
+ project=project, args=["workspace", "close", "--remove-dir", element_name]
+ )
result.assert_success()
# Assert the workspace dir has been deleted
@@ -503,17 +559,19 @@ def test_close_nonexistant_element(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_close_multiple(cli, tmpdir, datafiles):
- tmpdir_alpha = os.path.join(str(tmpdir), 'alpha')
- tmpdir_beta = os.path.join(str(tmpdir), 'beta')
+ tmpdir_alpha = os.path.join(str(tmpdir), "alpha")
+ tmpdir_beta = os.path.join(str(tmpdir), "beta")
alpha, project, workspace_alpha = open_workspace(
- cli, tmpdir_alpha, datafiles, 'git', suffix='-alpha')
+ cli, tmpdir_alpha, datafiles, "git", suffix="-alpha"
+ )
beta, project, workspace_beta = open_workspace(
- cli, tmpdir_beta, datafiles, 'git', suffix='-beta')
+ cli, tmpdir_beta, datafiles, "git", suffix="-beta"
+ )
# Close the workspaces
- result = cli.run(project=project, args=[
- 'workspace', 'close', '--remove-dir', alpha, beta
- ])
+ result = cli.run(
+ project=project, args=["workspace", "close", "--remove-dir", alpha, beta]
+ )
result.assert_success()
# Assert the workspace dirs have been deleted
@@ -523,17 +581,19 @@ def test_close_multiple(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_close_all(cli, tmpdir, datafiles):
- tmpdir_alpha = os.path.join(str(tmpdir), 'alpha')
- tmpdir_beta = os.path.join(str(tmpdir), 'beta')
+ tmpdir_alpha = os.path.join(str(tmpdir), "alpha")
+ tmpdir_beta = os.path.join(str(tmpdir), "beta")
_, project, workspace_alpha = open_workspace(
- cli, tmpdir_alpha, datafiles, 'git', suffix='-alpha')
+ cli, tmpdir_alpha, datafiles, "git", suffix="-alpha"
+ )
_, project, workspace_beta = open_workspace(
- cli, tmpdir_beta, datafiles, 'git', suffix='-beta')
+ cli, tmpdir_beta, datafiles, "git", suffix="-beta"
+ )
# Close the workspaces
- result = cli.run(project=project, args=[
- 'workspace', 'close', '--remove-dir', '--all'
- ])
+ result = cli.run(
+ project=project, args=["workspace", "close", "--remove-dir", "--all"]
+ )
result.assert_success()
# Assert the workspace dirs have been deleted
@@ -544,45 +604,43 @@ def test_close_all(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_reset(cli, tmpdir, datafiles):
# Open the workspace
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git')
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git")
# Modify workspace
- shutil.rmtree(os.path.join(workspace, 'usr', 'bin'))
- os.makedirs(os.path.join(workspace, 'etc'))
- with open(os.path.join(workspace, 'etc', 'pony.conf'), 'w') as f:
+ shutil.rmtree(os.path.join(workspace, "usr", "bin"))
+ os.makedirs(os.path.join(workspace, "etc"))
+ with open(os.path.join(workspace, "etc", "pony.conf"), "w") as f:
f.write("PONY='pink'")
# Now reset the open workspace, this should have the
# effect of reverting our changes.
- result = cli.run(project=project, args=[
- 'workspace', 'reset', element_name
- ])
+ result = cli.run(project=project, args=["workspace", "reset", element_name])
result.assert_success()
- assert os.path.exists(os.path.join(workspace, 'usr', 'bin', 'hello'))
- assert not os.path.exists(os.path.join(workspace, 'etc', 'pony.conf'))
+ assert os.path.exists(os.path.join(workspace, "usr", "bin", "hello"))
+ assert not os.path.exists(os.path.join(workspace, "etc", "pony.conf"))
@pytest.mark.datafiles(DATA_DIR)
def test_reset_soft(cli, tmpdir, datafiles):
# Open the workspace
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git')
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git")
- assert cli.get_element_state(project, element_name) == 'buildable'
+ assert cli.get_element_state(project, element_name) == "buildable"
- hello_path = os.path.join(workspace, 'usr', 'bin', 'hello')
- pony_path = os.path.join(workspace, 'etc', 'pony.conf')
+ hello_path = os.path.join(workspace, "usr", "bin", "hello")
+ pony_path = os.path.join(workspace, "etc", "pony.conf")
- assert os.path.exists(os.path.join(workspace, 'usr', 'bin'))
+ assert os.path.exists(os.path.join(workspace, "usr", "bin"))
assert os.path.exists(hello_path)
assert not os.path.exists(pony_path)
key_1 = cli.get_element_key(project, element_name)
- assert key_1 != "{:?<64}".format('')
- result = cli.run(project=project, args=['build', element_name])
+ assert key_1 != "{:?<64}".format("")
+ result = cli.run(project=project, args=["build", element_name])
result.assert_success()
- assert cli.get_element_state(project, element_name) == 'cached'
+ assert cli.get_element_state(project, element_name) == "cached"
key_2 = cli.get_element_key(project, element_name)
- assert key_2 != "{:?<64}".format('')
+ assert key_2 != "{:?<64}".format("")
# workspace keys are not recalculated
assert key_1 == key_2
@@ -590,99 +648,97 @@ def test_reset_soft(cli, tmpdir, datafiles):
wait_for_cache_granularity()
# Modify workspace
- shutil.rmtree(os.path.join(workspace, 'usr', 'bin'))
- os.makedirs(os.path.join(workspace, 'etc'))
- with open(os.path.join(workspace, 'etc', 'pony.conf'), 'w') as f:
+ shutil.rmtree(os.path.join(workspace, "usr", "bin"))
+ os.makedirs(os.path.join(workspace, "etc"))
+ with open(os.path.join(workspace, "etc", "pony.conf"), "w") as f:
f.write("PONY='pink'")
- assert not os.path.exists(os.path.join(workspace, 'usr', 'bin'))
+ assert not os.path.exists(os.path.join(workspace, "usr", "bin"))
assert os.path.exists(pony_path)
# Now soft-reset the open workspace, this should not revert the changes
- result = cli.run(project=project, args=[
- 'workspace', 'reset', '--soft', element_name
- ])
+ result = cli.run(
+ project=project, args=["workspace", "reset", "--soft", element_name]
+ )
result.assert_success()
# we removed this dir
- assert not os.path.exists(os.path.join(workspace, 'usr', 'bin'))
+ assert not os.path.exists(os.path.join(workspace, "usr", "bin"))
# and added this one
- assert os.path.exists(os.path.join(workspace, 'etc', 'pony.conf'))
+ assert os.path.exists(os.path.join(workspace, "etc", "pony.conf"))
- assert cli.get_element_state(project, element_name) == 'buildable'
+ assert cli.get_element_state(project, element_name) == "buildable"
key_3 = cli.get_element_key(project, element_name)
- assert key_3 != "{:?<64}".format('')
+ assert key_3 != "{:?<64}".format("")
assert key_1 != key_3
@pytest.mark.datafiles(DATA_DIR)
def test_reset_multiple(cli, tmpdir, datafiles):
# Open the workspaces
- tmpdir_alpha = os.path.join(str(tmpdir), 'alpha')
- tmpdir_beta = os.path.join(str(tmpdir), 'beta')
+ tmpdir_alpha = os.path.join(str(tmpdir), "alpha")
+ tmpdir_beta = os.path.join(str(tmpdir), "beta")
alpha, project, workspace_alpha = open_workspace(
- cli, tmpdir_alpha, datafiles, 'git', suffix='-alpha')
+ cli, tmpdir_alpha, datafiles, "git", suffix="-alpha"
+ )
beta, project, workspace_beta = open_workspace(
- cli, tmpdir_beta, datafiles, 'git', suffix='-beta')
+ cli, tmpdir_beta, datafiles, "git", suffix="-beta"
+ )
# Modify workspaces
- shutil.rmtree(os.path.join(workspace_alpha, 'usr', 'bin'))
- os.makedirs(os.path.join(workspace_beta, 'etc'))
- with open(os.path.join(workspace_beta, 'etc', 'pony.conf'), 'w') as f:
+ shutil.rmtree(os.path.join(workspace_alpha, "usr", "bin"))
+ os.makedirs(os.path.join(workspace_beta, "etc"))
+ with open(os.path.join(workspace_beta, "etc", "pony.conf"), "w") as f:
f.write("PONY='pink'")
# Now reset the open workspaces, this should have the
# effect of reverting our changes.
- result = cli.run(project=project, args=[
- 'workspace', 'reset', alpha, beta,
- ])
+ result = cli.run(project=project, args=["workspace", "reset", alpha, beta,])
result.assert_success()
- assert os.path.exists(os.path.join(workspace_alpha, 'usr', 'bin', 'hello'))
- assert not os.path.exists(os.path.join(workspace_beta, 'etc', 'pony.conf'))
+ assert os.path.exists(os.path.join(workspace_alpha, "usr", "bin", "hello"))
+ assert not os.path.exists(os.path.join(workspace_beta, "etc", "pony.conf"))
@pytest.mark.datafiles(DATA_DIR)
def test_reset_all(cli, tmpdir, datafiles):
# Open the workspaces
- tmpdir_alpha = os.path.join(str(tmpdir), 'alpha')
- tmpdir_beta = os.path.join(str(tmpdir), 'beta')
+ tmpdir_alpha = os.path.join(str(tmpdir), "alpha")
+ tmpdir_beta = os.path.join(str(tmpdir), "beta")
_, project, workspace_alpha = open_workspace(
- cli, tmpdir_alpha, datafiles, 'git', suffix='-alpha')
+ cli, tmpdir_alpha, datafiles, "git", suffix="-alpha"
+ )
_, project, workspace_beta = open_workspace(
- cli, tmpdir_beta, datafiles, 'git', suffix='-beta')
+ cli, tmpdir_beta, datafiles, "git", suffix="-beta"
+ )
# Modify workspaces
- shutil.rmtree(os.path.join(workspace_alpha, 'usr', 'bin'))
- os.makedirs(os.path.join(workspace_beta, 'etc'))
- with open(os.path.join(workspace_beta, 'etc', 'pony.conf'), 'w') as f:
+ shutil.rmtree(os.path.join(workspace_alpha, "usr", "bin"))
+ os.makedirs(os.path.join(workspace_beta, "etc"))
+ with open(os.path.join(workspace_beta, "etc", "pony.conf"), "w") as f:
f.write("PONY='pink'")
# Now reset the open workspace, this should have the
# effect of reverting our changes.
- result = cli.run(project=project, args=[
- 'workspace', 'reset', '--all'
- ])
+ result = cli.run(project=project, args=["workspace", "reset", "--all"])
result.assert_success()
- assert os.path.exists(os.path.join(workspace_alpha, 'usr', 'bin', 'hello'))
- assert not os.path.exists(os.path.join(workspace_beta, 'etc', 'pony.conf'))
+ assert os.path.exists(os.path.join(workspace_alpha, "usr", "bin", "hello"))
+ assert not os.path.exists(os.path.join(workspace_beta, "etc", "pony.conf"))
@pytest.mark.datafiles(DATA_DIR)
def test_list(cli, tmpdir, datafiles):
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git')
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git")
# Now list the workspaces
- result = cli.run(project=project, args=[
- 'workspace', 'list'
- ])
+ result = cli.run(project=project, args=["workspace", "list"])
result.assert_success()
loaded = _yaml.load_data(result.output)
- workspaces = loaded.get_sequence('workspaces')
+ workspaces = loaded.get_sequence("workspaces")
assert len(workspaces) == 1
space = workspaces.mapping_at(0)
- assert space.get_str('element') == element_name
- assert space.get_str('directory') == workspace
+ assert space.get_str("element") == element_name
+ assert space.get_str("directory") == workspace
@pytest.mark.datafiles(DATA_DIR)
@@ -691,117 +747,113 @@ def test_list(cli, tmpdir, datafiles):
@pytest.mark.parametrize(
"from_workspace,guess_element",
[(False, False), (True, True), (True, False)],
- ids=["project-no-guess", "workspace-guess", "workspace-no-guess"])
-def test_build(cli, tmpdir_factory, datafiles, kind, strict, from_workspace, guess_element):
- tmpdir = tmpdir_factory.mktemp('')
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
- checkout = os.path.join(str(tmpdir), 'checkout')
- args_dir = ['-C', workspace] if from_workspace else []
+ ids=["project-no-guess", "workspace-guess", "workspace-no-guess"],
+)
+def test_build(
+ cli, tmpdir_factory, datafiles, kind, strict, from_workspace, guess_element
+):
+ tmpdir = tmpdir_factory.mktemp("")
+ element_name, project, workspace = open_workspace(
+ cli, tmpdir, datafiles, kind, False
+ )
+ checkout = os.path.join(str(tmpdir), "checkout")
+ args_dir = ["-C", workspace] if from_workspace else []
args_elm = [element_name] if not guess_element else []
# Modify workspace
- shutil.rmtree(os.path.join(workspace, 'usr', 'bin'))
- os.makedirs(os.path.join(workspace, 'etc'))
- with open(os.path.join(workspace, 'etc', 'pony.conf'), 'w') as f:
+ shutil.rmtree(os.path.join(workspace, "usr", "bin"))
+ os.makedirs(os.path.join(workspace, "etc"))
+ with open(os.path.join(workspace, "etc", "pony.conf"), "w") as f:
f.write("PONY='pink'")
# Configure strict mode
strict_mode = True
- if strict != 'strict':
+ if strict != "strict":
strict_mode = False
- cli.configure({
- 'projects': {
- 'test': {
- 'strict': strict_mode
- }
- }
- })
+ cli.configure({"projects": {"test": {"strict": strict_mode}}})
# Build modified workspace
- assert cli.get_element_state(project, element_name) == 'buildable'
+ assert cli.get_element_state(project, element_name) == "buildable"
key_1 = cli.get_element_key(project, element_name)
- assert key_1 != "{:?<64}".format('')
- result = cli.run(project=project, args=args_dir + ['build', *args_elm])
+ assert key_1 != "{:?<64}".format("")
+ result = cli.run(project=project, args=args_dir + ["build", *args_elm])
result.assert_success()
- assert cli.get_element_state(project, element_name) == 'cached'
+ assert cli.get_element_state(project, element_name) == "cached"
key_2 = cli.get_element_key(project, element_name)
- assert key_2 != "{:?<64}".format('')
+ assert key_2 != "{:?<64}".format("")
# workspace keys are not recalculated
assert key_1 == key_2
# Checkout the result
- result = cli.run(project=project,
- args=args_dir + ['artifact', 'checkout', '--directory', checkout, *args_elm])
+ result = cli.run(
+ project=project,
+ args=args_dir + ["artifact", "checkout", "--directory", checkout, *args_elm],
+ )
result.assert_success()
# Check that the pony.conf from the modified workspace exists
- filename = os.path.join(checkout, 'etc', 'pony.conf')
+ filename = os.path.join(checkout, "etc", "pony.conf")
assert os.path.exists(filename)
# Check that the original /usr/bin/hello is not in the checkout
- assert not os.path.exists(os.path.join(checkout, 'usr', 'bin', 'hello'))
+ assert not os.path.exists(os.path.join(checkout, "usr", "bin", "hello"))
@pytest.mark.datafiles(DATA_DIR)
def test_buildable_no_ref(cli, tmpdir, datafiles):
project = str(datafiles)
- element_name = 'workspace-test-no-ref.bst'
- element_path = os.path.join(project, 'elements')
+ element_name = "workspace-test-no-ref.bst"
+ element_path = os.path.join(project, "elements")
# Write out our test target without any source ref
- repo = create_repo('git', str(tmpdir))
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config()
- ]
- }
- _yaml.roundtrip_dump(element,
- os.path.join(element_path, element_name))
+ repo = create_repo("git", str(tmpdir))
+ element = {"kind": "import", "sources": [repo.source_config()]}
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# Assert that this target is not buildable when no workspace is associated.
- assert cli.get_element_state(project, element_name) == 'no reference'
+ assert cli.get_element_state(project, element_name) == "no reference"
# Now open the workspace. We don't need to checkout the source though.
- workspace = os.path.join(str(tmpdir), 'workspace-no-ref')
+ workspace = os.path.join(str(tmpdir), "workspace-no-ref")
os.makedirs(workspace)
- args = ['workspace', 'open', '--no-checkout', '--directory', workspace, element_name]
+ args = [
+ "workspace",
+ "open",
+ "--no-checkout",
+ "--directory",
+ workspace,
+ element_name,
+ ]
result = cli.run(project=project, args=args)
result.assert_success()
# Assert that the target is now buildable.
- assert cli.get_element_state(project, element_name) == 'buildable'
+ assert cli.get_element_state(project, element_name) == "buildable"
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("modification", [("addfile"), ("removefile"), ("modifyfile")])
@pytest.mark.parametrize("strict", [("strict"), ("non-strict")])
def test_detect_modifications(cli, tmpdir, datafiles, modification, strict):
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git')
- checkout = os.path.join(str(tmpdir), 'checkout')
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git")
+ checkout = os.path.join(str(tmpdir), "checkout")
# Configure strict mode
strict_mode = True
- if strict != 'strict':
+ if strict != "strict":
strict_mode = False
- cli.configure({
- 'projects': {
- 'test': {
- 'strict': strict_mode
- }
- }
- })
+ cli.configure({"projects": {"test": {"strict": strict_mode}}})
# Build clean workspace
- assert cli.get_element_state(project, element_name) == 'buildable'
+ assert cli.get_element_state(project, element_name) == "buildable"
key_1 = cli.get_element_key(project, element_name)
- assert key_1 != "{:?<64}".format('')
- result = cli.run(project=project, args=['build', element_name])
+ assert key_1 != "{:?<64}".format("")
+ result = cli.run(project=project, args=["build", element_name])
result.assert_success()
- assert cli.get_element_state(project, element_name) == 'cached'
+ assert cli.get_element_state(project, element_name) == "cached"
key_2 = cli.get_element_key(project, element_name)
- assert key_2 != "{:?<64}".format('')
+ assert key_2 != "{:?<64}".format("")
# workspace keys are not recalculated
assert key_1 == key_2
@@ -811,32 +863,32 @@ def test_detect_modifications(cli, tmpdir, datafiles, modification, strict):
# Modify the workspace in various different ways, ensuring we
# properly detect the changes.
#
- if modification == 'addfile':
- os.makedirs(os.path.join(workspace, 'etc'))
- with open(os.path.join(workspace, 'etc', 'pony.conf'), 'w') as f:
+ if modification == "addfile":
+ os.makedirs(os.path.join(workspace, "etc"))
+ with open(os.path.join(workspace, "etc", "pony.conf"), "w") as f:
f.write("PONY='pink'")
- elif modification == 'removefile':
- os.remove(os.path.join(workspace, 'usr', 'bin', 'hello'))
- elif modification == 'modifyfile':
- with open(os.path.join(workspace, 'usr', 'bin', 'hello'), 'w') as f:
- f.write('cookie')
+ elif modification == "removefile":
+ os.remove(os.path.join(workspace, "usr", "bin", "hello"))
+ elif modification == "modifyfile":
+ with open(os.path.join(workspace, "usr", "bin", "hello"), "w") as f:
+ f.write("cookie")
else:
# This cannot be reached
assert 0
# First assert that the state is properly detected
- assert cli.get_element_state(project, element_name) == 'buildable'
+ assert cli.get_element_state(project, element_name) == "buildable"
key_3 = cli.get_element_key(project, element_name)
- assert key_3 != "{:?<64}".format('')
+ assert key_3 != "{:?<64}".format("")
# Since there are different things going on at `bst build` time
# than `bst show` time, we also want to build / checkout again,
# and ensure that the result contains what we expect.
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
result.assert_success()
- assert cli.get_element_state(project, element_name) == 'cached'
+ assert cli.get_element_state(project, element_name) == "cached"
key_4 = cli.get_element_key(project, element_name)
- assert key_4 != "{:?<64}".format('')
+ assert key_4 != "{:?<64}".format("")
# workspace keys are not recalculated
assert key_3 == key_4
@@ -844,22 +896,23 @@ def test_detect_modifications(cli, tmpdir, datafiles, modification, strict):
assert key_1 != key_3
# Checkout the result
- result = cli.run(project=project, args=[
- 'artifact', 'checkout', element_name, '--directory', checkout
- ])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
result.assert_success()
# Check the result for the changes we made
#
- if modification == 'addfile':
- filename = os.path.join(checkout, 'etc', 'pony.conf')
+ if modification == "addfile":
+ filename = os.path.join(checkout, "etc", "pony.conf")
assert os.path.exists(filename)
- elif modification == 'removefile':
- assert not os.path.exists(os.path.join(checkout, 'usr', 'bin', 'hello'))
- elif modification == 'modifyfile':
- with open(os.path.join(workspace, 'usr', 'bin', 'hello'), 'r') as f:
+ elif modification == "removefile":
+ assert not os.path.exists(os.path.join(checkout, "usr", "bin", "hello"))
+ elif modification == "modifyfile":
+ with open(os.path.join(workspace, "usr", "bin", "hello"), "r") as f:
data = f.read()
- assert data == 'cookie'
+ assert data == "cookie"
else:
# This cannot be reached
assert 0
@@ -868,143 +921,147 @@ def test_detect_modifications(cli, tmpdir, datafiles, modification, strict):
# Ensure that various versions that should not be accepted raise a
# LoadError.INVALID_DATA
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("workspace_cfg", [
- # Test loading a negative workspace version
- {"format-version": -1},
- # Test loading version 0 with two sources
- {
- "format-version": 0,
- "alpha.bst": {
- 0: "/workspaces/bravo",
- 1: "/workspaces/charlie",
- }
- },
- # Test loading a version with decimals
- {"format-version": 0.5},
- # Test loading a future version
- {"format-version": BST_WORKSPACE_FORMAT_VERSION + 1}
-])
+@pytest.mark.parametrize(
+ "workspace_cfg",
+ [
+ # Test loading a negative workspace version
+ {"format-version": -1},
+ # Test loading version 0 with two sources
+ {
+ "format-version": 0,
+ "alpha.bst": {0: "/workspaces/bravo", 1: "/workspaces/charlie",},
+ },
+ # Test loading a version with decimals
+ {"format-version": 0.5},
+ # Test loading a future version
+ {"format-version": BST_WORKSPACE_FORMAT_VERSION + 1},
+ ],
+)
def test_list_unsupported_workspace(cli, datafiles, workspace_cfg):
project = str(datafiles)
- os.makedirs(os.path.join(project, '.bst'))
- workspace_config_path = os.path.join(project, '.bst', 'workspaces.yml')
+ os.makedirs(os.path.join(project, ".bst"))
+ workspace_config_path = os.path.join(project, ".bst", "workspaces.yml")
_yaml.roundtrip_dump(workspace_cfg, workspace_config_path)
- result = cli.run(project=project, args=['workspace', 'list'])
+ result = cli.run(project=project, args=["workspace", "list"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
# Ensure that various versions that should be accepted are parsed
# correctly.
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("workspace_cfg,expected", [
- # Test loading version 0 without a dict
- ({
- "alpha.bst": "/workspaces/bravo"
- }, {
- "format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {
- "alpha.bst": {
- "prepared": False,
- "path": "/workspaces/bravo",
- "running_files": {}
- }
- }
- }),
- # Test loading version 0 with only one source
- ({
- "alpha.bst": {
- 0: "/workspaces/bravo"
- }
- }, {
- "format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {
- "alpha.bst": {
- "prepared": False,
- "path": "/workspaces/bravo",
- "running_files": {}
- }
- }
- }),
- # Test loading version 1
- ({
- "format-version": 1,
- "workspaces": {
- "alpha.bst": {
- "path": "/workspaces/bravo"
- }
- }
- }, {
- "format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {
- "alpha.bst": {
- "prepared": False,
- "path": "/workspaces/bravo",
- "running_files": {}
- }
- }
- }),
- # Test loading version 2
- ({
- "format-version": 2,
- "workspaces": {
- "alpha.bst": {
- "path": "/workspaces/bravo",
- "last_successful": "some_key",
- "running_files": {
- "beta.bst": ["some_file"]
- }
- }
- }
- }, {
- "format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {
- "alpha.bst": {
- "prepared": False,
- "path": "/workspaces/bravo",
- "last_successful": "some_key",
- "running_files": {
- "beta.bst": ["some_file"]
- }
- }
- }
- }),
- # Test loading version 3
- ({
- "format-version": 3,
- "workspaces": {
- "alpha.bst": {
- "prepared": True,
- "path": "/workspaces/bravo",
- "running_files": {}
- }
- }
- }, {
- "format-version": BST_WORKSPACE_FORMAT_VERSION,
- "workspaces": {
- "alpha.bst": {
- "prepared": True,
- "path": "/workspaces/bravo",
- "running_files": {}
- }
- }
- })
-])
+@pytest.mark.parametrize(
+ "workspace_cfg,expected",
+ [
+ # Test loading version 0 without a dict
+ (
+ {"alpha.bst": "/workspaces/bravo"},
+ {
+ "format-version": BST_WORKSPACE_FORMAT_VERSION,
+ "workspaces": {
+ "alpha.bst": {
+ "prepared": False,
+ "path": "/workspaces/bravo",
+ "running_files": {},
+ }
+ },
+ },
+ ),
+ # Test loading version 0 with only one source
+ (
+ {"alpha.bst": {0: "/workspaces/bravo"}},
+ {
+ "format-version": BST_WORKSPACE_FORMAT_VERSION,
+ "workspaces": {
+ "alpha.bst": {
+ "prepared": False,
+ "path": "/workspaces/bravo",
+ "running_files": {},
+ }
+ },
+ },
+ ),
+ # Test loading version 1
+ (
+ {
+ "format-version": 1,
+ "workspaces": {"alpha.bst": {"path": "/workspaces/bravo"}},
+ },
+ {
+ "format-version": BST_WORKSPACE_FORMAT_VERSION,
+ "workspaces": {
+ "alpha.bst": {
+ "prepared": False,
+ "path": "/workspaces/bravo",
+ "running_files": {},
+ }
+ },
+ },
+ ),
+ # Test loading version 2
+ (
+ {
+ "format-version": 2,
+ "workspaces": {
+ "alpha.bst": {
+ "path": "/workspaces/bravo",
+ "last_successful": "some_key",
+ "running_files": {"beta.bst": ["some_file"]},
+ }
+ },
+ },
+ {
+ "format-version": BST_WORKSPACE_FORMAT_VERSION,
+ "workspaces": {
+ "alpha.bst": {
+ "prepared": False,
+ "path": "/workspaces/bravo",
+ "last_successful": "some_key",
+ "running_files": {"beta.bst": ["some_file"]},
+ }
+ },
+ },
+ ),
+ # Test loading version 3
+ (
+ {
+ "format-version": 3,
+ "workspaces": {
+ "alpha.bst": {
+ "prepared": True,
+ "path": "/workspaces/bravo",
+ "running_files": {},
+ }
+ },
+ },
+ {
+ "format-version": BST_WORKSPACE_FORMAT_VERSION,
+ "workspaces": {
+ "alpha.bst": {
+ "prepared": True,
+ "path": "/workspaces/bravo",
+ "running_files": {},
+ }
+ },
+ },
+ ),
+ ],
+)
def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expected):
def parse_dict_as_yaml(node):
- tempfile = os.path.join(str(tmpdir), 'yaml_dump')
+ tempfile = os.path.join(str(tmpdir), "yaml_dump")
_yaml.roundtrip_dump(node, tempfile)
return _yaml.load(tempfile).strip_node_info()
project = str(datafiles)
- os.makedirs(os.path.join(project, '.bst'))
- workspace_config_path = os.path.join(project, '.bst', 'workspaces.yml')
+ os.makedirs(os.path.join(project, ".bst"))
+ workspace_config_path = os.path.join(project, ".bst", "workspaces.yml")
_yaml.roundtrip_dump(workspace_cfg, workspace_config_path)
# Check that we can still read workspace config that is in old format
- result = cli.run(project=project, args=['workspace', 'list'])
+ result = cli.run(project=project, args=["workspace", "list"])
result.assert_success()
loaded_config = _yaml.load(workspace_config_path).strip_node_info()
@@ -1014,31 +1071,30 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte
assert loaded_config == parse_dict_as_yaml(workspace_cfg)
# Create a test bst file
- bin_files_path = os.path.join(project, 'files', 'bin-files')
- element_path = os.path.join(project, 'elements')
- element_name = 'workspace-test.bst'
- workspace = os.path.join(str(tmpdir), 'workspace')
+ bin_files_path = os.path.join(project, "files", "bin-files")
+ element_path = os.path.join(project, "elements")
+ element_name = "workspace-test.bst"
+ workspace = os.path.join(str(tmpdir), "workspace")
# Create our repo object of the given source type with
# the bin files, and then collect the initial ref.
#
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
ref = repo.create(bin_files_path)
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
- _yaml.roundtrip_dump(element,
- os.path.join(element_path, element_name))
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
+ _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# Make a change to the workspaces file
- result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
+ result = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace, element_name],
+ )
result.assert_success()
- result = cli.run(project=project, args=['workspace', 'close', '--remove-dir', element_name])
+ result = cli.run(
+ project=project, args=["workspace", "close", "--remove-dir", element_name]
+ )
result.assert_success()
# Check that workspace config is converted correctly if necessary
@@ -1048,73 +1104,60 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte
@pytest.mark.datafiles(DATA_DIR)
def test_inconsitent_pipeline_message(cli, tmpdir, datafiles):
- element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git')
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git")
shutil.rmtree(workspace)
- result = cli.run(project=project, args=[
- 'build', element_name
- ])
+ result = cli.run(project=project, args=["build", element_name])
result.assert_main_error(ErrorDomain.PIPELINE, "inconsistent-pipeline-workspaced")
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("strict", [("strict"), ("non-strict")])
def test_cache_key_workspace_in_dependencies(cli, tmpdir, datafiles, strict):
- checkout = os.path.join(str(tmpdir), 'checkout')
- element_name, project, workspace = open_workspace(cli, os.path.join(str(tmpdir), 'repo-a'),
- datafiles, 'git')
+ checkout = os.path.join(str(tmpdir), "checkout")
+ element_name, project, workspace = open_workspace(
+ cli, os.path.join(str(tmpdir), "repo-a"), datafiles, "git"
+ )
- element_path = os.path.join(project, 'elements')
- back_dep_element_name = 'workspace-test-back-dep.bst'
+ element_path = os.path.join(project, "elements")
+ back_dep_element_name = "workspace-test-back-dep.bst"
# Write out our test target
element = {
- 'kind': 'compose',
- 'depends': [
- {
- 'filename': element_name,
- 'type': 'build'
- }
- ]
+ "kind": "compose",
+ "depends": [{"filename": element_name, "type": "build"}],
}
- _yaml.roundtrip_dump(element,
- os.path.join(element_path, back_dep_element_name))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, back_dep_element_name))
# Modify workspace
- shutil.rmtree(os.path.join(workspace, 'usr', 'bin'))
- os.makedirs(os.path.join(workspace, 'etc'))
- with open(os.path.join(workspace, 'etc', 'pony.conf'), 'w') as f:
+ shutil.rmtree(os.path.join(workspace, "usr", "bin"))
+ os.makedirs(os.path.join(workspace, "etc"))
+ with open(os.path.join(workspace, "etc", "pony.conf"), "w") as f:
f.write("PONY='pink'")
# Configure strict mode
strict_mode = True
- if strict != 'strict':
+ if strict != "strict":
strict_mode = False
- cli.configure({
- 'projects': {
- 'test': {
- 'strict': strict_mode
- }
- }
- })
+ cli.configure({"projects": {"test": {"strict": strict_mode}}})
# Build artifact with dependency's modified workspace
- assert cli.get_element_state(project, element_name) == 'buildable'
+ assert cli.get_element_state(project, element_name) == "buildable"
key_a1 = cli.get_element_key(project, element_name)
- assert key_a1 != "{:?<64}".format('')
- assert cli.get_element_state(project, back_dep_element_name) == 'waiting'
+ assert key_a1 != "{:?<64}".format("")
+ assert cli.get_element_state(project, back_dep_element_name) == "waiting"
key_b1 = cli.get_element_key(project, back_dep_element_name)
- assert key_b1 != "{:?<64}".format('')
- result = cli.run(project=project, args=['build', back_dep_element_name])
+ assert key_b1 != "{:?<64}".format("")
+ result = cli.run(project=project, args=["build", back_dep_element_name])
result.assert_success()
- assert cli.get_element_state(project, element_name) == 'cached'
+ assert cli.get_element_state(project, element_name) == "cached"
key_a2 = cli.get_element_key(project, element_name)
- assert key_a2 != "{:?<64}".format('')
- assert cli.get_element_state(project, back_dep_element_name) == 'cached'
+ assert key_a2 != "{:?<64}".format("")
+ assert cli.get_element_state(project, back_dep_element_name) == "cached"
key_b2 = cli.get_element_key(project, back_dep_element_name)
- assert key_b2 != "{:?<64}".format('')
- result = cli.run(project=project, args=['build', back_dep_element_name])
+ assert key_b2 != "{:?<64}".format("")
+ result = cli.run(project=project, args=["build", back_dep_element_name])
result.assert_success()
# workspace keys are not recalculated
@@ -1122,31 +1165,29 @@ def test_cache_key_workspace_in_dependencies(cli, tmpdir, datafiles, strict):
assert key_b1 == key_b2
# Checkout the result
- result = cli.run(project=project, args=[
- 'artifact', 'checkout', back_dep_element_name, '--directory', checkout
- ])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", back_dep_element_name, "--directory", checkout],
+ )
result.assert_success()
# Check that the pony.conf from the modified workspace exists
- filename = os.path.join(checkout, 'etc', 'pony.conf')
+ filename = os.path.join(checkout, "etc", "pony.conf")
assert os.path.exists(filename)
# Check that the original /usr/bin/hello is not in the checkout
- assert not os.path.exists(os.path.join(checkout, 'usr', 'bin', 'hello'))
+ assert not os.path.exists(os.path.join(checkout, "usr", "bin", "hello"))
@pytest.mark.datafiles(DATA_DIR)
def test_multiple_failed_builds(cli, tmpdir, datafiles):
element_config = {
"kind": "manual",
- "config": {
- "configure-commands": [
- "unknown_command_that_will_fail"
- ]
- }
+ "config": {"configure-commands": ["unknown_command_that_will_fail"]},
}
- element_name, project, _ = open_workspace(cli, tmpdir, datafiles,
- "git", element_attrs=element_config)
+ element_name, project, _ = open_workspace(
+ cli, tmpdir, datafiles, "git", element_attrs=element_config
+ )
for _ in range(2):
result = cli.run(project=project, args=["build", element_name])
@@ -1155,60 +1196,69 @@ def test_multiple_failed_builds(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize('subdir', [True, False], ids=["subdir", "no-subdir"])
+@pytest.mark.parametrize("subdir", [True, False], ids=["subdir", "no-subdir"])
@pytest.mark.parametrize("guess_element", [True, False], ids=["guess", "no-guess"])
def test_external_fetch(cli, datafiles, tmpdir_factory, subdir, guess_element):
# An element with an open workspace can't be fetched, but we still expect fetches
# to fetch any dependencies
- tmpdir = tmpdir_factory.mktemp('')
- depend_element = 'fetchable.bst'
+ tmpdir = tmpdir_factory.mktemp("")
+ depend_element = "fetchable.bst"
# Create an element to fetch (local sources do not need to fetch)
- create_element_size(depend_element, str(datafiles), 'elements', [], 1024)
+ create_element_size(depend_element, str(datafiles), "elements", [], 1024)
element_name, project, workspace = open_workspace(
- cli, tmpdir, datafiles, "git", no_checkout=True,
- element_attrs={'depends': [depend_element]}
+ cli,
+ tmpdir,
+ datafiles,
+ "git",
+ no_checkout=True,
+ element_attrs={"depends": [depend_element]},
)
arg_elm = [element_name] if not guess_element else []
if subdir:
- call_dir = os.path.join(workspace, 'usr')
+ call_dir = os.path.join(workspace, "usr")
os.makedirs(call_dir, exist_ok=True)
else:
call_dir = workspace
# Assert that the depended element is not fetched yet
- assert cli.get_element_state(str(datafiles), depend_element) == 'fetch needed'
+ assert cli.get_element_state(str(datafiles), depend_element) == "fetch needed"
# Fetch the workspaced element
- result = cli.run(project=project, args=['-C', call_dir, 'source', 'fetch', *arg_elm])
+ result = cli.run(
+ project=project, args=["-C", call_dir, "source", "fetch", *arg_elm]
+ )
result.assert_success()
# Assert that the depended element has now been fetched
- assert cli.get_element_state(str(datafiles), depend_element) == 'buildable'
+ assert cli.get_element_state(str(datafiles), depend_element) == "buildable"
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("guess_element", [True, False], ids=["guess", "no-guess"])
def test_external_push_pull(cli, datafiles, tmpdir_factory, guess_element):
# Pushing and pulling to/from an artifact cache works from an external workspace
- tmpdir = tmpdir_factory.mktemp('')
+ tmpdir = tmpdir_factory.mktemp("")
element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git")
arg_elm = [element_name] if not guess_element else []
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
- result = cli.run(project=project, args=['-C', workspace, 'build', element_name])
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
+ result = cli.run(project=project, args=["-C", workspace, "build", element_name])
result.assert_success()
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True}
- })
+ cli.configure({"artifacts": {"url": share.repo, "push": True}})
- result = cli.run(project=project, args=['-C', workspace, 'artifact', 'push', *arg_elm])
+ result = cli.run(
+ project=project, args=["-C", workspace, "artifact", "push", *arg_elm]
+ )
result.assert_success()
- result = cli.run(project=project, args=['-C', workspace, 'artifact', 'pull', '--deps', 'all', *arg_elm])
+ result = cli.run(
+ project=project,
+ args=["-C", workspace, "artifact", "pull", "--deps", "all", *arg_elm],
+ )
result.assert_success()
@@ -1218,35 +1268,39 @@ def test_external_push_pull(cli, datafiles, tmpdir_factory, guess_element):
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("guess_element", [True, False], ids=["guess", "no-guess"])
def test_external_track(cli, datafiles, tmpdir_factory, guess_element):
- tmpdir = tmpdir_factory.mktemp('')
+ tmpdir = tmpdir_factory.mktemp("")
element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git")
- element_file = os.path.join(str(datafiles), 'elements', element_name)
+ element_file = os.path.join(str(datafiles), "elements", element_name)
arg_elm = [element_name] if not guess_element else []
# Delete the ref from the source so that we can detect if the
# element has been tracked after closing the workspace
element_contents = _yaml.load(element_file)
- ref1 = element_contents.get_sequence('sources').mapping_at(0).get_str('ref')
- del element_contents.get_sequence('sources').mapping_at(0)['ref']
+ ref1 = element_contents.get_sequence("sources").mapping_at(0).get_str("ref")
+ del element_contents.get_sequence("sources").mapping_at(0)["ref"]
_yaml.roundtrip_dump(element_contents, element_file)
- result = cli.run(project=project, args=['-C', workspace, 'source', 'track', *arg_elm])
+ result = cli.run(
+ project=project, args=["-C", workspace, "source", "track", *arg_elm]
+ )
result.assert_success()
# Element is not tracked now
element_contents = _yaml.load(element_file)
- assert 'ref' not in element_contents.get_sequence('sources').mapping_at(0)
+ assert "ref" not in element_contents.get_sequence("sources").mapping_at(0)
# close the workspace
- result = cli.run(project=project, args=['-C', workspace, 'workspace', 'close', *arg_elm])
+ result = cli.run(
+ project=project, args=["-C", workspace, "workspace", "close", *arg_elm]
+ )
result.assert_success()
# and retrack the element
- result = cli.run(project=project, args=['source', 'track', element_name])
+ result = cli.run(project=project, args=["source", "track", element_name])
result.assert_success()
element_contents = _yaml.load(element_file)
- ref2 = element_contents.get_sequence('sources').mapping_at(0).get_str('ref')
+ ref2 = element_contents.get_sequence("sources").mapping_at(0).get_str("ref")
# these values should be equivalent
assert ref1 == ref2
@@ -1254,62 +1308,90 @@ def test_external_track(cli, datafiles, tmpdir_factory, guess_element):
@pytest.mark.datafiles(DATA_DIR)
def test_external_open_other(cli, datafiles, tmpdir_factory):
# From inside an external workspace, open another workspace
- tmpdir1 = tmpdir_factory.mktemp('')
- tmpdir2 = tmpdir_factory.mktemp('')
+ tmpdir1 = tmpdir_factory.mktemp("")
+ tmpdir2 = tmpdir_factory.mktemp("")
# Making use of the assumption that it's the same project in both invocations of open_workspace
- _, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", suffix="-alpha")
- beta_element, _, beta_workspace = open_workspace(cli, tmpdir2, datafiles, "git", suffix="-beta")
+ _, project, alpha_workspace = open_workspace(
+ cli, tmpdir1, datafiles, "git", suffix="-alpha"
+ )
+ beta_element, _, beta_workspace = open_workspace(
+ cli, tmpdir2, datafiles, "git", suffix="-beta"
+ )
# Closing the other element first, because I'm too lazy to create an
# element without opening it
- result = cli.run(project=project, args=['workspace', 'close', beta_element])
+ result = cli.run(project=project, args=["workspace", "close", beta_element])
result.assert_success()
- result = cli.run(project=project, args=[
- '-C', alpha_workspace, 'workspace', 'open', '--force', '--directory', beta_workspace, beta_element
- ])
+ result = cli.run(
+ project=project,
+ args=[
+ "-C",
+ alpha_workspace,
+ "workspace",
+ "open",
+ "--force",
+ "--directory",
+ beta_workspace,
+ beta_element,
+ ],
+ )
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
def test_external_close_other(cli, datafiles, tmpdir_factory):
# From inside an external workspace, close the other workspace
- tmpdir1 = tmpdir_factory.mktemp('')
- tmpdir2 = tmpdir_factory.mktemp('')
+ tmpdir1 = tmpdir_factory.mktemp("")
+ tmpdir2 = tmpdir_factory.mktemp("")
# Making use of the assumption that it's the same project in both invocations of open_workspace
- _, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", suffix="-alpha")
+ _, project, alpha_workspace = open_workspace(
+ cli, tmpdir1, datafiles, "git", suffix="-alpha"
+ )
beta_element, _, _ = open_workspace(cli, tmpdir2, datafiles, "git", suffix="-beta")
- result = cli.run(project=project, args=['-C', alpha_workspace, 'workspace', 'close', beta_element])
+ result = cli.run(
+ project=project,
+ args=["-C", alpha_workspace, "workspace", "close", beta_element],
+ )
result.assert_success()
- assert 'you can no longer run BuildStream' not in result.stderr
+ assert "you can no longer run BuildStream" not in result.stderr
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("guess_element", [True, False], ids=["guess", "no-guess"])
def test_external_close_self(cli, datafiles, tmpdir_factory, guess_element):
# From inside an external workspace, close it
- tmpdir1 = tmpdir_factory.mktemp('')
- tmpdir2 = tmpdir_factory.mktemp('')
+ tmpdir1 = tmpdir_factory.mktemp("")
+ tmpdir2 = tmpdir_factory.mktemp("")
# Making use of the assumption that it's the same project in both invocations of open_workspace
- alpha_element, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", suffix="-alpha")
+ alpha_element, project, alpha_workspace = open_workspace(
+ cli, tmpdir1, datafiles, "git", suffix="-alpha"
+ )
_, _, _ = open_workspace(cli, tmpdir2, datafiles, "git", suffix="-beta")
arg_elm = [alpha_element] if not guess_element else []
- result = cli.run(project=project, args=['-C', alpha_workspace, 'workspace', 'close', *arg_elm])
+ result = cli.run(
+ project=project, args=["-C", alpha_workspace, "workspace", "close", *arg_elm]
+ )
result.assert_success()
- assert 'you can no longer run BuildStream' in result.stderr
+ assert "you can no longer run BuildStream" in result.stderr
@pytest.mark.datafiles(DATA_DIR)
def test_external_reset_other(cli, datafiles, tmpdir_factory):
- tmpdir1 = tmpdir_factory.mktemp('')
- tmpdir2 = tmpdir_factory.mktemp('')
+ tmpdir1 = tmpdir_factory.mktemp("")
+ tmpdir2 = tmpdir_factory.mktemp("")
# Making use of the assumption that it's the same project in both invocations of open_workspace
- _, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", suffix="-alpha")
+ _, project, alpha_workspace = open_workspace(
+ cli, tmpdir1, datafiles, "git", suffix="-alpha"
+ )
beta_element, _, _ = open_workspace(cli, tmpdir2, datafiles, "git", suffix="-beta")
- result = cli.run(project=project, args=['-C', alpha_workspace, 'workspace', 'reset', beta_element])
+ result = cli.run(
+ project=project,
+ args=["-C", alpha_workspace, "workspace", "reset", beta_element],
+ )
result.assert_success()
@@ -1320,21 +1402,23 @@ def test_external_reset_self(cli, datafiles, tmpdir, guess_element):
arg_elm = [element] if not guess_element else []
# Command succeeds
- result = cli.run(project=project, args=['-C', workspace, 'workspace', 'reset', *arg_elm])
+ result = cli.run(
+ project=project, args=["-C", workspace, "workspace", "reset", *arg_elm]
+ )
result.assert_success()
# Successive commands still work (i.e. .bstproject.yaml hasn't been deleted)
- result = cli.run(project=project, args=['-C', workspace, 'workspace', 'list'])
+ result = cli.run(project=project, args=["-C", workspace, "workspace", "list"])
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
def test_external_list(cli, datafiles, tmpdir_factory):
- tmpdir = tmpdir_factory.mktemp('')
+ tmpdir = tmpdir_factory.mktemp("")
# Making use of the assumption that it's the same project in both invocations of open_workspace
_, project, workspace = open_workspace(cli, tmpdir, datafiles, "git")
- result = cli.run(project=project, args=['-C', workspace, 'workspace', 'list'])
+ result = cli.run(project=project, args=["-C", workspace, "workspace", "list"])
result.assert_success()
@@ -1345,26 +1429,24 @@ def test_multisource_workspace(cli, datafiles, tmpdir):
project = str(datafiles)
element_name = "multisource.bst"
element = {
- 'kind': 'import',
- 'sources': [{
- 'kind': 'local',
- 'path': 'files/bin-files'
- }, {
- 'kind': 'local',
- 'path': 'files/dev-files'
- }]
+ "kind": "import",
+ "sources": [
+ {"kind": "local", "path": "files/bin-files"},
+ {"kind": "local", "path": "files/dev-files"},
+ ],
}
- element_path = os.path.join(project, 'elements', element_name)
+ element_path = os.path.join(project, "elements", element_name)
_yaml.roundtrip_dump(element, element_path)
- workspace_dir = os.path.join(str(tmpdir), 'multisource')
- res = cli.run(project=project,
- args=['workspace', 'open', 'multisource.bst',
- '--directory', workspace_dir])
+ workspace_dir = os.path.join(str(tmpdir), "multisource")
+ res = cli.run(
+ project=project,
+ args=["workspace", "open", "multisource.bst", "--directory", workspace_dir],
+ )
res.assert_success()
- directories = os.listdir(os.path.join(workspace_dir, 'usr'))
- assert 'bin' in directories and 'include' in directories
+ directories = os.listdir(os.path.join(workspace_dir, "usr"))
+ assert "bin" in directories and "include" in directories
# This strange test tests against a regression raised in issue #919,
@@ -1373,83 +1455,88 @@ def test_multisource_workspace(cli, datafiles, tmpdir):
# but just successfully builds the workspaced element and happily
# exits without completing the build.
#
-TEST_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__))
-)
+TEST_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)))
@pytest.mark.datafiles(TEST_DIR)
@pytest.mark.parametrize(
["case", "non_workspaced_elements_state"],
[
- ("workspaced-build-dep", ["waiting", "waiting", "waiting", "waiting", "waiting"]),
- ("workspaced-runtime-dep", ["buildable", "buildable", "waiting", "waiting", "waiting"])
+ (
+ "workspaced-build-dep",
+ ["waiting", "waiting", "waiting", "waiting", "waiting"],
+ ),
+ (
+ "workspaced-runtime-dep",
+ ["buildable", "buildable", "waiting", "waiting", "waiting"],
+ ),
],
)
@pytest.mark.parametrize("strict", [("strict"), ("non-strict")])
def test_build_all(cli, tmpdir, datafiles, case, strict, non_workspaced_elements_state):
project = os.path.join(str(datafiles), case)
- workspace = os.path.join(str(tmpdir), 'workspace')
- non_leaf_elements = ["elem2.bst", "elem3.bst", "stack.bst", "elem4.bst", "elem5.bst"]
+ workspace = os.path.join(str(tmpdir), "workspace")
+ non_leaf_elements = [
+ "elem2.bst",
+ "elem3.bst",
+ "stack.bst",
+ "elem4.bst",
+ "elem5.bst",
+ ]
all_elements = ["elem1.bst", *non_leaf_elements]
# Configure strict mode
strict_mode = True
- if strict != 'strict':
+ if strict != "strict":
strict_mode = False
- cli.configure({
- 'projects': {
- 'test': {
- 'strict': strict_mode
- }
- }
- })
+ cli.configure({"projects": {"test": {"strict": strict_mode}}})
# First open the workspace
- result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, 'elem1.bst'])
+ result = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace, "elem1.bst"],
+ )
result.assert_success()
# Ensure all elements are waiting build the first
- assert cli.get_element_states(project, all_elements) == \
- dict(zip(all_elements, ['buildable', *non_workspaced_elements_state]))
+ assert cli.get_element_states(project, all_elements) == dict(
+ zip(all_elements, ["buildable", *non_workspaced_elements_state])
+ )
# Now build the targets elem4.bst and elem5.bst
- result = cli.run(project=project, args=['build', 'elem4.bst', 'elem5.bst'])
+ result = cli.run(project=project, args=["build", "elem4.bst", "elem5.bst"])
result.assert_success()
# Assert that the target is built
- assert cli.get_element_states(project, all_elements) == \
- {elem: "cached" for elem in all_elements}
+ assert cli.get_element_states(project, all_elements) == {
+ elem: "cached" for elem in all_elements
+ }
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize('strict', ['strict', 'non-strict'])
+@pytest.mark.parametrize("strict", ["strict", "non-strict"])
def test_show_workspace_logs(cli, tmpdir, datafiles, strict):
project = str(datafiles)
- workspace = os.path.join(str(tmpdir), 'workspace')
- target = 'manual.bst'
+ workspace = os.path.join(str(tmpdir), "workspace")
+ target = "manual.bst"
# Configure strict mode
strict_mode = True
- if strict != 'strict':
+ if strict != "strict":
strict_mode = False
- cli.configure({
- 'projects': {
- 'test': {
- 'strict': strict_mode
- }
- }
- })
+ cli.configure({"projects": {"test": {"strict": strict_mode}}})
# First open the workspace
- result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, target])
+ result = cli.run(
+ project=project, args=["workspace", "open", "--directory", workspace, target]
+ )
result.assert_success()
# Build the element
- result = cli.run(project=project, args=['build', target])
- result.assert_task_error(ErrorDomain.SANDBOX, 'missing-command')
+ result = cli.run(project=project, args=["build", target])
+ result.assert_task_error(ErrorDomain.SANDBOX, "missing-command")
- result = cli.run(project=project, args=['artifact', 'log', target])
+ result = cli.run(project=project, args=["artifact", "log", target])
result.assert_success()
# Assert that the log is not empty
diff --git a/tests/integration/artifact.py b/tests/integration/artifact.py
index 59b7bfaad..67565b803 100644
--- a/tests/integration/artifact.py
+++ b/tests/integration/artifact.py
@@ -36,10 +36,7 @@ pytestmark = pytest.mark.integration
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
# A test to capture the integration of the cachebuildtrees
@@ -48,70 +45,80 @@ DATA_DIR = os.path.join(
# Dse this really need a sandbox?
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_cache_buildtrees(cli, tmpdir, datafiles):
project = str(datafiles)
- element_name = 'autotools/amhello.bst'
+ element_name = "autotools/amhello.bst"
cwd = str(tmpdir)
# Create artifact shares for pull & push testing
- with create_artifact_share(os.path.join(str(tmpdir), 'share1')) as share1,\
- create_artifact_share(os.path.join(str(tmpdir), 'share2')) as share2,\
- create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3:
- cli.configure({
- 'artifacts': {'url': share1.repo, 'push': True},
- 'cachedir': str(tmpdir)
- })
+ with create_artifact_share(
+ os.path.join(str(tmpdir), "share1")
+ ) as share1, create_artifact_share(
+ os.path.join(str(tmpdir), "share2")
+ ) as share2, create_artifact_share(
+ os.path.join(str(tmpdir), "share3")
+ ) as share3:
+ cli.configure(
+ {"artifacts": {"url": share1.repo, "push": True}, "cachedir": str(tmpdir)}
+ )
# Build autotools element with the default behavior of caching buildtrees
# only when necessary. The artifact should be successfully pushed to the share1 remote
# and cached locally with an 'empty' buildtree digest, as it's not a
# dangling ref
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- assert cli.get_element_state(project, element_name) == 'cached'
- assert share1.get_artifact(cli.get_artifact_name(project, 'test', element_name))
+ assert cli.get_element_state(project, element_name) == "cached"
+ assert share1.get_artifact(cli.get_artifact_name(project, "test", element_name))
# The buildtree dir should not exist, as we set the config to not cache buildtrees.
- artifact_name = cli.get_artifact_name(project, 'test', element_name)
+ artifact_name = cli.get_artifact_name(project, "test", element_name)
assert share1.get_artifact(artifact_name)
with cli.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert not buildtreedir
# Delete the local cached artifacts, and assert the when pulled with --pull-buildtrees
# that is was cached in share1 as expected without a buildtree dir
- shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
- shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
- assert cli.get_element_state(project, element_name) != 'cached'
- result = cli.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
+ shutil.rmtree(os.path.join(str(tmpdir), "cas"))
+ shutil.rmtree(os.path.join(str(tmpdir), "artifacts"))
+ assert cli.get_element_state(project, element_name) != "cached"
+ result = cli.run(
+ project=project,
+ args=["--pull-buildtrees", "artifact", "pull", element_name],
+ )
assert element_name in result.get_pulled_elements()
with cli.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert not buildtreedir
- shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
- shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
+ shutil.rmtree(os.path.join(str(tmpdir), "cas"))
+ shutil.rmtree(os.path.join(str(tmpdir), "artifacts"))
# Assert that the default behaviour of pull to not include buildtrees on the artifact
# in share1 which was purposely cached with an empty one behaves as expected. As such the
# pulled artifact will have a dangling ref for the buildtree dir, regardless of content,
# leading to no buildtreedir being extracted
- result = cli.run(project=project, args=['artifact', 'pull', element_name])
+ result = cli.run(project=project, args=["artifact", "pull", element_name])
assert element_name in result.get_pulled_elements()
with cli.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert not buildtreedir
- shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
- shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
+ shutil.rmtree(os.path.join(str(tmpdir), "cas"))
+ shutil.rmtree(os.path.join(str(tmpdir), "artifacts"))
# Repeat building the artifacts, this time with cache-buildtrees set to
# 'always' via the cli, as such the buildtree dir should not be empty
- cli.configure({
- 'artifacts': {'url': share2.repo, 'push': True},
- 'cachedir': str(tmpdir)
- })
- result = cli.run(project=project, args=['--cache-buildtrees', 'always', 'build', element_name])
+ cli.configure(
+ {"artifacts": {"url": share2.repo, "push": True}, "cachedir": str(tmpdir)}
+ )
+ result = cli.run(
+ project=project,
+ args=["--cache-buildtrees", "always", "build", element_name],
+ )
assert result.exit_code == 0
- assert cli.get_element_state(project, element_name) == 'cached'
- assert share2.get_artifact(cli.get_artifact_name(project, 'test', element_name))
+ assert cli.get_element_state(project, element_name) == "cached"
+ assert share2.get_artifact(cli.get_artifact_name(project, "test", element_name))
# Cache key will be the same however the digest hash will have changed as expected, so reconstruct paths
with cli.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
@@ -120,28 +127,33 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
# Delete the local cached artifacts, and assert that when pulled with --pull-buildtrees
# that it was cached in share2 as expected with a populated buildtree dir
- shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
- shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
- assert cli.get_element_state(project, element_name) != 'cached'
- result = cli.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
+ shutil.rmtree(os.path.join(str(tmpdir), "cas"))
+ shutil.rmtree(os.path.join(str(tmpdir), "artifacts"))
+ assert cli.get_element_state(project, element_name) != "cached"
+ result = cli.run(
+ project=project,
+ args=["--pull-buildtrees", "artifact", "pull", element_name],
+ )
assert element_name in result.get_pulled_elements()
with cli.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert os.path.isdir(buildtreedir)
assert os.listdir(buildtreedir)
- shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
- shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
+ shutil.rmtree(os.path.join(str(tmpdir), "cas"))
+ shutil.rmtree(os.path.join(str(tmpdir), "artifacts"))
# Clarify that the user config option for cache-buildtrees works as the cli
# main option does. Point to share3 which does not have the artifacts cached to force
# a build
- cli.configure({
- 'artifacts': {'url': share3.repo, 'push': True},
- 'cachedir': str(tmpdir),
- 'cache': {'cache-buildtrees': 'always'}
- })
- result = cli.run(project=project, args=['build', element_name])
+ cli.configure(
+ {
+ "artifacts": {"url": share3.repo, "push": True},
+ "cachedir": str(tmpdir),
+ "cache": {"cache-buildtrees": "always"},
+ }
+ )
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- assert cli.get_element_state(project, element_name) == 'cached'
+ assert cli.get_element_state(project, element_name) == "cached"
with cli.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert os.path.isdir(buildtreedir)
assert os.listdir(buildtreedir)
diff --git a/tests/integration/autotools.py b/tests/integration/autotools.py
index 16cb38af7..d270b2a77 100644
--- a/tests/integration/autotools.py
+++ b/tests/integration/autotools.py
@@ -12,67 +12,92 @@ from buildstream.testing._utils.site import HAVE_SANDBOX
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
# Test that an autotools build 'works' - we use the autotools sample
# amhello project for this.
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_autotools_build(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_name = 'autotools/amhello.bst'
+ checkout = os.path.join(cli.directory, "checkout")
+ element_name = "autotools/amhello.bst"
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert result.exit_code == 0
- assert_contains(checkout, ['/usr', '/usr/lib', '/usr/bin',
- '/usr/share',
- '/usr/bin/hello', '/usr/share/doc',
- '/usr/share/doc/amhello',
- '/usr/share/doc/amhello/README'])
+ assert_contains(
+ checkout,
+ [
+ "/usr",
+ "/usr/lib",
+ "/usr/bin",
+ "/usr/share",
+ "/usr/bin/hello",
+ "/usr/share/doc",
+ "/usr/share/doc/amhello",
+ "/usr/share/doc/amhello/README",
+ ],
+ )
# Test that an autotools build 'works' - we use the autotools sample
# amhello project for this.
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
-@pytest.mark.xfail(HAVE_SANDBOX == 'buildbox', reason='Not working with BuildBox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
+@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
def test_autotools_confroot_build(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_name = 'autotools/amhelloconfroot.bst'
+ checkout = os.path.join(cli.directory, "checkout")
+ element_name = "autotools/amhelloconfroot.bst"
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert result.exit_code == 0
- assert_contains(checkout, ['/usr', '/usr/lib', '/usr/bin',
- '/usr/share',
- '/usr/bin/hello', '/usr/share/doc',
- '/usr/share/doc/amhello',
- '/usr/share/doc/amhello/README'])
+ assert_contains(
+ checkout,
+ [
+ "/usr",
+ "/usr/lib",
+ "/usr/bin",
+ "/usr/share",
+ "/usr/bin/hello",
+ "/usr/share/doc",
+ "/usr/share/doc/amhello",
+ "/usr/share/doc/amhello/README",
+ ],
+ )
# Test running an executable built with autotools
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_autotools_run(cli, datafiles):
project = str(datafiles)
- element_name = 'autotools/amhello.bst'
+ element_name = "autotools/amhello.bst"
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(project=project, args=['shell', element_name, '/usr/bin/hello'])
+ result = cli.run(project=project, args=["shell", element_name, "/usr/bin/hello"])
assert result.exit_code == 0
- assert result.output == 'Hello World!\nThis is amhello 1.0.\n'
+ assert result.output == "Hello World!\nThis is amhello 1.0.\n"
diff --git a/tests/integration/build-uid.py b/tests/integration/build-uid.py
index 66f9b3fbc..367cf0248 100644
--- a/tests/integration/build-uid.py
+++ b/tests/integration/build-uid.py
@@ -10,55 +10,56 @@ from buildstream.testing._utils.site import HAVE_SANDBOX, IS_LINUX
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
-@pytest.mark.skipif(not IS_LINUX or HAVE_SANDBOX != "bwrap", reason='Only available on linux with bubblewrap')
+@pytest.mark.skipif(
+ not IS_LINUX or HAVE_SANDBOX != "bwrap",
+ reason="Only available on linux with bubblewrap",
+)
@pytest.mark.datafiles(DATA_DIR)
def test_build_uid_overridden(cli, datafiles):
project = str(datafiles)
- element_name = 'build-uid/build-uid.bst'
+ element_name = "build-uid/build-uid.bst"
project_config = {
- 'name': 'build-uid-test',
- 'sandbox': {
- 'build-uid': 800,
- 'build-gid': 900
- }
+ "name": "build-uid-test",
+ "sandbox": {"build-uid": 800, "build-gid": 900},
}
result = cli.run_project_config(
- project=project, project_config=project_config, args=['build', element_name])
+ project=project, project_config=project_config, args=["build", element_name]
+ )
assert result.exit_code == 0
-@pytest.mark.skipif(not IS_LINUX or HAVE_SANDBOX != "bwrap", reason='Only available on linux with bubbelwrap')
+@pytest.mark.skipif(
+ not IS_LINUX or HAVE_SANDBOX != "bwrap",
+ reason="Only available on linux with bubbelwrap",
+)
@pytest.mark.datafiles(DATA_DIR)
def test_build_uid_in_project(cli, datafiles):
project = str(datafiles)
- element_name = 'build-uid/build-uid-1023.bst'
+ element_name = "build-uid/build-uid-1023.bst"
project_config = {
- 'name': 'build-uid-test',
- 'sandbox': {
- 'build-uid': 1023,
- 'build-gid': 3490
- }
+ "name": "build-uid-test",
+ "sandbox": {"build-uid": 1023, "build-gid": 3490},
}
result = cli.run_project_config(
- project=project, project_config=project_config, args=['build', element_name])
+ project=project, project_config=project_config, args=["build", element_name]
+ )
assert result.exit_code == 0
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(HAVE_SANDBOX != "bwrap", reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ HAVE_SANDBOX != "bwrap", reason="Only available with a functioning sandbox"
+)
def test_build_uid_default(cli, datafiles):
project = str(datafiles)
- element_name = 'build-uid/build-uid-default.bst'
+ element_name = "build-uid/build-uid-default.bst"
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
diff --git a/tests/integration/cachedfail.py b/tests/integration/cachedfail.py
index 63ad2d4d3..366346e2b 100644
--- a/tests/integration/cachedfail.py
+++ b/tests/integration/cachedfail.py
@@ -31,158 +31,132 @@ from tests.testutils import create_artifact_share
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_build_checkout_cached_fail(cli, datafiles):
project = str(datafiles)
- element_path = os.path.join(project, 'elements', 'element.bst')
- checkout = os.path.join(cli.directory, 'checkout')
+ element_path = os.path.join(project, "elements", "element.bst")
+ checkout = os.path.join(cli.directory, "checkout")
# Write out our test target
element = {
- 'kind': 'script',
- 'depends': [
- {
- 'filename': 'base.bst',
- 'type': 'build',
- },
- ],
- 'config': {
- 'commands': [
- 'touch %{install-root}/foo',
- 'false',
- ],
- },
+ "kind": "script",
+ "depends": [{"filename": "base.bst", "type": "build",},],
+ "config": {"commands": ["touch %{install-root}/foo", "false",],},
}
_yaml.roundtrip_dump(element, element_path)
# Try to build it, this should result in a failure that contains the content
- result = cli.run(project=project, args=['build', 'element.bst'])
+ result = cli.run(project=project, args=["build", "element.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
# Assert that it's cached in a failed artifact
- assert cli.get_element_state(project, 'element.bst') == 'failed'
+ assert cli.get_element_state(project, "element.bst") == "failed"
# Now check it out
- result = cli.run(project=project, args=[
- 'artifact', 'checkout', 'element.bst', '--directory', checkout
- ])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "element.bst", "--directory", checkout],
+ )
result.assert_success()
# Check that the checkout contains the file created before failure
- filename = os.path.join(checkout, 'foo')
+ filename = os.path.join(checkout, "foo")
assert os.path.exists(filename)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_build_depend_on_cached_fail(cli, datafiles):
project = str(datafiles)
- dep_path = os.path.join(project, 'elements', 'dep.bst')
- target_path = os.path.join(project, 'elements', 'target.bst')
+ dep_path = os.path.join(project, "elements", "dep.bst")
+ target_path = os.path.join(project, "elements", "target.bst")
dep = {
- 'kind': 'script',
- 'depends': [
- {
- 'filename': 'base.bst',
- 'type': 'build',
- },
- ],
- 'config': {
- 'commands': [
- 'touch %{install-root}/foo',
- 'false',
- ],
- },
+ "kind": "script",
+ "depends": [{"filename": "base.bst", "type": "build",},],
+ "config": {"commands": ["touch %{install-root}/foo", "false",],},
}
_yaml.roundtrip_dump(dep, dep_path)
target = {
- 'kind': 'script',
- 'depends': [
- {
- 'filename': 'base.bst',
- 'type': 'build',
- },
- {
- 'filename': 'dep.bst',
- 'type': 'build',
- },
+ "kind": "script",
+ "depends": [
+ {"filename": "base.bst", "type": "build",},
+ {"filename": "dep.bst", "type": "build",},
],
- 'config': {
- 'commands': [
- 'test -e /foo',
- ],
- },
+ "config": {"commands": ["test -e /foo",],},
}
_yaml.roundtrip_dump(target, target_path)
# Try to build it, this should result in caching a failure to build dep
- result = cli.run(project=project, args=['build', 'dep.bst'])
+ result = cli.run(project=project, args=["build", "dep.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
# Assert that it's cached in a failed artifact
- assert cli.get_element_state(project, 'dep.bst') == 'failed'
+ assert cli.get_element_state(project, "dep.bst") == "failed"
# Now we should fail because we've a cached fail of dep
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
# Assert that it's not yet built, since one of its dependencies isn't ready.
- assert cli.get_element_state(project, 'target.bst') == 'waiting'
+ assert cli.get_element_state(project, "target.bst") == "waiting"
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("on_error", ("continue", "quit"))
def test_push_cached_fail(cli, tmpdir, datafiles, on_error):
- if on_error == 'quit':
- pytest.xfail('https://gitlab.com/BuildStream/buildstream/issues/534')
+ if on_error == "quit":
+ pytest.xfail("https://gitlab.com/BuildStream/buildstream/issues/534")
project = str(datafiles)
- element_path = os.path.join(project, 'elements', 'element.bst')
+ element_path = os.path.join(project, "elements", "element.bst")
# Write out our test target
element = {
- 'kind': 'script',
- 'depends': [
- {
- 'filename': 'base.bst',
- 'type': 'build',
- },
- ],
- 'config': {
- 'commands': [
- 'false',
+ "kind": "script",
+ "depends": [{"filename": "base.bst", "type": "build",},],
+ "config": {
+ "commands": [
+ "false",
# Ensure unique cache key for different test variants
- 'TEST="{}"'.format(os.environ.get('PYTEST_CURRENT_TEST')),
+ 'TEST="{}"'.format(os.environ.get("PYTEST_CURRENT_TEST")),
],
},
}
_yaml.roundtrip_dump(element, element_path)
- with create_artifact_share(os.path.join(str(tmpdir), 'remote')) as share:
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- })
+ with create_artifact_share(os.path.join(str(tmpdir), "remote")) as share:
+ cli.configure(
+ {"artifacts": {"url": share.repo, "push": True},}
+ )
# Build the element, continuing to finish active jobs on error.
- result = cli.run(project=project, args=['--on-error={}'.format(on_error), 'build', 'element.bst'])
+ result = cli.run(
+ project=project,
+ args=["--on-error={}".format(on_error), "build", "element.bst"],
+ )
result.assert_main_error(ErrorDomain.STREAM, None)
# This element should have failed
- assert cli.get_element_state(project, 'element.bst') == 'failed'
+ assert cli.get_element_state(project, "element.bst") == "failed"
# This element should have been pushed to the remote
- assert share.get_artifact(cli.get_artifact_name(project, 'test', 'element.bst'))
+ assert share.get_artifact(cli.get_artifact_name(project, "test", "element.bst"))
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("on_error", ("continue", "quit"))
def test_push_failed_missing_shell(cli, tmpdir, datafiles, on_error):
@@ -191,78 +165,74 @@ def test_push_failed_missing_shell(cli, tmpdir, datafiles, on_error):
When we don't have a valid shell, the artifact will be empty, not even the root directory.
This ensures we handle the case of an entirely empty artifact correctly.
"""
- if on_error == 'quit':
- pytest.xfail('https://gitlab.com/BuildStream/buildstream/issues/534')
+ if on_error == "quit":
+ pytest.xfail("https://gitlab.com/BuildStream/buildstream/issues/534")
project = str(datafiles)
- element_path = os.path.join(project, 'elements', 'element.bst')
+ element_path = os.path.join(project, "elements", "element.bst")
# Write out our test target
element = {
- 'kind': 'script',
- 'config': {
- 'commands': [
- 'false',
+ "kind": "script",
+ "config": {
+ "commands": [
+ "false",
# Ensure unique cache key for different test variants
- 'TEST="{}"'.format(os.environ.get('PYTEST_CURRENT_TEST')),
+ 'TEST="{}"'.format(os.environ.get("PYTEST_CURRENT_TEST")),
],
},
}
_yaml.roundtrip_dump(element, element_path)
- with create_artifact_share(os.path.join(str(tmpdir), 'remote')) as share:
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- })
+ with create_artifact_share(os.path.join(str(tmpdir), "remote")) as share:
+ cli.configure(
+ {"artifacts": {"url": share.repo, "push": True},}
+ )
# Build the element, continuing to finish active jobs on error.
- result = cli.run(project=project, args=['--on-error={}'.format(on_error), 'build', 'element.bst'])
+ result = cli.run(
+ project=project,
+ args=["--on-error={}".format(on_error), "build", "element.bst"],
+ )
result.assert_main_error(ErrorDomain.STREAM, None)
# This element should have failed
- assert cli.get_element_state(project, 'element.bst') == 'failed'
+ assert cli.get_element_state(project, "element.bst") == "failed"
# This element should have been pushed to the remote
- assert share.get_artifact(cli.get_artifact_name(project, 'test', 'element.bst'))
+ assert share.get_artifact(cli.get_artifact_name(project, "test", "element.bst"))
-@pytest.mark.skipif(HAVE_SANDBOX != 'bwrap', reason='Only available with bubblewrap on Linux')
+@pytest.mark.skipif(
+ HAVE_SANDBOX != "bwrap", reason="Only available with bubblewrap on Linux"
+)
@pytest.mark.datafiles(DATA_DIR)
def test_host_tools_errors_are_not_cached(cli, datafiles, tmp_path):
# Create symlink to buildbox-casd to work with custom PATH
- buildbox_casd = tmp_path.joinpath('bin/buildbox-casd')
+ buildbox_casd = tmp_path.joinpath("bin/buildbox-casd")
buildbox_casd.parent.mkdir()
- os.symlink(utils.get_host_tool('buildbox-casd'), str(buildbox_casd))
+ os.symlink(utils.get_host_tool("buildbox-casd"), str(buildbox_casd))
project = str(datafiles)
- element_path = os.path.join(project, 'elements', 'element.bst')
+ element_path = os.path.join(project, "elements", "element.bst")
# Write out our test target
element = {
- 'kind': 'script',
- 'depends': [
- {
- 'filename': 'base.bst',
- 'type': 'build',
- },
- ],
- 'config': {
- 'commands': [
- 'true',
- ],
- },
+ "kind": "script",
+ "depends": [{"filename": "base.bst", "type": "build",},],
+ "config": {"commands": ["true",],},
}
_yaml.roundtrip_dump(element, element_path)
# Build without access to host tools, this will fail
result1 = cli.run(
project=project,
- args=['build', 'element.bst'],
- env={'PATH': str(tmp_path.joinpath('bin')),
- 'BST_FORCE_SANDBOX': None})
- result1.assert_task_error(ErrorDomain.SANDBOX, 'unavailable-local-sandbox')
- assert cli.get_element_state(project, 'element.bst') == 'buildable'
+ args=["build", "element.bst"],
+ env={"PATH": str(tmp_path.joinpath("bin")), "BST_FORCE_SANDBOX": None},
+ )
+ result1.assert_task_error(ErrorDomain.SANDBOX, "unavailable-local-sandbox")
+ assert cli.get_element_state(project, "element.bst") == "buildable"
# When rebuilding, this should work
- result2 = cli.run(project=project, args=['build', 'element.bst'])
+ result2 = cli.run(project=project, args=["build", "element.bst"])
result2.assert_success()
- assert cli.get_element_state(project, 'element.bst') == 'cached'
+ assert cli.get_element_state(project, "element.bst") == "cached"
diff --git a/tests/integration/cmake.py b/tests/integration/cmake.py
index a0298c2c3..14ee7c967 100644
--- a/tests/integration/cmake.py
+++ b/tests/integration/cmake.py
@@ -12,57 +12,69 @@ from buildstream.testing._utils.site import HAVE_SANDBOX
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_cmake_build(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_name = 'cmake/cmakehello.bst'
+ checkout = os.path.join(cli.directory, "checkout")
+ element_name = "cmake/cmakehello.bst"
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert result.exit_code == 0
- assert_contains(checkout, ['/usr', '/usr/bin', '/usr/bin/hello'])
+ assert_contains(checkout, ["/usr", "/usr/bin", "/usr/bin/hello"])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
-@pytest.mark.xfail(HAVE_SANDBOX == 'buildbox', reason='Not working with BuildBox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
+@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
def test_cmake_confroot_build(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_name = 'cmake/cmakeconfroothello.bst'
+ checkout = os.path.join(cli.directory, "checkout")
+ element_name = "cmake/cmakeconfroothello.bst"
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert result.exit_code == 0
- assert_contains(checkout, ['/usr', '/usr/bin', '/usr/bin/hello'])
+ assert_contains(checkout, ["/usr", "/usr/bin", "/usr/bin/hello"])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_cmake_run(cli, datafiles):
project = str(datafiles)
- element_name = 'cmake/cmakehello.bst'
+ element_name = "cmake/cmakehello.bst"
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(project=project, args=['shell', element_name, '/usr/bin/hello'])
+ result = cli.run(project=project, args=["shell", element_name, "/usr/bin/hello"])
assert result.exit_code == 0
- assert result.output == """Hello World!
+ assert (
+ result.output
+ == """Hello World!
This is hello.
"""
+ )
diff --git a/tests/integration/compose-symlinks.py b/tests/integration/compose-symlinks.py
index 061d8f8e4..7b807d1d5 100644
--- a/tests/integration/compose-symlinks.py
+++ b/tests/integration/compose-symlinks.py
@@ -10,10 +10,7 @@ from buildstream.testing import cli_integration as cli # pylint: disable=unused
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
# Test that staging a file inside a directory symlink fails.
@@ -26,11 +23,11 @@ def test_compose_symlinks(cli, tmpdir, datafiles):
# Symlinks do not survive being placed in a source distribution
# ('setup.py sdist'), so we have to create the one we need here.
- project_files = os.path.join(project, 'files', 'compose-symlinks', 'base')
- symlink_file = os.path.join(project_files, 'sbin')
- os.symlink(os.path.join('usr', 'sbin'), symlink_file, target_is_directory=True)
+ project_files = os.path.join(project, "files", "compose-symlinks", "base")
+ symlink_file = os.path.join(project_files, "sbin")
+ os.symlink(os.path.join("usr", "sbin"), symlink_file, target_is_directory=True)
- result = cli.run(project=project, args=['build', 'compose-symlinks/compose.bst'])
+ result = cli.run(project=project, args=["build", "compose-symlinks/compose.bst"])
assert result.exit_code == -1
- assert 'Destination is a symlink, not a directory: /sbin' in result.stderr
+ assert "Destination is a symlink, not a directory: /sbin" in result.stderr
diff --git a/tests/integration/compose.py b/tests/integration/compose.py
index 3562ed94b..2d68327e4 100644
--- a/tests/integration/compose.py
+++ b/tests/integration/compose.py
@@ -14,10 +14,7 @@ from buildstream.testing._utils.site import HAVE_SANDBOX
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
def create_compose_element(name, path, config=None):
@@ -25,83 +22,115 @@ def create_compose_element(name, path, config=None):
config = {}
element = {
- 'kind': 'compose',
- 'depends': [{
- 'filename': 'compose/amhello.bst',
- 'type': 'build'
- }, {
- 'filename': 'compose/test.bst',
- 'type': 'build'
- }],
- 'config': config
+ "kind": "compose",
+ "depends": [
+ {"filename": "compose/amhello.bst", "type": "build"},
+ {"filename": "compose/test.bst", "type": "build"},
+ ],
+ "config": config,
}
os.makedirs(os.path.dirname(os.path.join(path, name)), exist_ok=True)
_yaml.roundtrip_dump(element, os.path.join(path, name))
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("include_domains,exclude_domains,expected", [
- # Test flat inclusion
- ([], [], ['/usr', '/usr/bin',
- '/usr/share',
- '/usr/bin/hello',
- '/usr/share/doc', '/usr/share/doc/amhello',
- '/usr/share/doc/amhello/README',
- '/tests', '/tests/test']),
- # Test only runtime
- (['runtime'], [], ['/usr', '/usr/share',
- '/usr/bin', '/usr/bin/hello']),
- # Test with runtime and doc
- (['runtime', 'doc'], [], ['/usr', '/usr/share',
- '/usr/bin', '/usr/bin/hello',
- '/usr/share/doc', '/usr/share/doc/amhello',
- '/usr/share/doc/amhello/README']),
- # Test with only runtime excluded
- ([], ['runtime'], ['/usr', '/usr/share',
- '/usr/share/doc', '/usr/share/doc/amhello',
- '/usr/share/doc/amhello/README',
- '/tests', '/tests/test']),
- # Test with runtime and doc excluded
- ([], ['runtime', 'doc'], ['/usr', '/usr/share',
- '/tests', '/tests/test']),
- # Test with runtime simultaneously in- and excluded
- (['runtime'], ['runtime'], ['/usr', '/usr/share']),
- # Test with runtime included and doc excluded
- (['runtime'], ['doc'], ['/usr', '/usr/share',
- '/usr/bin', '/usr/bin/hello']),
- # Test including a custom 'test' domain
- (['test'], [], ['/usr', '/usr/share',
- '/tests', '/tests/test']),
- # Test excluding a custom 'test' domain
- ([], ['test'], ['/usr', '/usr/bin',
- '/usr/share',
- '/usr/bin/hello',
- '/usr/share/doc', '/usr/share/doc/amhello',
- '/usr/share/doc/amhello/README'])
-])
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
-def test_compose_include(cli, datafiles, include_domains,
- exclude_domains, expected):
+@pytest.mark.parametrize(
+ "include_domains,exclude_domains,expected",
+ [
+ # Test flat inclusion
+ (
+ [],
+ [],
+ [
+ "/usr",
+ "/usr/bin",
+ "/usr/share",
+ "/usr/bin/hello",
+ "/usr/share/doc",
+ "/usr/share/doc/amhello",
+ "/usr/share/doc/amhello/README",
+ "/tests",
+ "/tests/test",
+ ],
+ ),
+ # Test only runtime
+ (["runtime"], [], ["/usr", "/usr/share", "/usr/bin", "/usr/bin/hello"]),
+ # Test with runtime and doc
+ (
+ ["runtime", "doc"],
+ [],
+ [
+ "/usr",
+ "/usr/share",
+ "/usr/bin",
+ "/usr/bin/hello",
+ "/usr/share/doc",
+ "/usr/share/doc/amhello",
+ "/usr/share/doc/amhello/README",
+ ],
+ ),
+ # Test with only runtime excluded
+ (
+ [],
+ ["runtime"],
+ [
+ "/usr",
+ "/usr/share",
+ "/usr/share/doc",
+ "/usr/share/doc/amhello",
+ "/usr/share/doc/amhello/README",
+ "/tests",
+ "/tests/test",
+ ],
+ ),
+ # Test with runtime and doc excluded
+ ([], ["runtime", "doc"], ["/usr", "/usr/share", "/tests", "/tests/test"]),
+ # Test with runtime simultaneously in- and excluded
+ (["runtime"], ["runtime"], ["/usr", "/usr/share"]),
+ # Test with runtime included and doc excluded
+ (["runtime"], ["doc"], ["/usr", "/usr/share", "/usr/bin", "/usr/bin/hello"]),
+ # Test including a custom 'test' domain
+ (["test"], [], ["/usr", "/usr/share", "/tests", "/tests/test"]),
+ # Test excluding a custom 'test' domain
+ (
+ [],
+ ["test"],
+ [
+ "/usr",
+ "/usr/bin",
+ "/usr/share",
+ "/usr/bin/hello",
+ "/usr/share/doc",
+ "/usr/share/doc/amhello",
+ "/usr/share/doc/amhello/README",
+ ],
+ ),
+ ],
+)
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
+def test_compose_include(cli, datafiles, include_domains, exclude_domains, expected):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_path = os.path.join(project, 'elements')
- element_name = 'compose/compose-amhello.bst'
+ checkout = os.path.join(cli.directory, "checkout")
+ element_path = os.path.join(project, "elements")
+ element_name = "compose/compose-amhello.bst"
# Create a yaml configuration from the specified include and
# exclude domains
- config = {
- 'include': include_domains,
- 'exclude': exclude_domains
- }
+ config = {"include": include_domains, "exclude": exclude_domains}
create_compose_element(element_name, element_path, config=config)
- result = cli.run(project=project, args=['source', 'track', 'compose/amhello.bst'])
+ result = cli.run(project=project, args=["source", "track", "compose/amhello.bst"])
assert result.exit_code == 0
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert result.exit_code == 0
assert set(walk_dir(checkout)) == set(expected)
diff --git a/tests/integration/filter.py b/tests/integration/filter.py
index 2a32d4010..6e95915ee 100644
--- a/tests/integration/filter.py
+++ b/tests/integration/filter.py
@@ -13,27 +13,36 @@ from buildstream.testing._utils.site import HAVE_SANDBOX
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'project'
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(os.path.join(DATA_DIR))
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_filter_pass_integration(datafiles, cli):
project = str(datafiles)
# Passing integration commands should build nicely
- result = cli.run(project=project, args=['build', 'filter/filter.bst'])
+ result = cli.run(project=project, args=["build", "filter/filter.bst"])
result.assert_success()
# Checking out the element should work
- checkout_dir = os.path.join(project, 'filter')
- result = cli.run(project=project, args=['artifact', 'checkout', '--integrate', '--hardlinks',
- '--directory', checkout_dir, 'filter/filter.bst'])
+ checkout_dir = os.path.join(project, "filter")
+ result = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ "--integrate",
+ "--hardlinks",
+ "--directory",
+ checkout_dir,
+ "filter/filter.bst",
+ ],
+ )
result.assert_success()
# Check that the integration command was run
- assert_contains(checkout_dir, ['/foo'])
+ assert_contains(checkout_dir, ["/foo"])
shutil.rmtree(checkout_dir)
diff --git a/tests/integration/import.py b/tests/integration/import.py
index bac92cadf..b7f056bac 100644
--- a/tests/integration/import.py
+++ b/tests/integration/import.py
@@ -13,50 +13,54 @@ from buildstream.testing.integration import walk_dir
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
def create_import_element(name, path, source, target, source_path):
element = {
- 'kind': 'import',
- 'sources': [{
- 'kind': 'local',
- 'path': source_path
- }],
- 'config': {
- 'source': source,
- 'target': target
- }
+ "kind": "import",
+ "sources": [{"kind": "local", "path": source_path}],
+ "config": {"source": source, "target": target},
}
os.makedirs(os.path.dirname(os.path.join(path, name)), exist_ok=True)
_yaml.roundtrip_dump(element, os.path.join(path, name))
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize("source,target,path,expected", [
- ('/', '/', 'files/import-source', ['/test.txt', '/subdir',
- '/subdir/test.txt']),
- ('/subdir', '/', 'files/import-source', ['/test.txt']),
- ('/', '/', 'files/import-source/subdir', ['/test.txt']),
- ('/', '/output', 'files/import-source', ['/output', '/output/test.txt',
- '/output/subdir',
- '/output/subdir/test.txt']),
-])
+@pytest.mark.parametrize(
+ "source,target,path,expected",
+ [
+ ("/", "/", "files/import-source", ["/test.txt", "/subdir", "/subdir/test.txt"]),
+ ("/subdir", "/", "files/import-source", ["/test.txt"]),
+ ("/", "/", "files/import-source/subdir", ["/test.txt"]),
+ (
+ "/",
+ "/output",
+ "files/import-source",
+ [
+ "/output",
+ "/output/test.txt",
+ "/output/subdir",
+ "/output/subdir/test.txt",
+ ],
+ ),
+ ],
+)
def test_import(cli, datafiles, source, target, path, expected):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_path = os.path.join(project, 'elements')
- element_name = 'import/import.bst'
+ checkout = os.path.join(cli.directory, "checkout")
+ element_path = os.path.join(project, "elements")
+ element_name = "import/import.bst"
create_import_element(element_name, element_path, source, target, path)
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert res.exit_code == 0
assert set(walk_dir(checkout)) == set(expected)
diff --git a/tests/integration/make.py b/tests/integration/make.py
index 664e7ca7a..78f4ba8d7 100644
--- a/tests/integration/make.py
+++ b/tests/integration/make.py
@@ -12,41 +12,44 @@ from buildstream.testing._utils.site import HAVE_SANDBOX
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
# Test that a make build 'works' - we use the make sample
# makehello project for this.
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_make_build(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_name = 'make/makehello.bst'
+ checkout = os.path.join(cli.directory, "checkout")
+ element_name = "make/makehello.bst"
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert result.exit_code == 0
- assert_contains(checkout, ['/usr', '/usr/bin',
- '/usr/bin/hello'])
+ assert_contains(checkout, ["/usr", "/usr/bin", "/usr/bin/hello"])
# Test running an executable built with make
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_make_run(cli, datafiles):
project = str(datafiles)
- element_name = 'make/makehello.bst'
+ element_name = "make/makehello.bst"
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(project=project, args=['shell', element_name, '/usr/bin/hello'])
+ result = cli.run(project=project, args=["shell", element_name, "/usr/bin/hello"])
assert result.exit_code == 0
- assert result.output == 'Hello, world\n'
+ assert result.output == "Hello, world\n"
diff --git a/tests/integration/manual.py b/tests/integration/manual.py
index 2ac7f74d0..8db8b9671 100644
--- a/tests/integration/manual.py
+++ b/tests/integration/manual.py
@@ -13,141 +13,169 @@ from buildstream.testing._utils.site import HAVE_SANDBOX
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
def create_manual_element(name, path, config, variables, environment):
element = {
- 'kind': 'manual',
- 'depends': [{
- 'filename': 'base.bst',
- 'type': 'build'
- }],
- 'config': config,
- 'variables': variables,
- 'environment': environment
+ "kind": "manual",
+ "depends": [{"filename": "base.bst", "type": "build"}],
+ "config": config,
+ "variables": variables,
+ "environment": environment,
}
os.makedirs(os.path.dirname(os.path.join(path, name)), exist_ok=True)
_yaml.roundtrip_dump(element, os.path.join(path, name))
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_manual_element(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_path = os.path.join(project, 'elements')
- element_name = 'import/import.bst'
-
- create_manual_element(element_name, element_path, {
- 'configure-commands': ["echo './configure' >> test"],
- 'build-commands': ["echo 'make' >> test"],
- 'install-commands': [
- "echo 'make install' >> test",
- "cp test %{install-root}"
- ],
- 'strip-commands': ["echo 'strip' >> %{install-root}/test"]
- }, {}, {})
-
- res = cli.run(project=project, args=['build', element_name])
+ checkout = os.path.join(cli.directory, "checkout")
+ element_path = os.path.join(project, "elements")
+ element_name = "import/import.bst"
+
+ create_manual_element(
+ element_name,
+ element_path,
+ {
+ "configure-commands": ["echo './configure' >> test"],
+ "build-commands": ["echo 'make' >> test"],
+ "install-commands": [
+ "echo 'make install' >> test",
+ "cp test %{install-root}",
+ ],
+ "strip-commands": ["echo 'strip' >> %{install-root}/test"],
+ },
+ {},
+ {},
+ )
+
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert res.exit_code == 0
- with open(os.path.join(checkout, 'test')) as f:
+ with open(os.path.join(checkout, "test")) as f:
text = f.read()
- assert text == """./configure
+ assert (
+ text
+ == """./configure
make
make install
strip
"""
+ )
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_manual_element_environment(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_path = os.path.join(project, 'elements')
- element_name = 'import/import.bst'
-
- create_manual_element(element_name, element_path, {
- 'install-commands': [
- "echo $V >> test",
- "cp test %{install-root}"
- ]
- }, {
- }, {
- 'V': 2
- })
-
- res = cli.run(project=project, args=['build', element_name])
+ checkout = os.path.join(cli.directory, "checkout")
+ element_path = os.path.join(project, "elements")
+ element_name = "import/import.bst"
+
+ create_manual_element(
+ element_name,
+ element_path,
+ {"install-commands": ["echo $V >> test", "cp test %{install-root}"]},
+ {},
+ {"V": 2},
+ )
+
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert res.exit_code == 0
- with open(os.path.join(checkout, 'test')) as f:
+ with open(os.path.join(checkout, "test")) as f:
text = f.read()
assert text == "2\n"
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_manual_element_noparallel(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_path = os.path.join(project, 'elements')
- element_name = 'import/import.bst'
-
- create_manual_element(element_name, element_path, {
- 'install-commands': [
- "echo $MAKEFLAGS >> test",
- "echo $V >> test",
- "cp test %{install-root}"
- ]
- }, {
- 'notparallel': True
- }, {
- 'MAKEFLAGS': '-j%{max-jobs} -Wall',
- 'V': 2
- })
-
- res = cli.run(project=project, args=['build', element_name])
+ checkout = os.path.join(cli.directory, "checkout")
+ element_path = os.path.join(project, "elements")
+ element_name = "import/import.bst"
+
+ create_manual_element(
+ element_name,
+ element_path,
+ {
+ "install-commands": [
+ "echo $MAKEFLAGS >> test",
+ "echo $V >> test",
+ "cp test %{install-root}",
+ ]
+ },
+ {"notparallel": True},
+ {"MAKEFLAGS": "-j%{max-jobs} -Wall", "V": 2},
+ )
+
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert res.exit_code == 0
- with open(os.path.join(checkout, 'test')) as f:
+ with open(os.path.join(checkout, "test")) as f:
text = f.read()
- assert text == """-j1 -Wall
+ assert (
+ text
+ == """-j1 -Wall
2
"""
+ )
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_manual_element_logging(cli, datafiles):
project = str(datafiles)
- element_path = os.path.join(project, 'elements')
- element_name = 'import/import.bst'
-
- create_manual_element(element_name, element_path, {
- 'configure-commands': ["echo configure"],
- 'build-commands': ["echo build"],
- 'install-commands': ["echo install"],
- 'strip-commands': ["echo strip"]
- }, {}, {})
-
- res = cli.run(project=project, args=['build', element_name])
+ element_path = os.path.join(project, "elements")
+ element_name = "import/import.bst"
+
+ create_manual_element(
+ element_name,
+ element_path,
+ {
+ "configure-commands": ["echo configure"],
+ "build-commands": ["echo build"],
+ "install-commands": ["echo install"],
+ "strip-commands": ["echo strip"],
+ },
+ {},
+ {},
+ )
+
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
# Verify that individual commands are logged
diff --git a/tests/integration/messages.py b/tests/integration/messages.py
index 42725fc5b..8210664e5 100644
--- a/tests/integration/messages.py
+++ b/tests/integration/messages.py
@@ -33,32 +33,27 @@ pytestmark = pytest.mark.integration
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_disable_message_lines(cli, datafiles):
project = str(datafiles)
- element_path = os.path.join(project, 'elements')
- element_name = 'message.bst'
+ element_path = os.path.join(project, "elements")
+ element_name = "message.bst"
element = {
- 'kind': 'manual',
- 'depends': [{
- 'filename': 'base.bst'
- }],
- 'config': {
- 'build-commands':
- ['echo "Silly message"'],
- 'strip-commands': []
- }
+ "kind": "manual",
+ "depends": [{"filename": "base.bst"}],
+ "config": {"build-commands": ['echo "Silly message"'], "strip-commands": []},
}
- os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
+ os.makedirs(
+ os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True
+ )
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# First we check that we get the "Silly message"
@@ -68,43 +63,47 @@ def test_disable_message_lines(cli, datafiles):
# Let's now build it again, but with --message-lines 0
cli.remove_artifact_from_cache(project, element_name)
- result = cli.run(project=project, args=["--message-lines", "0",
- "build", element_name])
+ result = cli.run(
+ project=project, args=["--message-lines", "0", "build", element_name]
+ )
result.assert_success()
assert "Message contains " not in result.stderr
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_disable_error_lines(cli, datafiles):
project = str(datafiles)
- element_path = os.path.join(project, 'elements')
- element_name = 'message.bst'
+ element_path = os.path.join(project, "elements")
+ element_name = "message.bst"
element = {
- 'kind': 'manual',
- 'depends': [{
- 'filename': 'base.bst'
- }],
- 'config': {
- 'build-commands':
- ['This is a syntax error > >'],
- 'strip-commands': []
- }
+ "kind": "manual",
+ "depends": [{"filename": "base.bst"}],
+ "config": {
+ "build-commands": ["This is a syntax error > >"],
+ "strip-commands": [],
+ },
}
- os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
+ os.makedirs(
+ os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True
+ )
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# First we check that we get the syntax error
- result = cli.run(project=project, args=["--error-lines", "0",
- "build", element_name])
+ result = cli.run(
+ project=project, args=["--error-lines", "0", "build", element_name]
+ )
result.assert_main_error(ErrorDomain.STREAM, None)
assert "This is a syntax error" in result.stderr
# Let's now build it again, but with --error-lines 0
cli.remove_artifact_from_cache(project, element_name)
- result = cli.run(project=project, args=["--error-lines", "0",
- "build", element_name])
+ result = cli.run(
+ project=project, args=["--error-lines", "0", "build", element_name]
+ )
result.assert_main_error(ErrorDomain.STREAM, None)
assert "Printing the last" not in result.stderr
diff --git a/tests/integration/pip_element.py b/tests/integration/pip_element.py
index da0badcb3..d85cb5f03 100644
--- a/tests/integration/pip_element.py
+++ b/tests/integration/pip_element.py
@@ -17,119 +17,134 @@ from tests.testutils import setup_pypi_repo # pylint: disable=unused-import
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_pip_build(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_path = os.path.join(project, 'elements')
- element_name = 'pip/hello.bst'
+ checkout = os.path.join(cli.directory, "checkout")
+ element_path = os.path.join(project, "elements")
+ element_name = "pip/hello.bst"
element = {
- 'kind': 'pip',
- 'variables': {
- 'pip': 'pip3'
- },
- 'depends': [{
- 'filename': 'base.bst'
- }],
- 'sources': [{
- 'kind': 'tar',
- 'url': 'file://{}/files/hello.tar.xz'.format(project),
- 'ref': 'ad96570b552498807abec33c06210bf68378d854ced6753b77916c5ed517610d'
-
- }]
+ "kind": "pip",
+ "variables": {"pip": "pip3"},
+ "depends": [{"filename": "base.bst"}],
+ "sources": [
+ {
+ "kind": "tar",
+ "url": "file://{}/files/hello.tar.xz".format(project),
+ "ref": "ad96570b552498807abec33c06210bf68378d854ced6753b77916c5ed517610d",
+ }
+ ],
}
- os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
+ os.makedirs(
+ os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True
+ )
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert result.exit_code == 0
- assert_contains(checkout, ['/usr', '/usr/lib', '/usr/bin',
- '/usr/bin/hello', '/usr/lib/python3.6'])
+ assert_contains(
+ checkout,
+ ["/usr", "/usr/lib", "/usr/bin", "/usr/bin/hello", "/usr/lib/python3.6"],
+ )
# Test running an executable built with pip
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_pip_run(cli, datafiles):
# Create and build our test element
test_pip_build(cli, datafiles)
project = str(datafiles)
- element_name = 'pip/hello.bst'
+ element_name = "pip/hello.bst"
- result = cli.run(project=project, args=['shell', element_name, '/usr/bin/hello'])
+ result = cli.run(project=project, args=["shell", element_name, "/usr/bin/hello"])
assert result.exit_code == 0
- assert result.output == 'Hello, world!\n'
+ assert result.output == "Hello, world!\n"
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_pip_element_should_install_pip_deps(cli, datafiles, setup_pypi_repo):
project = str(datafiles)
- elements_path = os.path.join(project, 'elements')
- element_name = 'pip/hello.bst'
+ elements_path = os.path.join(project, "elements")
+ element_name = "pip/hello.bst"
# check that exotically named packages are imported correctly
- myreqs_packages = 'alohalib'
- dependencies = ['app2', 'app.3', 'app-4', 'app_5', 'app.no.6', 'app-no-7', 'app_no_8']
- mock_packages = {
- myreqs_packages: {
- package: {} for package in dependencies
- }
- }
+ myreqs_packages = "alohalib"
+ dependencies = [
+ "app2",
+ "app.3",
+ "app-4",
+ "app_5",
+ "app.no.6",
+ "app-no-7",
+ "app_no_8",
+ ]
+ mock_packages = {myreqs_packages: {package: {} for package in dependencies}}
# set up directories
- pypi_repo = os.path.join(project, 'files', 'pypi-repo')
+ pypi_repo = os.path.join(project, "files", "pypi-repo")
os.makedirs(pypi_repo, exist_ok=True)
- os.makedirs(os.path.dirname(os.path.join(elements_path, element_name)), exist_ok=True)
+ os.makedirs(
+ os.path.dirname(os.path.join(elements_path, element_name)), exist_ok=True
+ )
setup_pypi_repo(mock_packages, pypi_repo)
# create pip element
element = {
- 'kind': 'pip',
- 'variables': {
- 'pip': 'pip3'
- },
- 'depends': [{
- 'filename': 'base.bst'
- }],
- 'sources': [
+ "kind": "pip",
+ "variables": {"pip": "pip3"},
+ "depends": [{"filename": "base.bst"}],
+ "sources": [
{
- 'kind': 'tar',
- 'url': 'file://{}/files/hello.tar.xz'.format(project),
+ "kind": "tar",
+ "url": "file://{}/files/hello.tar.xz".format(project),
# FIXME: remove hardcoded ref once issue #1010 is closed
- 'ref': 'ad96570b552498807abec33c06210bf68378d854ced6753b77916c5ed517610d'
+ "ref": "ad96570b552498807abec33c06210bf68378d854ced6753b77916c5ed517610d",
},
{
- 'kind': 'pip',
- 'url': 'file://{}'.format(os.path.realpath(pypi_repo)),
- 'packages': [myreqs_packages],
- }
- ]
+ "kind": "pip",
+ "url": "file://{}".format(os.path.realpath(pypi_repo)),
+ "packages": [myreqs_packages],
+ },
+ ],
}
_yaml.roundtrip_dump(element, os.path.join(elements_path, element_name))
- result = cli.run(project=project, args=['source', 'track', element_name])
+ result = cli.run(project=project, args=["source", "track", element_name])
assert result.exit_code == 0
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
# get installed packages in sandbox
installed_packages = set(
- cli.run(project=project, args=['shell', element_name, 'pip3', 'freeze']).output.split('\n'))
+ cli.run(
+ project=project, args=["shell", element_name, "pip3", "freeze"]
+ ).output.split("\n")
+ )
# compare with packages that are expected to be installed
- pip_source_packages = {package.replace('_', "-") + '==0.1' for package in dependencies + [myreqs_packages]}
+ pip_source_packages = {
+ package.replace("_", "-") + "==0.1"
+ for package in dependencies + [myreqs_packages]
+ }
assert pip_source_packages.issubset(installed_packages)
diff --git a/tests/integration/pip_source.py b/tests/integration/pip_source.py
index c221910a6..bc9a4d94c 100644
--- a/tests/integration/pip_source.py
+++ b/tests/integration/pip_source.py
@@ -16,180 +16,204 @@ from tests.testutils.python_repo import setup_pypi_repo # pylint: disable=unuse
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
def test_pip_source_import_packages(cli, datafiles, setup_pypi_repo):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_path = os.path.join(project, 'elements')
- element_name = 'pip/hello.bst'
+ checkout = os.path.join(cli.directory, "checkout")
+ element_path = os.path.join(project, "elements")
+ element_name = "pip/hello.bst"
# check that exotically named packages are imported correctly
- myreqs_packages = 'hellolib'
- dependencies = ['app2', 'app.3', 'app-4', 'app_5', 'app.no.6', 'app-no-7', 'app_no_8']
- mock_packages = {
- myreqs_packages: {
- package: {} for package in dependencies
- }
- }
+ myreqs_packages = "hellolib"
+ dependencies = [
+ "app2",
+ "app.3",
+ "app-4",
+ "app_5",
+ "app.no.6",
+ "app-no-7",
+ "app_no_8",
+ ]
+ mock_packages = {myreqs_packages: {package: {} for package in dependencies}}
# create mock pypi repository
- pypi_repo = os.path.join(project, 'files', 'pypi-repo')
+ pypi_repo = os.path.join(project, "files", "pypi-repo")
os.makedirs(pypi_repo, exist_ok=True)
setup_pypi_repo(mock_packages, pypi_repo)
element = {
- 'kind': 'import',
- 'sources': [
+ "kind": "import",
+ "sources": [
+ {"kind": "local", "path": "files/pip-source"},
{
- 'kind': 'local',
- 'path': 'files/pip-source'
+ "kind": "pip",
+ "url": "file://{}".format(os.path.realpath(pypi_repo)),
+ "packages": [myreqs_packages],
},
- {
- 'kind': 'pip',
- 'url': 'file://{}'.format(os.path.realpath(pypi_repo)),
- 'packages': [myreqs_packages]
- }
- ]
+ ],
}
- os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
+ os.makedirs(
+ os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True
+ )
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
- result = cli.run(project=project, args=['source', 'track', element_name])
+ result = cli.run(project=project, args=["source", "track", element_name])
assert result.exit_code == 0
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert result.exit_code == 0
- assert_contains(checkout, ['/.bst_pip_downloads',
- '/.bst_pip_downloads/hellolib-0.1.tar.gz',
- '/.bst_pip_downloads/app2-0.1.tar.gz',
- '/.bst_pip_downloads/app.3-0.1.tar.gz',
- '/.bst_pip_downloads/app-4-0.1.tar.gz',
- '/.bst_pip_downloads/app_5-0.1.tar.gz',
- '/.bst_pip_downloads/app.no.6-0.1.tar.gz',
- '/.bst_pip_downloads/app-no-7-0.1.tar.gz',
- '/.bst_pip_downloads/app_no_8-0.1.tar.gz'])
+ assert_contains(
+ checkout,
+ [
+ "/.bst_pip_downloads",
+ "/.bst_pip_downloads/hellolib-0.1.tar.gz",
+ "/.bst_pip_downloads/app2-0.1.tar.gz",
+ "/.bst_pip_downloads/app.3-0.1.tar.gz",
+ "/.bst_pip_downloads/app-4-0.1.tar.gz",
+ "/.bst_pip_downloads/app_5-0.1.tar.gz",
+ "/.bst_pip_downloads/app.no.6-0.1.tar.gz",
+ "/.bst_pip_downloads/app-no-7-0.1.tar.gz",
+ "/.bst_pip_downloads/app_no_8-0.1.tar.gz",
+ ],
+ )
@pytest.mark.datafiles(DATA_DIR)
def test_pip_source_import_requirements_files(cli, datafiles, setup_pypi_repo):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_path = os.path.join(project, 'elements')
- element_name = 'pip/hello.bst'
+ checkout = os.path.join(cli.directory, "checkout")
+ element_path = os.path.join(project, "elements")
+ element_name = "pip/hello.bst"
# check that exotically named packages are imported correctly
- myreqs_packages = 'hellolib'
- dependencies = ['app2', 'app.3', 'app-4', 'app_5', 'app.no.6', 'app-no-7', 'app_no_8']
- mock_packages = {
- myreqs_packages: {
- package: {} for package in dependencies
- }
- }
+ myreqs_packages = "hellolib"
+ dependencies = [
+ "app2",
+ "app.3",
+ "app-4",
+ "app_5",
+ "app.no.6",
+ "app-no-7",
+ "app_no_8",
+ ]
+ mock_packages = {myreqs_packages: {package: {} for package in dependencies}}
# create mock pypi repository
- pypi_repo = os.path.join(project, 'files', 'pypi-repo')
+ pypi_repo = os.path.join(project, "files", "pypi-repo")
os.makedirs(pypi_repo, exist_ok=True)
setup_pypi_repo(mock_packages, pypi_repo)
element = {
- 'kind': 'import',
- 'sources': [
+ "kind": "import",
+ "sources": [
+ {"kind": "local", "path": "files/pip-source"},
{
- 'kind': 'local',
- 'path': 'files/pip-source'
+ "kind": "pip",
+ "url": "file://{}".format(os.path.realpath(pypi_repo)),
+ "requirements-files": ["myreqs.txt"],
},
- {
- 'kind': 'pip',
- 'url': 'file://{}'.format(os.path.realpath(pypi_repo)),
- 'requirements-files': ['myreqs.txt'],
- }
- ]
+ ],
}
- os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
+ os.makedirs(
+ os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True
+ )
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
- result = cli.run(project=project, args=['source', 'track', element_name])
+ result = cli.run(project=project, args=["source", "track", element_name])
assert result.exit_code == 0
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert result.exit_code == 0
- assert_contains(checkout, ['/.bst_pip_downloads',
- '/.bst_pip_downloads/hellolib-0.1.tar.gz',
- '/.bst_pip_downloads/app2-0.1.tar.gz',
- '/.bst_pip_downloads/app.3-0.1.tar.gz',
- '/.bst_pip_downloads/app-4-0.1.tar.gz',
- '/.bst_pip_downloads/app_5-0.1.tar.gz',
- '/.bst_pip_downloads/app.no.6-0.1.tar.gz',
- '/.bst_pip_downloads/app-no-7-0.1.tar.gz',
- '/.bst_pip_downloads/app_no_8-0.1.tar.gz'])
+ assert_contains(
+ checkout,
+ [
+ "/.bst_pip_downloads",
+ "/.bst_pip_downloads/hellolib-0.1.tar.gz",
+ "/.bst_pip_downloads/app2-0.1.tar.gz",
+ "/.bst_pip_downloads/app.3-0.1.tar.gz",
+ "/.bst_pip_downloads/app-4-0.1.tar.gz",
+ "/.bst_pip_downloads/app_5-0.1.tar.gz",
+ "/.bst_pip_downloads/app.no.6-0.1.tar.gz",
+ "/.bst_pip_downloads/app-no-7-0.1.tar.gz",
+ "/.bst_pip_downloads/app_no_8-0.1.tar.gz",
+ ],
+ )
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_pip_source_build(cli, datafiles, setup_pypi_repo):
project = str(datafiles)
- element_path = os.path.join(project, 'elements')
- element_name = 'pip/hello.bst'
+ element_path = os.path.join(project, "elements")
+ element_name = "pip/hello.bst"
# check that exotically named packages are imported correctly
- myreqs_packages = 'hellolib'
- dependencies = ['app2', 'app.3', 'app-4', 'app_5', 'app.no.6', 'app-no-7', 'app_no_8']
- mock_packages = {
- myreqs_packages: {
- package: {} for package in dependencies
- }
- }
+ myreqs_packages = "hellolib"
+ dependencies = [
+ "app2",
+ "app.3",
+ "app-4",
+ "app_5",
+ "app.no.6",
+ "app-no-7",
+ "app_no_8",
+ ]
+ mock_packages = {myreqs_packages: {package: {} for package in dependencies}}
# create mock pypi repository
- pypi_repo = os.path.join(project, 'files', 'pypi-repo')
+ pypi_repo = os.path.join(project, "files", "pypi-repo")
os.makedirs(pypi_repo, exist_ok=True)
setup_pypi_repo(mock_packages, pypi_repo)
element = {
- 'kind': 'manual',
- 'depends': ['base.bst'],
- 'sources': [
+ "kind": "manual",
+ "depends": ["base.bst"],
+ "sources": [
+ {"kind": "local", "path": "files/pip-source"},
{
- 'kind': 'local',
- 'path': 'files/pip-source'
+ "kind": "pip",
+ "url": "file://{}".format(os.path.realpath(pypi_repo)),
+ "requirements-files": ["myreqs.txt"],
+ "packages": dependencies,
},
- {
- 'kind': 'pip',
- 'url': 'file://{}'.format(os.path.realpath(pypi_repo)),
- 'requirements-files': ['myreqs.txt'],
- 'packages': dependencies
- }
],
- 'config': {
- 'install-commands': [
- 'pip3 install --no-index --prefix %{install-root}/usr .bst_pip_downloads/*.tar.gz',
- 'install app1.py %{install-root}/usr/bin/'
+ "config": {
+ "install-commands": [
+ "pip3 install --no-index --prefix %{install-root}/usr .bst_pip_downloads/*.tar.gz",
+ "install app1.py %{install-root}/usr/bin/",
]
- }
+ },
}
- os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
+ os.makedirs(
+ os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True
+ )
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
- result = cli.run(project=project, args=['source', 'track', element_name])
+ result = cli.run(project=project, args=["source", "track", element_name])
assert result.exit_code == 0
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(project=project, args=['shell', element_name, '/usr/bin/app1.py'])
+ result = cli.run(project=project, args=["shell", element_name, "/usr/bin/app1.py"])
assert result.exit_code == 0
assert result.output == "Hello App1! This is hellolib\n"
diff --git a/tests/integration/project/files/pip-source/app1.py b/tests/integration/project/files/pip-source/app1.py
index ab1005ba4..b96d14b00 100644
--- a/tests/integration/project/files/pip-source/app1.py
+++ b/tests/integration/project/files/pip-source/app1.py
@@ -4,8 +4,8 @@ from hellolib import hello
def main():
- hello('App1')
+ hello("App1")
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/tests/integration/pullbuildtrees.py b/tests/integration/pullbuildtrees.py
index a1b188e5a..f0cf22f59 100644
--- a/tests/integration/pullbuildtrees.py
+++ b/tests/integration/pullbuildtrees.py
@@ -6,7 +6,10 @@ import shutil
import pytest
-from buildstream.testing import cli, cli_integration as cli2 # pylint: disable=unused-import
+from buildstream.testing import (
+ cli,
+ cli_integration as cli2,
+) # pylint: disable=unused-import
from buildstream.testing._utils.site import HAVE_SANDBOX
from buildstream._exceptions import ErrorDomain, LoadErrorReason
@@ -16,22 +19,21 @@ from tests.testutils import create_artifact_share
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
# Remove artifact cache & set cli.config value of pull-buildtrees
# to false, which is the default user context. The cache has to be
# cleared as just forcefully removing the refpath leaves dangling objects.
def default_state(cli, tmpdir, share):
- shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': False},
- 'cachedir': str(tmpdir),
- 'cache': {'pull-buildtrees': False},
- })
+ shutil.rmtree(os.path.join(str(tmpdir), "cas"))
+ cli.configure(
+ {
+ "artifacts": {"url": share.repo, "push": False},
+ "cachedir": str(tmpdir),
+ "cache": {"pull-buildtrees": False},
+ }
+ )
# A test to capture the integration of the pullbuildtrees
@@ -39,50 +41,63 @@ def default_state(cli, tmpdir, share):
# directory of an element.
@pytest.mark.integration
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_pullbuildtrees(cli2, tmpdir, datafiles):
project = str(datafiles)
- element_name = 'autotools/amhello.bst'
+ element_name = "autotools/amhello.bst"
cwd = str(tmpdir)
# Create artifact shares for pull & push testing
- with create_artifact_share(os.path.join(str(tmpdir), 'share1')) as share1,\
- create_artifact_share(os.path.join(str(tmpdir), 'share2')) as share2,\
- create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3:
- cli2.configure({
- 'artifacts': {'url': share1.repo, 'push': True},
- 'cachedir': str(tmpdir),
- 'cache': {'cache-buildtrees': 'always'},
- })
+ with create_artifact_share(
+ os.path.join(str(tmpdir), "share1")
+ ) as share1, create_artifact_share(
+ os.path.join(str(tmpdir), "share2")
+ ) as share2, create_artifact_share(
+ os.path.join(str(tmpdir), "share3")
+ ) as share3:
+ cli2.configure(
+ {
+ "artifacts": {"url": share1.repo, "push": True},
+ "cachedir": str(tmpdir),
+ "cache": {"cache-buildtrees": "always"},
+ }
+ )
# Build autotools element, checked pushed, delete local
- result = cli2.run(project=project, args=['build', element_name])
+ result = cli2.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- assert cli2.get_element_state(project, element_name) == 'cached'
- assert share1.get_artifact(cli2.get_artifact_name(project, 'test', element_name))
+ assert cli2.get_element_state(project, element_name) == "cached"
+ assert share1.get_artifact(
+ cli2.get_artifact_name(project, "test", element_name)
+ )
default_state(cli2, tmpdir, share1)
# Pull artifact with default config, assert that pulling again
# doesn't create a pull job, then assert with buildtrees user
# config set creates a pull job.
- result = cli2.run(project=project, args=['artifact', 'pull', element_name])
+ result = cli2.run(project=project, args=["artifact", "pull", element_name])
assert element_name in result.get_pulled_elements()
- result = cli2.run(project=project, args=['artifact', 'pull', element_name])
+ result = cli2.run(project=project, args=["artifact", "pull", element_name])
assert element_name not in result.get_pulled_elements()
- cli2.configure({'cache': {'pull-buildtrees': True}})
- result = cli2.run(project=project, args=['artifact', 'pull', element_name])
+ cli2.configure({"cache": {"pull-buildtrees": True}})
+ result = cli2.run(project=project, args=["artifact", "pull", element_name])
assert element_name in result.get_pulled_elements()
default_state(cli2, tmpdir, share1)
# Pull artifact with default config, then assert that pulling
# with buildtrees cli flag set creates a pull job.
# Also assert that the buildtree is added to the local CAS.
- result = cli2.run(project=project, args=['artifact', 'pull', element_name])
+ result = cli2.run(project=project, args=["artifact", "pull", element_name])
assert element_name in result.get_pulled_elements()
- artifact_name = cli2.get_artifact_name(project, 'test', element_name)
+ artifact_name = cli2.get_artifact_name(project, "test", element_name)
with cli2.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert not buildtreedir
- result = cli2.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
+ result = cli2.run(
+ project=project,
+ args=["--pull-buildtrees", "artifact", "pull", element_name],
+ )
assert element_name in result.get_pulled_elements()
with cli2.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert os.path.isdir(buildtreedir)
@@ -91,22 +106,28 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
# Pull artifact with pullbuildtrees set in user config, then assert
# that pulling with the same user config doesn't creates a pull job,
# or when buildtrees cli flag is set.
- cli2.configure({'cache': {'pull-buildtrees': True}})
- result = cli2.run(project=project, args=['artifact', 'pull', element_name])
+ cli2.configure({"cache": {"pull-buildtrees": True}})
+ result = cli2.run(project=project, args=["artifact", "pull", element_name])
assert element_name in result.get_pulled_elements()
- result = cli2.run(project=project, args=['artifact', 'pull', element_name])
+ result = cli2.run(project=project, args=["artifact", "pull", element_name])
assert element_name not in result.get_pulled_elements()
- result = cli2.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
+ result = cli2.run(
+ project=project,
+ args=["--pull-buildtrees", "artifact", "pull", element_name],
+ )
assert element_name not in result.get_pulled_elements()
default_state(cli2, tmpdir, share1)
# Pull artifact with default config and buildtrees cli flag set, then assert
# that pulling with pullbuildtrees set in user config doesn't create a pull
# job.
- result = cli2.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
+ result = cli2.run(
+ project=project,
+ args=["--pull-buildtrees", "artifact", "pull", element_name],
+ )
assert element_name in result.get_pulled_elements()
- cli2.configure({'cache': {'pull-buildtrees': True}})
- result = cli2.run(project=project, args=['artifact', 'pull', element_name])
+ cli2.configure({"cache": {"pull-buildtrees": True}})
+ result = cli2.run(project=project, args=["artifact", "pull", element_name])
assert element_name not in result.get_pulled_elements()
default_state(cli2, tmpdir, share1)
@@ -114,73 +135,90 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
# can't be pushed to an artifact share, then assert that a complete build element
# can be. This will attempt a partial pull from share1 and then a partial push
# to share2
- result = cli2.run(project=project, args=['artifact', 'pull', element_name])
+ result = cli2.run(project=project, args=["artifact", "pull", element_name])
assert element_name in result.get_pulled_elements()
- cli2.configure({'artifacts': {'url': share2.repo, 'push': True}})
- result = cli2.run(project=project, args=['artifact', 'push', element_name])
+ cli2.configure({"artifacts": {"url": share2.repo, "push": True}})
+ result = cli2.run(project=project, args=["artifact", "push", element_name])
assert element_name not in result.get_pushed_elements()
- assert not share2.get_artifact(cli2.get_artifact_name(project, 'test', element_name))
+ assert not share2.get_artifact(
+ cli2.get_artifact_name(project, "test", element_name)
+ )
# Assert that after pulling the missing buildtree the element artifact can be
# successfully pushed to the remote. This will attempt to pull the buildtree
# from share1 and then a 'complete' push to share2
- cli2.configure({'artifacts': {'url': share1.repo, 'push': False}})
- result = cli2.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
+ cli2.configure({"artifacts": {"url": share1.repo, "push": False}})
+ result = cli2.run(
+ project=project,
+ args=["--pull-buildtrees", "artifact", "pull", element_name],
+ )
assert element_name in result.get_pulled_elements()
- cli2.configure({'artifacts': {'url': share2.repo, 'push': True}})
- result = cli2.run(project=project, args=['artifact', 'push', element_name])
+ cli2.configure({"artifacts": {"url": share2.repo, "push": True}})
+ result = cli2.run(project=project, args=["artifact", "push", element_name])
assert element_name in result.get_pushed_elements()
- assert share2.get_artifact(cli2.get_artifact_name(project, 'test', element_name))
+ assert share2.get_artifact(
+ cli2.get_artifact_name(project, "test", element_name)
+ )
default_state(cli2, tmpdir, share1)
# Assert that bst artifact push will automatically attempt to pull a missing buildtree
# if pull-buildtrees is set, however as share3 is the only defined remote and is empty,
# assert that no element artifact buildtrees are pulled (no available remote buildtree) and thus the
# artifact cannot be pushed.
- result = cli2.run(project=project, args=['artifact', 'pull', element_name])
+ result = cli2.run(project=project, args=["artifact", "pull", element_name])
assert element_name in result.get_pulled_elements()
- cli2.configure({'artifacts': {'url': share3.repo, 'push': True}})
- result = cli2.run(project=project, args=['--pull-buildtrees', 'artifact', 'push', element_name])
+ cli2.configure({"artifacts": {"url": share3.repo, "push": True}})
+ result = cli2.run(
+ project=project,
+ args=["--pull-buildtrees", "artifact", "push", element_name],
+ )
assert "Attempting to fetch missing artifact buildtrees" in result.stderr
assert element_name not in result.get_pulled_elements()
with cli2.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert not buildtreedir
assert element_name not in result.get_pushed_elements()
- assert not share3.get_artifact(cli2.get_artifact_name(project, 'test', element_name))
+ assert not share3.get_artifact(
+ cli2.get_artifact_name(project, "test", element_name)
+ )
# Assert that if we add an extra remote that has the buildtree artfact cached, bst artifact push will
# automatically attempt to pull it and will be successful, leading to the full artifact being pushed
# to the empty share3. This gives the ability to attempt push currently partial artifacts to a remote,
# without exlipictly requiring a bst artifact pull.
- cli2.configure({'artifacts': [{'url': share1.repo, 'push': False}, {'url': share3.repo, 'push': True}]})
- result = cli2.run(project=project, args=['--pull-buildtrees', 'artifact', 'push', element_name])
+ cli2.configure(
+ {
+ "artifacts": [
+ {"url": share1.repo, "push": False},
+ {"url": share3.repo, "push": True},
+ ]
+ }
+ )
+ result = cli2.run(
+ project=project,
+ args=["--pull-buildtrees", "artifact", "push", element_name],
+ )
assert "Attempting to fetch missing artifact buildtrees" in result.stderr
assert element_name in result.get_pulled_elements()
with cli2.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
assert os.path.isdir(buildtreedir)
assert element_name in result.get_pushed_elements()
- assert share3.get_artifact(cli2.get_artifact_name(project, 'test', element_name))
+ assert share3.get_artifact(
+ cli2.get_artifact_name(project, "test", element_name)
+ )
# Ensure that only valid pull-buildtrees boolean options make it through the loading
# process.
-@pytest.mark.parametrize("value,success", [
- (True, True),
- (False, True),
- ("pony", False),
- ("1", False)
-])
+@pytest.mark.parametrize(
+ "value,success", [(True, True), (False, True), ("pony", False), ("1", False)]
+)
@pytest.mark.datafiles(DATA_DIR)
def test_invalid_cache_pullbuildtrees(cli, datafiles, value, success):
project = str(datafiles)
- cli.configure({
- 'cache': {
- 'pull-buildtrees': value,
- }
- })
+ cli.configure({"cache": {"pull-buildtrees": value,}})
- res = cli.run(project=project, args=['workspace', 'list'])
+ res = cli.run(project=project, args=["workspace", "list"])
if success:
res.assert_success()
else:
diff --git a/tests/integration/sandbox-bwrap.py b/tests/integration/sandbox-bwrap.py
index f48c75cbd..6f33275e9 100644
--- a/tests/integration/sandbox-bwrap.py
+++ b/tests/integration/sandbox-bwrap.py
@@ -13,50 +13,55 @@ from buildstream.testing._utils.site import HAVE_SANDBOX, HAVE_BWRAP_JSON_STATUS
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
# Bubblewrap sandbox doesn't remove the dirs it created during its execution,
# so BuildStream tries to remove them to do good. BuildStream should be extra
# careful when those folders already exist and should not touch them, though.
-@pytest.mark.skipif(HAVE_SANDBOX != 'bwrap', reason='Only available with bubblewrap')
+@pytest.mark.skipif(HAVE_SANDBOX != "bwrap", reason="Only available with bubblewrap")
@pytest.mark.datafiles(DATA_DIR)
def test_sandbox_bwrap_cleanup_build(cli, datafiles):
project = str(datafiles)
# This element depends on a base image with non-empty `/tmp` folder.
- element_name = 'sandbox-bwrap/test-cleanup.bst'
+ element_name = "sandbox-bwrap/test-cleanup.bst"
# Here, BuildStream should not attempt any rmdir etc.
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
-@pytest.mark.skipif(HAVE_SANDBOX != 'bwrap', reason='Only available with bubblewrap')
-@pytest.mark.skipif(not HAVE_BWRAP_JSON_STATUS, reason='Only available with bubblewrap supporting --json-status-fd')
+@pytest.mark.skipif(HAVE_SANDBOX != "bwrap", reason="Only available with bubblewrap")
+@pytest.mark.skipif(
+ not HAVE_BWRAP_JSON_STATUS,
+ reason="Only available with bubblewrap supporting --json-status-fd",
+)
@pytest.mark.datafiles(DATA_DIR)
def test_sandbox_bwrap_distinguish_setup_error(cli, datafiles):
project = str(datafiles)
- element_name = 'sandbox-bwrap/non-executable-shell.bst'
+ element_name = "sandbox-bwrap/non-executable-shell.bst"
- result = cli.run(project=project, args=['build', element_name])
- result.assert_task_error(error_domain=ErrorDomain.SANDBOX, error_reason="bwrap-sandbox-fail")
+ result = cli.run(project=project, args=["build", element_name])
+ result.assert_task_error(
+ error_domain=ErrorDomain.SANDBOX, error_reason="bwrap-sandbox-fail"
+ )
-@pytest.mark.skipif(HAVE_SANDBOX != 'bwrap', reason='Only available with bubblewrap')
+@pytest.mark.skipif(HAVE_SANDBOX != "bwrap", reason="Only available with bubblewrap")
@pytest.mark.datafiles(DATA_DIR)
def test_sandbox_bwrap_return_subprocess(cli, datafiles):
project = str(datafiles)
- element_name = 'sandbox-bwrap/command-exit-42.bst'
-
- cli.configure({
- "logging": {
- "message-format": "%{element}|%{message}",
- },
- })
-
- result = cli.run(project=project, args=['build', element_name])
- result.assert_task_error(error_domain=ErrorDomain.SANDBOX, error_reason="command-failed")
- assert "sandbox-bwrap/command-exit-42.bst|Command failed with exitcode 42" in result.stderr
+ element_name = "sandbox-bwrap/command-exit-42.bst"
+
+ cli.configure(
+ {"logging": {"message-format": "%{element}|%{message}",},}
+ )
+
+ result = cli.run(project=project, args=["build", element_name])
+ result.assert_task_error(
+ error_domain=ErrorDomain.SANDBOX, error_reason="command-failed"
+ )
+ assert (
+ "sandbox-bwrap/command-exit-42.bst|Command failed with exitcode 42"
+ in result.stderr
+ )
diff --git a/tests/integration/script.py b/tests/integration/script.py
index 0e88ae53c..4f44feae4 100644
--- a/tests/integration/script.py
+++ b/tests/integration/script.py
@@ -12,10 +12,7 @@ from buildstream.testing._utils.site import HAVE_SANDBOX
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
def create_script_element(name, path, config=None, variables=None):
@@ -26,218 +23,282 @@ def create_script_element(name, path, config=None, variables=None):
variables = {}
element = {
- 'kind': 'script',
- 'depends': [{
- 'filename': 'base.bst',
- 'type': 'build'
- }],
- 'config': config,
- 'variables': variables
+ "kind": "script",
+ "depends": [{"filename": "base.bst", "type": "build"}],
+ "config": config,
+ "variables": variables,
}
os.makedirs(os.path.dirname(os.path.join(path, name)), exist_ok=True)
_yaml.roundtrip_dump(element, os.path.join(path, name))
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_script(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_path = os.path.join(project, 'elements')
- element_name = 'script/script-layout.bst'
-
- create_script_element(element_name, element_path,
- config={
- 'commands': [
- "mkdir -p %{install-root}",
- "echo 'Hi' > %{install-root}/test"
- ],
- })
-
- res = cli.run(project=project, args=['build', element_name])
+ checkout = os.path.join(cli.directory, "checkout")
+ element_path = os.path.join(project, "elements")
+ element_name = "script/script-layout.bst"
+
+ create_script_element(
+ element_name,
+ element_path,
+ config={
+ "commands": [
+ "mkdir -p %{install-root}",
+ "echo 'Hi' > %{install-root}/test",
+ ],
+ },
+ )
+
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- res = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ res = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert res.exit_code == 0
- with open(os.path.join(checkout, 'test')) as f:
+ with open(os.path.join(checkout, "test")) as f:
text = f.read()
assert text == "Hi\n"
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_script_root(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_path = os.path.join(project, 'elements')
- element_name = 'script/script-layout.bst'
-
- create_script_element(element_name, element_path,
- config={
- # Root-read only is False by default, we
- # want to check the default here
- # 'root-read-only': False,
- 'commands': [
- "mkdir -p %{install-root}",
- "echo 'I can write to root' > /test",
- "cp /test %{install-root}"
- ],
- })
-
- res = cli.run(project=project, args=['build', element_name])
+ checkout = os.path.join(cli.directory, "checkout")
+ element_path = os.path.join(project, "elements")
+ element_name = "script/script-layout.bst"
+
+ create_script_element(
+ element_name,
+ element_path,
+ config={
+ # Root-read only is False by default, we
+ # want to check the default here
+ # 'root-read-only': False,
+ "commands": [
+ "mkdir -p %{install-root}",
+ "echo 'I can write to root' > /test",
+ "cp /test %{install-root}",
+ ],
+ },
+ )
+
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- res = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ res = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert res.exit_code == 0
- with open(os.path.join(checkout, 'test')) as f:
+ with open(os.path.join(checkout, "test")) as f:
text = f.read()
assert text == "I can write to root\n"
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
-@pytest.mark.xfail(HAVE_SANDBOX == 'buildbox', reason='Not working with BuildBox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
+@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
def test_script_no_root(cli, datafiles):
project = str(datafiles)
- element_path = os.path.join(project, 'elements')
- element_name = 'script/script-layout.bst'
-
- create_script_element(element_name, element_path,
- config={
- 'root-read-only': True,
- 'commands': [
- "mkdir -p %{install-root}",
- "echo 'I can not write to root' > /test",
- "cp /test %{install-root}"
- ],
- })
-
- res = cli.run(project=project, args=['build', element_name])
+ element_path = os.path.join(project, "elements")
+ element_name = "script/script-layout.bst"
+
+ create_script_element(
+ element_name,
+ element_path,
+ config={
+ "root-read-only": True,
+ "commands": [
+ "mkdir -p %{install-root}",
+ "echo 'I can not write to root' > /test",
+ "cp /test %{install-root}",
+ ],
+ },
+ )
+
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code != 0
assert "/test: Read-only file system" in res.stderr
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
-@pytest.mark.xfail(HAVE_SANDBOX == 'buildbox', reason='Not working with BuildBox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
+@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
def test_script_cwd(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_path = os.path.join(project, 'elements')
- element_name = 'script/script-layout.bst'
-
- create_script_element(element_name, element_path,
- config={
- 'commands': [
- "echo 'test' > test",
- "cp /buildstream/test %{install-root}"
- ],
- },
- variables={
- 'cwd': '/buildstream'
- })
-
- res = cli.run(project=project, args=['build', element_name])
+ checkout = os.path.join(cli.directory, "checkout")
+ element_path = os.path.join(project, "elements")
+ element_name = "script/script-layout.bst"
+
+ create_script_element(
+ element_name,
+ element_path,
+ config={
+ "commands": ["echo 'test' > test", "cp /buildstream/test %{install-root}"],
+ },
+ variables={"cwd": "/buildstream"},
+ )
+
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- res = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ res = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert res.exit_code == 0
- with open(os.path.join(checkout, 'test')) as f:
+ with open(os.path.join(checkout, "test")) as f:
text = f.read()
assert text == "test\n"
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_script_layout(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_name = 'script/script-layout.bst'
+ checkout = os.path.join(cli.directory, "checkout")
+ element_name = "script/script-layout.bst"
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert res.exit_code == 0
- with open(os.path.join(checkout, 'test')) as f:
+ with open(os.path.join(checkout, "test")) as f:
text = f.read()
assert text == "Hi\n"
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_regression_cache_corruption(cli, datafiles):
project = str(datafiles)
- checkout_original = os.path.join(cli.directory, 'checkout-original')
- checkout_after = os.path.join(cli.directory, 'checkout-after')
- element_name = 'script/corruption.bst'
- canary_element_name = 'script/corruption-image.bst'
+ checkout_original = os.path.join(cli.directory, "checkout-original")
+ checkout_after = os.path.join(cli.directory, "checkout-after")
+ element_name = "script/corruption.bst"
+ canary_element_name = "script/corruption-image.bst"
- res = cli.run(project=project, args=['build', canary_element_name])
+ res = cli.run(project=project, args=["build", canary_element_name])
assert res.exit_code == 0
- res = cli.run(project=project, args=['artifact', 'checkout', canary_element_name,
- '--directory', checkout_original])
+ res = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ canary_element_name,
+ "--directory",
+ checkout_original,
+ ],
+ )
assert res.exit_code == 0
- with open(os.path.join(checkout_original, 'canary')) as f:
- assert f.read() == 'alive\n'
+ with open(os.path.join(checkout_original, "canary")) as f:
+ assert f.read() == "alive\n"
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- res = cli.run(project=project, args=['artifact', 'checkout', canary_element_name,
- '--directory', checkout_after])
+ res = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ canary_element_name,
+ "--directory",
+ checkout_after,
+ ],
+ )
assert res.exit_code == 0
- with open(os.path.join(checkout_after, 'canary')) as f:
- assert f.read() == 'alive\n'
+ with open(os.path.join(checkout_after, "canary")) as f:
+ assert f.read() == "alive\n"
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_regression_tmpdir(cli, datafiles):
project = str(datafiles)
- element_name = 'script/tmpdir.bst'
+ element_name = "script/tmpdir.bst"
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_regression_cache_corruption_2(cli, datafiles):
project = str(datafiles)
- checkout_original = os.path.join(cli.directory, 'checkout-original')
- checkout_after = os.path.join(cli.directory, 'checkout-after')
- element_name = 'script/corruption-2.bst'
- canary_element_name = 'script/corruption-image.bst'
+ checkout_original = os.path.join(cli.directory, "checkout-original")
+ checkout_after = os.path.join(cli.directory, "checkout-after")
+ element_name = "script/corruption-2.bst"
+ canary_element_name = "script/corruption-image.bst"
- res = cli.run(project=project, args=['build', canary_element_name])
+ res = cli.run(project=project, args=["build", canary_element_name])
assert res.exit_code == 0
- res = cli.run(project=project, args=['artifact', 'checkout', canary_element_name,
- '--directory', checkout_original])
+ res = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ canary_element_name,
+ "--directory",
+ checkout_original,
+ ],
+ )
assert res.exit_code == 0
- with open(os.path.join(checkout_original, 'canary')) as f:
- assert f.read() == 'alive\n'
+ with open(os.path.join(checkout_original, "canary")) as f:
+ assert f.read() == "alive\n"
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- res = cli.run(project=project, args=['artifact', 'checkout', canary_element_name,
- '--directory', checkout_after])
+ res = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ canary_element_name,
+ "--directory",
+ checkout_after,
+ ],
+ )
assert res.exit_code == 0
- with open(os.path.join(checkout_after, 'canary')) as f:
- assert f.read() == 'alive\n'
+ with open(os.path.join(checkout_after, "canary")) as f:
+ assert f.read() == "alive\n"
diff --git a/tests/integration/shell.py b/tests/integration/shell.py
index a44f51609..124770aad 100644
--- a/tests/integration/shell.py
+++ b/tests/integration/shell.py
@@ -16,10 +16,7 @@ from tests.testutils import create_artifact_share
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
# execute_shell()
@@ -35,28 +32,32 @@ DATA_DIR = os.path.join(
# element (str): The element to build and run a shell with
# isolate (bool): Whether to pass --isolate to `bst shell`
#
-def execute_shell(cli, project, command, *, config=None, mount=None, element='base.bst', isolate=False):
+def execute_shell(
+ cli, project, command, *, config=None, mount=None, element="base.bst", isolate=False
+):
# Ensure the element is built
result = cli.run_project_config(
- project=project, project_config=config, args=['build', element])
+ project=project, project_config=config, args=["build", element]
+ )
assert result.exit_code == 0
- args = ['shell']
+ args = ["shell"]
if isolate:
- args += ['--isolate']
+ args += ["--isolate"]
if mount is not None:
host_path, target_path = mount
- args += ['--mount', host_path, target_path]
- args += [element, '--', *command]
+ args += ["--mount", host_path, target_path]
+ args += [element, "--", *command]
- return cli.run_project_config(
- project=project, project_config=config, args=args)
+ return cli.run_project_config(project=project, project_config=config, args=args)
# Test running something through a shell, allowing it to find the
# executable
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_shell(cli, datafiles):
project = str(datafiles)
@@ -67,7 +68,9 @@ def test_shell(cli, datafiles):
# Test running an executable directly
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_executable(cli, datafiles):
project = str(datafiles)
@@ -79,19 +82,20 @@ def test_executable(cli, datafiles):
# Test shell environment variable explicit assignments
@pytest.mark.parametrize("animal", [("Horse"), ("Pony")])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
# This test seems to fail or pass depending on if this file is run or the hole test suite
def test_env_assign(cli, datafiles, animal):
project = str(datafiles)
- expected = animal + '\n'
+ expected = animal + "\n"
- result = execute_shell(cli, project, ['/bin/sh', '-c', 'echo ${ANIMAL}'], config={
- 'shell': {
- 'environment': {
- 'ANIMAL': animal
- }
- }
- })
+ result = execute_shell(
+ cli,
+ project,
+ ["/bin/sh", "-c", "echo ${ANIMAL}"],
+ config={"shell": {"environment": {"ANIMAL": animal}}},
+ )
assert result.exit_code == 0
assert result.output == expected
@@ -100,21 +104,22 @@ def test_env_assign(cli, datafiles, animal):
# Test shell environment variable explicit assignments with host env var expansion
@pytest.mark.parametrize("animal", [("Horse"), ("Pony")])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
# This test seems to fail or pass depending on if this file is run or the hole test suite
def test_env_assign_expand_host_environ(cli, datafiles, animal):
project = str(datafiles)
- expected = 'The animal is: {}\n'.format(animal)
+ expected = "The animal is: {}\n".format(animal)
- os.environ['BEAST'] = animal
+ os.environ["BEAST"] = animal
- result = execute_shell(cli, project, ['/bin/sh', '-c', 'echo ${ANIMAL}'], config={
- 'shell': {
- 'environment': {
- 'ANIMAL': 'The animal is: ${BEAST}'
- }
- }
- })
+ result = execute_shell(
+ cli,
+ project,
+ ["/bin/sh", "-c", "echo ${ANIMAL}"],
+ config={"shell": {"environment": {"ANIMAL": "The animal is: ${BEAST}"}}},
+ )
assert result.exit_code == 0
assert result.output == expected
@@ -124,51 +129,50 @@ def test_env_assign_expand_host_environ(cli, datafiles, animal):
# when running an isolated shell
@pytest.mark.parametrize("animal", [("Horse"), ("Pony")])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
# This test seems to faili or pass depending on if this file is run or the hole test suite
def test_env_assign_isolated(cli, datafiles, animal):
project = str(datafiles)
- result = execute_shell(cli, project, ['/bin/sh', '-c', 'echo ${ANIMAL}'], isolate=True, config={
- 'shell': {
- 'environment': {
- 'ANIMAL': animal
- }
- }
- })
+ result = execute_shell(
+ cli,
+ project,
+ ["/bin/sh", "-c", "echo ${ANIMAL}"],
+ isolate=True,
+ config={"shell": {"environment": {"ANIMAL": animal}}},
+ )
assert result.exit_code == 0
- assert result.output == '\n'
+ assert result.output == "\n"
# Test running an executable in a runtime with no shell (i.e., no
# /bin/sh)
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_no_shell(cli, datafiles):
project = str(datafiles)
- element_path = os.path.join(project, 'elements')
- element_name = 'shell/no-shell.bst'
+ element_path = os.path.join(project, "elements")
+ element_name = "shell/no-shell.bst"
# Create an element that removes /bin/sh from the base runtime
element = {
- 'kind': 'script',
- 'depends': [{
- 'filename': 'base.bst',
- 'type': 'build'
- }],
- 'variables': {
- 'install-root': '/'
- },
- 'config': {
- 'commands': [
- 'rm /bin/sh'
- ]
- }
+ "kind": "script",
+ "depends": [{"filename": "base.bst", "type": "build"}],
+ "variables": {"install-root": "/"},
+ "config": {"commands": ["rm /bin/sh"]},
}
- os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
+ os.makedirs(
+ os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True
+ )
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
- result = execute_shell(cli, project, ['/bin/echo', 'Pegasissies!'], element=element_name)
+ result = execute_shell(
+ cli, project, ["/bin/echo", "Pegasissies!"], element=element_name
+ )
assert result.exit_code == 0
assert result.output == "Pegasissies!\n"
@@ -176,99 +180,110 @@ def test_no_shell(cli, datafiles):
# Test that bind mounts defined in project.conf work
@pytest.mark.parametrize("path", [("/etc/pony.conf"), ("/usr/share/pony/pony.txt")])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
-@pytest.mark.xfail(HAVE_SANDBOX == 'buildbox', reason='Not working with BuildBox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
+@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
def test_host_files(cli, datafiles, path):
project = str(datafiles)
- ponyfile = os.path.join(project, 'files', 'shell-mount', 'pony.txt')
- result = execute_shell(cli, project, ['cat', path], config={
- 'shell': {
- 'host-files': [
- {
- 'host_path': ponyfile,
- 'path': path
- }
- ]
- }
- })
+ ponyfile = os.path.join(project, "files", "shell-mount", "pony.txt")
+ result = execute_shell(
+ cli,
+ project,
+ ["cat", path],
+ config={"shell": {"host-files": [{"host_path": ponyfile, "path": path}]}},
+ )
assert result.exit_code == 0
- assert result.output == 'pony\n'
+ assert result.output == "pony\n"
# Test that bind mounts defined in project.conf work
@pytest.mark.parametrize("path", [("/etc"), ("/usr/share/pony")])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
-@pytest.mark.xfail(HAVE_SANDBOX == 'buildbox', reason='Not working with BuildBox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
+@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
def test_host_files_expand_environ(cli, datafiles, path):
project = str(datafiles)
- hostpath = os.path.join(project, 'files', 'shell-mount')
- fullpath = os.path.join(path, 'pony.txt')
-
- os.environ['BASE_PONY'] = path
- os.environ['HOST_PONY_PATH'] = hostpath
-
- result = execute_shell(cli, project, ['cat', fullpath], config={
- 'shell': {
- 'host-files': [
- {
- 'host_path': '${HOST_PONY_PATH}/pony.txt',
- 'path': '${BASE_PONY}/pony.txt'
- }
- ]
- }
- })
+ hostpath = os.path.join(project, "files", "shell-mount")
+ fullpath = os.path.join(path, "pony.txt")
+
+ os.environ["BASE_PONY"] = path
+ os.environ["HOST_PONY_PATH"] = hostpath
+
+ result = execute_shell(
+ cli,
+ project,
+ ["cat", fullpath],
+ config={
+ "shell": {
+ "host-files": [
+ {
+ "host_path": "${HOST_PONY_PATH}/pony.txt",
+ "path": "${BASE_PONY}/pony.txt",
+ }
+ ]
+ }
+ },
+ )
assert result.exit_code == 0
- assert result.output == 'pony\n'
+ assert result.output == "pony\n"
# Test that bind mounts defined in project.conf dont mount in isolation
@pytest.mark.parametrize("path", [("/etc/pony.conf"), ("/usr/share/pony/pony.txt")])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_isolated_no_mount(cli, datafiles, path):
project = str(datafiles)
- ponyfile = os.path.join(project, 'files', 'shell-mount', 'pony.txt')
- result = execute_shell(cli, project, ['cat', path], isolate=True, config={
- 'shell': {
- 'host-files': [
- {
- 'host_path': ponyfile,
- 'path': path
- }
- ]
- }
- })
+ ponyfile = os.path.join(project, "files", "shell-mount", "pony.txt")
+ result = execute_shell(
+ cli,
+ project,
+ ["cat", path],
+ isolate=True,
+ config={"shell": {"host-files": [{"host_path": ponyfile, "path": path}]}},
+ )
assert result.exit_code != 0
assert path in result.stderr
- assert 'No such file or directory' in result.stderr
+ assert "No such file or directory" in result.stderr
# Test that we warn about non-existing files on the host if the mount is not
# declared as optional, and that there is no warning if it is optional
@pytest.mark.parametrize("optional", [("mandatory"), ("optional")])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_host_files_missing(cli, datafiles, optional):
project = str(datafiles)
- ponyfile = os.path.join(project, 'files', 'shell-mount', 'horsy.txt')
+ ponyfile = os.path.join(project, "files", "shell-mount", "horsy.txt")
- option = (optional == "optional")
+ option = optional == "optional"
# Assert that we did successfully run something in the shell anyway
- result = execute_shell(cli, project, ['echo', 'Hello'], config={
- 'shell': {
- 'host-files': [
- {
- 'host_path': ponyfile,
- 'path': '/etc/pony.conf',
- 'optional': option
- }
- ]
- }
- })
+ result = execute_shell(
+ cli,
+ project,
+ ["echo", "Hello"],
+ config={
+ "shell": {
+ "host-files": [
+ {
+ "host_path": ponyfile,
+ "path": "/etc/pony.conf",
+ "optional": option,
+ }
+ ]
+ }
+ },
+ )
assert result.exit_code == 0
- assert result.output == 'Hello\n'
+ assert result.output == "Hello\n"
if option:
# Assert that there was no warning about the mount
@@ -281,88 +296,113 @@ def test_host_files_missing(cli, datafiles, optional):
# Test that bind mounts defined in project.conf work
@pytest.mark.parametrize("path", [("/etc/pony.conf"), ("/usr/share/pony/pony.txt")])
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
-@pytest.mark.xfail(HAVE_SANDBOX == 'buildbox', reason='Not working with BuildBox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
+@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
def test_cli_mount(cli, datafiles, path):
project = str(datafiles)
- ponyfile = os.path.join(project, 'files', 'shell-mount', 'pony.txt')
+ ponyfile = os.path.join(project, "files", "shell-mount", "pony.txt")
- result = execute_shell(cli, project, ['cat', path], mount=(ponyfile, path))
+ result = execute_shell(cli, project, ["cat", path], mount=(ponyfile, path))
assert result.exit_code == 0
- assert result.output == 'pony\n'
+ assert result.output == "pony\n"
# Test that we can see the workspace files in a shell
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_workspace_visible(cli, datafiles):
project = str(datafiles)
- workspace = os.path.join(cli.directory, 'workspace')
- element_name = 'workspace/workspace-mount-fail.bst'
+ workspace = os.path.join(cli.directory, "workspace")
+ element_name = "workspace/workspace-mount-fail.bst"
# Open a workspace on our build failing element
#
- res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
+ res = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace, element_name],
+ )
assert res.exit_code == 0
# Ensure the dependencies of our build failing element are built
- result = cli.run(project=project, args=['build', 'base.bst'])
+ result = cli.run(project=project, args=["build", "base.bst"])
assert result.exit_code == 0
# Obtain a copy of the hello.c content from the workspace
#
- workspace_hello_path = os.path.join(cli.directory, 'workspace', 'hello.c')
+ workspace_hello_path = os.path.join(cli.directory, "workspace", "hello.c")
assert os.path.exists(workspace_hello_path)
- with open(workspace_hello_path, 'r') as f:
+ with open(workspace_hello_path, "r") as f:
workspace_hello = f.read()
# Cat the hello.c file from a bst shell command, and assert
# that we got the same content here
#
- result = cli.run(project=project, args=[
- 'shell', '--build', element_name, '--', 'cat', 'hello.c'
- ])
+ result = cli.run(
+ project=project, args=["shell", "--build", element_name, "--", "cat", "hello.c"]
+ )
assert result.exit_code == 0
assert result.output == workspace_hello
# Test that '--sysroot' works
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
-@pytest.mark.xfail(HAVE_SANDBOX == 'buildbox', reason='Not working with BuildBox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
+@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
def test_sysroot(cli, tmpdir, datafiles):
project = str(datafiles)
base_element = "base/base-alpine.bst"
# test element only needs to be something lightweight for this test
test_element = "script/script.bst"
- checkout_dir = os.path.join(str(tmpdir), 'alpine-sysroot')
- test_file = 'hello'
+ checkout_dir = os.path.join(str(tmpdir), "alpine-sysroot")
+ test_file = "hello"
# Build and check out a sysroot
- res = cli.run(project=project, args=['build', base_element])
+ res = cli.run(project=project, args=["build", base_element])
res.assert_success()
- res = cli.run(project=project, args=['artifact', 'checkout', base_element, '--directory', checkout_dir])
+ res = cli.run(
+ project=project,
+ args=["artifact", "checkout", base_element, "--directory", checkout_dir],
+ )
res.assert_success()
# Mutate the sysroot
test_path = os.path.join(checkout_dir, test_file)
- with open(test_path, 'w') as f:
- f.write('hello\n')
+ with open(test_path, "w") as f:
+ f.write("hello\n")
# Shell into the sysroot and check the test file exists
- res = cli.run(project=project, args=[
- 'shell', '--build', '--sysroot', checkout_dir, test_element, '--',
- 'grep', '-q', 'hello', '/' + test_file
- ])
+ res = cli.run(
+ project=project,
+ args=[
+ "shell",
+ "--build",
+ "--sysroot",
+ checkout_dir,
+ test_element,
+ "--",
+ "grep",
+ "-q",
+ "hello",
+ "/" + test_file,
+ ],
+ )
res.assert_success()
# Test system integration commands can access devices in /dev
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_integration_devices(cli, datafiles):
project = str(datafiles)
- element_name = 'integration.bst'
+ element_name = "integration.bst"
result = execute_shell(cli, project, ["true"], element=element_name)
assert result.exit_code == 0
@@ -372,79 +412,94 @@ def test_integration_devices(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("build_shell", [("build"), ("nobuild")])
@pytest.mark.parametrize("guess_element", [True, False], ids=["guess", "no-guess"])
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
-def test_integration_external_workspace(cli, tmpdir_factory, datafiles, build_shell, guess_element):
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
+def test_integration_external_workspace(
+ cli, tmpdir_factory, datafiles, build_shell, guess_element
+):
tmpdir = tmpdir_factory.mktemp("")
project = str(datafiles)
- element_name = 'autotools/amhello.bst'
- workspace_dir = os.path.join(str(tmpdir), 'workspace')
+ element_name = "autotools/amhello.bst"
+ workspace_dir = os.path.join(str(tmpdir), "workspace")
if guess_element:
# Mutate the project.conf to use a default shell command
- project_file = os.path.join(project, 'project.conf')
- config_text = "shell:\n"\
- " command: ['true']\n"
- with open(project_file, 'a') as f:
+ project_file = os.path.join(project, "project.conf")
+ config_text = "shell:\n" " command: ['true']\n"
+ with open(project_file, "a") as f:
f.write(config_text)
- result = cli.run(project=project, args=[
- 'workspace', 'open', '--directory', workspace_dir, element_name
- ])
+ result = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace_dir, element_name],
+ )
result.assert_success()
- result = cli.run(project=project, args=['-C', workspace_dir, 'build', element_name])
+ result = cli.run(project=project, args=["-C", workspace_dir, "build", element_name])
result.assert_success()
- command = ['-C', workspace_dir, 'shell']
- if build_shell == 'build':
- command.append('--build')
+ command = ["-C", workspace_dir, "shell"]
+ if build_shell == "build":
+ command.append("--build")
if not guess_element:
- command.extend([element_name, '--', 'true'])
+ command.extend([element_name, "--", "true"])
result = cli.run(project=project, cwd=workspace_dir, args=command)
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_integration_partial_artifact(cli, datafiles, tmpdir, integration_cache):
project = str(datafiles)
- element_name = 'autotools/amhello.bst'
+ element_name = "autotools/amhello.bst"
# push to an artifact server so we can pull from it later.
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
- cli.configure({'artifacts': {
- 'url': share.repo,
- 'push': True
- }})
- result = cli.run(project=project, args=['build', element_name])
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
+ cli.configure({"artifacts": {"url": share.repo, "push": True}})
+ result = cli.run(project=project, args=["build", element_name])
result.assert_success()
# If the build is cached then it might not push to the artifact cache
- result = cli.run(project=project, args=['artifact', 'push', element_name])
+ result = cli.run(project=project, args=["artifact", "push", element_name])
result.assert_success()
- result = cli.run(project=project, args=['shell', element_name])
+ result = cli.run(project=project, args=["shell", element_name])
result.assert_success()
# do a checkout and get the digest of the hello binary.
- result = cli.run(project=project, args=[
- 'artifact', 'checkout', '--deps', 'none',
- '--directory', os.path.join(str(tmpdir), 'tmp'),
- 'autotools/amhello.bst'])
+ result = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ "--deps",
+ "none",
+ "--directory",
+ os.path.join(str(tmpdir), "tmp"),
+ "autotools/amhello.bst",
+ ],
+ )
result.assert_success()
- digest = utils.sha256sum(os.path.join(str(tmpdir), 'tmp', 'usr', 'bin', 'hello'))
+ digest = utils.sha256sum(
+ os.path.join(str(tmpdir), "tmp", "usr", "bin", "hello")
+ )
# Remove the binary from the CAS
- cachedir = cli.config['cachedir']
- objpath = os.path.join(cachedir, 'cas', 'objects', digest[:2], digest[2:])
+ cachedir = cli.config["cachedir"]
+ objpath = os.path.join(cachedir, "cas", "objects", digest[:2], digest[2:])
os.unlink(objpath)
# check shell doesn't work
- result = cli.run(project=project, args=['shell', element_name, '--', 'hello'])
+ result = cli.run(project=project, args=["shell", element_name, "--", "hello"])
result.assert_main_error(ErrorDomain.APP, None)
# check the artifact gets completed with '--pull' specified
- result = cli.run(project=project, args=['shell', '--pull', element_name, '--', 'hello'])
+ result = cli.run(
+ project=project, args=["shell", "--pull", element_name, "--", "hello"]
+ )
result.assert_success()
- assert 'autotools/amhello.bst' in result.get_pulled_elements()
+ assert "autotools/amhello.bst" in result.get_pulled_elements()
diff --git a/tests/integration/shellbuildtrees.py b/tests/integration/shellbuildtrees.py
index 146bc6062..6ed6770a4 100644
--- a/tests/integration/shellbuildtrees.py
+++ b/tests/integration/shellbuildtrees.py
@@ -16,321 +16,528 @@ from tests.testutils import create_artifact_share
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_buildtree_staged(cli_integration, datafiles):
# We can only test the non interacitve case
# The non interactive case defaults to not using buildtrees
# for `bst shell --build`
project = str(datafiles)
- element_name = 'build-shell/buildtree.bst'
+ element_name = "build-shell/buildtree.bst"
- res = cli_integration.run(project=project, args=['--cache-buildtrees', 'always', 'build', element_name])
+ res = cli_integration.run(
+ project=project, args=["--cache-buildtrees", "always", "build", element_name]
+ )
res.assert_success()
- res = cli_integration.run(project=project, args=[
- 'shell', '--build', element_name, '--', 'cat', 'test'
- ])
+ res = cli_integration.run(
+ project=project, args=["shell", "--build", element_name, "--", "cat", "test"]
+ )
res.assert_shell_error()
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_buildtree_staged_forced_true(cli_integration, datafiles):
# Test that if we ask for a build tree it is there.
project = str(datafiles)
- element_name = 'build-shell/buildtree.bst'
+ element_name = "build-shell/buildtree.bst"
- res = cli_integration.run(project=project, args=['--cache-buildtrees', 'always', 'build', element_name])
+ res = cli_integration.run(
+ project=project, args=["--cache-buildtrees", "always", "build", element_name]
+ )
res.assert_success()
- res = cli_integration.run(project=project, args=[
- 'shell', '--build', '--use-buildtree', 'always', element_name, '--', 'cat', 'test'
- ])
+ res = cli_integration.run(
+ project=project,
+ args=[
+ "shell",
+ "--build",
+ "--use-buildtree",
+ "always",
+ element_name,
+ "--",
+ "cat",
+ "test",
+ ],
+ )
res.assert_success()
- assert 'Hi' in res.output
+ assert "Hi" in res.output
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_buildtree_staged_warn_empty_cached(cli_integration, tmpdir, datafiles):
# Test that if we stage a cached and empty buildtree, we warn the user.
project = str(datafiles)
- element_name = 'build-shell/buildtree.bst'
+ element_name = "build-shell/buildtree.bst"
# Switch to a temp artifact cache dir to ensure the artifact is rebuilt,
# without caching a buildtree which is the default bst behaviour
- cli_integration.configure({
- 'cachedir': str(tmpdir)
- })
+ cli_integration.configure({"cachedir": str(tmpdir)})
- res = cli_integration.run(project=project, args=['build', element_name])
+ res = cli_integration.run(project=project, args=["build", element_name])
res.assert_success()
- res = cli_integration.run(project=project, args=[
- 'shell', '--build', '--use-buildtree', 'always', element_name, '--', 'cat', 'test'
- ])
+ res = cli_integration.run(
+ project=project,
+ args=[
+ "shell",
+ "--build",
+ "--use-buildtree",
+ "always",
+ element_name,
+ "--",
+ "cat",
+ "test",
+ ],
+ )
res.assert_main_error(ErrorDomain.APP, None)
- assert "Artifact was created without buildtree, unable to launch shell with it" in res.stderr
+ assert (
+ "Artifact was created without buildtree, unable to launch shell with it"
+ in res.stderr
+ )
# Now attempt the same with the try option, this should not attempt to find a buildtree
# and just launch the shell, however the cat should still fail.
- res = cli_integration.run(project=project, args=[
- 'shell', '--build', '--use-buildtree', 'try', element_name, '--', 'cat', 'test'
- ])
- assert "Artifact created without buildtree, shell will be loaded without it" in res.stderr
- assert 'Hi' not in res.output
+ res = cli_integration.run(
+ project=project,
+ args=[
+ "shell",
+ "--build",
+ "--use-buildtree",
+ "try",
+ element_name,
+ "--",
+ "cat",
+ "test",
+ ],
+ )
+ assert (
+ "Artifact created without buildtree, shell will be loaded without it"
+ in res.stderr
+ )
+ assert "Hi" not in res.output
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_buildtree_staged_if_available(cli_integration, datafiles):
# Test that a build tree can be correctly detected.
project = str(datafiles)
- element_name = 'build-shell/buildtree.bst'
+ element_name = "build-shell/buildtree.bst"
- res = cli_integration.run(project=project, args=['--cache-buildtrees', 'always', 'build', element_name])
+ res = cli_integration.run(
+ project=project, args=["--cache-buildtrees", "always", "build", element_name]
+ )
res.assert_success()
- res = cli_integration.run(project=project, args=[
- 'shell', '--build', '--use-buildtree', 'try', element_name, '--', 'cat', 'test'
- ])
+ res = cli_integration.run(
+ project=project,
+ args=[
+ "shell",
+ "--build",
+ "--use-buildtree",
+ "try",
+ element_name,
+ "--",
+ "cat",
+ "test",
+ ],
+ )
res.assert_success()
- assert 'Hi' in res.output
+ assert "Hi" in res.output
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_buildtree_staged_forced_false(cli_integration, datafiles):
# Test that if we ask not to have a build tree it is not there
project = str(datafiles)
- element_name = 'build-shell/buildtree.bst'
+ element_name = "build-shell/buildtree.bst"
- res = cli_integration.run(project=project, args=['--cache-buildtrees', 'always', 'build', element_name])
+ res = cli_integration.run(
+ project=project, args=["--cache-buildtrees", "always", "build", element_name]
+ )
res.assert_success()
- res = cli_integration.run(project=project, args=[
- 'shell', '--build', '--use-buildtree', 'never', element_name, '--', 'cat', 'test'
- ])
+ res = cli_integration.run(
+ project=project,
+ args=[
+ "shell",
+ "--build",
+ "--use-buildtree",
+ "never",
+ element_name,
+ "--",
+ "cat",
+ "test",
+ ],
+ )
res.assert_shell_error()
- assert 'Hi' not in res.output
+ assert "Hi" not in res.output
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_buildtree_from_failure(cli_integration, datafiles):
# Test that we can use a build tree after a failure
project = str(datafiles)
- element_name = 'build-shell/buildtree-fail.bst'
+ element_name = "build-shell/buildtree-fail.bst"
- res = cli_integration.run(project=project, args=['build', element_name])
+ res = cli_integration.run(project=project, args=["build", element_name])
res.assert_main_error(ErrorDomain.STREAM, None)
# Assert that file has expected contents
- res = cli_integration.run(project=project, args=[
- 'shell', '--build', element_name, '--use-buildtree', 'always', '--', 'cat', 'test'
- ])
+ res = cli_integration.run(
+ project=project,
+ args=[
+ "shell",
+ "--build",
+ element_name,
+ "--use-buildtree",
+ "always",
+ "--",
+ "cat",
+ "test",
+ ],
+ )
res.assert_success()
assert "WARNING: using a buildtree from a failed build" in res.stderr
- assert 'Hi' in res.output
+ assert "Hi" in res.output
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_buildtree_from_failure_option_never(cli_integration, tmpdir, datafiles):
project = str(datafiles)
- element_name = 'build-shell/buildtree-fail.bst'
+ element_name = "build-shell/buildtree-fail.bst"
# Switch to a temp artifact cache dir to ensure the artifact is rebuilt,
# without caching a buildtree explicitly
- cli_integration.configure({
- 'cachedir': str(tmpdir)
- })
+ cli_integration.configure({"cachedir": str(tmpdir)})
- res = cli_integration.run(project=project, args=['--cache-buildtrees', 'never', 'build', element_name])
+ res = cli_integration.run(
+ project=project, args=["--cache-buildtrees", "never", "build", element_name]
+ )
res.assert_main_error(ErrorDomain.STREAM, None)
- res = cli_integration.run(project=project, args=[
- 'shell', '--build', element_name, '--use-buildtree', 'always', '--', 'cat', 'test'
- ])
+ res = cli_integration.run(
+ project=project,
+ args=[
+ "shell",
+ "--build",
+ element_name,
+ "--use-buildtree",
+ "always",
+ "--",
+ "cat",
+ "test",
+ ],
+ )
res.assert_main_error(ErrorDomain.APP, None)
- assert "Artifact was created without buildtree, unable to launch shell with it" in res.stderr
+ assert (
+ "Artifact was created without buildtree, unable to launch shell with it"
+ in res.stderr
+ )
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_buildtree_from_failure_option_always(cli_integration, tmpdir, datafiles):
project = str(datafiles)
- element_name = 'build-shell/buildtree-fail.bst'
+ element_name = "build-shell/buildtree-fail.bst"
# build with --cache-buildtrees set to 'always', behaviour should match
# default behaviour (which is always) as the buildtree will explicitly have been
# cached with content.
- cli_integration.configure({
- 'cachedir': str(tmpdir)
- })
+ cli_integration.configure({"cachedir": str(tmpdir)})
- res = cli_integration.run(project=project, args=['--cache-buildtrees', 'always', 'build', element_name])
+ res = cli_integration.run(
+ project=project, args=["--cache-buildtrees", "always", "build", element_name]
+ )
res.assert_main_error(ErrorDomain.STREAM, None)
- res = cli_integration.run(project=project, args=[
- 'shell', '--build', element_name, '--use-buildtree', 'always', '--', 'cat', 'test'
- ])
+ res = cli_integration.run(
+ project=project,
+ args=[
+ "shell",
+ "--build",
+ element_name,
+ "--use-buildtree",
+ "always",
+ "--",
+ "cat",
+ "test",
+ ],
+ )
res.assert_success()
assert "WARNING: using a buildtree from a failed build" in res.stderr
- assert 'Hi' in res.output
+ assert "Hi" in res.output
# Check that build shells work when pulled from a remote cache
# This is to roughly simulate remote execution
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_buildtree_pulled(cli, tmpdir, datafiles):
project = str(datafiles)
- element_name = 'build-shell/buildtree.bst'
+ element_name = "build-shell/buildtree.bst"
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# Build the element to push it to cache
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True}
- })
- result = cli.run(project=project, args=['--cache-buildtrees', 'always', 'build', element_name])
+ cli.configure({"artifacts": {"url": share.repo, "push": True}})
+ result = cli.run(
+ project=project,
+ args=["--cache-buildtrees", "always", "build", element_name],
+ )
result.assert_success()
- assert cli.get_element_state(project, element_name) == 'cached'
+ assert cli.get_element_state(project, element_name) == "cached"
# Discard the cache
- shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas')))
- shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'artifacts')))
- assert cli.get_element_state(project, element_name) != 'cached'
+ shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "cas")))
+ shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "artifacts")))
+ assert cli.get_element_state(project, element_name) != "cached"
# Pull from cache, ensuring cli options is set to pull the buildtree
- result = cli.run(project=project,
- args=['--pull-buildtrees', 'artifact', 'pull', '--deps', 'all', element_name])
+ result = cli.run(
+ project=project,
+ args=[
+ "--pull-buildtrees",
+ "artifact",
+ "pull",
+ "--deps",
+ "all",
+ element_name,
+ ],
+ )
result.assert_success()
# Check it's using the cached build tree
- res = cli.run(project=project, args=[
- 'shell', '--build', element_name, '--use-buildtree', 'always', '--', 'cat', 'test'
- ])
+ res = cli.run(
+ project=project,
+ args=[
+ "shell",
+ "--build",
+ element_name,
+ "--use-buildtree",
+ "always",
+ "--",
+ "cat",
+ "test",
+ ],
+ )
res.assert_success()
# This test checks for correct behaviour if a buildtree is not present in the local cache.
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_buildtree_options(cli, tmpdir, datafiles):
project = str(datafiles)
- element_name = 'build-shell/buildtree.bst'
+ element_name = "build-shell/buildtree.bst"
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# Build the element to push it to cache
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True}
- })
- result = cli.run(project=project, args=['--cache-buildtrees', 'always', 'build', element_name])
+ cli.configure({"artifacts": {"url": share.repo, "push": True}})
+ result = cli.run(
+ project=project,
+ args=["--cache-buildtrees", "always", "build", element_name],
+ )
result.assert_success()
- assert cli.get_element_state(project, element_name) == 'cached'
- assert share.get_artifact(cli.get_artifact_name(project, 'test', element_name))
+ assert cli.get_element_state(project, element_name) == "cached"
+ assert share.get_artifact(cli.get_artifact_name(project, "test", element_name))
# Discard the cache
- shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas')))
- shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'artifacts')))
- assert cli.get_element_state(project, element_name) != 'cached'
+ shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "cas")))
+ shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "artifacts")))
+ assert cli.get_element_state(project, element_name) != "cached"
# Pull from cache, but do not include buildtrees.
- result = cli.run(project=project, args=['artifact', 'pull', '--deps', 'all', element_name])
+ result = cli.run(
+ project=project, args=["artifact", "pull", "--deps", "all", element_name]
+ )
result.assert_success()
# Check it's not using the cached build tree
- res = cli.run(project=project, args=[
- 'shell', '--build', element_name, '--use-buildtree', 'never', '--', 'cat', 'test'
- ])
+ res = cli.run(
+ project=project,
+ args=[
+ "shell",
+ "--build",
+ element_name,
+ "--use-buildtree",
+ "never",
+ "--",
+ "cat",
+ "test",
+ ],
+ )
res.assert_shell_error()
- assert 'Hi' not in res.output
+ assert "Hi" not in res.output
# Check it's not using the cached build tree, default is to ask, and fall back to not
# for non interactive behavior
- res = cli.run(project=project, args=[
- 'shell', '--build', element_name, '--', 'cat', 'test'
- ])
+ res = cli.run(
+ project=project,
+ args=["shell", "--build", element_name, "--", "cat", "test"],
+ )
res.assert_shell_error()
- assert 'Hi' not in res.output
+ assert "Hi" not in res.output
# Check correctly handling the lack of buildtree, with 'try' not attempting to
# pull the buildtree as the user context is by default set to not pull them
- res = cli.run(project=project, args=[
- 'shell', '--build', element_name, '--use-buildtree', 'try', '--', 'cat', 'test'
- ])
- assert 'Hi' not in res.output
- assert 'Attempting to fetch missing artifact buildtrees' not in res.stderr
+ res = cli.run(
+ project=project,
+ args=[
+ "shell",
+ "--build",
+ element_name,
+ "--use-buildtree",
+ "try",
+ "--",
+ "cat",
+ "test",
+ ],
+ )
+ assert "Hi" not in res.output
+ assert "Attempting to fetch missing artifact buildtrees" not in res.stderr
# Check correctly handling the lack of buildtree, with 'try' attempting and succeeding
# to pull the buildtree as the user context allow the pulling of buildtrees and it is
# available in the remote
- res = cli.run(project=project, args=[
- '--pull-buildtrees', 'shell', '--build', element_name, '--use-buildtree', 'try', '--', 'cat', 'test'
- ])
- assert 'Attempting to fetch missing artifact buildtree' in res.stderr
- assert 'Hi' in res.output
- shutil.rmtree(os.path.join(os.path.join(str(tmpdir), 'cache', 'cas')))
- shutil.rmtree(os.path.join(os.path.join(str(tmpdir), 'cache', 'artifacts')))
- assert cli.get_element_state(project, element_name) != 'cached'
+ res = cli.run(
+ project=project,
+ args=[
+ "--pull-buildtrees",
+ "shell",
+ "--build",
+ element_name,
+ "--use-buildtree",
+ "try",
+ "--",
+ "cat",
+ "test",
+ ],
+ )
+ assert "Attempting to fetch missing artifact buildtree" in res.stderr
+ assert "Hi" in res.output
+ shutil.rmtree(os.path.join(os.path.join(str(tmpdir), "cache", "cas")))
+ shutil.rmtree(os.path.join(os.path.join(str(tmpdir), "cache", "artifacts")))
+ assert cli.get_element_state(project, element_name) != "cached"
# Check it's not loading the shell at all with always set for the buildtree, when the
# user context does not allow for buildtree pulling
- result = cli.run(project=project, args=['artifact', 'pull', '--deps', 'all', element_name])
+ result = cli.run(
+ project=project, args=["artifact", "pull", "--deps", "all", element_name]
+ )
result.assert_success()
- res = cli.run(project=project, args=[
- 'shell', '--build', element_name, '--use-buildtree', 'always', '--', 'cat', 'test'
- ])
+ res = cli.run(
+ project=project,
+ args=[
+ "shell",
+ "--build",
+ element_name,
+ "--use-buildtree",
+ "always",
+ "--",
+ "cat",
+ "test",
+ ],
+ )
res.assert_main_error(ErrorDomain.APP, None)
- assert 'Buildtree is not cached locally or in available remotes' in res.stderr
- assert 'Hi' not in res.output
- assert 'Attempting to fetch missing artifact buildtree' not in res.stderr
+ assert "Buildtree is not cached locally or in available remotes" in res.stderr
+ assert "Hi" not in res.output
+ assert "Attempting to fetch missing artifact buildtree" not in res.stderr
# Check that when user context is set to pull buildtrees and a remote has the buildtree,
# 'always' will attempt and succeed at pulling the missing buildtree.
- res = cli.run(project=project, args=[
- '--pull-buildtrees', 'shell', '--build', element_name, '--use-buildtree', 'always', '--', 'cat', 'test'
- ])
- assert 'Hi' in res.output
- assert "buildtree is not cached locally, will attempt to pull from available remotes" in res.stderr
- assert 'Attempting to fetch missing artifact buildtree' in res.stderr
+ res = cli.run(
+ project=project,
+ args=[
+ "--pull-buildtrees",
+ "shell",
+ "--build",
+ element_name,
+ "--use-buildtree",
+ "always",
+ "--",
+ "cat",
+ "test",
+ ],
+ )
+ assert "Hi" in res.output
+ assert (
+ "buildtree is not cached locally, will attempt to pull from available remotes"
+ in res.stderr
+ )
+ assert "Attempting to fetch missing artifact buildtree" in res.stderr
# Tests running pull and pull-buildtree options at the same time.
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_pull_buildtree_pulled(cli, tmpdir, datafiles):
project = str(datafiles)
- element_name = 'build-shell/buildtree.bst'
+ element_name = "build-shell/buildtree.bst"
- with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
# Build the element to push it to cache
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True}
- })
- result = cli.run(project=project, args=['--cache-buildtrees', 'always', 'build', element_name])
+ cli.configure({"artifacts": {"url": share.repo, "push": True}})
+ result = cli.run(
+ project=project,
+ args=["--cache-buildtrees", "always", "build", element_name],
+ )
result.assert_success()
- assert cli.get_element_state(project, element_name) == 'cached'
+ assert cli.get_element_state(project, element_name) == "cached"
# Discard the cache
- shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas')))
- shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'artifacts')))
- assert cli.get_element_state(project, element_name) != 'cached'
+ shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "cas")))
+ shutil.rmtree(str(os.path.join(str(tmpdir), "cache", "artifacts")))
+ assert cli.get_element_state(project, element_name) != "cached"
# Check it's using the cached build tree
- res = cli.run(project=project, args=[
- '--pull-buildtrees', 'shell', '--build', element_name, '--pull',
- '--use-buildtree', 'always', '--', 'cat', 'test'
- ])
+ res = cli.run(
+ project=project,
+ args=[
+ "--pull-buildtrees",
+ "shell",
+ "--build",
+ element_name,
+ "--pull",
+ "--use-buildtree",
+ "always",
+ "--",
+ "cat",
+ "test",
+ ],
+ )
res.assert_success()
diff --git a/tests/integration/sockets.py b/tests/integration/sockets.py
index 763238baf..6f0757ff4 100644
--- a/tests/integration/sockets.py
+++ b/tests/integration/sockets.py
@@ -10,27 +10,28 @@ from buildstream.testing._utils.site import HAVE_SANDBOX
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_builddir_socket_ignored(cli, datafiles):
project = str(datafiles)
- element_name = 'sockets/make-builddir-socket.bst'
+ element_name = "sockets/make-builddir-socket.bst"
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_install_root_socket_ignored(cli, datafiles):
project = str(datafiles)
- element_name = 'sockets/make-install-root-socket.bst'
+ element_name = "sockets/make-install-root-socket.bst"
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
diff --git a/tests/integration/source-determinism.py b/tests/integration/source-determinism.py
index 70c4b79de..b3a4dd96b 100644
--- a/tests/integration/source-determinism.py
+++ b/tests/integration/source-determinism.py
@@ -9,79 +9,67 @@ from buildstream.testing import cli_integration as cli # pylint: disable=unused
from buildstream.testing._utils.site import HAVE_SANDBOX
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
-def create_test_file(*path, mode=0o644, content='content\n'):
+def create_test_file(*path, mode=0o644, content="content\n"):
path = os.path.join(*path)
os.makedirs(os.path.dirname(path), exist_ok=True)
- with open(path, 'w') as f:
+ with open(path, "w") as f:
f.write(content)
os.fchmod(f.fileno(), mode)
def create_test_directory(*path, mode=0o644):
- create_test_file(*path, '.keep', content='')
+ create_test_file(*path, ".keep", content="")
path = os.path.join(*path)
os.chmod(path, mode)
@pytest.mark.integration
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_deterministic_source_local(cli, tmpdir, datafiles):
"""Only user rights should be considered for local source.
"""
project = str(datafiles)
- element_name = 'test.bst'
- element_path = os.path.join(project, 'elements', element_name)
- sourcedir = os.path.join(project, 'source')
+ element_name = "test.bst"
+ element_path = os.path.join(project, "elements", element_name)
+ sourcedir = os.path.join(project, "source")
element = {
- 'kind': 'manual',
- 'depends': [
- {
- 'filename': 'base.bst',
- 'type': 'build'
- }
- ],
- 'sources': [
- {
- 'kind': 'local',
- 'path': 'source'
- }
- ],
- 'config': {
- 'install-commands': [
- 'ls -l >"%{install-root}/ls-l"'
- ]
- }
+ "kind": "manual",
+ "depends": [{"filename": "base.bst", "type": "build"}],
+ "sources": [{"kind": "local", "path": "source"}],
+ "config": {"install-commands": ['ls -l >"%{install-root}/ls-l"']},
}
_yaml.roundtrip_dump(element, element_path)
def get_value_for_mask(mask):
- checkoutdir = os.path.join(str(tmpdir), 'checkout-{}'.format(mask))
-
- create_test_file(sourcedir, 'a.txt', mode=0o644 & mask)
- create_test_file(sourcedir, 'b.txt', mode=0o755 & mask)
- create_test_file(sourcedir, 'c.txt', mode=0o4755 & mask)
- create_test_file(sourcedir, 'd.txt', mode=0o2755 & mask)
- create_test_file(sourcedir, 'e.txt', mode=0o1755 & mask)
- create_test_directory(sourcedir, 'dir-a', mode=0o0755 & mask)
- create_test_directory(sourcedir, 'dir-b', mode=0o4755 & mask)
- create_test_directory(sourcedir, 'dir-c', mode=0o2755 & mask)
- create_test_directory(sourcedir, 'dir-d', mode=0o1755 & mask)
+ checkoutdir = os.path.join(str(tmpdir), "checkout-{}".format(mask))
+
+ create_test_file(sourcedir, "a.txt", mode=0o644 & mask)
+ create_test_file(sourcedir, "b.txt", mode=0o755 & mask)
+ create_test_file(sourcedir, "c.txt", mode=0o4755 & mask)
+ create_test_file(sourcedir, "d.txt", mode=0o2755 & mask)
+ create_test_file(sourcedir, "e.txt", mode=0o1755 & mask)
+ create_test_directory(sourcedir, "dir-a", mode=0o0755 & mask)
+ create_test_directory(sourcedir, "dir-b", mode=0o4755 & mask)
+ create_test_directory(sourcedir, "dir-c", mode=0o2755 & mask)
+ create_test_directory(sourcedir, "dir-d", mode=0o1755 & mask)
try:
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkoutdir],
+ )
result.assert_success()
- with open(os.path.join(checkoutdir, 'ls-l'), 'r') as f:
+ with open(os.path.join(checkoutdir, "ls-l"), "r") as f:
return f.read()
finally:
cli.remove_artifact_from_cache(project, element_name)
diff --git a/tests/integration/stack.py b/tests/integration/stack.py
index 9d6b38345..d208a8ce1 100644
--- a/tests/integration/stack.py
+++ b/tests/integration/stack.py
@@ -11,29 +11,31 @@ from buildstream.testing._utils.site import HAVE_SANDBOX
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_stack(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_name = 'stack/stack.bst'
+ checkout = os.path.join(cli.directory, "checkout")
+ element_name = "stack/stack.bst"
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert res.exit_code == 0
- with open(os.path.join(checkout, 'hi')) as f:
+ with open(os.path.join(checkout, "hi")) as f:
hi = f.read()
- with open(os.path.join(checkout, 'another-hi')) as f:
+ with open(os.path.join(checkout, "another-hi")) as f:
another_hi = f.read()
assert hi == "Hi\n"
diff --git a/tests/integration/symlinks.py b/tests/integration/symlinks.py
index 85bbc53fd..bc9675ecf 100644
--- a/tests/integration/symlinks.py
+++ b/tests/integration/symlinks.py
@@ -11,80 +11,96 @@ from buildstream.testing._utils.site import HAVE_SANDBOX
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_absolute_symlinks(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_name = 'symlinks/dangling-symlink.bst'
+ checkout = os.path.join(cli.directory, "checkout")
+ element_name = "symlinks/dangling-symlink.bst"
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert result.exit_code == 0
- symlink = os.path.join(checkout, 'opt', 'orgname')
+ symlink = os.path.join(checkout, "opt", "orgname")
assert os.path.islink(symlink)
# The symlink is created to point to /usr/orgs/orgname and BuildStream
# should not mangle symlinks.
- assert os.readlink(symlink) == '/usr/orgs/orgname'
+ assert os.readlink(symlink) == "/usr/orgs/orgname"
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_disallow_overlaps_inside_symlink_with_dangling_target(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_name = 'symlinks/dangling-symlink-overlap.bst'
+ checkout = os.path.join(cli.directory, "checkout")
+ element_name = "symlinks/dangling-symlink-overlap.bst"
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
- result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert result.exit_code == -1
- assert 'Destination is a symlink, not a directory: /opt/orgname' in result.stderr
+ assert "Destination is a symlink, not a directory: /opt/orgname" in result.stderr
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_detect_symlink_overlaps_pointing_outside_sandbox(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_name = 'symlinks/symlink-to-outside-sandbox-overlap.bst'
+ checkout = os.path.join(cli.directory, "checkout")
+ element_name = "symlinks/symlink-to-outside-sandbox-overlap.bst"
# Building the two elements should succeed...
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
assert result.exit_code == 0
# ...but when we compose them together, the overlaps create paths that
# point outside the sandbox which BuildStream needs to detect before it
# tries to actually write there.
- result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
assert result.exit_code == -1
- assert 'Destination is a symlink, not a directory: /opt/escape-hatch' in result.stderr
+ assert (
+ "Destination is a symlink, not a directory: /opt/escape-hatch" in result.stderr
+ )
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_symlink_in_sandbox_path(cli, datafiles):
project = str(datafiles)
- element_name = 'symlinks/link-on-path-use.bst'
- base_element_name = 'symlinks/link-on-path.bst'
+ element_name = "symlinks/link-on-path-use.bst"
+ base_element_name = "symlinks/link-on-path.bst"
# This test is inspired by how freedesktop-SDK has /bin -> /usr/bin
# Create a element that has sh in altbin and a link from bin to altbin
- result1 = cli.run(project=project, args=['build', base_element_name])
+ result1 = cli.run(project=project, args=["build", base_element_name])
result1.assert_success()
# Build a element that uses the element that has sh in altbin.
- result2 = cli.run(project=project, args=['build', element_name])
+ result2 = cli.run(project=project, args=["build", element_name])
result2.assert_success()
# When this element is built it demonstrates that the virtual sandbox
# can detect sh across links and that the sandbox can find sh accross
diff --git a/tests/integration/workspace.py b/tests/integration/workspace.py
index 0d8a88007..127a9358d 100644
--- a/tests/integration/workspace.py
+++ b/tests/integration/workspace.py
@@ -13,113 +13,132 @@ from buildstream._exceptions import ErrorDomain
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_workspace_stages_once(cli, datafiles):
project = str(datafiles)
- workspace = os.path.join(cli.directory, 'workspace')
- element_name = 'workspace/workspace-mount.bst'
+ workspace = os.path.join(cli.directory, "workspace")
+ element_name = "workspace/workspace-mount.bst"
- res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
+ res = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace, element_name],
+ )
assert res.exit_code == 0
- assert cli.get_element_key(project, element_name) != "{:?<64}".format('')
- res = cli.run(project=project, args=['build', element_name])
+ assert cli.get_element_key(project, element_name) != "{:?<64}".format("")
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_workspace_mount(cli, datafiles):
project = str(datafiles)
- workspace = os.path.join(cli.directory, 'workspace')
- element_name = 'workspace/workspace-mount.bst'
+ workspace = os.path.join(cli.directory, "workspace")
+ element_name = "workspace/workspace-mount.bst"
- res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
+ res = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace, element_name],
+ )
assert res.exit_code == 0
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- assert os.path.exists(os.path.join(cli.directory, 'workspace'))
+ assert os.path.exists(os.path.join(cli.directory, "workspace"))
@pytest.mark.datafiles(DATA_DIR)
def test_workspace_mount_on_read_only_directory(cli, datafiles):
project = str(datafiles)
- workspace = os.path.join(cli.directory, 'workspace')
+ workspace = os.path.join(cli.directory, "workspace")
os.makedirs(workspace)
- element_name = 'workspace/workspace-mount.bst'
+ element_name = "workspace/workspace-mount.bst"
# make directory RO
os.chmod(workspace, 0o555)
- res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
+ res = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace, element_name],
+ )
assert res.exit_code == 0
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
-@pytest.mark.xfail(HAVE_SANDBOX == 'buildbox', reason='Not working with BuildBox')
-@pytest.mark.xfail(reason="Incremental builds are currently incompatible with workspace source plugin.")
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
+@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
+@pytest.mark.xfail(
+ reason="Incremental builds are currently incompatible with workspace source plugin."
+)
def test_workspace_commanddir(cli, datafiles):
project = str(datafiles)
- workspace = os.path.join(cli.directory, 'workspace')
- element_name = 'workspace/workspace-commanddir.bst'
+ workspace = os.path.join(cli.directory, "workspace")
+ element_name = "workspace/workspace-commanddir.bst"
- res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
+ res = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace, element_name],
+ )
assert res.exit_code == 0
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- assert os.path.exists(os.path.join(cli.directory, 'workspace'))
- assert os.path.exists(os.path.join(cli.directory, 'workspace', 'build'))
+ assert os.path.exists(os.path.join(cli.directory, "workspace"))
+ assert os.path.exists(os.path.join(cli.directory, "workspace", "build"))
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_workspace_updated_dependency(cli, datafiles):
project = str(datafiles)
- workspace = os.path.join(cli.directory, 'workspace')
- element_path = os.path.join(project, 'elements')
- element_name = 'workspace/workspace-updated-dependency.bst'
- dep_name = 'workspace/dependency.bst'
+ workspace = os.path.join(cli.directory, "workspace")
+ element_path = os.path.join(project, "elements")
+ element_name = "workspace/workspace-updated-dependency.bst"
+ dep_name = "workspace/dependency.bst"
dependency = {
- 'kind': 'manual',
- 'depends': [{
- 'filename': 'base.bst',
- 'type': 'build'
- }],
- 'config': {
- 'build-commands': [
- 'mkdir -p %{install-root}/etc/test/',
- 'echo "Hello world!" > %{install-root}/etc/test/hello.txt'
+ "kind": "manual",
+ "depends": [{"filename": "base.bst", "type": "build"}],
+ "config": {
+ "build-commands": [
+ "mkdir -p %{install-root}/etc/test/",
+ 'echo "Hello world!" > %{install-root}/etc/test/hello.txt',
]
- }
+ },
}
os.makedirs(os.path.dirname(os.path.join(element_path, dep_name)), exist_ok=True)
_yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))
# First open the workspace
- res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
+ res = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace, element_name],
+ )
assert res.exit_code == 0
# We build the workspaced element, so that we have an artifact
# with specific built dependencies
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
# Now we update a dependency of our element.
- dependency['config']['build-commands'] = [
- 'mkdir -p %{install-root}/etc/test/',
- 'echo "Hello china!" > %{install-root}/etc/test/hello.txt'
+ dependency["config"]["build-commands"] = [
+ "mkdir -p %{install-root}/etc/test/",
+ 'echo "Hello china!" > %{install-root}/etc/test/hello.txt',
]
_yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))
@@ -128,185 +147,186 @@ def test_workspace_updated_dependency(cli, datafiles):
# therefore ensure that we change the mtimes of any files touched
# since the last successful build of this element, otherwise this
# build will fail.
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- res = cli.run(project=project, args=['shell', element_name, '/usr/bin/test.sh'])
+ res = cli.run(project=project, args=["shell", element_name, "/usr/bin/test.sh"])
assert res.exit_code == 0
- assert res.output == 'Hello china!\n\n'
+ assert res.output == "Hello china!\n\n"
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_workspace_update_dependency_failed(cli, datafiles):
project = str(datafiles)
- workspace = os.path.join(cli.directory, 'workspace')
- element_path = os.path.join(project, 'elements')
- element_name = 'workspace/workspace-updated-dependency-failed.bst'
- dep_name = 'workspace/dependency.bst'
+ workspace = os.path.join(cli.directory, "workspace")
+ element_path = os.path.join(project, "elements")
+ element_name = "workspace/workspace-updated-dependency-failed.bst"
+ dep_name = "workspace/dependency.bst"
dependency = {
- 'kind': 'manual',
- 'depends': [{
- 'filename': 'base.bst',
- 'type': 'build'
- }],
- 'config': {
- 'build-commands': [
- 'mkdir -p %{install-root}/etc/test/',
+ "kind": "manual",
+ "depends": [{"filename": "base.bst", "type": "build"}],
+ "config": {
+ "build-commands": [
+ "mkdir -p %{install-root}/etc/test/",
'echo "Hello world!" > %{install-root}/etc/test/hello.txt',
- 'echo "Hello brazil!" > %{install-root}/etc/test/brazil.txt'
+ 'echo "Hello brazil!" > %{install-root}/etc/test/brazil.txt',
]
- }
+ },
}
os.makedirs(os.path.dirname(os.path.join(element_path, dep_name)), exist_ok=True)
_yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))
# First open the workspace
- res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
+ res = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace, element_name],
+ )
assert res.exit_code == 0
# We build the workspaced element, so that we have an artifact
# with specific built dependencies
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
# Now we update a dependency of our element.
- dependency['config']['build-commands'] = [
- 'mkdir -p %{install-root}/etc/test/',
+ dependency["config"]["build-commands"] = [
+ "mkdir -p %{install-root}/etc/test/",
'echo "Hello china!" > %{install-root}/etc/test/hello.txt',
- 'echo "Hello brazil!" > %{install-root}/etc/test/brazil.txt'
+ 'echo "Hello brazil!" > %{install-root}/etc/test/brazil.txt',
]
_yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))
# And our build fails!
- with open(os.path.join(workspace, 'Makefile'), 'a') as f:
+ with open(os.path.join(workspace, "Makefile"), "a") as f:
f.write("\texit 1")
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code != 0
# We update our dependency again...
- dependency['config']['build-commands'] = [
- 'mkdir -p %{install-root}/etc/test/',
+ dependency["config"]["build-commands"] = [
+ "mkdir -p %{install-root}/etc/test/",
'echo "Hello world!" > %{install-root}/etc/test/hello.txt',
- 'echo "Hello spain!" > %{install-root}/etc/test/brazil.txt'
+ 'echo "Hello spain!" > %{install-root}/etc/test/brazil.txt',
]
_yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))
# And fix the source
- with open(os.path.join(workspace, 'Makefile'), 'r') as f:
+ with open(os.path.join(workspace, "Makefile"), "r") as f:
makefile = f.readlines()
- with open(os.path.join(workspace, 'Makefile'), 'w') as f:
+ with open(os.path.join(workspace, "Makefile"), "w") as f:
f.write("\n".join(makefile[:-1]))
# Since buildstream thinks hello.txt did not change, we could end
# up not rebuilding a file! We need to make sure that a case like
# this can't blind-side us.
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
- res = cli.run(project=project, args=['shell', element_name, '/usr/bin/test.sh'])
+ res = cli.run(project=project, args=["shell", element_name, "/usr/bin/test.sh"])
assert res.exit_code == 0
- assert res.output == 'Hello world!\nHello spain!\n\n'
+ assert res.output == "Hello world!\nHello spain!\n\n"
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_updated_dependency_nested(cli, datafiles):
project = str(datafiles)
- workspace = os.path.join(cli.directory, 'workspace')
- element_path = os.path.join(project, 'elements')
- element_name = 'workspace/workspace-updated-dependency-nested.bst'
- dep_name = 'workspace/dependency.bst'
+ workspace = os.path.join(cli.directory, "workspace")
+ element_path = os.path.join(project, "elements")
+ element_name = "workspace/workspace-updated-dependency-nested.bst"
+ dep_name = "workspace/dependency.bst"
dependency = {
- 'kind': 'manual',
- 'depends': [{
- 'filename': 'base.bst',
- 'type': 'build'
- }],
- 'config': {
- 'build-commands': [
- 'mkdir -p %{install-root}/etc/test/tests/',
+ "kind": "manual",
+ "depends": [{"filename": "base.bst", "type": "build"}],
+ "config": {
+ "build-commands": [
+ "mkdir -p %{install-root}/etc/test/tests/",
'echo "Hello world!" > %{install-root}/etc/test/hello.txt',
- 'echo "Hello brazil!" > %{install-root}/etc/test/tests/brazil.txt'
+ 'echo "Hello brazil!" > %{install-root}/etc/test/tests/brazil.txt',
]
- }
+ },
}
os.makedirs(os.path.dirname(os.path.join(element_path, dep_name)), exist_ok=True)
_yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))
# First open the workspace
- res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
+ res = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace, element_name],
+ )
assert res.exit_code == 0
# We build the workspaced element, so that we have an artifact
# with specific built dependencies
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
# Now we update a dependency of our element.
- dependency['config']['build-commands'] = [
- 'mkdir -p %{install-root}/etc/test/tests/',
+ dependency["config"]["build-commands"] = [
+ "mkdir -p %{install-root}/etc/test/tests/",
'echo "Hello world!" > %{install-root}/etc/test/hello.txt',
- 'echo "Hello test!" > %{install-root}/etc/test/tests/tests.txt'
+ 'echo "Hello test!" > %{install-root}/etc/test/tests/tests.txt',
]
_yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
# Buildstream should pick up the newly added element, and pick up
# the lack of the newly removed element
- res = cli.run(project=project, args=['shell', element_name, '/usr/bin/test.sh'])
+ res = cli.run(project=project, args=["shell", element_name, "/usr/bin/test.sh"])
assert res.exit_code == 0
- assert res.output == 'Hello world!\nHello test!\n\n'
+ assert res.output == "Hello world!\nHello test!\n\n"
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
-@pytest.mark.xfail(HAVE_SANDBOX == 'buildbox', reason='Not working with BuildBox')
-@pytest.mark.xfail(reason="Incremental builds are currently incompatible with workspace source plugin.")
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
+@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
+@pytest.mark.xfail(
+ reason="Incremental builds are currently incompatible with workspace source plugin."
+)
def test_incremental_configure_commands_run_only_once(cli, datafiles):
project = str(datafiles)
- workspace = os.path.join(cli.directory, 'workspace')
- element_path = os.path.join(project, 'elements')
- element_name = 'workspace/incremental.bst'
+ workspace = os.path.join(cli.directory, "workspace")
+ element_path = os.path.join(project, "elements")
+ element_name = "workspace/incremental.bst"
element = {
- 'kind': 'manual',
- 'depends': [{
- 'filename': 'base.bst',
- 'type': 'build'
- }],
- 'sources': [{
- 'kind': 'local',
- 'path': 'files/workspace-configure-only-once'
- }],
- 'config': {
- 'configure-commands': [
- '$SHELL configure'
- ]
- }
+ "kind": "manual",
+ "depends": [{"filename": "base.bst", "type": "build"}],
+ "sources": [{"kind": "local", "path": "files/workspace-configure-only-once"}],
+ "config": {"configure-commands": ["$SHELL configure"]},
}
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# We open a workspace on the above element
- res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
+ res = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace, element_name],
+ )
res.assert_success()
# Then we build, and check whether the configure step succeeded
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
res.assert_success()
- assert os.path.exists(os.path.join(workspace, 'prepared'))
+ assert os.path.exists(os.path.join(workspace, "prepared"))
# When we build again, the configure commands should not be
# called, and we should therefore exit cleanly (the configure
# commands are set to always fail after the first run)
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
res.assert_success()
- assert not os.path.exists(os.path.join(workspace, 'prepared-again'))
+ assert not os.path.exists(os.path.join(workspace, "prepared-again"))
# Test that rebuilding an already built workspaced element does
@@ -319,48 +339,58 @@ def test_incremental_configure_commands_run_only_once(cli, datafiles):
# part of a cleanup job.
#
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
-@pytest.mark.xfail(HAVE_SANDBOX == 'buildbox', reason='Not working with BuildBox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
+@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
def test_workspace_missing_last_successful(cli, datafiles):
project = str(datafiles)
- workspace = os.path.join(cli.directory, 'workspace')
- element_name = 'workspace/workspace-commanddir.bst'
+ workspace = os.path.join(cli.directory, "workspace")
+ element_name = "workspace/workspace-commanddir.bst"
# Open workspace
- res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
+ res = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace, element_name],
+ )
assert res.exit_code == 0
# Build first, this will record the last successful build in local state
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
# Remove the artifact from the cache, invalidating the last successful build
- res = cli.run(project=project, args=['artifact', 'delete', element_name])
+ res = cli.run(project=project, args=["artifact", "delete", element_name])
assert res.exit_code == 0
# Build again, ensure we dont crash just because the artifact went missing
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
assert res.exit_code == 0
# Check that we can still read failed workspace logs
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_workspace_failed_logs(cli, datafiles):
project = str(datafiles)
- workspace = os.path.join(cli.directory, 'failing_amhello')
- element_name = 'autotools/amhello-failure.bst'
+ workspace = os.path.join(cli.directory, "failing_amhello")
+ element_name = "autotools/amhello-failure.bst"
# Open workspace
- res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
+ res = cli.run(
+ project=project,
+ args=["workspace", "open", "--directory", workspace, element_name],
+ )
res.assert_success()
# Try to build and ensure the build fails
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
res.assert_main_error(ErrorDomain.STREAM, None)
- assert cli.get_element_state(project, element_name) == 'failed'
+ assert cli.get_element_state(project, element_name) == "failed"
- res = cli.run(project=project, args=['artifact', 'log', element_name])
+ res = cli.run(project=project, args=["artifact", "log", element_name])
res.assert_success()
log = res.output
diff --git a/tests/internals/cascache.py b/tests/internals/cascache.py
index 81273aeaf..a35d50040 100644
--- a/tests/internals/cascache.py
+++ b/tests/internals/cascache.py
@@ -47,7 +47,9 @@ def test_report_when_cascache_exist_not_cleanly(tmp_path, monkeypatch):
def test_report_when_cascache_is_forcefully_killed(tmp_path, monkeypatch):
dummy_buildbox_casd = tmp_path.joinpath("buildbox-casd")
- dummy_buildbox_casd.write_text("#!/bin/bash\ntrap 'echo hello' SIGTERM\nwhile :\ndo\nsleep 60\ndone")
+ dummy_buildbox_casd.write_text(
+ "#!/bin/bash\ntrap 'echo hello' SIGTERM\nwhile :\ndo\nsleep 60\ndone"
+ )
dummy_buildbox_casd.chmod(0o777)
monkeypatch.setenv("PATH", str(tmp_path), prepend=os.pathsep)
diff --git a/tests/internals/context.py b/tests/internals/context.py
index ddd558b6c..c219d5f5d 100644
--- a/tests/internals/context.py
+++ b/tests/internals/context.py
@@ -7,32 +7,26 @@ import pytest
from buildstream._context import Context
from buildstream._exceptions import LoadError, LoadErrorReason
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'context',
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "context",)
# Simple fixture to create a Context object.
@pytest.fixture()
def context_fixture():
- if os.environ.get('XDG_CACHE_HOME'):
- cache_home = os.environ['XDG_CACHE_HOME']
+ if os.environ.get("XDG_CACHE_HOME"):
+ cache_home = os.environ["XDG_CACHE_HOME"]
else:
- cache_home = os.path.expanduser('~/.cache')
+ cache_home = os.path.expanduser("~/.cache")
with Context() as context:
- yield {
- 'xdg-cache': cache_home,
- 'context': context
- }
+ yield {"xdg-cache": cache_home, "context": context}
#######################################
# Test instantiation #
#######################################
def test_context_create(context_fixture):
- context = context_fixture['context']
+ context = context_fixture["context"]
assert isinstance(context, Context)
@@ -40,51 +34,51 @@ def test_context_create(context_fixture):
# Test configuration loading #
#######################################
def test_context_load(context_fixture):
- context = context_fixture['context']
- cache_home = os.path.normpath(context_fixture['xdg-cache'])
+ context = context_fixture["context"]
+ cache_home = os.path.normpath(context_fixture["xdg-cache"])
assert isinstance(context, Context)
context.load(config=os.devnull)
- assert context.sourcedir == os.path.join(cache_home, 'buildstream', 'sources')
- assert context.builddir == os.path.join(cache_home, 'buildstream', 'build')
- assert context.cachedir == os.path.join(cache_home, 'buildstream')
- assert context.logdir == os.path.join(cache_home, 'buildstream', 'logs')
+ assert context.sourcedir == os.path.join(cache_home, "buildstream", "sources")
+ assert context.builddir == os.path.join(cache_home, "buildstream", "build")
+ assert context.cachedir == os.path.join(cache_home, "buildstream")
+ assert context.logdir == os.path.join(cache_home, "buildstream", "logs")
# Assert that a changed XDG_CACHE_HOME doesn't cause issues
def test_context_load_envvar(context_fixture):
- os.environ['XDG_CACHE_HOME'] = '/some/path/'
+ os.environ["XDG_CACHE_HOME"] = "/some/path/"
- context = context_fixture['context']
+ context = context_fixture["context"]
assert isinstance(context, Context)
context.load(config=os.devnull)
- assert context.sourcedir == os.path.join('/', 'some', 'path', 'buildstream', 'sources')
- assert context.builddir == os.path.join('/', 'some', 'path', 'buildstream', 'build')
- assert context.cachedir == os.path.join('/', 'some', 'path', 'buildstream')
- assert context.logdir == os.path.join('/', 'some', 'path', 'buildstream', 'logs')
+ assert context.sourcedir == os.path.join(
+ "/", "some", "path", "buildstream", "sources"
+ )
+ assert context.builddir == os.path.join("/", "some", "path", "buildstream", "build")
+ assert context.cachedir == os.path.join("/", "some", "path", "buildstream")
+ assert context.logdir == os.path.join("/", "some", "path", "buildstream", "logs")
# Reset the environment variable
- del os.environ['XDG_CACHE_HOME']
+ del os.environ["XDG_CACHE_HOME"]
# Test that values in a user specified config file
# override the defaults
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_context_load_user_config(context_fixture, datafiles):
- context = context_fixture['context']
- cache_home = context_fixture['xdg-cache']
+ context = context_fixture["context"]
+ cache_home = context_fixture["xdg-cache"]
assert isinstance(context, Context)
- conf_file = os.path.join(datafiles.dirname,
- datafiles.basename,
- 'userconf.yaml')
+ conf_file = os.path.join(datafiles.dirname, datafiles.basename, "userconf.yaml")
context.load(conf_file)
- assert context.sourcedir == os.path.expanduser('~/pony')
- assert context.builddir == os.path.join(cache_home, 'buildstream', 'build')
- assert context.cachedir == os.path.join(cache_home, 'buildstream')
- assert context.logdir == os.path.join(cache_home, 'buildstream', 'logs')
+ assert context.sourcedir == os.path.expanduser("~/pony")
+ assert context.builddir == os.path.join(cache_home, "buildstream", "build")
+ assert context.cachedir == os.path.join(cache_home, "buildstream")
+ assert context.logdir == os.path.join(cache_home, "buildstream", "logs")
#######################################
@@ -92,12 +86,10 @@ def test_context_load_user_config(context_fixture, datafiles):
#######################################
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_context_load_missing_config(context_fixture, datafiles):
- context = context_fixture['context']
+ context = context_fixture["context"]
assert isinstance(context, Context)
- conf_file = os.path.join(datafiles.dirname,
- datafiles.basename,
- 'nonexistant.yaml')
+ conf_file = os.path.join(datafiles.dirname, datafiles.basename, "nonexistant.yaml")
with pytest.raises(LoadError) as exc:
context.load(conf_file)
@@ -107,12 +99,10 @@ def test_context_load_missing_config(context_fixture, datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_context_load_malformed_config(context_fixture, datafiles):
- context = context_fixture['context']
+ context = context_fixture["context"]
assert isinstance(context, Context)
- conf_file = os.path.join(datafiles.dirname,
- datafiles.basename,
- 'malformed.yaml')
+ conf_file = os.path.join(datafiles.dirname, datafiles.basename, "malformed.yaml")
with pytest.raises(LoadError) as exc:
context.load(conf_file)
@@ -122,12 +112,10 @@ def test_context_load_malformed_config(context_fixture, datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_context_load_notdict_config(context_fixture, datafiles):
- context = context_fixture['context']
+ context = context_fixture["context"]
assert isinstance(context, Context)
- conf_file = os.path.join(datafiles.dirname,
- datafiles.basename,
- 'notdict.yaml')
+ conf_file = os.path.join(datafiles.dirname, datafiles.basename, "notdict.yaml")
with pytest.raises(LoadError) as exc:
context.load(conf_file)
diff --git a/tests/internals/loader.py b/tests/internals/loader.py
index 39ef8ac99..781d144ae 100644
--- a/tests/internals/loader.py
+++ b/tests/internals/loader.py
@@ -10,10 +10,7 @@ from buildstream._loader.loader import _NO_PROGRESS
from tests.testutils import dummy_context
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'loader',
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "loader",)
@contextmanager
@@ -26,52 +23,52 @@ def make_loader(basedir):
##############################################################
# Basics: Test behavior loading the simplest of projects #
##############################################################
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'onefile'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "onefile"))
def test_one_file(datafiles):
basedir = str(datafiles)
with make_loader(basedir) as loader:
- element = loader.load(['elements/onefile.bst'], _NO_PROGRESS)[0]
+ element = loader.load(["elements/onefile.bst"], _NO_PROGRESS)[0]
assert isinstance(element, MetaElement)
- assert element.kind == 'pony'
+ assert element.kind == "pony"
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'onefile'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "onefile"))
def test_missing_file(datafiles):
basedir = str(datafiles)
with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
- loader.load(['elements/missing.bst'], _NO_PROGRESS)
+ loader.load(["elements/missing.bst"], _NO_PROGRESS)
assert exc.value.reason == LoadErrorReason.MISSING_FILE
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'onefile'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "onefile"))
def test_invalid_reference(datafiles):
basedir = str(datafiles)
with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
- loader.load(['elements/badreference.bst'], _NO_PROGRESS)
+ loader.load(["elements/badreference.bst"], _NO_PROGRESS)
assert exc.value.reason == LoadErrorReason.INVALID_YAML
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'onefile'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "onefile"))
def test_invalid_yaml(datafiles):
basedir = str(datafiles)
with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
- loader.load(['elements/badfile.bst'], _NO_PROGRESS)
+ loader.load(["elements/badfile.bst"], _NO_PROGRESS)
assert exc.value.reason == LoadErrorReason.INVALID_YAML
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'onefile'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "onefile"))
def test_fail_fullpath_target(datafiles):
basedir = str(datafiles)
- fullpath = os.path.join(basedir, 'elements', 'onefile.bst')
+ fullpath = os.path.join(basedir, "elements", "onefile.bst")
with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
loader.load([fullpath], _NO_PROGRESS)
@@ -79,21 +76,21 @@ def test_fail_fullpath_target(datafiles):
assert exc.value.reason == LoadErrorReason.INVALID_DATA
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'onefile'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "onefile"))
def test_invalid_key(datafiles):
basedir = str(datafiles)
with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
- loader.load(['elements/invalidkey.bst'], _NO_PROGRESS)
+ loader.load(["elements/invalidkey.bst"], _NO_PROGRESS)
assert exc.value.reason == LoadErrorReason.INVALID_DATA
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'onefile'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "onefile"))
def test_invalid_directory_load(datafiles):
basedir = str(datafiles)
with make_loader(basedir) as loader, pytest.raises(LoadError) as exc:
- loader.load(['elements/'], _NO_PROGRESS)
+ loader.load(["elements/"], _NO_PROGRESS)
assert exc.value.reason == LoadErrorReason.LOADING_DIRECTORY
diff --git a/tests/internals/pluginfactory.py b/tests/internals/pluginfactory.py
index b3f77c8b1..13c204752 100644
--- a/tests/internals/pluginfactory.py
+++ b/tests/internals/pluginfactory.py
@@ -10,236 +10,267 @@ from buildstream._elementfactory import ElementFactory
from buildstream._sourcefactory import SourceFactory
from buildstream._exceptions import PluginError
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'pluginfactory',
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "pluginfactory",)
# Simple fixture to create a PluginBase object that
# we use for loading plugins.
@pytest.fixture()
def plugin_fixture():
- return {
- 'base': PluginBase(package='buildstream.plugins')
- }
+ return {"base": PluginBase(package="buildstream.plugins")}
##############################################################
# Basics: test the fixture, test we can create the factories #
##############################################################
def test_fixture(plugin_fixture):
- assert isinstance(plugin_fixture['base'], PluginBase)
+ assert isinstance(plugin_fixture["base"], PluginBase)
def test_source_factory(plugin_fixture):
- factory = SourceFactory(plugin_fixture['base'])
+ factory = SourceFactory(plugin_fixture["base"])
assert isinstance(factory, SourceFactory)
def test_element_factory(plugin_fixture):
- factory = ElementFactory(plugin_fixture['base'])
+ factory = ElementFactory(plugin_fixture["base"])
assert isinstance(factory, ElementFactory)
##############################################################
# Check that we can load custom sources & elements #
##############################################################
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'customsource'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "customsource"))
def test_custom_source(plugin_fixture, datafiles):
- plugins = [Node.from_dict({
- 'origin': 'local',
- 'path': os.path.join(datafiles.dirname,
- datafiles.basename),
- 'plugins': ['foo']
- })]
- factory = SourceFactory(plugin_fixture['base'], plugin_origins=plugins)
+ plugins = [
+ Node.from_dict(
+ {
+ "origin": "local",
+ "path": os.path.join(datafiles.dirname, datafiles.basename),
+ "plugins": ["foo"],
+ }
+ )
+ ]
+ factory = SourceFactory(plugin_fixture["base"], plugin_origins=plugins)
assert isinstance(factory, SourceFactory)
- foo_type, _ = factory.lookup('foo')
- assert foo_type.__name__ == 'FooSource'
+ foo_type, _ = factory.lookup("foo")
+ assert foo_type.__name__ == "FooSource"
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'customelement'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "customelement"))
def test_custom_element(plugin_fixture, datafiles):
- plugins = [Node.from_dict({
- 'origin': 'local',
- 'path': os.path.join(datafiles.dirname,
- datafiles.basename),
- 'plugins': ['foo']
- })]
- factory = ElementFactory(plugin_fixture['base'], plugin_origins=plugins)
+ plugins = [
+ Node.from_dict(
+ {
+ "origin": "local",
+ "path": os.path.join(datafiles.dirname, datafiles.basename),
+ "plugins": ["foo"],
+ }
+ )
+ ]
+ factory = ElementFactory(plugin_fixture["base"], plugin_origins=plugins)
assert isinstance(factory, ElementFactory)
- foo_type, _ = factory.lookup('foo')
- assert foo_type.__name__ == 'FooElement'
+ foo_type, _ = factory.lookup("foo")
+ assert foo_type.__name__ == "FooElement"
##############################################################
# Check plugin loading failure modes #
##############################################################
def test_missing_source(plugin_fixture):
- factory = SourceFactory(plugin_fixture['base'])
+ factory = SourceFactory(plugin_fixture["base"])
assert isinstance(factory, SourceFactory)
# Test fails if PluginError is not raised
with pytest.raises(PluginError):
- factory.lookup('foo')
+ factory.lookup("foo")
def test_missing_element(plugin_fixture):
- factory = ElementFactory(plugin_fixture['base'])
+ factory = ElementFactory(plugin_fixture["base"])
assert isinstance(factory, ElementFactory)
# Test fails if PluginError is not raised
with pytest.raises(PluginError):
- factory.lookup('foo')
+ factory.lookup("foo")
# Load a factory with a plugin that returns a value instead of Source subclass
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'notatype'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "notatype"))
def test_source_notatype(plugin_fixture, datafiles):
- plugins = [Node.from_dict({
- 'origin': 'local',
- 'path': os.path.join(datafiles.dirname,
- datafiles.basename),
- 'plugins': ['foo']
- })]
- factory = SourceFactory(plugin_fixture['base'], plugin_origins=plugins)
+ plugins = [
+ Node.from_dict(
+ {
+ "origin": "local",
+ "path": os.path.join(datafiles.dirname, datafiles.basename),
+ "plugins": ["foo"],
+ }
+ )
+ ]
+ factory = SourceFactory(plugin_fixture["base"], plugin_origins=plugins)
with pytest.raises(PluginError):
- factory.lookup('foo')
+ factory.lookup("foo")
# Load a factory with a plugin that returns a value instead of Element subclass
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'notatype'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "notatype"))
def test_element_notatype(plugin_fixture, datafiles):
- plugins = [Node.from_dict({
- 'origin': 'local',
- 'path': os.path.join(datafiles.dirname,
- datafiles.basename),
- 'plugins': ['foo']
- })]
- factory = ElementFactory(plugin_fixture['base'], plugin_origins=plugins)
+ plugins = [
+ Node.from_dict(
+ {
+ "origin": "local",
+ "path": os.path.join(datafiles.dirname, datafiles.basename),
+ "plugins": ["foo"],
+ }
+ )
+ ]
+ factory = ElementFactory(plugin_fixture["base"], plugin_origins=plugins)
with pytest.raises(PluginError):
- factory.lookup('foo')
+ factory.lookup("foo")
# Load a factory with a plugin that returns a type
# which is not a Source subclass
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'wrongtype'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "wrongtype"))
def test_source_wrongtype(plugin_fixture, datafiles):
- plugins = [Node.from_dict({
- 'origin': 'local',
- 'path': os.path.join(datafiles.dirname,
- datafiles.basename),
- 'plugins': ['foo']
- })]
- factory = SourceFactory(plugin_fixture['base'], plugin_origins=plugins)
+ plugins = [
+ Node.from_dict(
+ {
+ "origin": "local",
+ "path": os.path.join(datafiles.dirname, datafiles.basename),
+ "plugins": ["foo"],
+ }
+ )
+ ]
+ factory = SourceFactory(plugin_fixture["base"], plugin_origins=plugins)
with pytest.raises(PluginError):
- factory.lookup('foo')
+ factory.lookup("foo")
# Load a factory with a plugin that returns a type
# which is not a Element subclass
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'wrongtype'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "wrongtype"))
def test_element_wrongtype(plugin_fixture, datafiles):
- plugins = [Node.from_dict({
- 'origin': 'local',
- 'path': os.path.join(datafiles.dirname,
- datafiles.basename),
- 'plugins': ['foo']
- })]
- factory = ElementFactory(plugin_fixture['base'], plugin_origins=plugins)
+ plugins = [
+ Node.from_dict(
+ {
+ "origin": "local",
+ "path": os.path.join(datafiles.dirname, datafiles.basename),
+ "plugins": ["foo"],
+ }
+ )
+ ]
+ factory = ElementFactory(plugin_fixture["base"], plugin_origins=plugins)
with pytest.raises(PluginError):
- factory.lookup('foo')
+ factory.lookup("foo")
# Load a factory with a plugin which fails to provide a setup() function
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'nosetup'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "nosetup"))
def test_source_missing_setup(plugin_fixture, datafiles):
- plugins = [Node.from_dict({
- 'origin': 'local',
- 'path': os.path.join(datafiles.dirname,
- datafiles.basename),
- 'plugins': ['foo']
- })]
- factory = SourceFactory(plugin_fixture['base'], plugin_origins=plugins)
+ plugins = [
+ Node.from_dict(
+ {
+ "origin": "local",
+ "path": os.path.join(datafiles.dirname, datafiles.basename),
+ "plugins": ["foo"],
+ }
+ )
+ ]
+ factory = SourceFactory(plugin_fixture["base"], plugin_origins=plugins)
with pytest.raises(PluginError):
- factory.lookup('foo')
+ factory.lookup("foo")
# Load a factory with a plugin which fails to provide a setup() function
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'nosetup'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "nosetup"))
def test_element_missing_setup(plugin_fixture, datafiles):
- plugins = [Node.from_dict({
- 'origin': 'local',
- 'path': os.path.join(datafiles.dirname,
- datafiles.basename),
- 'plugins': ['foo']
- })]
- factory = ElementFactory(plugin_fixture['base'], plugin_origins=plugins)
+ plugins = [
+ Node.from_dict(
+ {
+ "origin": "local",
+ "path": os.path.join(datafiles.dirname, datafiles.basename),
+ "plugins": ["foo"],
+ }
+ )
+ ]
+ factory = ElementFactory(plugin_fixture["base"], plugin_origins=plugins)
with pytest.raises(PluginError):
- factory.lookup('foo')
+ factory.lookup("foo")
# Load a factory with a plugin which provides a setup symbol
# that is not a function
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'badsetup'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "badsetup"))
def test_source_bad_setup(plugin_fixture, datafiles):
- plugins = [Node.from_dict({
- 'origin': 'local',
- 'path': os.path.join(datafiles.dirname,
- datafiles.basename),
- 'plugins': ['foo']
- })]
- factory = SourceFactory(plugin_fixture['base'], plugin_origins=plugins)
+ plugins = [
+ Node.from_dict(
+ {
+ "origin": "local",
+ "path": os.path.join(datafiles.dirname, datafiles.basename),
+ "plugins": ["foo"],
+ }
+ )
+ ]
+ factory = SourceFactory(plugin_fixture["base"], plugin_origins=plugins)
with pytest.raises(PluginError):
- factory.lookup('foo')
+ factory.lookup("foo")
# Load a factory with a plugin which provides a setup symbol
# that is not a function
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'badsetup'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "badsetup"))
def test_element_bad_setup(plugin_fixture, datafiles):
- plugins = [Node.from_dict({
- 'origin': 'local',
- 'path': os.path.join(datafiles.dirname,
- datafiles.basename),
- 'plugins': ['foo']
- })]
- factory = ElementFactory(plugin_fixture['base'], plugin_origins=plugins)
+ plugins = [
+ Node.from_dict(
+ {
+ "origin": "local",
+ "path": os.path.join(datafiles.dirname, datafiles.basename),
+ "plugins": ["foo"],
+ }
+ )
+ ]
+ factory = ElementFactory(plugin_fixture["base"], plugin_origins=plugins)
with pytest.raises(PluginError):
- factory.lookup('foo')
+ factory.lookup("foo")
# Load a factory with a plugin which requires an absurdly
# high version of buildstream
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'badversionsource'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "badversionsource"))
def test_source_badversion(plugin_fixture, datafiles):
- plugins = [Node.from_dict({
- 'origin': 'local',
- 'path': os.path.join(datafiles.dirname,
- datafiles.basename),
- 'plugins': ['foo']
- })]
- factory = SourceFactory(plugin_fixture['base'], plugin_origins=plugins)
+ plugins = [
+ Node.from_dict(
+ {
+ "origin": "local",
+ "path": os.path.join(datafiles.dirname, datafiles.basename),
+ "plugins": ["foo"],
+ }
+ )
+ ]
+ factory = SourceFactory(plugin_fixture["base"], plugin_origins=plugins)
with pytest.raises(PluginError):
- factory.lookup('foo')
+ factory.lookup("foo")
# Load a factory with a plugin which requires an absurdly
# high version of buildstream
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'badversionelement'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "badversionelement"))
def test_element_badversion(plugin_fixture, datafiles):
- plugins = [Node.from_dict({
- 'origin': 'local',
- 'path': os.path.join(datafiles.dirname,
- datafiles.basename),
- 'plugins': ['foo']
- })]
- factory = ElementFactory(plugin_fixture['base'], plugin_origins=plugins)
+ plugins = [
+ Node.from_dict(
+ {
+ "origin": "local",
+ "path": os.path.join(datafiles.dirname, datafiles.basename),
+ "plugins": ["foo"],
+ }
+ )
+ ]
+ factory = ElementFactory(plugin_fixture["base"], plugin_origins=plugins)
with pytest.raises(PluginError):
- factory.lookup('foo')
+ factory.lookup("foo")
##############################################################
@@ -249,56 +280,62 @@ def test_element_badversion(plugin_fixture, datafiles):
# Load two factories, both of which define a different 'foo' plugin
@pytest.mark.datafiles(DATA_DIR)
def test_source_multicontext(plugin_fixture, datafiles):
- plugins1 = Node.from_dict({
- 'origin': 'local',
- 'path': os.path.join(datafiles.dirname,
- datafiles.basename,
- 'customsource'),
- 'plugins': ['foo']
- })
- plugins2 = Node.from_dict({
- 'origin': 'local',
- 'path': os.path.join(datafiles.dirname,
- datafiles.basename,
- 'anothersource'),
- 'plugins': ['foo']
- })
-
- factory1 = SourceFactory(plugin_fixture['base'], plugin_origins=[plugins1])
- factory2 = SourceFactory(plugin_fixture['base'], plugin_origins=[plugins2])
+ plugins1 = Node.from_dict(
+ {
+ "origin": "local",
+ "path": os.path.join(datafiles.dirname, datafiles.basename, "customsource"),
+ "plugins": ["foo"],
+ }
+ )
+ plugins2 = Node.from_dict(
+ {
+ "origin": "local",
+ "path": os.path.join(
+ datafiles.dirname, datafiles.basename, "anothersource"
+ ),
+ "plugins": ["foo"],
+ }
+ )
+
+ factory1 = SourceFactory(plugin_fixture["base"], plugin_origins=[plugins1])
+ factory2 = SourceFactory(plugin_fixture["base"], plugin_origins=[plugins2])
assert isinstance(factory1, SourceFactory)
assert isinstance(factory2, SourceFactory)
- foo_type1, _ = factory1.lookup('foo')
- foo_type2, _ = factory2.lookup('foo')
- assert foo_type1.__name__ == 'FooSource'
- assert foo_type2.__name__ == 'AnotherFooSource'
+ foo_type1, _ = factory1.lookup("foo")
+ foo_type2, _ = factory2.lookup("foo")
+ assert foo_type1.__name__ == "FooSource"
+ assert foo_type2.__name__ == "AnotherFooSource"
# Load two factories, both of which define a different 'foo' plugin
@pytest.mark.datafiles(DATA_DIR)
def test_element_multicontext(plugin_fixture, datafiles):
- plugins1 = Node.from_dict({
- 'origin': 'local',
- 'path': os.path.join(datafiles.dirname,
- datafiles.basename,
- 'customelement'),
- 'plugins': ['foo']
- })
- plugins2 = Node.from_dict({
- 'origin': 'local',
- 'path': os.path.join(datafiles.dirname,
- datafiles.basename,
- 'anotherelement'),
- 'plugins': ['foo']
- })
-
- factory1 = ElementFactory(plugin_fixture['base'], plugin_origins=[plugins1])
- factory2 = ElementFactory(plugin_fixture['base'], plugin_origins=[plugins2])
+ plugins1 = Node.from_dict(
+ {
+ "origin": "local",
+ "path": os.path.join(
+ datafiles.dirname, datafiles.basename, "customelement"
+ ),
+ "plugins": ["foo"],
+ }
+ )
+ plugins2 = Node.from_dict(
+ {
+ "origin": "local",
+ "path": os.path.join(
+ datafiles.dirname, datafiles.basename, "anotherelement"
+ ),
+ "plugins": ["foo"],
+ }
+ )
+
+ factory1 = ElementFactory(plugin_fixture["base"], plugin_origins=[plugins1])
+ factory2 = ElementFactory(plugin_fixture["base"], plugin_origins=[plugins2])
assert isinstance(factory1, ElementFactory)
assert isinstance(factory2, ElementFactory)
- foo_type1, _ = factory1.lookup('foo')
- foo_type2, _ = factory2.lookup('foo')
- assert foo_type1.__name__ == 'FooElement'
- assert foo_type2.__name__ == 'AnotherFooElement'
+ foo_type1, _ = factory1.lookup("foo")
+ foo_type2, _ = factory2.lookup("foo")
+ assert foo_type1.__name__ == "FooElement"
+ assert foo_type2.__name__ == "AnotherFooElement"
diff --git a/tests/internals/pluginfactory/wrongtype/foo.py b/tests/internals/pluginfactory/wrongtype/foo.py
index 3fe9a1a62..37d9f6bfe 100644
--- a/tests/internals/pluginfactory/wrongtype/foo.py
+++ b/tests/internals/pluginfactory/wrongtype/foo.py
@@ -4,7 +4,7 @@
# This one fails the requirement
-class Foo():
+class Foo:
pass
diff --git a/tests/internals/pluginloading.py b/tests/internals/pluginloading.py
index 4b6baf229..5527bf5cc 100644
--- a/tests/internals/pluginloading.py
+++ b/tests/internals/pluginloading.py
@@ -8,55 +8,56 @@ from buildstream._pipeline import Pipeline
from tests.testutils import dummy_context
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'pluginloading',
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "pluginloading",)
@contextmanager
def create_pipeline(tmpdir, basedir, target):
with dummy_context() as context:
- context.deploydir = os.path.join(str(tmpdir), 'deploy')
- context.casdir = os.path.join(str(tmpdir), 'cas')
+ context.deploydir = os.path.join(str(tmpdir), "deploy")
+ context.casdir = os.path.join(str(tmpdir), "cas")
project = Project(basedir, context)
pipeline = Pipeline(context, project, None)
- targets, = pipeline.load([(target,)])
+ (targets,) = pipeline.load([(target,)])
yield targets
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'customsource'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "customsource"))
def test_customsource(datafiles, tmpdir):
basedir = str(datafiles)
- with create_pipeline(tmpdir, basedir, 'simple.bst') as targets:
+ with create_pipeline(tmpdir, basedir, "simple.bst") as targets:
assert targets[0].get_kind() == "autotools"
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'customelement'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "customelement"))
def test_customelement(datafiles, tmpdir):
basedir = str(datafiles)
- with create_pipeline(tmpdir, basedir, 'simple.bst') as targets:
+ with create_pipeline(tmpdir, basedir, "simple.bst") as targets:
assert targets[0].get_kind() == "foo"
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'badversionsource'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "badversionsource"))
def test_badversionsource(datafiles, tmpdir):
basedir = str(datafiles)
- with pytest.raises(LoadError) as exc, create_pipeline(tmpdir, basedir, 'simple.bst'):
+ with pytest.raises(LoadError) as exc, create_pipeline(
+ tmpdir, basedir, "simple.bst"
+ ):
pass
assert exc.value.reason == LoadErrorReason.UNSUPPORTED_PLUGIN
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'badversionelement'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "badversionelement"))
def test_badversionelement(datafiles, tmpdir):
basedir = str(datafiles)
- with pytest.raises(LoadError) as exc, create_pipeline(tmpdir, basedir, 'simple.bst'):
+ with pytest.raises(LoadError) as exc, create_pipeline(
+ tmpdir, basedir, "simple.bst"
+ ):
pass
assert exc.value.reason == LoadErrorReason.UNSUPPORTED_PLUGIN
diff --git a/tests/internals/pluginloading/customelement/pluginelements/foo.py b/tests/internals/pluginloading/customelement/pluginelements/foo.py
index 823306ebc..c6a85a5b1 100644
--- a/tests/internals/pluginloading/customelement/pluginelements/foo.py
+++ b/tests/internals/pluginloading/customelement/pluginelements/foo.py
@@ -2,7 +2,6 @@ from buildstream import Element
class FooElement(Element):
-
def preflight(self):
pass
diff --git a/tests/internals/pluginloading/customsource/pluginsources/foo.py b/tests/internals/pluginloading/customsource/pluginsources/foo.py
index 8dd16801c..706c96f3b 100644
--- a/tests/internals/pluginloading/customsource/pluginsources/foo.py
+++ b/tests/internals/pluginloading/customsource/pluginsources/foo.py
@@ -2,7 +2,6 @@ from buildstream import Source, Consistency
class FooSource(Source):
-
def preflight(self):
pass
diff --git a/tests/internals/storage.py b/tests/internals/storage.py
index a26ca4858..27dd7e88b 100644
--- a/tests/internals/storage.py
+++ b/tests/internals/storage.py
@@ -7,10 +7,7 @@ from buildstream._cas import CASCache
from buildstream.storage._casbaseddirectory import CasBasedDirectory
from buildstream.storage._filebaseddirectory import FileBasedDirectory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "storage"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "storage")
@contextmanager
@@ -25,8 +22,7 @@ def setup_backend(backend_class, tmpdir):
cas_cache.release_resources()
-@pytest.mark.parametrize("backend", [
- FileBasedDirectory, CasBasedDirectory])
+@pytest.mark.parametrize("backend", [FileBasedDirectory, CasBasedDirectory])
@pytest.mark.datafiles(DATA_DIR)
def test_import(tmpdir, datafiles, backend):
original = os.path.join(str(datafiles), "original")
@@ -38,8 +34,7 @@ def test_import(tmpdir, datafiles, backend):
assert "bin/hello" in c.list_relative_paths()
-@pytest.mark.parametrize("backend", [
- FileBasedDirectory, CasBasedDirectory])
+@pytest.mark.parametrize("backend", [FileBasedDirectory, CasBasedDirectory])
@pytest.mark.datafiles(DATA_DIR)
def test_modified_file_list(tmpdir, datafiles, backend):
original = os.path.join(str(datafiles), "original")
@@ -52,7 +47,9 @@ def test_modified_file_list(tmpdir, datafiles, backend):
c.import_files(overlay)
- print("List of all paths in imported results: {}".format(c.list_relative_paths()))
+ print(
+ "List of all paths in imported results: {}".format(c.list_relative_paths())
+ )
assert "bin/bash" in c.list_relative_paths()
assert "bin/bash" in c.list_modified_paths()
assert "bin/hello" not in c.list_modified_paths()
diff --git a/tests/internals/storage_vdir_import.py b/tests/internals/storage_vdir_import.py
index 808e1be9a..225191b43 100644
--- a/tests/internals/storage_vdir_import.py
+++ b/tests/internals/storage_vdir_import.py
@@ -38,11 +38,11 @@ from buildstream.storage.directory import VirtualDirectoryError
# (directory) with content being the contents for a file or the
# destination for a symlink.
root_filesets = [
- [('a/b/c/textfile1', 'F', 'This is textfile 1\n')],
- [('a/b/c/textfile1', 'F', 'This is the replacement textfile 1\n')],
- [('a/b/f', 'S', '/a/b/c')],
- [('a/b/c', 'D', ''), ('a/b/f', 'S', '/a/b/c')],
- [('a/b/f', 'F', 'This is textfile 1\n')],
+ [("a/b/c/textfile1", "F", "This is textfile 1\n")],
+ [("a/b/c/textfile1", "F", "This is the replacement textfile 1\n")],
+ [("a/b/f", "S", "/a/b/c")],
+ [("a/b/c", "D", ""), ("a/b/f", "S", "/a/b/c")],
+ [("a/b/f", "F", "This is textfile 1\n")],
]
empty_hash_ref = sha256().hexdigest()
@@ -60,14 +60,14 @@ def generate_import_root(rootdir, filelist):
if os.path.exists(rootdir):
return
for (path, typesymbol, content) in filelist:
- if typesymbol == 'F':
+ if typesymbol == "F":
(dirnames, filename) = os.path.split(path)
os.makedirs(os.path.join(rootdir, dirnames), exist_ok=True)
with open(os.path.join(rootdir, dirnames, filename), "wt") as f:
f.write(content)
- elif typesymbol == 'D':
+ elif typesymbol == "D":
os.makedirs(os.path.join(rootdir, path), exist_ok=True)
- elif typesymbol == 'S':
+ elif typesymbol == "S":
(dirnames, filename) = os.path.split(path)
os.makedirs(os.path.join(rootdir, dirnames), exist_ok=True)
os.symlink(content, os.path.join(rootdir, path))
@@ -83,26 +83,26 @@ def generate_random_root(rootno, directory):
if os.path.exists(rootdir):
return
things = []
- locations = ['.']
+ locations = ["."]
os.makedirs(rootdir)
for i in range(0, 100):
location = random.choice(locations)
thingname = "node{}".format(i)
- thing = random.choice(['dir', 'link', 'file'])
- if thing == 'dir':
+ thing = random.choice(["dir", "link", "file"])
+ if thing == "dir":
thingname = "dir" + thingname
target = os.path.join(rootdir, location, thingname)
- if thing == 'dir':
+ if thing == "dir":
os.makedirs(target)
locations.append(os.path.join(location, thingname))
- elif thing == 'file':
+ elif thing == "file":
with open(target, "wt") as f:
f.write("This is node {}\n".format(i))
- elif thing == 'link':
- symlink_type = random.choice(['absolute', 'relative', 'broken'])
- if symlink_type == 'broken' or not things:
+ elif thing == "link":
+ symlink_type = random.choice(["absolute", "relative", "broken"])
+ if symlink_type == "broken" or not things:
os.symlink("/broken", target)
- elif symlink_type == 'absolute':
+ elif symlink_type == "absolute":
symlink_destination = random.choice(things)
os.symlink(symlink_destination, target)
else:
@@ -159,7 +159,7 @@ def resolve_symlinks(path, root):
if os.path.islink(location):
# Resolve the link, add on all the remaining components
target = os.path.join(os.readlink(location))
- tail = os.path.sep.join(components[i + 1:])
+ tail = os.path.sep.join(components[i + 1 :])
if target.startswith(os.path.sep):
# Absolute link - relative to root
@@ -194,7 +194,9 @@ def _import_test(tmpdir, original, overlay, generator_function, verify_contents=
d2 = create_new_casdir(overlay, cas_cache, tmpdir)
d.import_files(d2)
export_dir = os.path.join(tmpdir, "output-{}-{}".format(original, overlay))
- roundtrip_dir = os.path.join(tmpdir, "roundtrip-{}-{}".format(original, overlay))
+ roundtrip_dir = os.path.join(
+ tmpdir, "roundtrip-{}-{}".format(original, overlay)
+ )
d2.export_files(roundtrip_dir)
d.export_files(export_dir)
@@ -202,22 +204,25 @@ def _import_test(tmpdir, original, overlay, generator_function, verify_contents=
for item in root_filesets[overlay - 1]:
(path, typename, content) = item
realpath = resolve_symlinks(path, export_dir)
- if typename == 'F':
+ if typename == "F":
if os.path.isdir(realpath) and directory_not_empty(realpath):
# The file should not have overwritten the directory in this case.
pass
else:
- assert os.path.isfile(realpath), \
- "{} did not exist in the combined virtual directory".format(path)
+ assert os.path.isfile(
+ realpath
+ ), "{} did not exist in the combined virtual directory".format(
+ path
+ )
assert file_contents_are(realpath, content)
- elif typename == 'S':
+ elif typename == "S":
if os.path.isdir(realpath) and directory_not_empty(realpath):
# The symlink should not have overwritten the directory in this case.
pass
else:
assert os.path.islink(realpath)
assert os.readlink(realpath) == content
- elif typename == 'D':
+ elif typename == "D":
# We can't do any more tests than this because it
# depends on things present in the original. Blank
# directories here will be ignored and the original
@@ -236,13 +241,17 @@ def _import_test(tmpdir, original, overlay, generator_function, verify_contents=
@pytest.mark.parametrize("original", range(1, len(root_filesets) + 1))
@pytest.mark.parametrize("overlay", range(1, len(root_filesets) + 1))
def test_fixed_cas_import(tmpdir, original, overlay):
- _import_test(str(tmpdir), original, overlay, generate_import_roots, verify_contents=True)
+ _import_test(
+ str(tmpdir), original, overlay, generate_import_roots, verify_contents=True
+ )
@pytest.mark.parametrize("original", range(1, NUM_RANDOM_TESTS + 1))
@pytest.mark.parametrize("overlay", range(1, NUM_RANDOM_TESTS + 1))
def test_random_cas_import(tmpdir, original, overlay):
- _import_test(str(tmpdir), original, overlay, generate_random_root, verify_contents=False)
+ _import_test(
+ str(tmpdir), original, overlay, generate_random_root, verify_contents=False
+ )
def _listing_test(tmpdir, root, generator_function):
@@ -274,22 +283,22 @@ def test_fixed_directory_listing(tmpdir, root):
# Check that the vdir is decending and readable
def test_descend(tmpdir):
- cas_dir = os.path.join(str(tmpdir), 'cas')
+ cas_dir = os.path.join(str(tmpdir), "cas")
cas_cache = CASCache(cas_dir)
try:
d = CasBasedDirectory(cas_cache)
- Content_to_check = 'You got me'
- test_dir = os.path.join(str(tmpdir), 'importfrom')
+ Content_to_check = "You got me"
+ test_dir = os.path.join(str(tmpdir), "importfrom")
filesys_discription = [
- ('a', 'D', ''),
- ('a/l', 'D', ''),
- ('a/l/g', 'F', Content_to_check)
+ ("a", "D", ""),
+ ("a/l", "D", ""),
+ ("a/l/g", "F", Content_to_check),
]
generate_import_root(test_dir, filesys_discription)
d.import_files(test_dir)
- digest = d.descend('a', 'l').index['g'].get_digest()
+ digest = d.descend("a", "l").index["g"].get_digest()
assert Content_to_check == open(cas_cache.objpath(digest)).read()
finally:
@@ -300,31 +309,31 @@ def test_descend(tmpdir):
# Make sure the correct erros are raised when trying
# to decend in to files or links to files
def test_bad_symlinks(tmpdir):
- cas_dir = os.path.join(str(tmpdir), 'cas')
+ cas_dir = os.path.join(str(tmpdir), "cas")
cas_cache = CASCache(cas_dir)
try:
d = CasBasedDirectory(cas_cache)
- test_dir = os.path.join(str(tmpdir), 'importfrom')
+ test_dir = os.path.join(str(tmpdir), "importfrom")
filesys_discription = [
- ('a', 'D', ''),
- ('a/l', 'S', '../target'),
- ('target', 'F', 'You got me')
+ ("a", "D", ""),
+ ("a/l", "S", "../target"),
+ ("target", "F", "You got me"),
]
generate_import_root(test_dir, filesys_discription)
d.import_files(test_dir)
exp_reason = "not-a-directory"
with pytest.raises(VirtualDirectoryError) as error:
- d.descend('a', 'l', follow_symlinks=True)
+ d.descend("a", "l", follow_symlinks=True)
assert error.reason == exp_reason
with pytest.raises(VirtualDirectoryError) as error:
- d.descend('a', 'l')
+ d.descend("a", "l")
assert error.reason == exp_reason
with pytest.raises(VirtualDirectoryError) as error:
- d.descend('a', 'f')
+ d.descend("a", "f")
assert error.reason == exp_reason
finally:
cas_cache.release_resources()
@@ -333,23 +342,23 @@ def test_bad_symlinks(tmpdir):
# Check symlink logic for edgecases
# Check decend accross relitive link
def test_relative_symlink(tmpdir):
- cas_dir = os.path.join(str(tmpdir), 'cas')
+ cas_dir = os.path.join(str(tmpdir), "cas")
cas_cache = CASCache(cas_dir)
try:
d = CasBasedDirectory(cas_cache)
- Content_to_check = 'You got me'
- test_dir = os.path.join(str(tmpdir), 'importfrom')
+ Content_to_check = "You got me"
+ test_dir = os.path.join(str(tmpdir), "importfrom")
filesys_discription = [
- ('a', 'D', ''),
- ('a/l', 'S', '../target'),
- ('target', 'D', ''),
- ('target/file', 'F', Content_to_check)
+ ("a", "D", ""),
+ ("a/l", "S", "../target"),
+ ("target", "D", ""),
+ ("target/file", "F", Content_to_check),
]
generate_import_root(test_dir, filesys_discription)
d.import_files(test_dir)
- digest = d.descend('a', 'l', follow_symlinks=True).index['file'].get_digest()
+ digest = d.descend("a", "l", follow_symlinks=True).index["file"].get_digest()
assert Content_to_check == open(cas_cache.objpath(digest)).read()
finally:
cas_cache.release_resources()
@@ -358,23 +367,23 @@ def test_relative_symlink(tmpdir):
# Check symlink logic for edgecases
# Check deccend accross abs link
def test_abs_symlink(tmpdir):
- cas_dir = os.path.join(str(tmpdir), 'cas')
+ cas_dir = os.path.join(str(tmpdir), "cas")
cas_cache = CASCache(cas_dir)
try:
d = CasBasedDirectory(cas_cache)
- Content_to_check = 'two step file'
- test_dir = os.path.join(str(tmpdir), 'importfrom')
+ Content_to_check = "two step file"
+ test_dir = os.path.join(str(tmpdir), "importfrom")
filesys_discription = [
- ('a', 'D', ''),
- ('a/l', 'S', '/target'),
- ('target', 'D', ''),
- ('target/file', 'F', Content_to_check)
+ ("a", "D", ""),
+ ("a/l", "S", "/target"),
+ ("target", "D", ""),
+ ("target/file", "F", Content_to_check),
]
generate_import_root(test_dir, filesys_discription)
d.import_files(test_dir)
- digest = d.descend('a', 'l', follow_symlinks=True).index['file'].get_digest()
+ digest = d.descend("a", "l", follow_symlinks=True).index["file"].get_digest()
assert Content_to_check == open(cas_cache.objpath(digest)).read()
finally:
@@ -384,24 +393,24 @@ def test_abs_symlink(tmpdir):
# Check symlink logic for edgecases
# Check symlink can not escape root
def test_bad_sym_escape(tmpdir):
- cas_dir = os.path.join(str(tmpdir), 'cas')
+ cas_dir = os.path.join(str(tmpdir), "cas")
cas_cache = CASCache(cas_dir)
try:
d = CasBasedDirectory(cas_cache)
- test_dir = os.path.join(str(tmpdir), 'importfrom')
+ test_dir = os.path.join(str(tmpdir), "importfrom")
filesys_discription = [
- ('jail', 'D', ''),
- ('jail/a', 'D', ''),
- ('jail/a/l', 'S', '../../target'),
- ('target', 'D', ''),
- ('target/file', 'F', 'two step file')
+ ("jail", "D", ""),
+ ("jail/a", "D", ""),
+ ("jail/a/l", "S", "../../target"),
+ ("target", "D", ""),
+ ("target/file", "F", "two step file"),
]
generate_import_root(test_dir, filesys_discription)
- d.import_files(os.path.join(test_dir, 'jail'))
+ d.import_files(os.path.join(test_dir, "jail"))
with pytest.raises(VirtualDirectoryError) as error:
- d.descend('a', 'l', follow_symlinks=True)
+ d.descend("a", "l", follow_symlinks=True)
assert error.reason == "directory-not-found"
finally:
cas_cache.release_resources()
diff --git a/tests/internals/utils_save_atomic.py b/tests/internals/utils_save_atomic.py
index 0731f7bea..898286076 100644
--- a/tests/internals/utils_save_atomic.py
+++ b/tests/internals/utils_save_atomic.py
@@ -5,58 +5,58 @@ from buildstream.utils import save_file_atomic
def test_save_new_file(tmpdir):
- filename = os.path.join(str(tmpdir), 'savefile-success.test')
- with save_file_atomic(filename, 'w') as f:
- f.write('foo\n')
+ filename = os.path.join(str(tmpdir), "savefile-success.test")
+ with save_file_atomic(filename, "w") as f:
+ f.write("foo\n")
- assert os.listdir(str(tmpdir)) == ['savefile-success.test']
+ assert os.listdir(str(tmpdir)) == ["savefile-success.test"]
with open(filename) as f:
- assert f.read() == 'foo\n'
+ assert f.read() == "foo\n"
def test_save_over_existing_file(tmpdir):
- filename = os.path.join(str(tmpdir), 'savefile-overwrite.test')
+ filename = os.path.join(str(tmpdir), "savefile-overwrite.test")
- with open(filename, 'w') as f:
- f.write('existing contents\n')
+ with open(filename, "w") as f:
+ f.write("existing contents\n")
- with save_file_atomic(filename, 'w') as f:
- f.write('overwritten contents\n')
+ with save_file_atomic(filename, "w") as f:
+ f.write("overwritten contents\n")
- assert os.listdir(str(tmpdir)) == ['savefile-overwrite.test']
+ assert os.listdir(str(tmpdir)) == ["savefile-overwrite.test"]
with open(filename) as f:
- assert f.read() == 'overwritten contents\n'
+ assert f.read() == "overwritten contents\n"
def test_exception_new_file(tmpdir):
- filename = os.path.join(str(tmpdir), 'savefile-exception.test')
+ filename = os.path.join(str(tmpdir), "savefile-exception.test")
with pytest.raises(RuntimeError):
- with save_file_atomic(filename, 'w') as f:
- f.write('Some junk\n')
+ with save_file_atomic(filename, "w") as f:
+ f.write("Some junk\n")
raise RuntimeError("Something goes wrong")
assert os.listdir(str(tmpdir)) == []
def test_exception_existing_file(tmpdir):
- filename = os.path.join(str(tmpdir), 'savefile-existing.test')
+ filename = os.path.join(str(tmpdir), "savefile-existing.test")
- with open(filename, 'w') as f:
- f.write('existing contents\n')
+ with open(filename, "w") as f:
+ f.write("existing contents\n")
with pytest.raises(RuntimeError):
- with save_file_atomic(filename, 'w') as f:
- f.write('Some junk\n')
+ with save_file_atomic(filename, "w") as f:
+ f.write("Some junk\n")
raise RuntimeError("Something goes wrong")
- assert os.listdir(str(tmpdir)) == ['savefile-existing.test']
+ assert os.listdir(str(tmpdir)) == ["savefile-existing.test"]
with open(filename) as f:
- assert f.read() == 'existing contents\n'
+ assert f.read() == "existing contents\n"
def test_attributes(tmpdir):
- filename = os.path.join(str(tmpdir), 'savefile-attributes.test')
- with save_file_atomic(filename, 'w') as f:
+ filename = os.path.join(str(tmpdir), "savefile-attributes.test")
+ with save_file_atomic(filename, "w") as f:
assert f.real_filename == filename
assert f.name != filename
diff --git a/tests/internals/yaml.py b/tests/internals/yaml.py
index 93619dc4c..e25cf70b3 100644
--- a/tests/internals/yaml.py
+++ b/tests/internals/yaml.py
@@ -7,21 +7,16 @@ from buildstream import _yaml, Node, ProvenanceInformation, SequenceNode
from buildstream._exceptions import LoadError, LoadErrorReason
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'yaml',
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "yaml",)
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_load_yaml(datafiles):
- filename = os.path.join(datafiles.dirname,
- datafiles.basename,
- 'basics.yaml')
+ filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
loaded = _yaml.load(filename)
- assert loaded.get_str('kind') == 'pony'
+ assert loaded.get_str("kind") == "pony"
def assert_provenance(filename, line, col, node):
@@ -37,12 +32,10 @@ def assert_provenance(filename, line, col, node):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_basic_provenance(datafiles):
- filename = os.path.join(datafiles.dirname,
- datafiles.basename,
- 'basics.yaml')
+ filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
loaded = _yaml.load(filename)
- assert loaded.get_str('kind') == 'pony'
+ assert loaded.get_str("kind") == "pony"
assert_provenance(filename, 1, 0, loaded)
@@ -50,45 +43,37 @@ def test_basic_provenance(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_member_provenance(datafiles):
- filename = os.path.join(datafiles.dirname,
- datafiles.basename,
- 'basics.yaml')
+ filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
loaded = _yaml.load(filename)
- assert loaded.get_str('kind') == 'pony'
- assert_provenance(filename, 2, 13, loaded.get_scalar('description'))
+ assert loaded.get_str("kind") == "pony"
+ assert_provenance(filename, 2, 13, loaded.get_scalar("description"))
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_element_provenance(datafiles):
- filename = os.path.join(datafiles.dirname,
- datafiles.basename,
- 'basics.yaml')
+ filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
loaded = _yaml.load(filename)
- assert loaded.get_str('kind') == 'pony'
- assert_provenance(filename, 5, 2, loaded.get_sequence('moods').scalar_at(1))
+ assert loaded.get_str("kind") == "pony"
+ assert_provenance(filename, 5, 2, loaded.get_sequence("moods").scalar_at(1))
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_mapping_validate_keys(datafiles):
- valid = os.path.join(datafiles.dirname,
- datafiles.basename,
- 'basics.yaml')
- invalid = os.path.join(datafiles.dirname,
- datafiles.basename,
- 'invalid.yaml')
+ valid = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
+ invalid = os.path.join(datafiles.dirname, datafiles.basename, "invalid.yaml")
base = _yaml.load(valid)
- base.validate_keys(['kind', 'description', 'moods', 'children', 'extra'])
+ base.validate_keys(["kind", "description", "moods", "children", "extra"])
base = _yaml.load(invalid)
with pytest.raises(LoadError) as exc:
- base.validate_keys(['kind', 'description', 'moods', 'children', 'extra'])
+ base.validate_keys(["kind", "description", "moods", "children", "extra"])
assert exc.value.reason == LoadErrorReason.INVALID_DATA
@@ -96,23 +81,21 @@ def test_mapping_validate_keys(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_node_get(datafiles):
- filename = os.path.join(datafiles.dirname,
- datafiles.basename,
- 'basics.yaml')
+ filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
base = _yaml.load(filename)
- assert base.get_str('kind') == 'pony'
+ assert base.get_str("kind") == "pony"
- children = base.get_sequence('children')
+ children = base.get_sequence("children")
assert isinstance(children, SequenceNode)
assert len(children) == 7
- child = base.get_sequence('children').mapping_at(6)
- assert_provenance(filename, 20, 8, child.get_scalar('mood'))
+ child = base.get_sequence("children").mapping_at(6)
+ assert_provenance(filename, 20, 8, child.get_scalar("mood"))
- extra = base.get_mapping('extra')
+ extra = base.get_mapping("extra")
with pytest.raises(LoadError) as exc:
- extra.get_mapping('old')
+ extra.get_mapping("old")
assert exc.value.reason == LoadErrorReason.INVALID_DATA
@@ -120,50 +103,44 @@ def test_node_get(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_node_set(datafiles):
- filename = os.path.join(datafiles.dirname,
- datafiles.basename,
- 'basics.yaml')
+ filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
base = _yaml.load(filename)
- assert 'mother' not in base
- base['mother'] = 'snow white'
- assert base.get_str('mother') == 'snow white'
+ assert "mother" not in base
+ base["mother"] = "snow white"
+ assert base.get_str("mother") == "snow white"
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_node_set_overwrite(datafiles):
- filename = os.path.join(datafiles.dirname,
- datafiles.basename,
- 'basics.yaml')
+ filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
base = _yaml.load(filename)
# Overwrite a string
- assert base.get_str('kind') == 'pony'
- base['kind'] = 'cow'
- assert base.get_str('kind') == 'cow'
+ assert base.get_str("kind") == "pony"
+ base["kind"] = "cow"
+ assert base.get_str("kind") == "cow"
# Overwrite a list as a string
- assert base.get_str_list('moods') == ['happy', 'sad']
- base['moods'] = 'unemotional'
- assert base.get_str('moods') == 'unemotional'
+ assert base.get_str_list("moods") == ["happy", "sad"]
+ base["moods"] = "unemotional"
+ assert base.get_str("moods") == "unemotional"
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_node_set_list_element(datafiles):
- filename = os.path.join(datafiles.dirname,
- datafiles.basename,
- 'basics.yaml')
+ filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
base = _yaml.load(filename)
- assert base.get_str_list('moods') == ['happy', 'sad']
- base.get_sequence('moods')[0] = 'confused'
+ assert base.get_str_list("moods") == ["happy", "sad"]
+ base.get_sequence("moods")[0] = "confused"
- assert base.get_str_list('moods') == ['confused', 'sad']
+ assert base.get_str_list("moods") == ["confused", "sad"]
# Really this is testing _yaml.node_copy(), we want to
@@ -173,26 +150,22 @@ def test_node_set_list_element(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_composite_preserve_originals(datafiles):
- filename = os.path.join(datafiles.dirname,
- datafiles.basename,
- 'basics.yaml')
- overlayfile = os.path.join(datafiles.dirname,
- datafiles.basename,
- 'composite.yaml')
+ filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
+ overlayfile = os.path.join(datafiles.dirname, datafiles.basename, "composite.yaml")
base = _yaml.load(filename)
overlay = _yaml.load(overlayfile)
base_copy = base.clone()
overlay._composite(base_copy)
- copy_extra = base_copy.get_mapping('extra')
- orig_extra = base.get_mapping('extra')
+ copy_extra = base_copy.get_mapping("extra")
+ orig_extra = base.get_mapping("extra")
# Test that the node copy has the overridden value...
- assert copy_extra.get_str('old') == 'override'
+ assert copy_extra.get_str("old") == "override"
# But the original node is not effected by the override.
- assert orig_extra.get_str('old') == 'new'
+ assert orig_extra.get_str("old") == "new"
# Tests for list composition
@@ -210,66 +183,98 @@ def test_composite_preserve_originals(datafiles):
# prov_col: The expected provenance column of "mood"
#
@pytest.mark.datafiles(os.path.join(DATA_DIR))
-@pytest.mark.parametrize("filename,index,length,mood,prov_file,prov_line,prov_col", [
-
- # Test results of compositing with the (<) prepend directive
- ('listprepend.yaml', 0, 9, 'prepended1', 'listprepend.yaml', 5, 10),
- ('listprepend.yaml', 1, 9, 'prepended2', 'listprepend.yaml', 7, 10),
- ('listprepend.yaml', 2, 9, 'silly', 'basics.yaml', 8, 8),
- ('listprepend.yaml', 8, 9, 'sleepy', 'basics.yaml', 20, 8),
-
- # Test results of compositing with the (>) append directive
- ('listappend.yaml', 7, 9, 'appended1', 'listappend.yaml', 5, 10),
- ('listappend.yaml', 8, 9, 'appended2', 'listappend.yaml', 7, 10),
- ('listappend.yaml', 0, 9, 'silly', 'basics.yaml', 8, 8),
- ('listappend.yaml', 6, 9, 'sleepy', 'basics.yaml', 20, 8),
-
- # Test results of compositing with both (<) and (>) directives
- ('listappendprepend.yaml', 0, 11, 'prepended1', 'listappendprepend.yaml', 5, 10),
- ('listappendprepend.yaml', 1, 11, 'prepended2', 'listappendprepend.yaml', 7, 10),
- ('listappendprepend.yaml', 2, 11, 'silly', 'basics.yaml', 8, 8),
- ('listappendprepend.yaml', 8, 11, 'sleepy', 'basics.yaml', 20, 8),
- ('listappendprepend.yaml', 9, 11, 'appended1', 'listappendprepend.yaml', 10, 10),
- ('listappendprepend.yaml', 10, 11, 'appended2', 'listappendprepend.yaml', 12, 10),
-
- # Test results of compositing with the (=) overwrite directive
- ('listoverwrite.yaml', 0, 2, 'overwrite1', 'listoverwrite.yaml', 5, 10),
- ('listoverwrite.yaml', 1, 2, 'overwrite2', 'listoverwrite.yaml', 7, 10),
-
- # Test results of compositing without any directive, implicitly overwriting
- ('implicitoverwrite.yaml', 0, 2, 'overwrite1', 'implicitoverwrite.yaml', 4, 8),
- ('implicitoverwrite.yaml', 1, 2, 'overwrite2', 'implicitoverwrite.yaml', 6, 8),
-])
-def test_list_composition(datafiles, filename, tmpdir,
- index, length, mood,
- prov_file, prov_line, prov_col):
- base_file = os.path.join(datafiles.dirname, datafiles.basename, 'basics.yaml')
+@pytest.mark.parametrize(
+ "filename,index,length,mood,prov_file,prov_line,prov_col",
+ [
+ # Test results of compositing with the (<) prepend directive
+ ("listprepend.yaml", 0, 9, "prepended1", "listprepend.yaml", 5, 10),
+ ("listprepend.yaml", 1, 9, "prepended2", "listprepend.yaml", 7, 10),
+ ("listprepend.yaml", 2, 9, "silly", "basics.yaml", 8, 8),
+ ("listprepend.yaml", 8, 9, "sleepy", "basics.yaml", 20, 8),
+ # Test results of compositing with the (>) append directive
+ ("listappend.yaml", 7, 9, "appended1", "listappend.yaml", 5, 10),
+ ("listappend.yaml", 8, 9, "appended2", "listappend.yaml", 7, 10),
+ ("listappend.yaml", 0, 9, "silly", "basics.yaml", 8, 8),
+ ("listappend.yaml", 6, 9, "sleepy", "basics.yaml", 20, 8),
+ # Test results of compositing with both (<) and (>) directives
+ (
+ "listappendprepend.yaml",
+ 0,
+ 11,
+ "prepended1",
+ "listappendprepend.yaml",
+ 5,
+ 10,
+ ),
+ (
+ "listappendprepend.yaml",
+ 1,
+ 11,
+ "prepended2",
+ "listappendprepend.yaml",
+ 7,
+ 10,
+ ),
+ ("listappendprepend.yaml", 2, 11, "silly", "basics.yaml", 8, 8),
+ ("listappendprepend.yaml", 8, 11, "sleepy", "basics.yaml", 20, 8),
+ (
+ "listappendprepend.yaml",
+ 9,
+ 11,
+ "appended1",
+ "listappendprepend.yaml",
+ 10,
+ 10,
+ ),
+ (
+ "listappendprepend.yaml",
+ 10,
+ 11,
+ "appended2",
+ "listappendprepend.yaml",
+ 12,
+ 10,
+ ),
+ # Test results of compositing with the (=) overwrite directive
+ ("listoverwrite.yaml", 0, 2, "overwrite1", "listoverwrite.yaml", 5, 10),
+ ("listoverwrite.yaml", 1, 2, "overwrite2", "listoverwrite.yaml", 7, 10),
+ # Test results of compositing without any directive, implicitly overwriting
+ ("implicitoverwrite.yaml", 0, 2, "overwrite1", "implicitoverwrite.yaml", 4, 8),
+ ("implicitoverwrite.yaml", 1, 2, "overwrite2", "implicitoverwrite.yaml", 6, 8),
+ ],
+)
+def test_list_composition(
+ datafiles, filename, tmpdir, index, length, mood, prov_file, prov_line, prov_col
+):
+ base_file = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
overlay_file = os.path.join(datafiles.dirname, datafiles.basename, filename)
- base = _yaml.load(base_file, 'basics.yaml')
+ base = _yaml.load(base_file, "basics.yaml")
overlay = _yaml.load(overlay_file, shortname=filename)
overlay._composite(base)
- children = base.get_sequence('children')
+ children = base.get_sequence("children")
assert len(children) == length
child = children.mapping_at(index)
- assert child.get_str('mood') == mood
- assert_provenance(prov_file, prov_line, prov_col, child.get_node('mood'))
+ assert child.get_str("mood") == mood
+ assert_provenance(prov_file, prov_line, prov_col, child.get_node("mood"))
# Test that overwriting a list with an empty list works as expected.
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_list_deletion(datafiles):
- base = os.path.join(datafiles.dirname, datafiles.basename, 'basics.yaml')
- overlay = os.path.join(datafiles.dirname, datafiles.basename, 'listoverwriteempty.yaml')
+ base = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
+ overlay = os.path.join(
+ datafiles.dirname, datafiles.basename, "listoverwriteempty.yaml"
+ )
- base = _yaml.load(base, shortname='basics.yaml')
- overlay = _yaml.load(overlay, shortname='listoverwriteempty.yaml')
+ base = _yaml.load(base, shortname="basics.yaml")
+ overlay = _yaml.load(overlay, shortname="listoverwriteempty.yaml")
overlay._composite(base)
- children = base.get_sequence('children')
+ children = base.get_sequence("children")
assert not children
@@ -300,109 +305,411 @@ def test_list_deletion(datafiles):
# prov_col: The expected provenance column of "mood"
#
@pytest.mark.datafiles(os.path.join(DATA_DIR))
-@pytest.mark.parametrize("filename1,filename2,index,length,mood,prov_file,prov_line,prov_col", [
-
- # Test results of compositing literal list with (>) and then (<)
- ('listprepend.yaml', 'listappend.yaml', 0, 11, 'prepended1', 'listprepend.yaml', 5, 10),
- ('listprepend.yaml', 'listappend.yaml', 1, 11, 'prepended2', 'listprepend.yaml', 7, 10),
- ('listprepend.yaml', 'listappend.yaml', 2, 11, 'silly', 'basics.yaml', 8, 8),
- ('listprepend.yaml', 'listappend.yaml', 8, 11, 'sleepy', 'basics.yaml', 20, 8),
- ('listprepend.yaml', 'listappend.yaml', 9, 11, 'appended1', 'listappend.yaml', 5, 10),
- ('listprepend.yaml', 'listappend.yaml', 10, 11, 'appended2', 'listappend.yaml', 7, 10),
-
- # Test results of compositing literal list with (<) and then (>)
- ('listappend.yaml', 'listprepend.yaml', 0, 11, 'prepended1', 'listprepend.yaml', 5, 10),
- ('listappend.yaml', 'listprepend.yaml', 1, 11, 'prepended2', 'listprepend.yaml', 7, 10),
- ('listappend.yaml', 'listprepend.yaml', 2, 11, 'silly', 'basics.yaml', 8, 8),
- ('listappend.yaml', 'listprepend.yaml', 8, 11, 'sleepy', 'basics.yaml', 20, 8),
- ('listappend.yaml', 'listprepend.yaml', 9, 11, 'appended1', 'listappend.yaml', 5, 10),
- ('listappend.yaml', 'listprepend.yaml', 10, 11, 'appended2', 'listappend.yaml', 7, 10),
-
- # Test results of compositing literal list with (>) and then (>)
- ('listappend.yaml', 'secondappend.yaml', 0, 11, 'silly', 'basics.yaml', 8, 8),
- ('listappend.yaml', 'secondappend.yaml', 6, 11, 'sleepy', 'basics.yaml', 20, 8),
- ('listappend.yaml', 'secondappend.yaml', 7, 11, 'appended1', 'listappend.yaml', 5, 10),
- ('listappend.yaml', 'secondappend.yaml', 8, 11, 'appended2', 'listappend.yaml', 7, 10),
- ('listappend.yaml', 'secondappend.yaml', 9, 11, 'secondappend1', 'secondappend.yaml', 5, 10),
- ('listappend.yaml', 'secondappend.yaml', 10, 11, 'secondappend2', 'secondappend.yaml', 7, 10),
-
- # Test results of compositing literal list with (>) and then (>)
- ('listprepend.yaml', 'secondprepend.yaml', 0, 11, 'secondprepend1', 'secondprepend.yaml', 5, 10),
- ('listprepend.yaml', 'secondprepend.yaml', 1, 11, 'secondprepend2', 'secondprepend.yaml', 7, 10),
- ('listprepend.yaml', 'secondprepend.yaml', 2, 11, 'prepended1', 'listprepend.yaml', 5, 10),
- ('listprepend.yaml', 'secondprepend.yaml', 3, 11, 'prepended2', 'listprepend.yaml', 7, 10),
- ('listprepend.yaml', 'secondprepend.yaml', 4, 11, 'silly', 'basics.yaml', 8, 8),
- ('listprepend.yaml', 'secondprepend.yaml', 10, 11, 'sleepy', 'basics.yaml', 20, 8),
-
- # Test results of compositing literal list with (>) or (<) and then another literal list
- ('listappend.yaml', 'implicitoverwrite.yaml', 0, 2, 'overwrite1', 'implicitoverwrite.yaml', 4, 8),
- ('listappend.yaml', 'implicitoverwrite.yaml', 1, 2, 'overwrite2', 'implicitoverwrite.yaml', 6, 8),
- ('listprepend.yaml', 'implicitoverwrite.yaml', 0, 2, 'overwrite1', 'implicitoverwrite.yaml', 4, 8),
- ('listprepend.yaml', 'implicitoverwrite.yaml', 1, 2, 'overwrite2', 'implicitoverwrite.yaml', 6, 8),
-
- # Test results of compositing literal list with (>) or (<) and then an explicit (=) overwrite
- ('listappend.yaml', 'listoverwrite.yaml', 0, 2, 'overwrite1', 'listoverwrite.yaml', 5, 10),
- ('listappend.yaml', 'listoverwrite.yaml', 1, 2, 'overwrite2', 'listoverwrite.yaml', 7, 10),
- ('listprepend.yaml', 'listoverwrite.yaml', 0, 2, 'overwrite1', 'listoverwrite.yaml', 5, 10),
- ('listprepend.yaml', 'listoverwrite.yaml', 1, 2, 'overwrite2', 'listoverwrite.yaml', 7, 10),
-
- # Test results of compositing literal list an explicit overwrite (=) and then with (>) or (<)
- ('listoverwrite.yaml', 'listappend.yaml', 0, 4, 'overwrite1', 'listoverwrite.yaml', 5, 10),
- ('listoverwrite.yaml', 'listappend.yaml', 1, 4, 'overwrite2', 'listoverwrite.yaml', 7, 10),
- ('listoverwrite.yaml', 'listappend.yaml', 2, 4, 'appended1', 'listappend.yaml', 5, 10),
- ('listoverwrite.yaml', 'listappend.yaml', 3, 4, 'appended2', 'listappend.yaml', 7, 10),
- ('listoverwrite.yaml', 'listprepend.yaml', 0, 4, 'prepended1', 'listprepend.yaml', 5, 10),
- ('listoverwrite.yaml', 'listprepend.yaml', 1, 4, 'prepended2', 'listprepend.yaml', 7, 10),
- ('listoverwrite.yaml', 'listprepend.yaml', 2, 4, 'overwrite1', 'listoverwrite.yaml', 5, 10),
- ('listoverwrite.yaml', 'listprepend.yaml', 3, 4, 'overwrite2', 'listoverwrite.yaml', 7, 10),
-])
-def test_list_composition_twice(datafiles, tmpdir, filename1, filename2,
- index, length, mood,
- prov_file, prov_line, prov_col):
- file_base = os.path.join(datafiles.dirname, datafiles.basename, 'basics.yaml')
+@pytest.mark.parametrize(
+ "filename1,filename2,index,length,mood,prov_file,prov_line,prov_col",
+ [
+ # Test results of compositing literal list with (>) and then (<)
+ (
+ "listprepend.yaml",
+ "listappend.yaml",
+ 0,
+ 11,
+ "prepended1",
+ "listprepend.yaml",
+ 5,
+ 10,
+ ),
+ (
+ "listprepend.yaml",
+ "listappend.yaml",
+ 1,
+ 11,
+ "prepended2",
+ "listprepend.yaml",
+ 7,
+ 10,
+ ),
+ ("listprepend.yaml", "listappend.yaml", 2, 11, "silly", "basics.yaml", 8, 8),
+ ("listprepend.yaml", "listappend.yaml", 8, 11, "sleepy", "basics.yaml", 20, 8),
+ (
+ "listprepend.yaml",
+ "listappend.yaml",
+ 9,
+ 11,
+ "appended1",
+ "listappend.yaml",
+ 5,
+ 10,
+ ),
+ (
+ "listprepend.yaml",
+ "listappend.yaml",
+ 10,
+ 11,
+ "appended2",
+ "listappend.yaml",
+ 7,
+ 10,
+ ),
+ # Test results of compositing literal list with (<) and then (>)
+ (
+ "listappend.yaml",
+ "listprepend.yaml",
+ 0,
+ 11,
+ "prepended1",
+ "listprepend.yaml",
+ 5,
+ 10,
+ ),
+ (
+ "listappend.yaml",
+ "listprepend.yaml",
+ 1,
+ 11,
+ "prepended2",
+ "listprepend.yaml",
+ 7,
+ 10,
+ ),
+ ("listappend.yaml", "listprepend.yaml", 2, 11, "silly", "basics.yaml", 8, 8),
+ ("listappend.yaml", "listprepend.yaml", 8, 11, "sleepy", "basics.yaml", 20, 8),
+ (
+ "listappend.yaml",
+ "listprepend.yaml",
+ 9,
+ 11,
+ "appended1",
+ "listappend.yaml",
+ 5,
+ 10,
+ ),
+ (
+ "listappend.yaml",
+ "listprepend.yaml",
+ 10,
+ 11,
+ "appended2",
+ "listappend.yaml",
+ 7,
+ 10,
+ ),
+ # Test results of compositing literal list with (>) and then (>)
+ ("listappend.yaml", "secondappend.yaml", 0, 11, "silly", "basics.yaml", 8, 8),
+ ("listappend.yaml", "secondappend.yaml", 6, 11, "sleepy", "basics.yaml", 20, 8),
+ (
+ "listappend.yaml",
+ "secondappend.yaml",
+ 7,
+ 11,
+ "appended1",
+ "listappend.yaml",
+ 5,
+ 10,
+ ),
+ (
+ "listappend.yaml",
+ "secondappend.yaml",
+ 8,
+ 11,
+ "appended2",
+ "listappend.yaml",
+ 7,
+ 10,
+ ),
+ (
+ "listappend.yaml",
+ "secondappend.yaml",
+ 9,
+ 11,
+ "secondappend1",
+ "secondappend.yaml",
+ 5,
+ 10,
+ ),
+ (
+ "listappend.yaml",
+ "secondappend.yaml",
+ 10,
+ 11,
+ "secondappend2",
+ "secondappend.yaml",
+ 7,
+ 10,
+ ),
+ # Test results of compositing literal list with (>) and then (>)
+ (
+ "listprepend.yaml",
+ "secondprepend.yaml",
+ 0,
+ 11,
+ "secondprepend1",
+ "secondprepend.yaml",
+ 5,
+ 10,
+ ),
+ (
+ "listprepend.yaml",
+ "secondprepend.yaml",
+ 1,
+ 11,
+ "secondprepend2",
+ "secondprepend.yaml",
+ 7,
+ 10,
+ ),
+ (
+ "listprepend.yaml",
+ "secondprepend.yaml",
+ 2,
+ 11,
+ "prepended1",
+ "listprepend.yaml",
+ 5,
+ 10,
+ ),
+ (
+ "listprepend.yaml",
+ "secondprepend.yaml",
+ 3,
+ 11,
+ "prepended2",
+ "listprepend.yaml",
+ 7,
+ 10,
+ ),
+ ("listprepend.yaml", "secondprepend.yaml", 4, 11, "silly", "basics.yaml", 8, 8),
+ (
+ "listprepend.yaml",
+ "secondprepend.yaml",
+ 10,
+ 11,
+ "sleepy",
+ "basics.yaml",
+ 20,
+ 8,
+ ),
+ # Test results of compositing literal list with (>) or (<) and then another literal list
+ (
+ "listappend.yaml",
+ "implicitoverwrite.yaml",
+ 0,
+ 2,
+ "overwrite1",
+ "implicitoverwrite.yaml",
+ 4,
+ 8,
+ ),
+ (
+ "listappend.yaml",
+ "implicitoverwrite.yaml",
+ 1,
+ 2,
+ "overwrite2",
+ "implicitoverwrite.yaml",
+ 6,
+ 8,
+ ),
+ (
+ "listprepend.yaml",
+ "implicitoverwrite.yaml",
+ 0,
+ 2,
+ "overwrite1",
+ "implicitoverwrite.yaml",
+ 4,
+ 8,
+ ),
+ (
+ "listprepend.yaml",
+ "implicitoverwrite.yaml",
+ 1,
+ 2,
+ "overwrite2",
+ "implicitoverwrite.yaml",
+ 6,
+ 8,
+ ),
+ # Test results of compositing literal list with (>) or (<) and then an explicit (=) overwrite
+ (
+ "listappend.yaml",
+ "listoverwrite.yaml",
+ 0,
+ 2,
+ "overwrite1",
+ "listoverwrite.yaml",
+ 5,
+ 10,
+ ),
+ (
+ "listappend.yaml",
+ "listoverwrite.yaml",
+ 1,
+ 2,
+ "overwrite2",
+ "listoverwrite.yaml",
+ 7,
+ 10,
+ ),
+ (
+ "listprepend.yaml",
+ "listoverwrite.yaml",
+ 0,
+ 2,
+ "overwrite1",
+ "listoverwrite.yaml",
+ 5,
+ 10,
+ ),
+ (
+ "listprepend.yaml",
+ "listoverwrite.yaml",
+ 1,
+ 2,
+ "overwrite2",
+ "listoverwrite.yaml",
+ 7,
+ 10,
+ ),
+ # Test results of compositing literal list an explicit overwrite (=) and then with (>) or (<)
+ (
+ "listoverwrite.yaml",
+ "listappend.yaml",
+ 0,
+ 4,
+ "overwrite1",
+ "listoverwrite.yaml",
+ 5,
+ 10,
+ ),
+ (
+ "listoverwrite.yaml",
+ "listappend.yaml",
+ 1,
+ 4,
+ "overwrite2",
+ "listoverwrite.yaml",
+ 7,
+ 10,
+ ),
+ (
+ "listoverwrite.yaml",
+ "listappend.yaml",
+ 2,
+ 4,
+ "appended1",
+ "listappend.yaml",
+ 5,
+ 10,
+ ),
+ (
+ "listoverwrite.yaml",
+ "listappend.yaml",
+ 3,
+ 4,
+ "appended2",
+ "listappend.yaml",
+ 7,
+ 10,
+ ),
+ (
+ "listoverwrite.yaml",
+ "listprepend.yaml",
+ 0,
+ 4,
+ "prepended1",
+ "listprepend.yaml",
+ 5,
+ 10,
+ ),
+ (
+ "listoverwrite.yaml",
+ "listprepend.yaml",
+ 1,
+ 4,
+ "prepended2",
+ "listprepend.yaml",
+ 7,
+ 10,
+ ),
+ (
+ "listoverwrite.yaml",
+ "listprepend.yaml",
+ 2,
+ 4,
+ "overwrite1",
+ "listoverwrite.yaml",
+ 5,
+ 10,
+ ),
+ (
+ "listoverwrite.yaml",
+ "listprepend.yaml",
+ 3,
+ 4,
+ "overwrite2",
+ "listoverwrite.yaml",
+ 7,
+ 10,
+ ),
+ ],
+)
+def test_list_composition_twice(
+ datafiles,
+ tmpdir,
+ filename1,
+ filename2,
+ index,
+ length,
+ mood,
+ prov_file,
+ prov_line,
+ prov_col,
+):
+ file_base = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml")
file1 = os.path.join(datafiles.dirname, datafiles.basename, filename1)
file2 = os.path.join(datafiles.dirname, datafiles.basename, filename2)
#####################
# Round 1 - Fight !
#####################
- base = _yaml.load(file_base, shortname='basics.yaml')
+ base = _yaml.load(file_base, shortname="basics.yaml")
overlay1 = _yaml.load(file1, shortname=filename1)
overlay2 = _yaml.load(file2, shortname=filename2)
overlay1._composite(base)
overlay2._composite(base)
- children = base.get_sequence('children')
+ children = base.get_sequence("children")
assert len(children) == length
child = children.mapping_at(index)
- assert child.get_str('mood') == mood
- assert_provenance(prov_file, prov_line, prov_col, child.get_node('mood'))
+ assert child.get_str("mood") == mood
+ assert_provenance(prov_file, prov_line, prov_col, child.get_node("mood"))
#####################
# Round 2 - Fight !
#####################
- base = _yaml.load(file_base, shortname='basics.yaml')
+ base = _yaml.load(file_base, shortname="basics.yaml")
overlay1 = _yaml.load(file1, shortname=filename1)
overlay2 = _yaml.load(file2, shortname=filename2)
overlay2._composite(overlay1)
overlay1._composite(base)
- children = base.get_sequence('children')
+ children = base.get_sequence("children")
assert len(children) == length
child = children.mapping_at(index)
- assert child.get_str('mood') == mood
- assert_provenance(prov_file, prov_line, prov_col, child.get_node('mood'))
+ assert child.get_str("mood") == mood
+ assert_provenance(prov_file, prov_line, prov_col, child.get_node("mood"))
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_convert_value_to_string(datafiles):
- conf_file = os.path.join(datafiles.dirname,
- datafiles.basename,
- 'convert_value_to_str.yaml')
+ conf_file = os.path.join(
+ datafiles.dirname, datafiles.basename, "convert_value_to_str.yaml"
+ )
# Run file through yaml to convert it
test_dict = _yaml.load(conf_file)
@@ -426,9 +733,9 @@ def test_convert_value_to_string(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_value_doesnt_match_expected(datafiles):
- conf_file = os.path.join(datafiles.dirname,
- datafiles.basename,
- 'convert_value_to_str.yaml')
+ conf_file = os.path.join(
+ datafiles.dirname, datafiles.basename, "convert_value_to_str.yaml"
+ )
# Run file through yaml to convert it
test_dict = _yaml.load(conf_file)
@@ -439,11 +746,11 @@ def test_value_doesnt_match_expected(datafiles):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
-@pytest.mark.parametrize('fromdisk', [(True), (False)])
+@pytest.mark.parametrize("fromdisk", [(True), (False)])
def test_roundtrip_dump(datafiles, fromdisk):
- filename = os.path.join(datafiles.dirname,
- datafiles.basename,
- "roundtrip-test.yaml")
+ filename = os.path.join(
+ datafiles.dirname, datafiles.basename, "roundtrip-test.yaml"
+ )
with open(filename, "r") as fh:
rt_raw = fh.read()
if fromdisk:
@@ -480,16 +787,11 @@ def test_roundtrip_dump(datafiles, fromdisk):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
-@pytest.mark.parametrize('case', [
- ['a', 'b', 'c'],
- ['foo', 1],
- ['stuff', 0, 'colour'],
- ['bird', 0, 1],
-])
+@pytest.mark.parametrize(
+ "case", [["a", "b", "c"], ["foo", 1], ["stuff", 0, "colour"], ["bird", 0, 1],]
+)
def test_node_find_target(datafiles, case):
- filename = os.path.join(datafiles.dirname,
- datafiles.basename,
- "traversal.yaml")
+ filename = os.path.join(datafiles.dirname, datafiles.basename, "traversal.yaml")
# We set copy_tree in order to ensure that the nodes in `loaded`
# are not the same nodes as in `prov.toplevel`
loaded = _yaml.load(filename, copy_tree=True)
@@ -523,9 +825,7 @@ def test_node_find_target(datafiles, case):
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_node_find_target_fails(datafiles):
- filename = os.path.join(datafiles.dirname,
- datafiles.basename,
- "traversal.yaml")
+ filename = os.path.join(datafiles.dirname, datafiles.basename, "traversal.yaml")
loaded = _yaml.load(filename, copy_tree=True)
brand_new = Node.from_dict({})
diff --git a/tests/plugins/deprecationwarnings/deprecationwarnings.py b/tests/plugins/deprecationwarnings/deprecationwarnings.py
index 4d2d22c05..6f20eae8c 100644
--- a/tests/plugins/deprecationwarnings/deprecationwarnings.py
+++ b/tests/plugins/deprecationwarnings/deprecationwarnings.py
@@ -8,19 +8,18 @@ import pytest
from buildstream.testing import cli # pylint: disable=unused-import
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
_DEPRECATION_MESSAGE = "Here is some detail."
-_DEPRECATION_WARNING = "Using deprecated plugin deprecated_plugin: {}".format(_DEPRECATION_MESSAGE)
+_DEPRECATION_WARNING = "Using deprecated plugin deprecated_plugin: {}".format(
+ _DEPRECATION_MESSAGE
+)
@pytest.mark.datafiles(DATA_DIR)
def test_deprecation_warning_present(cli, datafiles):
project = str(datafiles)
- result = cli.run(project=project, args=['show', 'deprecated.bst'])
+ result = cli.run(project=project, args=["show", "deprecated.bst"])
result.assert_success()
assert _DEPRECATION_WARNING in result.stderr
@@ -28,16 +27,18 @@ def test_deprecation_warning_present(cli, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_suppress_deprecation_warning(cli, datafiles):
project = str(datafiles)
- cli.run(project=project, args=['show', 'manual.bst'])
+ cli.run(project=project, args=["show", "manual.bst"])
- element_overrides = "elements:\n" \
- " deprecated_plugin:\n" \
- " suppress-deprecation-warnings : True\n"
+ element_overrides = (
+ "elements:\n"
+ " deprecated_plugin:\n"
+ " suppress-deprecation-warnings : True\n"
+ )
- project_conf = os.path.join(project, 'project.conf')
- with open(project_conf, 'a') as f:
+ project_conf = os.path.join(project, "project.conf")
+ with open(project_conf, "a") as f:
f.write(element_overrides)
- result = cli.run(project=project, args=['show', 'deprecated.bst'])
+ result = cli.run(project=project, args=["show", "deprecated.bst"])
result.assert_success()
assert _DEPRECATION_WARNING not in result.stderr
diff --git a/tests/remoteexecution/buildfail.py b/tests/remoteexecution/buildfail.py
index 0fb4cdb95..bd1c81891 100644
--- a/tests/remoteexecution/buildfail.py
+++ b/tests/remoteexecution/buildfail.py
@@ -23,53 +23,43 @@ import pytest
from buildstream._exceptions import ErrorDomain
from buildstream import _yaml
-from buildstream.testing import cli_remote_execution as cli # pylint: disable=unused-import
+from buildstream.testing import (
+ cli_remote_execution as cli,
+) # pylint: disable=unused-import
pytestmark = pytest.mark.remoteexecution
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
@pytest.mark.datafiles(DATA_DIR)
def test_build_remote_failure(cli, datafiles):
project = str(datafiles)
- element_path = os.path.join(project, 'elements', 'element.bst')
- checkout_path = os.path.join(cli.directory, 'checkout')
+ element_path = os.path.join(project, "elements", "element.bst")
+ checkout_path = os.path.join(cli.directory, "checkout")
# Write out our test target
element = {
- 'kind': 'script',
- 'depends': [
- {
- 'filename': 'base.bst',
- 'type': 'build',
- },
- ],
- 'config': {
- 'commands': [
- 'touch %{install-root}/foo',
- 'false',
- ],
- },
+ "kind": "script",
+ "depends": [{"filename": "base.bst", "type": "build",},],
+ "config": {"commands": ["touch %{install-root}/foo", "false",],},
}
_yaml.roundtrip_dump(element, element_path)
services = cli.ensure_services()
- assert set(services) == set(['action-cache', 'execution', 'storage'])
+ assert set(services) == set(["action-cache", "execution", "storage"])
# Try to build it, this should result in a failure that contains the content
- result = cli.run(project=project, args=['build', 'element.bst'])
+ result = cli.run(project=project, args=["build", "element.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
- result = cli.run(project=project, args=[
- 'artifact', 'checkout', 'element.bst', '--directory', checkout_path
- ])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "element.bst", "--directory", checkout_path],
+ )
result.assert_success()
# check that the file created before the failure exists
- filename = os.path.join(checkout_path, 'foo')
+ filename = os.path.join(checkout_path, "foo")
assert os.path.isfile(filename)
diff --git a/tests/remoteexecution/buildtree.py b/tests/remoteexecution/buildtree.py
index a64b8716c..86efb29f1 100644
--- a/tests/remoteexecution/buildtree.py
+++ b/tests/remoteexecution/buildtree.py
@@ -21,61 +21,85 @@ import os
import shutil
import pytest
-from buildstream.testing import cli_remote_execution as cli # pylint: disable=unused-import
+from buildstream.testing import (
+ cli_remote_execution as cli,
+) # pylint: disable=unused-import
from tests.testutils import create_artifact_share
pytestmark = pytest.mark.remoteexecution
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
@pytest.mark.datafiles(DATA_DIR)
def test_buildtree_remote(cli, tmpdir, datafiles):
project = str(datafiles)
- element_name = 'build-shell/buildtree.bst'
- share_path = os.path.join(str(tmpdir), 'share')
+ element_name = "build-shell/buildtree.bst"
+ share_path = os.path.join(str(tmpdir), "share")
services = cli.ensure_services()
- assert set(services) == set(['action-cache', 'execution', 'storage'])
+ assert set(services) == set(["action-cache", "execution", "storage"])
with create_artifact_share(share_path) as share:
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- 'cache': {'pull-buildtrees': False}
- })
-
- res = cli.run(project=project, args=[
- '--cache-buildtrees', 'always', 'build', element_name])
+ cli.configure(
+ {
+ "artifacts": {"url": share.repo, "push": True},
+ "cache": {"pull-buildtrees": False},
+ }
+ )
+
+ res = cli.run(
+ project=project,
+ args=["--cache-buildtrees", "always", "build", element_name],
+ )
res.assert_success()
# remove local cache
- shutil.rmtree(os.path.join(str(tmpdir), 'cache', 'cas'))
- shutil.rmtree(os.path.join(str(tmpdir), 'cache', 'artifacts'))
+ shutil.rmtree(os.path.join(str(tmpdir), "cache", "cas"))
+ shutil.rmtree(os.path.join(str(tmpdir), "cache", "artifacts"))
# pull without buildtree
- res = cli.run(project=project, args=[
- 'artifact', 'pull', '--deps', 'all', element_name])
+ res = cli.run(
+ project=project, args=["artifact", "pull", "--deps", "all", element_name]
+ )
res.assert_success()
# check shell doesn't work
- res = cli.run(project=project, args=[
- 'shell', '--build', element_name, '--', 'cat', 'test'
- ])
+ res = cli.run(
+ project=project,
+ args=["shell", "--build", element_name, "--", "cat", "test"],
+ )
res.assert_shell_error()
# pull with buildtree
- res = cli.run(project=project, args=[
- '--pull-buildtrees', 'artifact', 'pull', '--deps', 'all', element_name])
+ res = cli.run(
+ project=project,
+ args=[
+ "--pull-buildtrees",
+ "artifact",
+ "pull",
+ "--deps",
+ "all",
+ element_name,
+ ],
+ )
res.assert_success()
# check it works this time
- res = cli.run(project=project, args=[
- 'shell', '--build', element_name, '--use-buildtree', 'always', '--', 'cat', 'test'
- ])
+ res = cli.run(
+ project=project,
+ args=[
+ "shell",
+ "--build",
+ element_name,
+ "--use-buildtree",
+ "always",
+ "--",
+ "cat",
+ "test",
+ ],
+ )
res.assert_success()
assert "Hi" in res.output
diff --git a/tests/remoteexecution/junction.py b/tests/remoteexecution/junction.py
index db087fd90..dd8d4d4e0 100644
--- a/tests/remoteexecution/junction.py
+++ b/tests/remoteexecution/junction.py
@@ -20,7 +20,9 @@
import os
import pytest
-from buildstream.testing import cli_remote_execution as cli # pylint: disable=unused-import
+from buildstream.testing import (
+ cli_remote_execution as cli,
+) # pylint: disable=unused-import
from buildstream.testing import create_repo
from buildstream import _yaml
from tests.testutils import generate_junction
@@ -28,25 +30,20 @@ from tests.testutils import generate_junction
pytestmark = pytest.mark.remoteexecution
# Project directory
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project",
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project",)
def configure_project(path, config):
- config['name'] = 'test'
- config['element-path'] = 'elements'
- _yaml.roundtrip_dump(config, os.path.join(path, 'project.conf'))
+ config["name"] = "test"
+ config["element-path"] = "elements"
+ _yaml.roundtrip_dump(config, os.path.join(path, "project.conf"))
def create_element(repo, name, path, dependencies, ref=None):
element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ],
- 'depends': dependencies
+ "kind": "import",
+ "sources": [repo.source_config(ref=ref)],
+ "depends": dependencies,
}
_yaml.roundtrip_dump(element, os.path.join(path, name))
@@ -54,66 +51,69 @@ def create_element(repo, name, path, dependencies, ref=None):
@pytest.mark.datafiles(DATA_DIR)
def test_junction_build_remote(cli, tmpdir, datafiles):
project = str(datafiles)
- subproject_path = os.path.join(project, 'files', 'sub-project')
- subproject_element_path = os.path.join(subproject_path, 'elements')
- amhello_files_path = os.path.join(subproject_path, 'files')
- element_path = os.path.join(project, 'elements')
- junction_path = os.path.join(element_path, 'junction.bst')
+ subproject_path = os.path.join(project, "files", "sub-project")
+ subproject_element_path = os.path.join(subproject_path, "elements")
+ amhello_files_path = os.path.join(subproject_path, "files")
+ element_path = os.path.join(project, "elements")
+ junction_path = os.path.join(element_path, "junction.bst")
# We need a repo for real trackable elements
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
ref = repo.create(amhello_files_path)
# ensure that the correct project directory is also listed in the junction
- subproject_conf = os.path.join(subproject_path, 'project.conf')
+ subproject_conf = os.path.join(subproject_path, "project.conf")
with open(subproject_conf) as f:
config = f.read()
config = config.format(project_dir=subproject_path)
- with open(subproject_conf, 'w') as f:
+ with open(subproject_conf, "w") as f:
f.write(config)
# Create a trackable element to depend on the cross junction element,
# this one has it's ref resolved already
- create_element(repo, 'sub-target.bst', subproject_element_path, ['autotools/amhello.bst'], ref=ref)
+ create_element(
+ repo,
+ "sub-target.bst",
+ subproject_element_path,
+ ["autotools/amhello.bst"],
+ ref=ref,
+ )
# Create a trackable element to depend on the cross junction element
- create_element(repo, 'target.bst', element_path, [
- {
- 'junction': 'junction.bst',
- 'filename': 'sub-target.bst'
- }
- ])
+ create_element(
+ repo,
+ "target.bst",
+ element_path,
+ [{"junction": "junction.bst", "filename": "sub-target.bst"}],
+ )
# Create a repo to hold the subproject and generate a junction element for it
generate_junction(tmpdir, subproject_path, junction_path, store_ref=False)
# Now create a compose element at the top level
element = {
- 'kind': 'compose',
- 'depends': [
- {
- 'filename': 'target.bst',
- 'type': 'build'
- }
- ]
+ "kind": "compose",
+ "depends": [{"filename": "target.bst", "type": "build"}],
}
- _yaml.roundtrip_dump(element, os.path.join(element_path, 'composed.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(element_path, "composed.bst"))
# We're doing remote execution so ensure services are available
services = cli.ensure_services()
- assert set(services) == set(['action-cache', 'execution', 'storage'])
+ assert set(services) == set(["action-cache", "execution", "storage"])
# track the junction first to ensure we have refs
- result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
+ result = cli.run(project=project, args=["source", "track", "junction.bst"])
result.assert_success()
# track target to ensure we have refs
- result = cli.run(project=project, args=['source', 'track', '--deps', 'all', 'composed.bst'])
+ result = cli.run(
+ project=project, args=["source", "track", "--deps", "all", "composed.bst"]
+ )
result.assert_success()
# build
- result = cli.run(project=project, silent=True, args=['build', 'composed.bst'])
+ result = cli.run(project=project, silent=True, args=["build", "composed.bst"])
result.assert_success()
# Assert that the main target is cached as a result
- assert cli.get_element_state(project, 'composed.bst') == 'cached'
+ assert cli.get_element_state(project, "composed.bst") == "cached"
diff --git a/tests/remoteexecution/partial.py b/tests/remoteexecution/partial.py
index a640f27d5..13b6ff853 100644
--- a/tests/remoteexecution/partial.py
+++ b/tests/remoteexecution/partial.py
@@ -5,7 +5,9 @@ import os
import pytest
from buildstream._exceptions import ErrorDomain
-from buildstream.testing import cli_remote_execution as cli # pylint: disable=unused-import
+from buildstream.testing import (
+ cli_remote_execution as cli,
+) # pylint: disable=unused-import
from buildstream.testing.integration import assert_contains
from tests.testutils.artifactshare import create_artifact_share
@@ -14,75 +16,80 @@ from tests.testutils.artifactshare import create_artifact_share
pytestmark = pytest.mark.remoteexecution
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
# Test that `bst build` does not download file blobs of a build-only dependency
# to the local cache.
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize('pull_artifact_files', [True, False])
-@pytest.mark.parametrize('build_all', [True, False])
-def test_build_dependency_partial_local_cas(cli, datafiles, pull_artifact_files, build_all):
+@pytest.mark.parametrize("pull_artifact_files", [True, False])
+@pytest.mark.parametrize("build_all", [True, False])
+def test_build_dependency_partial_local_cas(
+ cli, datafiles, pull_artifact_files, build_all
+):
project = str(datafiles)
- element_name = 'no-runtime-deps.bst'
- builddep_element_name = 'autotools/amhello.bst'
- checkout = os.path.join(cli.directory, 'checkout')
- builddep_checkout = os.path.join(cli.directory, 'builddep-checkout')
+ element_name = "no-runtime-deps.bst"
+ builddep_element_name = "autotools/amhello.bst"
+ checkout = os.path.join(cli.directory, "checkout")
+ builddep_checkout = os.path.join(cli.directory, "builddep-checkout")
services = cli.ensure_services()
- assert set(services) == set(['action-cache', 'execution', 'storage'])
+ assert set(services) == set(["action-cache", "execution", "storage"])
# configure pull blobs
if build_all:
- cli.configure({
- 'build': {
- 'dependencies': 'all'
- }
- })
- cli.config['remote-execution']['pull-artifact-files'] = pull_artifact_files
-
- result = cli.run(project=project, args=['build', element_name])
+ cli.configure({"build": {"dependencies": "all"}})
+ cli.config["remote-execution"]["pull-artifact-files"] = pull_artifact_files
+
+ result = cli.run(project=project, args=["build", element_name])
result.assert_success()
# Verify artifact is pulled bar files when ensure artifact files is set
- result = cli.run(project=project, args=['artifact', 'checkout', element_name,
- '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
if pull_artifact_files:
result.assert_success()
- assert_contains(checkout, ['/test'])
+ assert_contains(checkout, ["/test"])
else:
- result.assert_main_error(ErrorDomain.STREAM, 'uncached-checkout-attempt')
+ result.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
# Verify build dependencies is pulled for ALL and BUILD
- result = cli.run(project=project, args=['artifact', 'checkout', builddep_element_name,
- '--directory', builddep_checkout])
+ result = cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ builddep_element_name,
+ "--directory",
+ builddep_checkout,
+ ],
+ )
if build_all and pull_artifact_files:
result.assert_success()
else:
- result.assert_main_error(ErrorDomain.STREAM, 'uncached-checkout-attempt')
+ result.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
@pytest.mark.datafiles(DATA_DIR)
def test_build_partial_push(cli, tmpdir, datafiles):
project = str(datafiles)
share_dir = os.path.join(str(tmpdir), "artifactshare")
- element_name = 'no-runtime-deps.bst'
- builddep_element_name = 'autotools/amhello.bst'
+ element_name = "no-runtime-deps.bst"
+ builddep_element_name = "autotools/amhello.bst"
with create_artifact_share(share_dir) as share:
services = cli.ensure_services()
- assert set(services) == set(['action-cache', 'execution', 'storage'])
+ assert set(services) == set(["action-cache", "execution", "storage"])
- cli.config['artifacts'] = {
- 'url': share.repo,
- 'push': True,
+ cli.config["artifacts"] = {
+ "url": share.repo,
+ "push": True,
}
- res = cli.run(project=project, args=['build', element_name])
+ res = cli.run(project=project, args=["build", element_name])
res.assert_success()
assert builddep_element_name in res.get_pushed_elements()
diff --git a/tests/remoteexecution/simple.py b/tests/remoteexecution/simple.py
index 1b7f7818a..a0625038e 100644
--- a/tests/remoteexecution/simple.py
+++ b/tests/remoteexecution/simple.py
@@ -4,56 +4,66 @@
import os
import pytest
-from buildstream.testing import cli_remote_execution as cli # pylint: disable=unused-import
+from buildstream.testing import (
+ cli_remote_execution as cli,
+) # pylint: disable=unused-import
from buildstream.testing.integration import assert_contains
pytestmark = pytest.mark.remoteexecution
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
# Test building an executable with remote-execution:
@pytest.mark.datafiles(DATA_DIR)
def test_remote_autotools_build(cli, datafiles):
project = str(datafiles)
- checkout = os.path.join(cli.directory, 'checkout')
- element_name = 'autotools/amhello.bst'
+ checkout = os.path.join(cli.directory, "checkout")
+ element_name = "autotools/amhello.bst"
services = cli.ensure_services()
- assert set(services) == set(['action-cache', 'execution', 'storage'])
+ assert set(services) == set(["action-cache", "execution", "storage"])
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', element_name, '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", element_name, "--directory", checkout],
+ )
result.assert_success()
- assert_contains(checkout, ['/usr', '/usr/lib', '/usr/bin',
- '/usr/share',
- '/usr/bin/hello', '/usr/share/doc',
- '/usr/share/doc/amhello',
- '/usr/share/doc/amhello/README'])
+ assert_contains(
+ checkout,
+ [
+ "/usr",
+ "/usr/lib",
+ "/usr/bin",
+ "/usr/share",
+ "/usr/bin/hello",
+ "/usr/share/doc",
+ "/usr/share/doc/amhello",
+ "/usr/share/doc/amhello/README",
+ ],
+ )
# Test running an executable built with remote-execution:
@pytest.mark.datafiles(DATA_DIR)
def test_remote_autotools_run(cli, datafiles):
project = str(datafiles)
- element_name = 'autotools/amhello.bst'
+ element_name = "autotools/amhello.bst"
services = cli.ensure_services()
- assert set(services) == set(['action-cache', 'execution', 'storage'])
+ assert set(services) == set(["action-cache", "execution", "storage"])
services = cli.ensure_services()
- result = cli.run(project=project, args=['build', element_name])
+ result = cli.run(project=project, args=["build", element_name])
result.assert_success()
- result = cli.run(project=project, args=['shell', element_name, '/usr/bin/hello'])
+ result = cli.run(project=project, args=["shell", element_name, "/usr/bin/hello"])
result.assert_success()
- assert result.output == 'Hello World!\nThis is amhello 1.0.\n'
+ assert result.output == "Hello World!\nThis is amhello 1.0.\n"
diff --git a/tests/sandboxes/fallback.py b/tests/sandboxes/fallback.py
index f2f585e70..948e3a6de 100644
--- a/tests/sandboxes/fallback.py
+++ b/tests/sandboxes/fallback.py
@@ -26,51 +26,43 @@ from buildstream.testing import cli # pylint: disable=unused-import
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
def test_fallback_platform_fails(cli, datafiles):
project = str(datafiles)
- element_path = os.path.join(project, 'elements', 'element.bst')
+ element_path = os.path.join(project, "elements", "element.bst")
# Write out our test target
element = {
- 'kind': 'script',
- 'depends': [
- {
- 'filename': 'base.bst',
- 'type': 'build',
- },
- ],
- 'config': {
- 'commands': [
- 'true',
- ],
- },
+ "kind": "script",
+ "depends": [{"filename": "base.bst", "type": "build",},],
+ "config": {"commands": ["true",],},
}
_yaml.roundtrip_dump(element, element_path)
- result = cli.run(project=project, args=['build', 'element.bst'],
- env={'BST_FORCE_BACKEND': 'fallback',
- 'BST_FORCE_SANDBOX': None})
+ result = cli.run(
+ project=project,
+ args=["build", "element.bst"],
+ env={"BST_FORCE_BACKEND": "fallback", "BST_FORCE_SANDBOX": None},
+ )
result.assert_main_error(ErrorDomain.STREAM, None)
assert "FallBack platform only implements dummy sandbox" in result.stderr
# The dummy sandbox can not build the element but it can get the element read
# There for the element should be `buildable` rather than `waiting`
- assert cli.get_element_state(project, 'element.bst') == 'buildable'
+ assert cli.get_element_state(project, "element.bst") == "buildable"
@pytest.mark.datafiles(DATA_DIR)
def test_fallback_platform_can_use_dummy(cli, datafiles):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'import-file1.bst'],
- env={'BST_FORCE_BACKEND': 'fallback',
- 'BST_FORCE_SANDBOX': None})
+ result = cli.run(
+ project=project,
+ args=["build", "import-file1.bst"],
+ env={"BST_FORCE_BACKEND": "fallback", "BST_FORCE_SANDBOX": None},
+ )
result.assert_success()
# The fallback platform can still provide a dummy sandbox that alows simple elemnts that do not need
# a full sandbox to still be built on new platforms.
diff --git a/tests/sandboxes/missing-command.py b/tests/sandboxes/missing-command.py
index 171e855f7..87e668966 100644
--- a/tests/sandboxes/missing-command.py
+++ b/tests/sandboxes/missing-command.py
@@ -9,15 +9,12 @@ from buildstream._exceptions import ErrorDomain
from buildstream.testing import cli # pylint: disable=unused-import
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "missing-command"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "missing-command")
@pytest.mark.datafiles(DATA_DIR)
def test_missing_command(cli, datafiles):
project = str(datafiles)
- result = cli.run(project=project, args=['build', 'no-runtime.bst'])
- result.assert_task_error(ErrorDomain.SANDBOX, 'missing-command')
- assert cli.get_element_state(project, 'no-runtime.bst') == 'failed'
+ result = cli.run(project=project, args=["build", "no-runtime.bst"])
+ result.assert_task_error(ErrorDomain.SANDBOX, "missing-command")
+ assert cli.get_element_state(project, "no-runtime.bst") == "failed"
diff --git a/tests/sandboxes/missing_dependencies.py b/tests/sandboxes/missing_dependencies.py
index a5bf31e76..16754747e 100644
--- a/tests/sandboxes/missing_dependencies.py
+++ b/tests/sandboxes/missing_dependencies.py
@@ -13,8 +13,7 @@ from buildstream.testing import cli # pylint: disable=unused-import
# Project directory
DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "missing-dependencies",
+ os.path.dirname(os.path.realpath(__file__)), "missing-dependencies",
)
@@ -25,85 +24,69 @@ def _symlink_host_tools_to_dir(host_tools, dir_):
os.symlink(utils.get_host_tool(tool), str(target_path))
-@pytest.mark.skipif(not IS_LINUX, reason='Only available on Linux')
+@pytest.mark.skipif(not IS_LINUX, reason="Only available on Linux")
@pytest.mark.datafiles(DATA_DIR)
def test_missing_bwrap_has_nice_error_message(cli, datafiles, tmp_path):
# Create symlink to buildbox-casd and git to work with custom PATH
bin_dir = tmp_path / "bin"
- _symlink_host_tools_to_dir(['buildbox-casd', 'git'], bin_dir)
+ _symlink_host_tools_to_dir(["buildbox-casd", "git"], bin_dir)
project = str(datafiles)
- element_path = os.path.join(project, 'elements', 'element.bst')
+ element_path = os.path.join(project, "elements", "element.bst")
# Write out our test target
element = {
- 'kind': 'script',
- 'depends': [
- {
- 'filename': 'base.bst',
- 'type': 'build',
- },
- ],
- 'config': {
- 'commands': [
- 'false',
- ],
- },
+ "kind": "script",
+ "depends": [{"filename": "base.bst", "type": "build",},],
+ "config": {"commands": ["false",],},
}
_yaml.roundtrip_dump(element, element_path)
# Build without access to host tools, this should fail with a nice error
result = cli.run(
project=project,
- args=['build', 'element.bst'],
- env={'PATH': str(bin_dir),
- 'BST_FORCE_SANDBOX': None})
- result.assert_task_error(ErrorDomain.SANDBOX, 'unavailable-local-sandbox')
+ args=["build", "element.bst"],
+ env={"PATH": str(bin_dir), "BST_FORCE_SANDBOX": None},
+ )
+ result.assert_task_error(ErrorDomain.SANDBOX, "unavailable-local-sandbox")
assert "not found" in result.stderr
-@pytest.mark.skipif(not IS_LINUX, reason='Only available on Linux')
+@pytest.mark.skipif(not IS_LINUX, reason="Only available on Linux")
@pytest.mark.datafiles(DATA_DIR)
def test_old_brwap_has_nice_error_message(cli, datafiles, tmp_path):
- bwrap = tmp_path.joinpath('bin/bwrap')
+ bwrap = tmp_path.joinpath("bin/bwrap")
bwrap.parent.mkdir()
- with bwrap.open('w') as fp:
- fp.write('''
+ with bwrap.open("w") as fp:
+ fp.write(
+ """
#!/bin/sh
echo bubblewrap 0.0.1
- '''.strip())
+ """.strip()
+ )
bwrap.chmod(0o755)
# Create symlink to buildbox-casd and git to work with custom PATH
bin_dir = tmp_path / "bin"
- _symlink_host_tools_to_dir(['buildbox-casd', 'git'], bin_dir)
+ _symlink_host_tools_to_dir(["buildbox-casd", "git"], bin_dir)
project = str(datafiles)
- element_path = os.path.join(project, 'elements', 'element3.bst')
+ element_path = os.path.join(project, "elements", "element3.bst")
# Write out our test target
element = {
- 'kind': 'script',
- 'depends': [
- {
- 'filename': 'base.bst',
- 'type': 'build',
- },
- ],
- 'config': {
- 'commands': [
- 'false',
- ],
- },
+ "kind": "script",
+ "depends": [{"filename": "base.bst", "type": "build",},],
+ "config": {"commands": ["false",],},
}
_yaml.roundtrip_dump(element, element_path)
# Build without access to host tools, this should fail with a nice error
result = cli.run(
project=project,
- args=['--debug', '--verbose', 'build', 'element3.bst'],
- env={'PATH': str(bin_dir),
- 'BST_FORCE_SANDBOX': None})
- result.assert_task_error(ErrorDomain.SANDBOX, 'unavailable-local-sandbox')
+ args=["--debug", "--verbose", "build", "element3.bst"],
+ env={"PATH": str(bin_dir), "BST_FORCE_SANDBOX": None},
+ )
+ result.assert_task_error(ErrorDomain.SANDBOX, "unavailable-local-sandbox")
assert "too old" in result.stderr
diff --git a/tests/sandboxes/mounting/mount_simple.py b/tests/sandboxes/mounting/mount_simple.py
index 65aaf209d..0e78a5603 100644
--- a/tests/sandboxes/mounting/mount_simple.py
+++ b/tests/sandboxes/mounting/mount_simple.py
@@ -13,35 +13,35 @@ def test_bind_mount():
src = stack.enter_context(tempfile.TemporaryDirectory())
target = stack.enter_context(tempfile.TemporaryDirectory())
- with open(os.path.join(src, 'test'), 'a') as test:
- test.write('Test')
+ with open(os.path.join(src, "test"), "a") as test:
+ test.write("Test")
with Mounter.bind_mount(target, src) as dest:
# Ensure we get the correct path back
assert dest == target
# Ensure we can access files from src from target
- with open(os.path.join(target, 'test'), 'r') as test:
- assert test.read() == 'Test'
+ with open(os.path.join(target, "test"), "r") as test:
+ assert test.read() == "Test"
# Ensure the files from src are gone from target
with pytest.raises(FileNotFoundError):
- with open(os.path.join(target, 'test'), 'r'):
+ with open(os.path.join(target, "test"), "r"):
# Actual contents don't matter
pass
# Ensure the files in src are still in src
- with open(os.path.join(src, 'test'), 'r') as test:
- assert test.read() == 'Test'
+ with open(os.path.join(src, "test"), "r") as test:
+ assert test.read() == "Test"
@pytest.mark.skipif(not os.geteuid() == 0, reason="requires root permissions")
def test_mount_proc():
with ExitStack() as stack:
- src = '/proc'
+ src = "/proc"
target = stack.enter_context(tempfile.TemporaryDirectory())
- with Mounter.mount(target, src, mount_type='proc', ro=True) as dest:
+ with Mounter.mount(target, src, mount_type="proc", ro=True) as dest:
# Ensure we get the correct path back
assert dest == target
diff --git a/tests/sandboxes/remote-exec-config.py b/tests/sandboxes/remote-exec-config.py
index a6aeeb7ab..889448954 100644
--- a/tests/sandboxes/remote-exec-config.py
+++ b/tests/sandboxes/remote-exec-config.py
@@ -10,8 +10,7 @@ from buildstream._exceptions import ErrorDomain, LoadErrorReason
from buildstream.testing.runcli import cli # pylint: disable=unused-import
DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "remote-exec-config"
+ os.path.dirname(os.path.realpath(__file__)), "remote-exec-config"
)
# Tests that we get a useful error message when supplying invalid
@@ -22,81 +21,74 @@ DATA_DIR = os.path.join(
# are used at once, a LoadError results.
@pytest.mark.datafiles(DATA_DIR)
def test_old_and_new_configs(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'missing-certs')
+ project = os.path.join(datafiles.dirname, datafiles.basename, "missing-certs")
project_conf = {
- 'name': 'test',
-
- 'remote-execution': {
- 'url': 'https://cache.example.com:12345',
- 'execution-service': {
- 'url': 'http://localhost:8088'
- },
- 'storage-service': {
- 'url': 'http://charactron:11001',
- }
- }
+ "name": "test",
+ "remote-execution": {
+ "url": "https://cache.example.com:12345",
+ "execution-service": {"url": "http://localhost:8088"},
+ "storage-service": {"url": "http://charactron:11001",},
+ },
}
- project_conf_file = os.path.join(project, 'project.conf')
+ project_conf_file = os.path.join(project, "project.conf")
_yaml.roundtrip_dump(project_conf, project_conf_file)
# Use `pull` here to ensure we try to initialize the remotes, triggering the error
#
# This does not happen for a simple `bst show`.
- result = cli.run(project=project, args=['artifact', 'pull', 'element.bst'])
- result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA, "specify one")
+ result = cli.run(project=project, args=["artifact", "pull", "element.bst"])
+ result.assert_main_error(
+ ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA, "specify one"
+ )
# Assert that if either the client key or client cert is specified
# without specifying its counterpart, we get a comprehensive LoadError
# instead of an unhandled exception.
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize('config_key, config_value', [
- ('client-cert', 'client.crt'),
- ('client-key', 'client.key')
-])
+@pytest.mark.parametrize(
+ "config_key, config_value",
+ [("client-cert", "client.crt"), ("client-key", "client.key")],
+)
def test_missing_certs(cli, datafiles, config_key, config_value):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'missing-certs')
+ project = os.path.join(datafiles.dirname, datafiles.basename, "missing-certs")
project_conf = {
- 'name': 'test',
-
- 'remote-execution': {
- 'execution-service': {
- 'url': 'http://localhost:8088'
- },
- 'storage-service': {
- 'url': 'http://charactron:11001',
+ "name": "test",
+ "remote-execution": {
+ "execution-service": {"url": "http://localhost:8088"},
+ "storage-service": {
+ "url": "http://charactron:11001",
config_key: config_value,
- }
- }
+ },
+ },
}
- project_conf_file = os.path.join(project, 'project.conf')
+ project_conf_file = os.path.join(project, "project.conf")
_yaml.roundtrip_dump(project_conf, project_conf_file)
# Use `pull` here to ensure we try to initialize the remotes, triggering the error
#
# This does not happen for a simple `bst show`.
- result = cli.run(project=project, args=['show', 'element.bst'])
- result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA, "Your config is missing")
+ result = cli.run(project=project, args=["show", "element.bst"])
+ result.assert_main_error(
+ ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA, "Your config is missing"
+ )
# Assert that if incomplete information is supplied we get a sensible error message.
@pytest.mark.datafiles(DATA_DIR)
def test_empty_config(cli, datafiles):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'missing-certs')
-
- project_conf = {
- 'name': 'test',
+ project = os.path.join(datafiles.dirname, datafiles.basename, "missing-certs")
- 'remote-execution': {
- }
- }
- project_conf_file = os.path.join(project, 'project.conf')
+ project_conf = {"name": "test", "remote-execution": {}}
+ project_conf_file = os.path.join(project, "project.conf")
_yaml.roundtrip_dump(project_conf, project_conf_file)
# Use `pull` here to ensure we try to initialize the remotes, triggering the error
#
# This does not happen for a simple `bst show`.
- result = cli.run(project=project, args=['artifact', 'pull', 'element.bst'])
- result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA, "specify one")
+ result = cli.run(project=project, args=["artifact", "pull", "element.bst"])
+ result.assert_main_error(
+ ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA, "specify one"
+ )
diff --git a/tests/sandboxes/selection.py b/tests/sandboxes/selection.py
index b4bbb1b00..70fbdac70 100644
--- a/tests/sandboxes/selection.py
+++ b/tests/sandboxes/selection.py
@@ -26,79 +26,62 @@ from buildstream.testing import cli # pylint: disable=unused-import
pytestmark = pytest.mark.integration
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- "project"
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
@pytest.mark.datafiles(DATA_DIR)
def test_force_sandbox(cli, datafiles):
project = str(datafiles)
- element_path = os.path.join(project, 'elements', 'element.bst')
+ element_path = os.path.join(project, "elements", "element.bst")
# Write out our test target
element = {
- 'kind': 'script',
- 'depends': [
- {
- 'filename': 'base.bst',
- 'type': 'build',
- },
- ],
- 'config': {
- 'commands': [
- 'true',
- ],
- },
+ "kind": "script",
+ "depends": [{"filename": "base.bst", "type": "build",},],
+ "config": {"commands": ["true",],},
}
_yaml.roundtrip_dump(element, element_path)
# Build without access to host tools, this will fail
- result = cli.run(project=project, args=['build', 'element.bst'], env={'PATH': '', 'BST_FORCE_SANDBOX': 'bwrap'})
+ result = cli.run(
+ project=project,
+ args=["build", "element.bst"],
+ env={"PATH": "", "BST_FORCE_SANDBOX": "bwrap"},
+ )
result.assert_main_error(ErrorDomain.PLATFORM, None)
assert "Bubblewrap not found" in result.stderr
# we have asked for a spesific sand box, but it is not avalble so
# bst should fail early and the element should be waiting
- assert cli.get_element_state(project, 'element.bst') == 'waiting'
+ assert cli.get_element_state(project, "element.bst") == "waiting"
@pytest.mark.datafiles(DATA_DIR)
def test_dummy_sandbox_fallback(cli, datafiles, tmp_path):
# Create symlink to buildbox-casd to work with custom PATH
- buildbox_casd = tmp_path.joinpath('bin/buildbox-casd')
+ buildbox_casd = tmp_path.joinpath("bin/buildbox-casd")
buildbox_casd.parent.mkdir()
- os.symlink(utils.get_host_tool('buildbox-casd'), str(buildbox_casd))
+ os.symlink(utils.get_host_tool("buildbox-casd"), str(buildbox_casd))
project = str(datafiles)
- element_path = os.path.join(project, 'elements', 'element.bst')
+ element_path = os.path.join(project, "elements", "element.bst")
# Write out our test target
element = {
- 'kind': 'script',
- 'depends': [
- {
- 'filename': 'base.bst',
- 'type': 'build',
- },
- ],
- 'config': {
- 'commands': [
- 'true',
- ],
- },
+ "kind": "script",
+ "depends": [{"filename": "base.bst", "type": "build",},],
+ "config": {"commands": ["true",],},
}
_yaml.roundtrip_dump(element, element_path)
# Build without access to host tools, this will fail
result = cli.run(
project=project,
- args=['build', 'element.bst'],
- env={'PATH': str(tmp_path.joinpath('bin')),
- 'BST_FORCE_SANDBOX': None})
+ args=["build", "element.bst"],
+ env={"PATH": str(tmp_path.joinpath("bin")), "BST_FORCE_SANDBOX": None},
+ )
# But if we dont spesify a sandbox then we fall back to dummy, we still
# fail early but only once we know we need a facny sandbox and that
# dumy is not enough, there for element gets fetched and so is buildable
- result.assert_task_error(ErrorDomain.SANDBOX, 'unavailable-local-sandbox')
- assert cli.get_element_state(project, 'element.bst') == 'buildable'
+ result.assert_task_error(ErrorDomain.SANDBOX, "unavailable-local-sandbox")
+ assert cli.get_element_state(project, "element.bst") == "buildable"
diff --git a/tests/sourcecache/cache.py b/tests/sourcecache/cache.py
index 9aa2c67ac..a4878e73d 100644
--- a/tests/sourcecache/cache.py
+++ b/tests/sourcecache/cache.py
@@ -39,10 +39,10 @@ def test_patch_sources_cached_1(cli, datafiles):
# as we have a local, patch, local config, the first local and patch should
# be cached together, and the last local on it's own
- source_protos = os.path.join(project_dir, 'cache', 'source_protos')
+ source_protos = os.path.join(project_dir, "cache", "source_protos")
- assert len(os.listdir(os.path.join(source_protos, 'patch'))) == 1
- assert len(os.listdir(os.path.join(source_protos, 'local'))) == 2
+ assert len(os.listdir(os.path.join(source_protos, "patch"))) == 1
+ assert len(os.listdir(os.path.join(source_protos, "local"))) == 2
@pytest.mark.datafiles(DATA_DIR)
@@ -53,9 +53,9 @@ def test_patch_sources_cached_2(cli, datafiles):
res.assert_success()
# As everything is before the patch it should all be cached together
- source_protos = os.path.join(project_dir, 'cache', 'source_protos')
+ source_protos = os.path.join(project_dir, "cache", "source_protos")
- assert len(os.listdir(os.path.join(source_protos, 'patch'))) == 1
+ assert len(os.listdir(os.path.join(source_protos, "patch"))) == 1
@pytest.mark.datafiles(DATA_DIR)
@@ -66,35 +66,34 @@ def test_sources_without_patch(cli, datafiles):
res.assert_success()
# No patches so everything should be cached seperately
- source_protos = os.path.join(project_dir, 'cache', 'source_protos')
+ source_protos = os.path.join(project_dir, "cache", "source_protos")
- assert len(os.listdir(os.path.join(source_protos, 'local'))) == 3
+ assert len(os.listdir(os.path.join(source_protos, "local"))) == 3
@pytest.mark.datafiles(DATA_DIR)
def test_source_cache_key(cli, datafiles):
project_dir = str(datafiles)
- file_path = os.path.join(project_dir, 'files')
- file_url = 'file://' + file_path
- element_path = os.path.join(project_dir, 'elements')
- element_name = 'key_check.bst'
+ file_path = os.path.join(project_dir, "files")
+ file_url = "file://" + file_path
+ element_path = os.path.join(project_dir, "elements")
+ element_name = "key_check.bst"
element = {
- 'kind': 'import',
- 'sources': [
+ "kind": "import",
+ "sources": [
{
- 'kind': 'remote',
- 'url': os.path.join(file_url, 'bin-files', 'usr', 'bin', 'hello'),
- 'directory': 'usr/bin'
- }, {
- 'kind': 'remote',
- 'url': os.path.join(file_url, 'dev-files', 'usr', 'include', 'pony.h'),
- 'directory': 'usr/include'
- }, {
- 'kind': 'patch',
- 'path': 'files/hello-patch.diff'
- }
- ]
+ "kind": "remote",
+ "url": os.path.join(file_url, "bin-files", "usr", "bin", "hello"),
+ "directory": "usr/bin",
+ },
+ {
+ "kind": "remote",
+ "url": os.path.join(file_url, "dev-files", "usr", "include", "pony.h"),
+ "directory": "usr/include",
+ },
+ {"kind": "patch", "path": "files/hello-patch.diff"},
+ ],
}
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
@@ -105,11 +104,13 @@ def test_source_cache_key(cli, datafiles):
res.assert_success()
# Should have one source ref
- patch_protos = os.path.join(project_dir, 'cache', 'source_protos', 'patch')
+ patch_protos = os.path.join(project_dir, "cache", "source_protos", "patch")
assert len(os.listdir(patch_protos)) == 1
# modify hello-patch file and check tracking updates refs
- with open(os.path.join(file_path, 'dev-files', 'usr', 'include', 'pony.h'), 'a') as f:
+ with open(
+ os.path.join(file_path, "dev-files", "usr", "include", "pony.h"), "a"
+ ) as f:
f.write("\nappending nonsense")
res = cli.run(project=project_dir, args=["source", "track", element_name])
diff --git a/tests/sourcecache/config.py b/tests/sourcecache/config.py
index 2ab11e9f9..aaf46459e 100644
--- a/tests/sourcecache/config.py
+++ b/tests/sourcecache/config.py
@@ -36,27 +36,26 @@ DATA_DIR = os.path.dirname(os.path.realpath(__file__))
# without specifying its counterpart, we get a comprehensive LoadError
# instead of an unhandled exception.
@pytest.mark.datafiles(DATA_DIR)
-@pytest.mark.parametrize('config_key, config_value', [
- ('client-cert', 'client.crt'),
- ('client-key', 'client.key')
-])
+@pytest.mark.parametrize(
+ "config_key, config_value",
+ [("client-cert", "client.crt"), ("client-key", "client.key")],
+)
def test_missing_certs(cli, datafiles, config_key, config_value):
- project = os.path.join(datafiles.dirname, datafiles.basename, 'missing-certs')
+ project = os.path.join(datafiles.dirname, datafiles.basename, "missing-certs")
project_conf = {
- 'name': 'test',
-
- 'source-caches': {
- 'url': 'https://cache.example.com:12345',
- 'push': 'true',
- config_key: config_value
- }
+ "name": "test",
+ "source-caches": {
+ "url": "https://cache.example.com:12345",
+ "push": "true",
+ config_key: config_value,
+ },
}
- project_conf_file = os.path.join(project, 'project.conf')
+ project_conf_file = os.path.join(project, "project.conf")
_yaml.roundtrip_dump(project_conf, project_conf_file)
# Use `pull` here to ensure we try to initialize the remotes, triggering the error
#
# This does not happen for a simple `bst show`.
- result = cli.run(project=project, args=['source', 'fetch', 'element.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "element.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
diff --git a/tests/sourcecache/fetch.py b/tests/sourcecache/fetch.py
index a5863b867..bc3f32e66 100644
--- a/tests/sourcecache/fetch.py
+++ b/tests/sourcecache/fetch.py
@@ -36,22 +36,19 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project")
def move_local_cas_to_remote_source_share(local, remote):
- shutil.rmtree(os.path.join(remote, 'repo', 'cas'))
- shutil.move(os.path.join(local, 'source_protos'), os.path.join(remote, 'repo'))
- shutil.move(os.path.join(local, 'cas'), os.path.join(remote, 'repo'))
- shutil.rmtree(os.path.join(local, 'sources'))
- shutil.rmtree(os.path.join(local, 'artifacts'))
+ shutil.rmtree(os.path.join(remote, "repo", "cas"))
+ shutil.move(os.path.join(local, "source_protos"), os.path.join(remote, "repo"))
+ shutil.move(os.path.join(local, "cas"), os.path.join(remote, "repo"))
+ shutil.rmtree(os.path.join(local, "sources"))
+ shutil.rmtree(os.path.join(local, "artifacts"))
def create_test_element(tmpdir, project_dir):
- repo = create_repo('git', str(tmpdir))
- ref = repo.create(os.path.join(project_dir, 'files'))
- element_path = os.path.join(project_dir, 'elements')
- element_name = 'fetch.bst'
- element = {
- 'kind': 'import',
- 'sources': [repo.source_config(ref=ref)]
- }
+ repo = create_repo("git", str(tmpdir))
+ ref = repo.create(os.path.join(project_dir, "files"))
+ element_path = os.path.join(project_dir, "elements")
+ element_name = "fetch.bst"
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
return element_name, repo, ref
@@ -59,15 +56,11 @@ def create_test_element(tmpdir, project_dir):
@contextmanager
def context_with_source_cache(cli, cache, share, tmpdir):
- user_config_file = str(tmpdir.join('buildstream.conf'))
+ user_config_file = str(tmpdir.join("buildstream.conf"))
user_config = {
- 'scheduler': {
- 'pushers': 1
- },
- 'source-caches': {
- 'url': share.repo,
- },
- 'cachedir': cache,
+ "scheduler": {"pushers": 1},
+ "source-caches": {"url": share.repo,},
+ "cachedir": cache,
}
_yaml.roundtrip_dump(user_config, file=user_config_file)
cli.configure(user_config)
@@ -80,10 +73,10 @@ def context_with_source_cache(cli, cache, share, tmpdir):
def test_source_fetch(cli, tmpdir, datafiles):
project_dir = str(datafiles)
element_name, _repo, _ref = create_test_element(tmpdir, project_dir)
- cache_dir = os.path.join(str(tmpdir), 'cache')
+ cache_dir = os.path.join(str(tmpdir), "cache")
# use artifact cache for sources for now, they should work the same
- with create_artifact_share(os.path.join(str(tmpdir), 'sourceshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share:
with context_with_source_cache(cli, cache_dir, share, tmpdir) as context:
project = Project(project_dir, context)
project.ensure_fully_loaded()
@@ -96,10 +89,12 @@ def test_source_fetch(cli, tmpdir, datafiles):
assert not cas.contains(source._get_source_name())
# Just check that we sensibly fetch and build the element
- res = cli.run(project=project_dir, args=['build', element_name])
+ res = cli.run(project=project_dir, args=["build", element_name])
res.assert_success()
- assert os.listdir(os.path.join(str(tmpdir), 'cache', 'sources', 'git')) != []
+ assert (
+ os.listdir(os.path.join(str(tmpdir), "cache", "sources", "git")) != []
+ )
# get root digest of source
sourcecache = context.sourcecache
@@ -111,26 +106,28 @@ def test_source_fetch(cli, tmpdir, datafiles):
assert share.has_object(digest)
state = cli.get_element_state(project_dir, element_name)
- assert state == 'fetch needed'
+ assert state == "fetch needed"
# Now fetch the source and check
- res = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ res = cli.run(project=project_dir, args=["source", "fetch", element_name])
res.assert_success()
assert "Pulled source" in res.stderr
# check that we have the source in the cas now and it's not fetched
assert element._source_cached()
- assert os.listdir(os.path.join(str(tmpdir), 'cache', 'sources', 'git')) == []
+ assert (
+ os.listdir(os.path.join(str(tmpdir), "cache", "sources", "git")) == []
+ )
@pytest.mark.datafiles(DATA_DIR)
def test_fetch_fallback(cli, tmpdir, datafiles):
project_dir = str(datafiles)
element_name, repo, ref = create_test_element(tmpdir, project_dir)
- cache_dir = os.path.join(str(tmpdir), 'cache')
+ cache_dir = os.path.join(str(tmpdir), "cache")
# use artifact cache for sources for now, they should work the same
- with create_artifact_share(os.path.join(str(tmpdir), 'sourceshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share:
with context_with_source_cache(cli, cache_dir, share, tmpdir) as context:
project = Project(project_dir, context)
project.ensure_fully_loaded()
@@ -141,16 +138,20 @@ def test_fetch_fallback(cli, tmpdir, datafiles):
cas = context.get_cascache()
assert not cas.contains(source._get_source_name())
- assert not os.path.exists(os.path.join(cache_dir, 'sources'))
+ assert not os.path.exists(os.path.join(cache_dir, "sources"))
# Now check if it falls back to the source fetch method.
- res = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ res = cli.run(project=project_dir, args=["source", "fetch", element_name])
res.assert_success()
brief_key = source._get_brief_display_key()
- assert ("Remote source service ({}) does not have source {} cached"
- .format(share.repo, brief_key)) in res.stderr
- assert ("SUCCESS Fetching from {}"
- .format(repo.source_config(ref=ref)['url'])) in res.stderr
+ assert (
+ "Remote source service ({}) does not have source {} cached".format(
+ share.repo, brief_key
+ )
+ ) in res.stderr
+ assert (
+ "SUCCESS Fetching from {}".format(repo.source_config(ref=ref)["url"])
+ ) in res.stderr
# Check that the source in both in the source dir and the local CAS
assert element._source_cached()
@@ -160,9 +161,9 @@ def test_fetch_fallback(cli, tmpdir, datafiles):
def test_pull_fail(cli, tmpdir, datafiles):
project_dir = str(datafiles)
element_name, repo, _ref = create_test_element(tmpdir, project_dir)
- cache_dir = os.path.join(str(tmpdir), 'cache')
+ cache_dir = os.path.join(str(tmpdir), "cache")
- with create_artifact_share(os.path.join(str(tmpdir), 'sourceshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share:
with context_with_source_cache(cli, cache_dir, share, tmpdir) as context:
project = Project(project_dir, context)
project.ensure_fully_loaded()
@@ -175,21 +176,25 @@ def test_pull_fail(cli, tmpdir, datafiles):
shutil.rmtree(repo.repo)
# Should fail in stream, with a plugin task causing the error
- res = cli.run(project=project_dir, args=['build', element_name])
+ res = cli.run(project=project_dir, args=["build", element_name])
res.assert_main_error(ErrorDomain.STREAM, None)
res.assert_task_error(ErrorDomain.PLUGIN, None)
- assert "Remote source service ({}) does not have source {} cached".format(
- share.repo, source._get_brief_display_key()) in res.stderr
+ assert (
+ "Remote source service ({}) does not have source {} cached".format(
+ share.repo, source._get_brief_display_key()
+ )
+ in res.stderr
+ )
@pytest.mark.datafiles(DATA_DIR)
def test_source_pull_partial_fallback_fetch(cli, tmpdir, datafiles):
project_dir = str(datafiles)
element_name, repo, ref = create_test_element(tmpdir, project_dir)
- cache_dir = os.path.join(str(tmpdir), 'cache')
+ cache_dir = os.path.join(str(tmpdir), "cache")
# use artifact cache for sources for now, they should work the same
- with create_artifact_share(os.path.join(str(tmpdir), 'sourceshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share:
with context_with_source_cache(cli, cache_dir, share, tmpdir) as context:
project = Project(project_dir, context)
project.ensure_fully_loaded()
@@ -202,10 +207,12 @@ def test_source_pull_partial_fallback_fetch(cli, tmpdir, datafiles):
assert not cas.contains(source._get_source_name())
# Just check that we sensibly fetch and build the element
- res = cli.run(project=project_dir, args=['build', element_name])
+ res = cli.run(project=project_dir, args=["build", element_name])
res.assert_success()
- assert os.listdir(os.path.join(str(tmpdir), 'cache', 'sources', 'git')) != []
+ assert (
+ os.listdir(os.path.join(str(tmpdir), "cache", "sources", "git")) != []
+ )
# get root digest of source
sourcecache = context.sourcecache
@@ -214,16 +221,19 @@ def test_source_pull_partial_fallback_fetch(cli, tmpdir, datafiles):
move_local_cas_to_remote_source_share(str(cache_dir), share.directory)
# Remove the cas content, only keep the proto and such around
- shutil.rmtree(os.path.join(str(tmpdir), "sourceshare", "repo", "cas", "objects"))
+ shutil.rmtree(
+ os.path.join(str(tmpdir), "sourceshare", "repo", "cas", "objects")
+ )
# check the share doesn't have the object
assert not share.has_object(digest)
state = cli.get_element_state(project_dir, element_name)
- assert state == 'fetch needed'
+ assert state == "fetch needed"
# Now fetch the source and check
- res = cli.run(project=project_dir, args=['source', 'fetch', element_name])
+ res = cli.run(project=project_dir, args=["source", "fetch", element_name])
res.assert_success()
- assert ("SUCCESS Fetching from {}"
- .format(repo.source_config(ref=ref)['url'])) in res.stderr
+ assert (
+ "SUCCESS Fetching from {}".format(repo.source_config(ref=ref)["url"])
+ ) in res.stderr
diff --git a/tests/sourcecache/project/plugins/elements/always_fail.py b/tests/sourcecache/project/plugins/elements/always_fail.py
index 99ef0d7de..43dba5626 100644
--- a/tests/sourcecache/project/plugins/elements/always_fail.py
+++ b/tests/sourcecache/project/plugins/elements/always_fail.py
@@ -23,7 +23,6 @@ from buildstream.buildelement import BuildElement
class AlwaysFail(BuildElement):
-
def assemble(self, sandbox):
raise ElementError("Always fails")
diff --git a/tests/sourcecache/push.py b/tests/sourcecache/push.py
index 406aeba9f..719860425 100644
--- a/tests/sourcecache/push.py
+++ b/tests/sourcecache/push.py
@@ -47,6 +47,7 @@ def message_handler(message, is_silenced):
@contextmanager
def _configure_caches(tmpdir, *directories):
with ExitStack() as stack:
+
def create_share(directory):
return create_artifact_share(os.path.join(str(tmpdir), directory))
@@ -55,37 +56,27 @@ def _configure_caches(tmpdir, *directories):
@pytest.mark.datafiles(DATA_DIR)
def test_source_push_split(cli, tmpdir, datafiles):
- cache_dir = os.path.join(str(tmpdir), 'cache')
+ cache_dir = os.path.join(str(tmpdir), "cache")
project_dir = str(datafiles)
- with _configure_caches(tmpdir, 'indexshare', 'storageshare') as (index, storage):
- user_config_file = str(tmpdir.join('buildstream.conf'))
+ with _configure_caches(tmpdir, "indexshare", "storageshare") as (index, storage):
+ user_config_file = str(tmpdir.join("buildstream.conf"))
user_config = {
- 'scheduler': {
- 'pushers': 1
- },
- 'source-caches': [{
- 'url': index.repo,
- 'push': True,
- 'type': 'index'
- }, {
- 'url': storage.repo,
- 'push': True,
- 'type': 'storage'
- }],
- 'cachedir': cache_dir
+ "scheduler": {"pushers": 1},
+ "source-caches": [
+ {"url": index.repo, "push": True, "type": "index"},
+ {"url": storage.repo, "push": True, "type": "storage"},
+ ],
+ "cachedir": cache_dir,
}
_yaml.roundtrip_dump(user_config, file=user_config_file)
cli.configure(user_config)
- repo = create_repo('git', str(tmpdir))
- ref = repo.create(os.path.join(project_dir, 'files'))
- element_path = os.path.join(project_dir, 'elements')
- element_name = 'push.bst'
- element = {
- 'kind': 'import',
- 'sources': [repo.source_config(ref=ref)]
- }
+ repo = create_repo("git", str(tmpdir))
+ ref = repo.create(os.path.join(project_dir, "files"))
+ element_path = os.path.join(project_dir, "elements")
+ element_name = "push.bst"
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# get the source object
@@ -93,7 +84,7 @@ def test_source_push_split(cli, tmpdir, datafiles):
project = Project(project_dir, context)
project.ensure_fully_loaded()
- element = project.load_elements(['push.bst'])[0]
+ element = project.load_elements(["push.bst"])[0]
assert not element._source_cached()
source = list(element.sources())[0]
@@ -103,7 +94,7 @@ def test_source_push_split(cli, tmpdir, datafiles):
# build the element, this should fetch and then push the source to the
# remote
- res = cli.run(project=project_dir, args=['build', 'push.bst'])
+ res = cli.run(project=project_dir, args=["build", "push.bst"])
res.assert_success()
assert "Pushed source" in res.stderr
@@ -118,32 +109,24 @@ def test_source_push_split(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_source_push(cli, tmpdir, datafiles):
- cache_dir = os.path.join(str(tmpdir), 'cache')
+ cache_dir = os.path.join(str(tmpdir), "cache")
project_dir = str(datafiles)
- with create_artifact_share(os.path.join(str(tmpdir), 'sourceshare')) as share:
- user_config_file = str(tmpdir.join('buildstream.conf'))
+ with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share:
+ user_config_file = str(tmpdir.join("buildstream.conf"))
user_config = {
- 'scheduler': {
- 'pushers': 1
- },
- 'source-caches': {
- 'url': share.repo,
- 'push': True,
- },
- 'cachedir': cache_dir,
+ "scheduler": {"pushers": 1},
+ "source-caches": {"url": share.repo, "push": True,},
+ "cachedir": cache_dir,
}
_yaml.roundtrip_dump(user_config, file=user_config_file)
cli.configure(user_config)
- repo = create_repo('git', str(tmpdir))
- ref = repo.create(os.path.join(project_dir, 'files'))
- element_path = os.path.join(project_dir, 'elements')
- element_name = 'push.bst'
- element = {
- 'kind': 'import',
- 'sources': [repo.source_config(ref=ref)]
- }
+ repo = create_repo("git", str(tmpdir))
+ ref = repo.create(os.path.join(project_dir, "files"))
+ element_path = os.path.join(project_dir, "elements")
+ element_name = "push.bst"
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# get the source object
@@ -151,7 +134,7 @@ def test_source_push(cli, tmpdir, datafiles):
project = Project(project_dir, context)
project.ensure_fully_loaded()
- element = project.load_elements(['push.bst'])[0]
+ element = project.load_elements(["push.bst"])[0]
assert not element._source_cached()
source = list(element.sources())[0]
@@ -161,7 +144,7 @@ def test_source_push(cli, tmpdir, datafiles):
# build the element, this should fetch and then push the source to the
# remote
- res = cli.run(project=project_dir, args=['build', 'push.bst'])
+ res = cli.run(project=project_dir, args=["build", "push.bst"])
res.assert_success()
assert "Pushed source" in res.stderr
@@ -177,35 +160,27 @@ def test_source_push(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_push_pull(cli, datafiles, tmpdir):
project_dir = str(datafiles)
- cache_dir = os.path.join(str(tmpdir), 'cache')
+ cache_dir = os.path.join(str(tmpdir), "cache")
- with create_artifact_share(os.path.join(str(tmpdir), 'sourceshare')) as share:
- user_config_file = str(tmpdir.join('buildstream.conf'))
+ with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share:
+ user_config_file = str(tmpdir.join("buildstream.conf"))
user_config = {
- 'scheduler': {
- 'pushers': 1
- },
- 'source-caches': {
- 'url': share.repo,
- 'push': True,
- },
- 'cachedir': cache_dir,
+ "scheduler": {"pushers": 1},
+ "source-caches": {"url": share.repo, "push": True,},
+ "cachedir": cache_dir,
}
_yaml.roundtrip_dump(user_config, file=user_config_file)
cli.configure(user_config)
# create repo to pull from
- repo = create_repo('git', str(tmpdir))
- ref = repo.create(os.path.join(project_dir, 'files'))
- element_path = os.path.join(project_dir, 'elements')
- element_name = 'push.bst'
- element = {
- 'kind': 'import',
- 'sources': [repo.source_config(ref=ref)]
- }
+ repo = create_repo("git", str(tmpdir))
+ ref = repo.create(os.path.join(project_dir, "files"))
+ element_path = os.path.join(project_dir, "elements")
+ element_name = "push.bst"
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
- res = cli.run(project=project_dir, args=['build', 'push.bst'])
+ res = cli.run(project=project_dir, args=["build", "push.bst"])
res.assert_success()
# remove local cache dir, and repo files and check it all works
@@ -214,45 +189,37 @@ def test_push_pull(cli, datafiles, tmpdir):
shutil.rmtree(repo.repo)
# check it's pulls from the share
- res = cli.run(project=project_dir, args=['build', 'push.bst'])
+ res = cli.run(project=project_dir, args=["build", "push.bst"])
res.assert_success()
@pytest.mark.datafiles(DATA_DIR)
def test_push_fail(cli, tmpdir, datafiles):
project_dir = str(datafiles)
- cache_dir = os.path.join(str(tmpdir), 'cache')
+ cache_dir = os.path.join(str(tmpdir), "cache")
# set up config with remote that we'll take down
- with create_artifact_share(os.path.join(str(tmpdir), 'sourceshare')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share:
remote = share.repo
- user_config_file = str(tmpdir.join('buildstream.conf'))
+ user_config_file = str(tmpdir.join("buildstream.conf"))
user_config = {
- 'scheduler': {
- 'pushers': 1
- },
- 'source-caches': {
- 'url': share.repo,
- 'push': True,
- },
- 'cachedir': cache_dir,
+ "scheduler": {"pushers": 1},
+ "source-caches": {"url": share.repo, "push": True,},
+ "cachedir": cache_dir,
}
_yaml.roundtrip_dump(user_config, file=user_config_file)
cli.configure(user_config)
# create repo to pull from
- repo = create_repo('git', str(tmpdir))
- ref = repo.create(os.path.join(project_dir, 'files'))
- element_path = os.path.join(project_dir, 'elements')
- element_name = 'push.bst'
- element = {
- 'kind': 'import',
- 'sources': [repo.source_config(ref=ref)]
- }
+ repo = create_repo("git", str(tmpdir))
+ ref = repo.create(os.path.join(project_dir, "files"))
+ element_path = os.path.join(project_dir, "elements")
+ element_name = "push.bst"
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
# build and check that it fails to set up the remote
- res = cli.run(project=project_dir, args=['build', 'push.bst'])
+ res = cli.run(project=project_dir, args=["build", "push.bst"])
res.assert_success()
assert "Failed to initialize remote {}".format(remote) in res.stderr
@@ -260,37 +227,29 @@ def test_push_fail(cli, tmpdir, datafiles):
assert "Pushed" not in res.stderr
-@pytest.mark.xfail(HAVE_SANDBOX == 'buildbox', reason='Not working with BuildBox')
+@pytest.mark.xfail(HAVE_SANDBOX == "buildbox", reason="Not working with BuildBox")
@pytest.mark.datafiles(DATA_DIR)
def test_source_push_build_fail(cli, tmpdir, datafiles):
project_dir = str(datafiles)
- cache_dir = os.path.join(str(tmpdir), 'cache')
+ cache_dir = os.path.join(str(tmpdir), "cache")
- with create_artifact_share(os.path.join(str(tmpdir), 'share')) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "share")) as share:
user_config = {
- 'scheduler': {
- 'pushers': 1
- },
- 'source-caches': {
- 'url': share.repo,
- 'push': True,
- },
- 'cachedir': cache_dir,
+ "scheduler": {"pushers": 1},
+ "source-caches": {"url": share.repo, "push": True,},
+ "cachedir": cache_dir,
}
cli.configure(user_config)
- repo = create_repo('git', str(tmpdir))
- ref = repo.create(os.path.join(project_dir, 'files'))
- element_path = os.path.join(project_dir, 'elements')
+ repo = create_repo("git", str(tmpdir))
+ ref = repo.create(os.path.join(project_dir, "files"))
+ element_path = os.path.join(project_dir, "elements")
- element_name = 'always-fail.bst'
- element = {
- 'kind': 'always_fail',
- 'sources': [repo.source_config(ref=ref)]
- }
+ element_name = "always-fail.bst"
+ element = {"kind": "always_fail", "sources": [repo.source_config(ref=ref)]}
_yaml.roundtrip_dump(element, os.path.join(element_path, element_name))
- res = cli.run(project=project_dir, args=['build', 'always-fail.bst'])
+ res = cli.run(project=project_dir, args=["build", "always-fail.bst"])
res.assert_main_error(ErrorDomain.STREAM, None)
res.assert_task_error(ErrorDomain.ELEMENT, None)
diff --git a/tests/sourcecache/source-checkout.py b/tests/sourcecache/source-checkout.py
index 4e3391c12..2d2e71565 100644
--- a/tests/sourcecache/source-checkout.py
+++ b/tests/sourcecache/source-checkout.py
@@ -36,20 +36,23 @@ DATA_DIR = os.path.dirname(os.path.realpath(__file__))
@pytest.mark.datafiles(DATA_DIR)
def test_source_checkout(tmpdir, datafiles, cli):
- project_dir = os.path.join(str(tmpdir), 'project')
- element_path = 'elements'
- cache_dir = os.path.join(str(tmpdir), 'cache')
- source_dir = os.path.join(cache_dir, 'sources')
+ project_dir = os.path.join(str(tmpdir), "project")
+ element_path = "elements"
+ cache_dir = os.path.join(str(tmpdir), "cache")
+ source_dir = os.path.join(cache_dir, "sources")
- cli.configure({
- 'cachedir': cache_dir,
- })
- target_dir = os.path.join(str(tmpdir), 'target')
+ cli.configure(
+ {"cachedir": cache_dir,}
+ )
+ target_dir = os.path.join(str(tmpdir), "target")
- repo = create_element_size('target.bst', project_dir, element_path, [], 100000)
+ repo = create_element_size("target.bst", project_dir, element_path, [], 100000)
# check implicit fetching
- res = cli.run(project=project_dir, args=['source', 'checkout', '--directory', target_dir, 'target.bst'])
+ res = cli.run(
+ project=project_dir,
+ args=["source", "checkout", "--directory", target_dir, "target.bst"],
+ )
res.assert_success()
assert "Fetching from" in res.stderr
@@ -59,15 +62,19 @@ def test_source_checkout(tmpdir, datafiles, cli):
shutil.rmtree(target_dir)
shutil.rmtree(source_dir)
- res = cli.run(project=project_dir,
- args=['source', 'checkout', '--directory', target_dir, 'target.bst'])
+ res = cli.run(
+ project=project_dir,
+ args=["source", "checkout", "--directory", target_dir, "target.bst"],
+ )
res.assert_success()
assert "Fetching from" not in res.stderr
# remove the CAS and check it doesn't work again
shutil.rmtree(target_dir)
- shutil.rmtree(os.path.join(cache_dir, 'cas'))
+ shutil.rmtree(os.path.join(cache_dir, "cas"))
- res = cli.run(project=project_dir,
- args=['source', 'checkout', '--directory', target_dir, 'target.bst'])
+ res = cli.run(
+ project=project_dir,
+ args=["source", "checkout", "--directory", target_dir, "target.bst"],
+ )
res.assert_task_error(ErrorDomain.PLUGIN, None)
diff --git a/tests/sourcecache/staging.py b/tests/sourcecache/staging.py
index b0cc03119..246c4863f 100644
--- a/tests/sourcecache/staging.py
+++ b/tests/sourcecache/staging.py
@@ -45,12 +45,10 @@ def relative_walk(rootdir):
@pytest.mark.datafiles(DATA_DIR)
def test_source_staged(tmpdir, cli, datafiles):
- project_dir = os.path.join(datafiles.dirname, datafiles.basename, 'project')
- cachedir = os.path.join(str(tmpdir), 'cache')
+ project_dir = os.path.join(datafiles.dirname, datafiles.basename, "project")
+ cachedir = os.path.join(str(tmpdir), "cache")
- cli.configure({
- 'cachedir': cachedir
- })
+ cli.configure({"cachedir": cachedir})
res = cli.run(project=project_dir, args=["build", "import-bin.bst"])
res.assert_success()
@@ -83,12 +81,10 @@ def test_source_staged(tmpdir, cli, datafiles):
# Check sources are staged during a fetch
@pytest.mark.datafiles(DATA_DIR)
def test_source_fetch(tmpdir, cli, datafiles):
- project_dir = os.path.join(datafiles.dirname, datafiles.basename, 'project')
- cachedir = os.path.join(str(tmpdir), 'cache')
+ project_dir = os.path.join(datafiles.dirname, datafiles.basename, "project")
+ cachedir = os.path.join(str(tmpdir), "cache")
- cli.configure({
- 'cachedir': cachedir
- })
+ cli.configure({"cachedir": cachedir})
res = cli.run(project=project_dir, args=["source", "fetch", "import-dev.bst"])
res.assert_success()
@@ -118,17 +114,15 @@ def test_source_fetch(tmpdir, cli, datafiles):
# Check that with sources only in the CAS build successfully completes
@pytest.mark.datafiles(DATA_DIR)
def test_staged_source_build(tmpdir, datafiles, cli):
- project_dir = os.path.join(datafiles.dirname, datafiles.basename, 'project')
- cachedir = os.path.join(str(tmpdir), 'cache')
- element_path = 'elements'
- source_protos = os.path.join(str(tmpdir), 'cache', 'source_protos')
- source_dir = os.path.join(str(tmpdir), 'cache', 'sources')
+ project_dir = os.path.join(datafiles.dirname, datafiles.basename, "project")
+ cachedir = os.path.join(str(tmpdir), "cache")
+ element_path = "elements"
+ source_protos = os.path.join(str(tmpdir), "cache", "source_protos")
+ source_dir = os.path.join(str(tmpdir), "cache", "sources")
- cli.configure({
- 'cachedir': cachedir
- })
+ cli.configure({"cachedir": cachedir})
- create_element_size('target.bst', project_dir, element_path, [], 10000)
+ create_element_size("target.bst", project_dir, element_path, [], 10000)
with dummy_context() as context:
context.cachedir = cachedir
@@ -141,23 +135,23 @@ def test_staged_source_build(tmpdir, datafiles, cli):
# local sources set BST_KEY_REQUIRES_STAGE so this is cached
assert element._source_cached()
- res = cli.run(project=project_dir, args=['build', 'target.bst'])
+ res = cli.run(project=project_dir, args=["build", "target.bst"])
res.assert_success()
# delete artifacts check state is buildable
- cli.remove_artifact_from_cache(project_dir, 'target.bst')
- states = cli.get_element_states(project_dir, ['target.bst'])
- assert states['target.bst'] == 'buildable'
+ cli.remove_artifact_from_cache(project_dir, "target.bst")
+ states = cli.get_element_states(project_dir, ["target.bst"])
+ assert states["target.bst"] == "buildable"
# delete source dir and check that state is still buildable
shutil.rmtree(source_dir)
- states = cli.get_element_states(project_dir, ['target.bst'])
- assert states['target.bst'] == 'buildable'
+ states = cli.get_element_states(project_dir, ["target.bst"])
+ assert states["target.bst"] == "buildable"
# build and check that no fetching was done.
- res = cli.run(project=project_dir, args=['build', 'target.bst'])
+ res = cli.run(project=project_dir, args=["build", "target.bst"])
res.assert_success()
- assert 'Fetching from' not in res.stderr
+ assert "Fetching from" not in res.stderr
# assert the source directory is still empty (though there may be
# directories from staging etc.)
@@ -168,11 +162,11 @@ def test_staged_source_build(tmpdir, datafiles, cli):
# Now remove the source refs and check the state
shutil.rmtree(source_protos)
- cli.remove_artifact_from_cache(project_dir, 'target.bst')
- states = cli.get_element_states(project_dir, ['target.bst'])
- assert states['target.bst'] == 'fetch needed'
+ cli.remove_artifact_from_cache(project_dir, "target.bst")
+ states = cli.get_element_states(project_dir, ["target.bst"])
+ assert states["target.bst"] == "fetch needed"
# Check that it now fetches from when building the target
- res = cli.run(project=project_dir, args=['build', 'target.bst'])
+ res = cli.run(project=project_dir, args=["build", "target.bst"])
res.assert_success()
- assert 'Fetching from' in res.stderr
+ assert "Fetching from" in res.stderr
diff --git a/tests/sourcecache/workspace.py b/tests/sourcecache/workspace.py
index 22316c4fe..3b6e265e9 100644
--- a/tests/sourcecache/workspace.py
+++ b/tests/sourcecache/workspace.py
@@ -39,66 +39,65 @@ DATA_DIR = os.path.dirname(os.path.realpath(__file__))
# for opening a workspace
@pytest.mark.datafiles(DATA_DIR)
def test_workspace_source_fetch(tmpdir, datafiles, cli):
- project_dir = os.path.join(str(tmpdir), 'project')
- element_path = 'elements'
- source_dir = os.path.join(str(tmpdir), 'cache', 'sources')
- workspace = os.path.join(cli.directory, 'workspace')
+ project_dir = os.path.join(str(tmpdir), "project")
+ element_path = "elements"
+ source_dir = os.path.join(str(tmpdir), "cache", "sources")
+ workspace = os.path.join(cli.directory, "workspace")
- cli.configure({
- 'cachedir': os.path.join(str(tmpdir), 'cache')
- })
+ cli.configure({"cachedir": os.path.join(str(tmpdir), "cache")})
- create_element_size('target.bst', project_dir, element_path, [], 10000)
- res = cli.run(project=project_dir, args=['build', 'target.bst'])
+ create_element_size("target.bst", project_dir, element_path, [], 10000)
+ res = cli.run(project=project_dir, args=["build", "target.bst"])
res.assert_success()
- assert 'Fetching from' in res.stderr
+ assert "Fetching from" in res.stderr
# remove the original sources
shutil.rmtree(source_dir)
# Open a workspace and check that fetches the original sources
- res = cli.run(project=project_dir,
- args=['workspace', 'open', 'target.bst', '--directory', workspace])
+ res = cli.run(
+ project=project_dir,
+ args=["workspace", "open", "target.bst", "--directory", workspace],
+ )
res.assert_success()
- assert 'Fetching from' in res.stderr
+ assert "Fetching from" in res.stderr
assert os.listdir(workspace) != []
@pytest.mark.datafiles(DATA_DIR)
def test_workspace_open_no_source_push(tmpdir, datafiles, cli):
- project_dir = os.path.join(str(tmpdir), 'project')
- element_path = 'elements'
- cache_dir = os.path.join(str(tmpdir), 'cache')
- share_dir = os.path.join(str(tmpdir), 'share')
- workspace = os.path.join(cli.directory, 'workspace')
+ project_dir = os.path.join(str(tmpdir), "project")
+ element_path = "elements"
+ cache_dir = os.path.join(str(tmpdir), "cache")
+ share_dir = os.path.join(str(tmpdir), "share")
+ workspace = os.path.join(cli.directory, "workspace")
with create_artifact_share(share_dir) as share:
- cli.configure({
- 'cachedir': cache_dir,
- 'scheduler': {
- 'pushers': 1
- },
- 'source-caches': {
- 'url': share.repo,
- 'push': True,
- },
- })
+ cli.configure(
+ {
+ "cachedir": cache_dir,
+ "scheduler": {"pushers": 1},
+ "source-caches": {"url": share.repo, "push": True,},
+ }
+ )
# Fetch as in previous test and check it pushes the source
- create_element_size('target.bst', project_dir, element_path, [], 10000)
- res = cli.run(project=project_dir, args=['build', 'target.bst'])
+ create_element_size("target.bst", project_dir, element_path, [], 10000)
+ res = cli.run(project=project_dir, args=["build", "target.bst"])
res.assert_success()
- assert 'Fetching from' in res.stderr
- assert 'Pushed source' in res.stderr
+ assert "Fetching from" in res.stderr
+ assert "Pushed source" in res.stderr
# clear the cas and open a workspace
- shutil.rmtree(os.path.join(cache_dir, 'cas'))
- res = cli.run(project=project_dir,
- args=['workspace', 'open', 'target.bst', '--directory', workspace])
+ shutil.rmtree(os.path.join(cache_dir, "cas"))
+ res = cli.run(
+ project=project_dir,
+ args=["workspace", "open", "target.bst", "--directory", workspace],
+ )
res.assert_success()
# Check that this time it does not push the sources
- res = cli.run(project=project_dir, args=['build', 'target.bst'])
+ res = cli.run(project=project_dir, args=["build", "target.bst"])
res.assert_success()
assert "Pushed source" not in res.stderr
diff --git a/tests/sources/bzr.py b/tests/sources/bzr.py
index c6e78f8c1..7df4d7471 100644
--- a/tests/sources/bzr.py
+++ b/tests/sources/bzr.py
@@ -10,40 +10,35 @@ from buildstream.testing import cli # pylint: disable=unused-import
from buildstream.testing import create_repo
from buildstream.testing._utils.site import HAVE_BZR
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'bzr'
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "bzr")
@pytest.mark.skipif(HAVE_BZR is False, reason="bzr is not available")
@pytest.mark.datafiles(os.path.join(DATA_DIR))
def test_fetch_checkout(cli, tmpdir, datafiles):
project = str(datafiles)
- checkoutdir = os.path.join(str(tmpdir), 'checkout')
+ checkoutdir = os.path.join(str(tmpdir), "checkout")
- repo = create_repo('bzr', str(tmpdir))
- ref = repo.create(os.path.join(project, 'basic'))
+ repo = create_repo("bzr", str(tmpdir))
+ ref = repo.create(os.path.join(project, "basic"))
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
# Fetch, build, checkout
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
assert result.exit_code == 0
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
assert result.exit_code == 0
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
assert result.exit_code == 0
# Assert we checked out the file as it was commited
- with open(os.path.join(checkoutdir, 'test')) as f:
+ with open(os.path.join(checkoutdir, "test")) as f:
text = f.read()
- assert text == 'test\n'
+ assert text == "test\n"
diff --git a/tests/sources/deb.py b/tests/sources/deb.py
index e536e522a..5d1fdcceb 100644
--- a/tests/sources/deb.py
+++ b/tests/sources/deb.py
@@ -12,22 +12,16 @@ from buildstream.testing import cli # pylint: disable=unused-import
from buildstream.testing._utils.site import HAVE_ARPY
from . import list_dir_contents
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'deb',
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "deb",)
deb_name = "a_deb.deb"
def generate_project(project_dir, tmpdir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.roundtrip_dump({
- 'name': 'foo',
- 'aliases': {
- 'tmpdir': "file:///" + str(tmpdir)
- }
- }, project_file)
+ _yaml.roundtrip_dump(
+ {"name": "foo", "aliases": {"tmpdir": "file:///" + str(tmpdir)}}, project_file
+ )
def _copy_deb(start_location, tmpdir):
@@ -38,31 +32,29 @@ def _copy_deb(start_location, tmpdir):
# Test that without ref, consistency is set appropriately.
@pytest.mark.skipif(HAVE_ARPY is False, reason="arpy is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'no-ref'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "no-ref"))
def test_no_ref(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
- assert cli.get_element_state(project, 'target.bst') == 'no reference'
+ assert cli.get_element_state(project, "target.bst") == "no reference"
# Test that when I fetch a nonexistent URL, errors are handled gracefully and a retry is performed.
@pytest.mark.skipif(HAVE_ARPY is False, reason="arpy is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "fetch"))
def test_fetch_bad_url(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
# Try to fetch it
- result = cli.run(project=project, args=[
- 'source', 'fetch', 'target.bst'
- ])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
assert "FAILURE Try #" in result.stderr
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.SOURCE, None)
@pytest.mark.skipif(HAVE_ARPY is False, reason="arpy is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "fetch"))
def test_fetch_bad_ref(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
@@ -71,16 +63,14 @@ def test_fetch_bad_ref(cli, tmpdir, datafiles):
_copy_deb(DATA_DIR, tmpdir)
# Try to fetch it
- result = cli.run(project=project, args=[
- 'source', 'fetch', 'target.bst'
- ])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.SOURCE, None)
# Test that when tracking with a ref set, there is a warning
@pytest.mark.skipif(HAVE_ARPY is False, reason="arpy is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "fetch"))
def test_track_warning(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
@@ -89,16 +79,14 @@ def test_track_warning(cli, tmpdir, datafiles):
_copy_deb(DATA_DIR, tmpdir)
# Track it
- result = cli.run(project=project, args=[
- 'source', 'track', 'target.bst'
- ])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
assert "Potential man-in-the-middle attack!" in result.stderr
# Test that a staged checkout matches what was tarred up, with the default first subdir
@pytest.mark.skipif(HAVE_ARPY is False, reason="arpy is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "fetch"))
def test_stage_default_basedir(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
@@ -108,13 +96,16 @@ def test_stage_default_basedir(cli, tmpdir, datafiles):
_copy_deb(DATA_DIR, tmpdir)
# Track, fetch, build, checkout
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the content of the first directory is checked out (base-dir: '')
@@ -126,7 +117,7 @@ def test_stage_default_basedir(cli, tmpdir, datafiles):
# Test that a staged checkout matches what was tarred up, with an empty base-dir
@pytest.mark.skipif(HAVE_ARPY is False, reason="arpy is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'no-basedir'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "no-basedir"))
def test_stage_no_basedir(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
@@ -136,13 +127,16 @@ def test_stage_no_basedir(cli, tmpdir, datafiles):
_copy_deb(DATA_DIR, tmpdir)
# Track, fetch, build, checkout
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the full content of the tarball is checked out (base-dir: '')
@@ -154,7 +148,7 @@ def test_stage_no_basedir(cli, tmpdir, datafiles):
# Test that a staged checkout matches what was tarred up, with an explicit basedir
@pytest.mark.skipif(HAVE_ARPY is False, reason="arpy is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'explicit-basedir'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "explicit-basedir"))
def test_stage_explicit_basedir(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
@@ -164,13 +158,16 @@ def test_stage_explicit_basedir(cli, tmpdir, datafiles):
_copy_deb(DATA_DIR, tmpdir)
# Track, fetch, build, checkout
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the content of the first directory is checked out (base-dir: '')
diff --git a/tests/sources/git.py b/tests/sources/git.py
index 245f90131..6a6f95364 100644
--- a/tests/sources/git.py
+++ b/tests/sources/git.py
@@ -36,354 +36,342 @@ from buildstream.testing import cli # pylint: disable=unused-import
from buildstream.testing import create_repo
from buildstream.testing._utils.site import HAVE_GIT, HAVE_OLD_GIT
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'git',
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "git",)
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
def test_fetch_bad_ref(cli, tmpdir, datafiles):
project = str(datafiles)
# Create the repo from 'repofiles' subdir
- repo = create_repo('git', str(tmpdir))
- repo.create(os.path.join(project, 'repofiles'))
+ repo = create_repo("git", str(tmpdir))
+ repo.create(os.path.join(project, "repofiles"))
# Write out our test target with a bad ref
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref='5')
- ]
- }
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ element = {"kind": "import", "sources": [repo.source_config(ref="5")]}
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
# Assert that fetch raises an error here
- result = cli.run(project=project, args=[
- 'source', 'fetch', 'target.bst'
- ])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.SOURCE, None)
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
def test_submodule_fetch_checkout(cli, tmpdir, datafiles):
project = str(datafiles)
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Create the submodule first from the 'subrepofiles' subdir
- subrepo = create_repo('git', str(tmpdir), 'subrepo')
- subrepo.create(os.path.join(project, 'subrepofiles'))
+ subrepo = create_repo("git", str(tmpdir), "subrepo")
+ subrepo.create(os.path.join(project, "subrepofiles"))
# Create the repo from 'repofiles' subdir
- repo = create_repo('git', str(tmpdir))
- repo.create(os.path.join(project, 'repofiles'))
+ repo = create_repo("git", str(tmpdir))
+ repo.create(os.path.join(project, "repofiles"))
# Add a submodule pointing to the one we created
- ref = repo.add_submodule('subdir', 'file://' + subrepo.repo)
+ ref = repo.add_submodule("subdir", "file://" + subrepo.repo)
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
# Fetch, build, checkout
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Assert we checked out both files at their expected location
- assert os.path.exists(os.path.join(checkoutdir, 'file.txt'))
- assert os.path.exists(os.path.join(checkoutdir, 'subdir', 'ponyfile.txt'))
+ assert os.path.exists(os.path.join(checkoutdir, "file.txt"))
+ assert os.path.exists(os.path.join(checkoutdir, "subdir", "ponyfile.txt"))
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
def test_submodule_fetch_source_enable_explicit(cli, tmpdir, datafiles):
project = str(datafiles)
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Create the submodule first from the 'subrepofiles' subdir
- subrepo = create_repo('git', str(tmpdir), 'subrepo')
- subrepo.create(os.path.join(project, 'subrepofiles'))
+ subrepo = create_repo("git", str(tmpdir), "subrepo")
+ subrepo.create(os.path.join(project, "subrepofiles"))
# Create the repo from 'repofiles' subdir
- repo = create_repo('git', str(tmpdir))
- repo.create(os.path.join(project, 'repofiles'))
+ repo = create_repo("git", str(tmpdir))
+ repo.create(os.path.join(project, "repofiles"))
# Add a submodule pointing to the one we created
- ref = repo.add_submodule('subdir', 'file://' + subrepo.repo)
+ ref = repo.add_submodule("subdir", "file://" + subrepo.repo)
# Write out our test target
element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config_extra(ref=ref, checkout_submodules=True)
- ]
+ "kind": "import",
+ "sources": [repo.source_config_extra(ref=ref, checkout_submodules=True)],
}
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
# Fetch, build, checkout
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Assert we checked out both files at their expected location
- assert os.path.exists(os.path.join(checkoutdir, 'file.txt'))
- assert os.path.exists(os.path.join(checkoutdir, 'subdir', 'ponyfile.txt'))
+ assert os.path.exists(os.path.join(checkoutdir, "file.txt"))
+ assert os.path.exists(os.path.join(checkoutdir, "subdir", "ponyfile.txt"))
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
def test_submodule_fetch_source_disable(cli, tmpdir, datafiles):
project = str(datafiles)
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Create the submodule first from the 'subrepofiles' subdir
- subrepo = create_repo('git', str(tmpdir), 'subrepo')
- subrepo.create(os.path.join(project, 'subrepofiles'))
+ subrepo = create_repo("git", str(tmpdir), "subrepo")
+ subrepo.create(os.path.join(project, "subrepofiles"))
# Create the repo from 'repofiles' subdir
- repo = create_repo('git', str(tmpdir))
- repo.create(os.path.join(project, 'repofiles'))
+ repo = create_repo("git", str(tmpdir))
+ repo.create(os.path.join(project, "repofiles"))
# Add a submodule pointing to the one we created
- ref = repo.add_submodule('subdir', 'file://' + subrepo.repo)
+ ref = repo.add_submodule("subdir", "file://" + subrepo.repo)
# Write out our test target
element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config_extra(ref=ref, checkout_submodules=False)
- ]
+ "kind": "import",
+ "sources": [repo.source_config_extra(ref=ref, checkout_submodules=False)],
}
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
# Fetch, build, checkout
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Assert we checked out both files at their expected location
- assert os.path.exists(os.path.join(checkoutdir, 'file.txt'))
- assert not os.path.exists(os.path.join(checkoutdir, 'subdir', 'ponyfile.txt'))
+ assert os.path.exists(os.path.join(checkoutdir, "file.txt"))
+ assert not os.path.exists(os.path.join(checkoutdir, "subdir", "ponyfile.txt"))
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
def test_submodule_fetch_submodule_does_override(cli, tmpdir, datafiles):
project = str(datafiles)
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Create the submodule first from the 'subrepofiles' subdir
- subrepo = create_repo('git', str(tmpdir), 'subrepo')
- subrepo.create(os.path.join(project, 'subrepofiles'))
+ subrepo = create_repo("git", str(tmpdir), "subrepo")
+ subrepo.create(os.path.join(project, "subrepofiles"))
# Create the repo from 'repofiles' subdir
- repo = create_repo('git', str(tmpdir))
- repo.create(os.path.join(project, 'repofiles'))
+ repo = create_repo("git", str(tmpdir))
+ repo.create(os.path.join(project, "repofiles"))
# Add a submodule pointing to the one we created
- ref = repo.add_submodule('subdir', 'file://' + subrepo.repo, checkout=True)
+ ref = repo.add_submodule("subdir", "file://" + subrepo.repo, checkout=True)
# Write out our test target
element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config_extra(ref=ref, checkout_submodules=False)
- ]
+ "kind": "import",
+ "sources": [repo.source_config_extra(ref=ref, checkout_submodules=False)],
}
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
# Fetch, build, checkout
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Assert we checked out both files at their expected location
- assert os.path.exists(os.path.join(checkoutdir, 'file.txt'))
- assert os.path.exists(os.path.join(checkoutdir, 'subdir', 'ponyfile.txt'))
+ assert os.path.exists(os.path.join(checkoutdir, "file.txt"))
+ assert os.path.exists(os.path.join(checkoutdir, "subdir", "ponyfile.txt"))
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
def test_submodule_fetch_submodule_individual_checkout(cli, tmpdir, datafiles):
project = str(datafiles)
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Create the submodule first from the 'subrepofiles' subdir
- subrepo = create_repo('git', str(tmpdir), 'subrepo')
- subrepo.create(os.path.join(project, 'subrepofiles'))
+ subrepo = create_repo("git", str(tmpdir), "subrepo")
+ subrepo.create(os.path.join(project, "subrepofiles"))
# Create another submodule from the 'othersubrepofiles' subdir
- other_subrepo = create_repo('git', str(tmpdir), 'othersubrepo')
- other_subrepo.create(os.path.join(project, 'othersubrepofiles'))
+ other_subrepo = create_repo("git", str(tmpdir), "othersubrepo")
+ other_subrepo.create(os.path.join(project, "othersubrepofiles"))
# Create the repo from 'repofiles' subdir
- repo = create_repo('git', str(tmpdir))
- repo.create(os.path.join(project, 'repofiles'))
+ repo = create_repo("git", str(tmpdir))
+ repo.create(os.path.join(project, "repofiles"))
# Add a submodule pointing to the one we created
- repo.add_submodule('subdir', 'file://' + subrepo.repo, checkout=False)
- ref = repo.add_submodule('othersubdir', 'file://' + other_subrepo.repo)
+ repo.add_submodule("subdir", "file://" + subrepo.repo, checkout=False)
+ ref = repo.add_submodule("othersubdir", "file://" + other_subrepo.repo)
# Write out our test target
element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config_extra(ref=ref, checkout_submodules=True)
- ]
+ "kind": "import",
+ "sources": [repo.source_config_extra(ref=ref, checkout_submodules=True)],
}
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
# Fetch, build, checkout
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Assert we checked out files at their expected location
- assert os.path.exists(os.path.join(checkoutdir, 'file.txt'))
- assert not os.path.exists(os.path.join(checkoutdir, 'subdir', 'ponyfile.txt'))
- assert os.path.exists(os.path.join(checkoutdir, 'othersubdir', 'unicornfile.txt'))
+ assert os.path.exists(os.path.join(checkoutdir, "file.txt"))
+ assert not os.path.exists(os.path.join(checkoutdir, "subdir", "ponyfile.txt"))
+ assert os.path.exists(os.path.join(checkoutdir, "othersubdir", "unicornfile.txt"))
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
def test_submodule_fetch_submodule_individual_checkout_explicit(cli, tmpdir, datafiles):
project = str(datafiles)
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Create the submodule first from the 'subrepofiles' subdir
- subrepo = create_repo('git', str(tmpdir), 'subrepo')
- subrepo.create(os.path.join(project, 'subrepofiles'))
+ subrepo = create_repo("git", str(tmpdir), "subrepo")
+ subrepo.create(os.path.join(project, "subrepofiles"))
# Create another submodule from the 'othersubrepofiles' subdir
- other_subrepo = create_repo('git', str(tmpdir), 'othersubrepo')
- other_subrepo.create(os.path.join(project, 'othersubrepofiles'))
+ other_subrepo = create_repo("git", str(tmpdir), "othersubrepo")
+ other_subrepo.create(os.path.join(project, "othersubrepofiles"))
# Create the repo from 'repofiles' subdir
- repo = create_repo('git', str(tmpdir))
- repo.create(os.path.join(project, 'repofiles'))
+ repo = create_repo("git", str(tmpdir))
+ repo.create(os.path.join(project, "repofiles"))
# Add a submodule pointing to the one we created
- repo.add_submodule('subdir', 'file://' + subrepo.repo, checkout=False)
- ref = repo.add_submodule('othersubdir', 'file://' + other_subrepo.repo, checkout=True)
+ repo.add_submodule("subdir", "file://" + subrepo.repo, checkout=False)
+ ref = repo.add_submodule(
+ "othersubdir", "file://" + other_subrepo.repo, checkout=True
+ )
# Write out our test target
element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config_extra(ref=ref, checkout_submodules=True)
- ]
+ "kind": "import",
+ "sources": [repo.source_config_extra(ref=ref, checkout_submodules=True)],
}
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
# Fetch, build, checkout
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Assert we checked out files at their expected location
- assert os.path.exists(os.path.join(checkoutdir, 'file.txt'))
- assert not os.path.exists(os.path.join(checkoutdir, 'subdir', 'ponyfile.txt'))
- assert os.path.exists(os.path.join(checkoutdir, 'othersubdir', 'unicornfile.txt'))
+ assert os.path.exists(os.path.join(checkoutdir, "file.txt"))
+ assert not os.path.exists(os.path.join(checkoutdir, "subdir", "ponyfile.txt"))
+ assert os.path.exists(os.path.join(checkoutdir, "othersubdir", "unicornfile.txt"))
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project-override'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "project-override"))
def test_submodule_fetch_project_override(cli, tmpdir, datafiles):
project = str(datafiles)
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Create the submodule first from the 'subrepofiles' subdir
- subrepo = create_repo('git', str(tmpdir), 'subrepo')
- subrepo.create(os.path.join(project, 'subrepofiles'))
+ subrepo = create_repo("git", str(tmpdir), "subrepo")
+ subrepo.create(os.path.join(project, "subrepofiles"))
# Create the repo from 'repofiles' subdir
- repo = create_repo('git', str(tmpdir))
- repo.create(os.path.join(project, 'repofiles'))
+ repo = create_repo("git", str(tmpdir))
+ repo.create(os.path.join(project, "repofiles"))
# Add a submodule pointing to the one we created
- ref = repo.add_submodule('subdir', 'file://' + subrepo.repo)
+ ref = repo.add_submodule("subdir", "file://" + subrepo.repo)
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
# Fetch, build, checkout
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Assert we checked out both files at their expected location
- assert os.path.exists(os.path.join(checkoutdir, 'file.txt'))
- assert not os.path.exists(os.path.join(checkoutdir, 'subdir', 'ponyfile.txt'))
+ assert os.path.exists(os.path.join(checkoutdir, "file.txt"))
+ assert not os.path.exists(os.path.join(checkoutdir, "subdir", "ponyfile.txt"))
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
def test_submodule_track_ignore_inconsistent(cli, tmpdir, datafiles):
project = str(datafiles)
# Create the repo from 'repofiles' subdir
- repo = create_repo('git', str(tmpdir))
- ref = repo.create(os.path.join(project, 'repofiles'))
+ repo = create_repo("git", str(tmpdir))
+ ref = repo.create(os.path.join(project, "repofiles"))
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ]
- }
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
# Now add a .gitmodules file with an inconsistent submodule,
# we are calling this inconsistent because the file was created
# but `git submodule add` was never called, so there is no reference
# associated to the submodule.
#
- repo.add_file(os.path.join(project, 'inconsistent-submodule', '.gitmodules'))
+ repo.add_file(os.path.join(project, "inconsistent-submodule", ".gitmodules"))
# Fetch should work, we're not yet at the offending ref
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
# Track will encounter an inconsistent submodule without any ref
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
# Assert that we are just fine without it, and emit a warning to the user.
@@ -391,68 +379,58 @@ def test_submodule_track_ignore_inconsistent(cli, tmpdir, datafiles):
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
def test_submodule_track_no_ref_or_track(cli, tmpdir, datafiles):
project = str(datafiles)
# Create the repo from 'repofiles' subdir
- repo = create_repo('git', str(tmpdir))
- repo.create(os.path.join(project, 'repofiles'))
+ repo = create_repo("git", str(tmpdir))
+ repo.create(os.path.join(project, "repofiles"))
# Write out our test target
gitsource = repo.source_config(ref=None)
- gitsource.pop('track')
- element = {
- 'kind': 'import',
- 'sources': [
- gitsource
- ]
- }
+ gitsource.pop("track")
+ element = {"kind": "import", "sources": [gitsource]}
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
# Track will encounter an inconsistent submodule without any ref
- result = cli.run(project=project, args=['show', 'target.bst'])
+ result = cli.run(project=project, args=["show", "target.bst"])
result.assert_main_error(ErrorDomain.SOURCE, "missing-track-and-ref")
result.assert_task_error(None, None)
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
-@pytest.mark.parametrize("fail", ['warn', 'error'])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
+@pytest.mark.parametrize("fail", ["warn", "error"])
def test_ref_not_in_track(cli, tmpdir, datafiles, fail):
project = str(datafiles)
# Make the warning an error if we're testing errors
- if fail == 'error':
+ if fail == "error":
project_template = {
"name": "foo",
- "fatal-warnings": [CoreWarnings.REF_NOT_IN_TRACK]
+ "fatal-warnings": [CoreWarnings.REF_NOT_IN_TRACK],
}
- _yaml.roundtrip_dump(project_template, os.path.join(project, 'project.conf'))
+ _yaml.roundtrip_dump(project_template, os.path.join(project, "project.conf"))
# Create the repo from 'repofiles', create a branch without latest commit
- repo = create_repo('git', str(tmpdir))
- ref = repo.create(os.path.join(project, 'repofiles'))
+ repo = create_repo("git", str(tmpdir))
+ ref = repo.create(os.path.join(project, "repofiles"))
gitsource = repo.source_config(ref=ref)
# Overwrite the track value to the added branch
- gitsource['track'] = 'foo'
+ gitsource["track"] = "foo"
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- gitsource
- ]
- }
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ element = {"kind": "import", "sources": [gitsource]}
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
# Assert a warning or an error depending on what we're checking
- if fail == 'error':
+ if fail == "error":
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.PLUGIN, CoreWarnings.REF_NOT_IN_TRACK)
else:
@@ -461,29 +439,26 @@ def test_ref_not_in_track(cli, tmpdir, datafiles, fail):
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
-@pytest.mark.parametrize("fail", ['warn', 'error'])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
+@pytest.mark.parametrize("fail", ["warn", "error"])
def test_unlisted_submodule(cli, tmpdir, datafiles, fail):
project = str(datafiles)
# Make the warning an error if we're testing errors
- if fail == 'error':
- project_template = {
- "name": "foo",
- "fatal-warnings": ['git:unlisted-submodule']
- }
- _yaml.roundtrip_dump(project_template, os.path.join(project, 'project.conf'))
+ if fail == "error":
+ project_template = {"name": "foo", "fatal-warnings": ["git:unlisted-submodule"]}
+ _yaml.roundtrip_dump(project_template, os.path.join(project, "project.conf"))
# Create the submodule first from the 'subrepofiles' subdir
- subrepo = create_repo('git', str(tmpdir), 'subrepo')
- subrepo.create(os.path.join(project, 'subrepofiles'))
+ subrepo = create_repo("git", str(tmpdir), "subrepo")
+ subrepo.create(os.path.join(project, "subrepofiles"))
# Create the repo from 'repofiles' subdir
- repo = create_repo('git', str(tmpdir))
- repo.create(os.path.join(project, 'repofiles'))
+ repo = create_repo("git", str(tmpdir))
+ repo.create(os.path.join(project, "repofiles"))
# Add a submodule pointing to the one we created
- ref = repo.add_submodule('subdir', 'file://' + subrepo.repo)
+ ref = repo.add_submodule("subdir", "file://" + subrepo.repo)
# Create the source, and delete the explicit configuration
# of the submodules.
@@ -492,127 +467,111 @@ def test_unlisted_submodule(cli, tmpdir, datafiles, fail):
# after the source has been fetched.
#
gitsource = repo.source_config(ref=ref)
- del gitsource['submodules']
+ del gitsource["submodules"]
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- gitsource
- ]
- }
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ element = {"kind": "import", "sources": [gitsource]}
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
# We will not see the warning or error before the first fetch, because
# we don't have the repository yet and so we have no knowledge of
# the unlisted submodule.
- result = cli.run(project=project, args=['show', 'target.bst'])
+ result = cli.run(project=project, args=["show", "target.bst"])
result.assert_success()
assert "git:unlisted-submodule" not in result.stderr
# We will notice this directly in fetch, as it will try to fetch
# the submodules it discovers as a result of fetching the primary repo.
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
# Assert a warning or an error depending on what we're checking
- if fail == 'error':
+ if fail == "error":
result.assert_main_error(ErrorDomain.STREAM, None)
- result.assert_task_error(ErrorDomain.PLUGIN, 'git:unlisted-submodule')
+ result.assert_task_error(ErrorDomain.PLUGIN, "git:unlisted-submodule")
else:
result.assert_success()
assert "git:unlisted-submodule" in result.stderr
# Now that we've fetched it, `bst show` will discover the unlisted submodule too
- result = cli.run(project=project, args=['show', 'target.bst'])
+ result = cli.run(project=project, args=["show", "target.bst"])
# Assert a warning or an error depending on what we're checking
- if fail == 'error':
- result.assert_main_error(ErrorDomain.PLUGIN, 'git:unlisted-submodule')
+ if fail == "error":
+ result.assert_main_error(ErrorDomain.PLUGIN, "git:unlisted-submodule")
else:
result.assert_success()
assert "git:unlisted-submodule" in result.stderr
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
-@pytest.mark.parametrize("fail", ['warn', 'error'])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
+@pytest.mark.parametrize("fail", ["warn", "error"])
def test_track_unlisted_submodule(cli, tmpdir, datafiles, fail):
project = str(datafiles)
# Make the warning an error if we're testing errors
- if fail == 'error':
- project_template = {
- "name": "foo",
- "fatal-warnings": ['git:unlisted-submodule']
- }
- _yaml.roundtrip_dump(project_template, os.path.join(project, 'project.conf'))
+ if fail == "error":
+ project_template = {"name": "foo", "fatal-warnings": ["git:unlisted-submodule"]}
+ _yaml.roundtrip_dump(project_template, os.path.join(project, "project.conf"))
# Create the submodule first from the 'subrepofiles' subdir
- subrepo = create_repo('git', str(tmpdir), 'subrepo')
- subrepo.create(os.path.join(project, 'subrepofiles'))
+ subrepo = create_repo("git", str(tmpdir), "subrepo")
+ subrepo.create(os.path.join(project, "subrepofiles"))
# Create the repo from 'repofiles' subdir
- repo = create_repo('git', str(tmpdir))
- ref = repo.create(os.path.join(project, 'repofiles'))
+ repo = create_repo("git", str(tmpdir))
+ ref = repo.create(os.path.join(project, "repofiles"))
# Add a submodule pointing to the one we created, but use
# the original ref, let the submodules appear after tracking
- repo.add_submodule('subdir', 'file://' + subrepo.repo)
+ repo.add_submodule("subdir", "file://" + subrepo.repo)
# Create the source, and delete the explicit configuration
# of the submodules.
gitsource = repo.source_config(ref=ref)
- del gitsource['submodules']
+ del gitsource["submodules"]
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- gitsource
- ]
- }
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ element = {"kind": "import", "sources": [gitsource]}
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
# Fetch the repo, we will not see the warning because we
# are still pointing to a ref which predates the submodules
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
assert "git:unlisted-submodule" not in result.stderr
# We won't get a warning/error when tracking either, the source
# has not become Consistency.CACHED so the opportunity to check
# for the warning has not yet arisen.
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
assert "git:unlisted-submodule" not in result.stderr
# Fetching the repo at the new ref will finally reveal the warning
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
- if fail == 'error':
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
+ if fail == "error":
result.assert_main_error(ErrorDomain.STREAM, None)
- result.assert_task_error(ErrorDomain.PLUGIN, 'git:unlisted-submodule')
+ result.assert_task_error(ErrorDomain.PLUGIN, "git:unlisted-submodule")
else:
result.assert_success()
assert "git:unlisted-submodule" in result.stderr
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
-@pytest.mark.parametrize("fail", ['warn', 'error'])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
+@pytest.mark.parametrize("fail", ["warn", "error"])
def test_invalid_submodule(cli, tmpdir, datafiles, fail):
project = str(datafiles)
# Make the warning an error if we're testing errors
- if fail == 'error':
- project_template = {
- "name": "foo",
- "fatal-warnings": ['git:invalid-submodule']
- }
- _yaml.roundtrip_dump(project_template, os.path.join(project, 'project.conf'))
+ if fail == "error":
+ project_template = {"name": "foo", "fatal-warnings": ["git:invalid-submodule"]}
+ _yaml.roundtrip_dump(project_template, os.path.join(project, "project.conf"))
# Create the repo from 'repofiles' subdir
- repo = create_repo('git', str(tmpdir))
- ref = repo.create(os.path.join(project, 'repofiles'))
+ repo = create_repo("git", str(tmpdir))
+ ref = repo.create(os.path.join(project, "repofiles"))
# Create the source without any submodules, and add
# an invalid submodule configuration to it.
@@ -622,46 +581,37 @@ def test_invalid_submodule(cli, tmpdir, datafiles, fail):
# the real submodules actually are.
#
gitsource = repo.source_config(ref=ref)
- gitsource['submodules'] = {
- 'subdir': {
- 'url': 'https://pony.org/repo.git'
- }
- }
+ gitsource["submodules"] = {"subdir": {"url": "https://pony.org/repo.git"}}
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- gitsource
- ]
- }
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ element = {"kind": "import", "sources": [gitsource]}
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
# We will not see the warning or error before the first fetch, because
# we don't have the repository yet and so we have no knowledge of
# the unlisted submodule.
- result = cli.run(project=project, args=['show', 'target.bst'])
+ result = cli.run(project=project, args=["show", "target.bst"])
result.assert_success()
assert "git:invalid-submodule" not in result.stderr
# We will notice this directly in fetch, as it will try to fetch
# the submodules it discovers as a result of fetching the primary repo.
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
# Assert a warning or an error depending on what we're checking
- if fail == 'error':
+ if fail == "error":
result.assert_main_error(ErrorDomain.STREAM, None)
- result.assert_task_error(ErrorDomain.PLUGIN, 'git:invalid-submodule')
+ result.assert_task_error(ErrorDomain.PLUGIN, "git:invalid-submodule")
else:
result.assert_success()
assert "git:invalid-submodule" in result.stderr
# Now that we've fetched it, `bst show` will discover the unlisted submodule too
- result = cli.run(project=project, args=['show', 'target.bst'])
+ result = cli.run(project=project, args=["show", "target.bst"])
# Assert a warning or an error depending on what we're checking
- if fail == 'error':
- result.assert_main_error(ErrorDomain.PLUGIN, 'git:invalid-submodule')
+ if fail == "error":
+ result.assert_main_error(ErrorDomain.PLUGIN, "git:invalid-submodule")
else:
result.assert_success()
assert "git:invalid-submodule" in result.stderr
@@ -669,49 +619,41 @@ def test_invalid_submodule(cli, tmpdir, datafiles, fail):
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
@pytest.mark.skipif(HAVE_OLD_GIT, reason="old git rm does not update .gitmodules")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
-@pytest.mark.parametrize("fail", ['warn', 'error'])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
+@pytest.mark.parametrize("fail", ["warn", "error"])
def test_track_invalid_submodule(cli, tmpdir, datafiles, fail):
project = str(datafiles)
# Make the warning an error if we're testing errors
- if fail == 'error':
- project_template = {
- "name": "foo",
- "fatal-warnings": ['git:invalid-submodule']
- }
- _yaml.roundtrip_dump(project_template, os.path.join(project, 'project.conf'))
+ if fail == "error":
+ project_template = {"name": "foo", "fatal-warnings": ["git:invalid-submodule"]}
+ _yaml.roundtrip_dump(project_template, os.path.join(project, "project.conf"))
# Create the submodule first from the 'subrepofiles' subdir
- subrepo = create_repo('git', str(tmpdir), 'subrepo')
- subrepo.create(os.path.join(project, 'subrepofiles'))
+ subrepo = create_repo("git", str(tmpdir), "subrepo")
+ subrepo.create(os.path.join(project, "subrepofiles"))
# Create the repo from 'repofiles' subdir
- repo = create_repo('git', str(tmpdir))
- repo.create(os.path.join(project, 'repofiles'))
+ repo = create_repo("git", str(tmpdir))
+ repo.create(os.path.join(project, "repofiles"))
# Add a submodule pointing to the one we created
- ref = repo.add_submodule('subdir', 'file://' + subrepo.repo)
+ ref = repo.add_submodule("subdir", "file://" + subrepo.repo)
# Add a commit beyond the ref which *removes* the submodule we've added
- repo.remove_path('subdir')
+ repo.remove_path("subdir")
# Create the source, this will keep the submodules so initially
# the configuration is valid for the ref we're using
gitsource = repo.source_config(ref=ref)
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- gitsource
- ]
- }
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ element = {"kind": "import", "sources": [gitsource]}
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
# Fetch the repo, we will not see the warning because we
# are still pointing to a ref which predates the submodules
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
assert "git:invalid-submodule" not in result.stderr
@@ -720,417 +662,425 @@ def test_track_invalid_submodule(cli, tmpdir, datafiles, fail):
# not locally cached, the Source will be CACHED directly after
# tracking and the validations will occur as a result.
#
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
- if fail == 'error':
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
+ if fail == "error":
result.assert_main_error(ErrorDomain.STREAM, None)
- result.assert_task_error(ErrorDomain.PLUGIN, 'git:invalid-submodule')
+ result.assert_task_error(ErrorDomain.PLUGIN, "git:invalid-submodule")
else:
result.assert_success()
assert "git:invalid-submodule" in result.stderr
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
-@pytest.mark.parametrize("ref_format", ['sha1', 'git-describe'])
-@pytest.mark.parametrize("tag,extra_commit", [(False, False), (True, False), (True, True)])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
+@pytest.mark.parametrize("ref_format", ["sha1", "git-describe"])
+@pytest.mark.parametrize(
+ "tag,extra_commit", [(False, False), (True, False), (True, True)]
+)
def test_track_fetch(cli, tmpdir, datafiles, ref_format, tag, extra_commit):
project = str(datafiles)
# Create the repo from 'repofiles' subdir
- repo = create_repo('git', str(tmpdir))
- repo.create(os.path.join(project, 'repofiles'))
+ repo = create_repo("git", str(tmpdir))
+ repo.create(os.path.join(project, "repofiles"))
if tag:
- repo.add_tag('tag')
+ repo.add_tag("tag")
if extra_commit:
repo.add_commit()
# Write out our test target
- element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config()
- ]
- }
- element['sources'][0]['ref-format'] = ref_format
- element_path = os.path.join(project, 'target.bst')
+ element = {"kind": "import", "sources": [repo.source_config()]}
+ element["sources"][0]["ref-format"] = ref_format
+ element_path = os.path.join(project, "target.bst")
_yaml.roundtrip_dump(element, element_path)
# Track it
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
element = _yaml.load(element_path)
- new_ref = element.get_sequence('sources').mapping_at(0).get_str('ref')
+ new_ref = element.get_sequence("sources").mapping_at(0).get_str("ref")
- if ref_format == 'git-describe' and tag:
+ if ref_format == "git-describe" and tag:
# Check and strip prefix
- prefix = 'tag-{}-g'.format(0 if not extra_commit else 1)
+ prefix = "tag-{}-g".format(0 if not extra_commit else 1)
assert new_ref.startswith(prefix)
- new_ref = new_ref[len(prefix):]
+ new_ref = new_ref[len(prefix) :]
# 40 chars for SHA-1
assert len(new_ref) == 40
# Fetch it
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
@pytest.mark.skipif(HAVE_OLD_GIT, reason="old git describe lacks --first-parent")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
-@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
-@pytest.mark.parametrize("tag_type", [('annotated'), ('lightweight')])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
+@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
+@pytest.mark.parametrize("tag_type", [("annotated"), ("lightweight")])
def test_git_describe(cli, tmpdir, datafiles, ref_storage, tag_type):
project = str(datafiles)
- project_config = _yaml.load(os.path.join(project, 'project.conf'))
- project_config['ref-storage'] = ref_storage
- _yaml.roundtrip_dump(project_config, os.path.join(project, 'project.conf'))
+ project_config = _yaml.load(os.path.join(project, "project.conf"))
+ project_config["ref-storage"] = ref_storage
+ _yaml.roundtrip_dump(project_config, os.path.join(project, "project.conf"))
- repofiles = os.path.join(str(tmpdir), 'repofiles')
+ repofiles = os.path.join(str(tmpdir), "repofiles")
os.makedirs(repofiles, exist_ok=True)
- file0 = os.path.join(repofiles, 'file0')
- with open(file0, 'w') as f:
- f.write('test\n')
+ file0 = os.path.join(repofiles, "file0")
+ with open(file0, "w") as f:
+ f.write("test\n")
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
def tag(name):
- if tag_type == 'annotated':
+ if tag_type == "annotated":
repo.add_annotated_tag(name, name)
else:
repo.add_tag(name)
repo.create(repofiles)
- tag('uselesstag')
+ tag("uselesstag")
- file1 = os.path.join(str(tmpdir), 'file1')
- with open(file1, 'w') as f:
- f.write('test\n')
+ file1 = os.path.join(str(tmpdir), "file1")
+ with open(file1, "w") as f:
+ f.write("test\n")
repo.add_file(file1)
- tag('tag1')
+ tag("tag1")
- file2 = os.path.join(str(tmpdir), 'file2')
- with open(file2, 'w') as f:
- f.write('test\n')
- repo.branch('branch2')
+ file2 = os.path.join(str(tmpdir), "file2")
+ with open(file2, "w") as f:
+ f.write("test\n")
+ repo.branch("branch2")
repo.add_file(file2)
- tag('tag2')
+ tag("tag2")
- repo.checkout('master')
- file3 = os.path.join(str(tmpdir), 'file3')
- with open(file3, 'w') as f:
- f.write('test\n')
+ repo.checkout("master")
+ file3 = os.path.join(str(tmpdir), "file3")
+ with open(file3, "w") as f:
+ f.write("test\n")
repo.add_file(file3)
- repo.merge('branch2')
+ repo.merge("branch2")
config = repo.source_config()
- config['track'] = repo.latest_commit()
- config['track-tags'] = True
+ config["track"] = repo.latest_commit()
+ config["track-tags"] = True
# Write out our test target
element = {
- 'kind': 'import',
- 'sources': [
- config
- ],
+ "kind": "import",
+ "sources": [config],
}
- element_path = os.path.join(project, 'target.bst')
+ element_path = os.path.join(project, "target.bst")
_yaml.roundtrip_dump(element, element_path)
- if ref_storage == 'inline':
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ if ref_storage == "inline":
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
else:
- result = cli.run(project=project, args=['source', 'track', 'target.bst', '--deps', 'all'])
+ result = cli.run(
+ project=project, args=["source", "track", "target.bst", "--deps", "all"]
+ )
result.assert_success()
- if ref_storage == 'inline':
+ if ref_storage == "inline":
element = _yaml.load(element_path)
- tags = element.get_sequence('sources').mapping_at(0).get_sequence('tags')
+ tags = element.get_sequence("sources").mapping_at(0).get_sequence("tags")
assert len(tags) == 2
for tag in tags:
- assert 'tag' in tag
- assert 'commit' in tag
- assert 'annotated' in tag
- assert tag.get_bool('annotated') == (tag_type == 'annotated')
-
- assert {(tag.get_str('tag'),
- tag.get_str('commit'))
- for tag in tags} == {('tag1', repo.rev_parse('tag1^{commit}')),
- ('tag2', repo.rev_parse('tag2^{commit}'))}
+ assert "tag" in tag
+ assert "commit" in tag
+ assert "annotated" in tag
+ assert tag.get_bool("annotated") == (tag_type == "annotated")
+
+ assert {(tag.get_str("tag"), tag.get_str("commit")) for tag in tags} == {
+ ("tag1", repo.rev_parse("tag1^{commit}")),
+ ("tag2", repo.rev_parse("tag2^{commit}")),
+ }
- checkout = os.path.join(str(tmpdir), 'checkout')
+ checkout = os.path.join(str(tmpdir), "checkout")
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkout],
+ )
result.assert_success()
- if tag_type == 'annotated':
+ if tag_type == "annotated":
options = []
else:
- options = ['--tags']
- describe = subprocess.check_output(['git', 'describe', *options],
- cwd=checkout, universal_newlines=True)
- assert describe.startswith('tag2-2-')
-
- describe_fp = subprocess.check_output(['git', 'describe', '--first-parent', *options],
- cwd=checkout, universal_newlines=True)
- assert describe_fp.startswith('tag1-2-')
-
- tags = subprocess.check_output(['git', 'tag'],
- cwd=checkout, universal_newlines=True)
+ options = ["--tags"]
+ describe = subprocess.check_output(
+ ["git", "describe", *options], cwd=checkout, universal_newlines=True
+ )
+ assert describe.startswith("tag2-2-")
+
+ describe_fp = subprocess.check_output(
+ ["git", "describe", "--first-parent", *options],
+ cwd=checkout,
+ universal_newlines=True,
+ )
+ assert describe_fp.startswith("tag1-2-")
+
+ tags = subprocess.check_output(
+ ["git", "tag"], cwd=checkout, universal_newlines=True
+ )
tags = set(tags.splitlines())
- assert tags == set(['tag1', 'tag2'])
+ assert tags == set(["tag1", "tag2"])
- p = subprocess.run(['git', 'log', repo.rev_parse('uselesstag')],
- cwd=checkout)
+ p = subprocess.run(["git", "log", repo.rev_parse("uselesstag")], cwd=checkout)
assert p.returncode != 0
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
-@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
-@pytest.mark.parametrize("tag_type", [('annotated'), ('lightweight')])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
+@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
+@pytest.mark.parametrize("tag_type", [("annotated"), ("lightweight")])
def test_git_describe_head_is_tagged(cli, tmpdir, datafiles, ref_storage, tag_type):
project = str(datafiles)
- project_config = _yaml.load(os.path.join(project, 'project.conf'))
- project_config['ref-storage'] = ref_storage
- _yaml.roundtrip_dump(project_config, os.path.join(project, 'project.conf'))
+ project_config = _yaml.load(os.path.join(project, "project.conf"))
+ project_config["ref-storage"] = ref_storage
+ _yaml.roundtrip_dump(project_config, os.path.join(project, "project.conf"))
- repofiles = os.path.join(str(tmpdir), 'repofiles')
+ repofiles = os.path.join(str(tmpdir), "repofiles")
os.makedirs(repofiles, exist_ok=True)
- file0 = os.path.join(repofiles, 'file0')
- with open(file0, 'w') as f:
- f.write('test\n')
+ file0 = os.path.join(repofiles, "file0")
+ with open(file0, "w") as f:
+ f.write("test\n")
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
def tag(name):
- if tag_type == 'annotated':
+ if tag_type == "annotated":
repo.add_annotated_tag(name, name)
else:
repo.add_tag(name)
repo.create(repofiles)
- tag('uselesstag')
+ tag("uselesstag")
- file1 = os.path.join(str(tmpdir), 'file1')
- with open(file1, 'w') as f:
- f.write('test\n')
+ file1 = os.path.join(str(tmpdir), "file1")
+ with open(file1, "w") as f:
+ f.write("test\n")
repo.add_file(file1)
- file2 = os.path.join(str(tmpdir), 'file2')
- with open(file2, 'w') as f:
- f.write('test\n')
- repo.branch('branch2')
+ file2 = os.path.join(str(tmpdir), "file2")
+ with open(file2, "w") as f:
+ f.write("test\n")
+ repo.branch("branch2")
repo.add_file(file2)
- repo.checkout('master')
- file3 = os.path.join(str(tmpdir), 'file3')
- with open(file3, 'w') as f:
- f.write('test\n')
+ repo.checkout("master")
+ file3 = os.path.join(str(tmpdir), "file3")
+ with open(file3, "w") as f:
+ f.write("test\n")
repo.add_file(file3)
- tagged_ref = repo.merge('branch2')
- tag('tag')
+ tagged_ref = repo.merge("branch2")
+ tag("tag")
config = repo.source_config()
- config['track'] = repo.latest_commit()
- config['track-tags'] = True
+ config["track"] = repo.latest_commit()
+ config["track-tags"] = True
# Write out our test target
element = {
- 'kind': 'import',
- 'sources': [
- config
- ],
+ "kind": "import",
+ "sources": [config],
}
- element_path = os.path.join(project, 'target.bst')
+ element_path = os.path.join(project, "target.bst")
_yaml.roundtrip_dump(element, element_path)
- if ref_storage == 'inline':
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ if ref_storage == "inline":
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
else:
- result = cli.run(project=project, args=['source', 'track', 'target.bst', '--deps', 'all'])
+ result = cli.run(
+ project=project, args=["source", "track", "target.bst", "--deps", "all"]
+ )
result.assert_success()
- if ref_storage == 'inline':
+ if ref_storage == "inline":
element = _yaml.load(element_path)
- source = element.get_sequence('sources').mapping_at(0)
- tags = source.get_sequence('tags')
+ source = element.get_sequence("sources").mapping_at(0)
+ tags = source.get_sequence("tags")
assert len(tags) == 1
- tag = source.get_sequence('tags').mapping_at(0)
- assert 'tag' in tag
- assert 'commit' in tag
- assert 'annotated' in tag
- assert tag.get_bool('annotated') == (tag_type == 'annotated')
+ tag = source.get_sequence("tags").mapping_at(0)
+ assert "tag" in tag
+ assert "commit" in tag
+ assert "annotated" in tag
+ assert tag.get_bool("annotated") == (tag_type == "annotated")
- tag_name = tag.get_str('tag')
- commit = tag.get_str('commit')
- assert (tag_name, commit) == ('tag', repo.rev_parse('tag^{commit}'))
+ tag_name = tag.get_str("tag")
+ commit = tag.get_str("commit")
+ assert (tag_name, commit) == ("tag", repo.rev_parse("tag^{commit}"))
- checkout = os.path.join(str(tmpdir), 'checkout')
+ checkout = os.path.join(str(tmpdir), "checkout")
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkout],
+ )
result.assert_success()
- if tag_type == 'annotated':
+ if tag_type == "annotated":
options = []
else:
- options = ['--tags']
- describe = subprocess.check_output(['git', 'describe', *options],
- cwd=checkout, universal_newlines=True)
- assert describe.startswith('tag')
-
- tags = subprocess.check_output(['git', 'tag'],
- cwd=checkout,
- universal_newlines=True)
+ options = ["--tags"]
+ describe = subprocess.check_output(
+ ["git", "describe", *options], cwd=checkout, universal_newlines=True
+ )
+ assert describe.startswith("tag")
+
+ tags = subprocess.check_output(
+ ["git", "tag"], cwd=checkout, universal_newlines=True
+ )
tags = set(tags.splitlines())
- assert tags == set(['tag'])
+ assert tags == set(["tag"])
- rev_list = subprocess.check_output(['git', 'rev-list', '--all'],
- cwd=checkout,
- universal_newlines=True)
+ rev_list = subprocess.check_output(
+ ["git", "rev-list", "--all"], cwd=checkout, universal_newlines=True
+ )
assert set(rev_list.splitlines()) == set([tagged_ref])
- p = subprocess.run(['git', 'log', repo.rev_parse('uselesstag')],
- cwd=checkout)
+ p = subprocess.run(["git", "log", repo.rev_parse("uselesstag")], cwd=checkout)
assert p.returncode != 0
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
def test_git_describe_relevant_history(cli, tmpdir, datafiles):
project = str(datafiles)
- project_config = _yaml.load(os.path.join(project, 'project.conf'))
- project_config['ref-storage'] = 'project.refs'
- _yaml.roundtrip_dump(project_config, os.path.join(project, 'project.conf'))
+ project_config = _yaml.load(os.path.join(project, "project.conf"))
+ project_config["ref-storage"] = "project.refs"
+ _yaml.roundtrip_dump(project_config, os.path.join(project, "project.conf"))
- repofiles = os.path.join(str(tmpdir), 'repofiles')
+ repofiles = os.path.join(str(tmpdir), "repofiles")
os.makedirs(repofiles, exist_ok=True)
- file0 = os.path.join(repofiles, 'file0')
- with open(file0, 'w') as f:
- f.write('test\n')
+ file0 = os.path.join(repofiles, "file0")
+ with open(file0, "w") as f:
+ f.write("test\n")
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
repo.create(repofiles)
- file1 = os.path.join(str(tmpdir), 'file1')
- with open(file1, 'w') as f:
- f.write('test\n')
+ file1 = os.path.join(str(tmpdir), "file1")
+ with open(file1, "w") as f:
+ f.write("test\n")
repo.add_file(file1)
- repo.branch('branch')
- repo.checkout('master')
+ repo.branch("branch")
+ repo.checkout("master")
- file2 = os.path.join(str(tmpdir), 'file2')
- with open(file2, 'w') as f:
- f.write('test\n')
+ file2 = os.path.join(str(tmpdir), "file2")
+ with open(file2, "w") as f:
+ f.write("test\n")
repo.add_file(file2)
- file3 = os.path.join(str(tmpdir), 'file3')
- with open(file3, 'w') as f:
- f.write('test\n')
+ file3 = os.path.join(str(tmpdir), "file3")
+ with open(file3, "w") as f:
+ f.write("test\n")
branch_boundary = repo.add_file(file3)
- repo.checkout('branch')
- file4 = os.path.join(str(tmpdir), 'file4')
- with open(file4, 'w') as f:
- f.write('test\n')
+ repo.checkout("branch")
+ file4 = os.path.join(str(tmpdir), "file4")
+ with open(file4, "w") as f:
+ f.write("test\n")
tagged_ref = repo.add_file(file4)
- repo.add_annotated_tag('tag1', 'tag1')
+ repo.add_annotated_tag("tag1", "tag1")
- head = repo.merge('master')
+ head = repo.merge("master")
config = repo.source_config()
- config['track'] = head
- config['track-tags'] = True
+ config["track"] = head
+ config["track-tags"] = True
# Write out our test target
element = {
- 'kind': 'import',
- 'sources': [
- config
- ],
+ "kind": "import",
+ "sources": [config],
}
- element_path = os.path.join(project, 'target.bst')
+ element_path = os.path.join(project, "target.bst")
_yaml.roundtrip_dump(element, element_path)
- result = cli.run(project=project, args=['source', 'track', 'target.bst', '--deps', 'all'])
+ result = cli.run(
+ project=project, args=["source", "track", "target.bst", "--deps", "all"]
+ )
result.assert_success()
- checkout = os.path.join(str(tmpdir), 'checkout')
+ checkout = os.path.join(str(tmpdir), "checkout")
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkout])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkout],
+ )
result.assert_success()
- describe = subprocess.check_output(['git', 'describe'],
- cwd=checkout,
- universal_newlines=True)
- assert describe.startswith('tag1-2-')
+ describe = subprocess.check_output(
+ ["git", "describe"], cwd=checkout, universal_newlines=True
+ )
+ assert describe.startswith("tag1-2-")
- rev_list = subprocess.check_output(['git', 'rev-list', '--all'],
- cwd=checkout,
- universal_newlines=True)
+ rev_list = subprocess.check_output(
+ ["git", "rev-list", "--all"], cwd=checkout, universal_newlines=True
+ )
assert set(rev_list.splitlines()) == set([head, tagged_ref, branch_boundary])
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
def test_default_do_not_track_tags(cli, tmpdir, datafiles):
project = str(datafiles)
- project_config = _yaml.load(os.path.join(project, 'project.conf'))
- project_config['ref-storage'] = 'inline'
- _yaml.roundtrip_dump(project_config, os.path.join(project, 'project.conf'))
+ project_config = _yaml.load(os.path.join(project, "project.conf"))
+ project_config["ref-storage"] = "inline"
+ _yaml.roundtrip_dump(project_config, os.path.join(project, "project.conf"))
- repofiles = os.path.join(str(tmpdir), 'repofiles')
+ repofiles = os.path.join(str(tmpdir), "repofiles")
os.makedirs(repofiles, exist_ok=True)
- file0 = os.path.join(repofiles, 'file0')
- with open(file0, 'w') as f:
- f.write('test\n')
+ file0 = os.path.join(repofiles, "file0")
+ with open(file0, "w") as f:
+ f.write("test\n")
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
repo.create(repofiles)
- repo.add_tag('tag')
+ repo.add_tag("tag")
config = repo.source_config()
- config['track'] = repo.latest_commit()
+ config["track"] = repo.latest_commit()
# Write out our test target
element = {
- 'kind': 'import',
- 'sources': [
- config
- ],
+ "kind": "import",
+ "sources": [config],
}
- element_path = os.path.join(project, 'target.bst')
+ element_path = os.path.join(project, "target.bst")
_yaml.roundtrip_dump(element, element_path)
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
element = _yaml.load(element_path)
- source = element.get_sequence('sources').mapping_at(0)
- assert 'tags' not in source
+ source = element.get_sequence("sources").mapping_at(0)
+ assert "tags" not in source
@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "template"))
def test_overwrite_rogue_tag_multiple_remotes(cli, tmpdir, datafiles):
"""When using multiple remotes in cache (i.e. when using aliases), we
need to make sure we override tags. This is not allowed to fetch
@@ -1139,88 +1089,76 @@ def test_overwrite_rogue_tag_multiple_remotes(cli, tmpdir, datafiles):
project = str(datafiles)
- repofiles = os.path.join(str(tmpdir), 'repofiles')
+ repofiles = os.path.join(str(tmpdir), "repofiles")
os.makedirs(repofiles, exist_ok=True)
- file0 = os.path.join(repofiles, 'file0')
- with open(file0, 'w') as f:
- f.write('test\n')
+ file0 = os.path.join(repofiles, "file0")
+ with open(file0, "w") as f:
+ f.write("test\n")
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
top_commit = repo.create(repofiles)
repodir, reponame = os.path.split(repo.repo)
- project_config = _yaml.load(os.path.join(project, 'project.conf'))
- project_config['aliases'] = Node.from_dict({
- 'repo': 'http://example.com/'
- })
- project_config['mirrors'] = [
- {
- 'name': 'middle-earth',
- 'aliases': {
- 'repo': ['file://{}/'.format(repodir)]
- }
- }
+ project_config = _yaml.load(os.path.join(project, "project.conf"))
+ project_config["aliases"] = Node.from_dict({"repo": "http://example.com/"})
+ project_config["mirrors"] = [
+ {"name": "middle-earth", "aliases": {"repo": ["file://{}/".format(repodir)]}}
]
- _yaml.roundtrip_dump(project_config, os.path.join(project, 'project.conf'))
+ _yaml.roundtrip_dump(project_config, os.path.join(project, "project.conf"))
- repo.add_annotated_tag('tag', 'tag')
+ repo.add_annotated_tag("tag", "tag")
- file1 = os.path.join(repofiles, 'file1')
- with open(file1, 'w') as f:
- f.write('test\n')
+ file1 = os.path.join(repofiles, "file1")
+ with open(file1, "w") as f:
+ f.write("test\n")
ref = repo.add_file(file1)
config = repo.source_config(ref=ref)
- del config['track']
- config['url'] = 'repo:{}'.format(reponame)
+ del config["track"]
+ config["url"] = "repo:{}".format(reponame)
# Write out our test target
element = {
- 'kind': 'import',
- 'sources': [
- config
- ],
+ "kind": "import",
+ "sources": [config],
}
- element_path = os.path.join(project, 'target.bst')
+ element_path = os.path.join(project, "target.bst")
_yaml.roundtrip_dump(element, element_path)
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
repo.checkout(top_commit)
- file2 = os.path.join(repofiles, 'file2')
- with open(file2, 'w') as f:
- f.write('test\n')
+ file2 = os.path.join(repofiles, "file2")
+ with open(file2, "w") as f:
+ f.write("test\n")
new_ref = repo.add_file(file2)
- repo.delete_tag('tag')
- repo.add_annotated_tag('tag', 'tag')
- repo.checkout('master')
+ repo.delete_tag("tag")
+ repo.add_annotated_tag("tag", "tag")
+ repo.checkout("master")
- otherpath = os.path.join(str(tmpdir), 'other_path')
- shutil.copytree(repo.repo,
- os.path.join(otherpath, 'repo'))
- create_repo('git', otherpath)
+ otherpath = os.path.join(str(tmpdir), "other_path")
+ shutil.copytree(repo.repo, os.path.join(otherpath, "repo"))
+ create_repo("git", otherpath)
repodir, reponame = os.path.split(repo.repo)
- _yaml.roundtrip_dump(project_config, os.path.join(project, 'project.conf'))
+ _yaml.roundtrip_dump(project_config, os.path.join(project, "project.conf"))
config = repo.source_config(ref=new_ref)
- del config['track']
- config['url'] = 'repo:{}'.format(reponame)
+ del config["track"]
+ config["url"] = "repo:{}".format(reponame)
element = {
- 'kind': 'import',
- 'sources': [
- config
- ],
+ "kind": "import",
+ "sources": [config],
}
_yaml.roundtrip_dump(element, element_path)
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
diff --git a/tests/sources/keytest.py b/tests/sources/keytest.py
index d3eab8d6b..46d0d07fe 100644
--- a/tests/sources/keytest.py
+++ b/tests/sources/keytest.py
@@ -27,8 +27,7 @@ import pytest
from buildstream._exceptions import ErrorDomain
from buildstream.testing import cli # pylint: disable=unused-import
-DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)),
- "project_key_test")
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project_key_test")
# using the key-test plugin to ensure get_unique_key is never called before
diff --git a/tests/sources/local.py b/tests/sources/local.py
index 4b72a4343..08c508bfe 100644
--- a/tests/sources/local.py
+++ b/tests/sources/local.py
@@ -10,103 +10,101 @@ from buildstream.testing import cli # pylint: disable=unused-import
from buildstream.testing._utils.site import HAVE_SANDBOX
from tests.testutils import filetypegenerator
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'local',
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "local",)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_missing_path(cli, datafiles):
project = str(datafiles)
# Removing the local file causes preflight to fail
- localfile = os.path.join(project, 'file.txt')
+ localfile = os.path.join(project, "file.txt")
os.remove(localfile)
- result = cli.run(project=project, args=[
- 'show', 'target.bst'
- ])
+ result = cli.run(project=project, args=["show", "target.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_non_regular_file_or_directory(cli, datafiles):
project = str(datafiles)
- localfile = os.path.join(project, 'file.txt')
+ localfile = os.path.join(project, "file.txt")
for _file_type in filetypegenerator.generate_file_types(localfile):
- result = cli.run(project=project, args=[
- 'show', 'target.bst'
- ])
+ result = cli.run(project=project, args=["show", "target.bst"])
if os.path.isdir(localfile) and not os.path.islink(localfile):
result.assert_success()
elif os.path.isfile(localfile) and not os.path.islink(localfile):
result.assert_success()
else:
- result.assert_main_error(ErrorDomain.LOAD,
- LoadErrorReason.PROJ_PATH_INVALID_KIND)
+ result.assert_main_error(
+ ErrorDomain.LOAD, LoadErrorReason.PROJ_PATH_INVALID_KIND
+ )
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_invalid_absolute_path(cli, datafiles):
project = str(datafiles)
- with open(os.path.join(project, "target.bst"), 'r') as f:
+ with open(os.path.join(project, "target.bst"), "r") as f:
old_yaml = f.read()
new_yaml = old_yaml.replace("file.txt", os.path.join(project, "file.txt"))
assert old_yaml != new_yaml
- with open(os.path.join(project, "target.bst"), 'w') as f:
+ with open(os.path.join(project, "target.bst"), "w") as f:
f.write(new_yaml)
- result = cli.run(project=project, args=['show', 'target.bst'])
- result.assert_main_error(ErrorDomain.LOAD,
- LoadErrorReason.PROJ_PATH_INVALID)
+ result = cli.run(project=project, args=["show", "target.bst"])
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.PROJ_PATH_INVALID)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'invalid-relative-path'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "invalid-relative-path"))
def test_invalid_relative_path(cli, datafiles):
project = str(datafiles)
- result = cli.run(project=project, args=['show', 'target.bst'])
- result.assert_main_error(ErrorDomain.LOAD,
- LoadErrorReason.PROJ_PATH_INVALID)
+ result = cli.run(project=project, args=["show", "target.bst"])
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.PROJ_PATH_INVALID)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_stage_file(cli, tmpdir, datafiles):
project = str(datafiles)
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Build, checkout
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the checkout contains the expected file
- assert os.path.exists(os.path.join(checkoutdir, 'file.txt'))
+ assert os.path.exists(os.path.join(checkoutdir, "file.txt"))
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'directory'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "directory"))
def test_stage_directory(cli, tmpdir, datafiles):
project = str(datafiles)
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Build, checkout
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the checkout contains the expected file and directory and other file
- assert os.path.exists(os.path.join(checkoutdir, 'file.txt'))
- assert os.path.exists(os.path.join(checkoutdir, 'subdir', 'anotherfile.txt'))
+ assert os.path.exists(os.path.join(checkoutdir, "file.txt"))
+ assert os.path.exists(os.path.join(checkoutdir, "subdir", "anotherfile.txt"))
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'symlink'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "symlink"))
def test_stage_symlink(cli, tmpdir, datafiles):
project = str(datafiles)
@@ -117,104 +115,101 @@ def test_stage_symlink(cli, tmpdir, datafiles):
# https://github.com/omarkohl/pytest-datafiles/issues/1
#
# Create the symlink by hand.
- symlink = os.path.join(project, 'files', 'symlink-to-file.txt')
- os.symlink('file.txt', symlink)
+ symlink = os.path.join(project, "files", "symlink-to-file.txt")
+ os.symlink("file.txt", symlink)
# Build, checkout
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the checkout contains the expected file and directory and other file
- assert os.path.exists(os.path.join(checkoutdir, 'file.txt'))
- assert os.path.exists(os.path.join(checkoutdir, 'symlink-to-file.txt'))
- assert os.path.islink(os.path.join(checkoutdir, 'symlink-to-file.txt'))
+ assert os.path.exists(os.path.join(checkoutdir, "file.txt"))
+ assert os.path.exists(os.path.join(checkoutdir, "symlink-to-file.txt"))
+ assert os.path.islink(os.path.join(checkoutdir, "symlink-to-file.txt"))
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'file-exists'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "file-exists"))
def test_stage_file_exists(cli, datafiles):
project = str(datafiles)
# Build, checkout
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.ELEMENT, "import-source-files-fail")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'directory'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "directory"))
def test_stage_directory_symlink(cli, tmpdir, datafiles):
project = str(datafiles)
checkoutdir = os.path.join(str(tmpdir), "checkout")
- symlink = os.path.join(project, 'files', 'symlink-to-subdir')
- os.symlink('subdir', symlink)
+ symlink = os.path.join(project, "files", "symlink-to-subdir")
+ os.symlink("subdir", symlink)
# Build, checkout
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the checkout contains the expected directory and directory symlink
- assert os.path.exists(os.path.join(checkoutdir, 'subdir', 'anotherfile.txt'))
- assert os.path.exists(os.path.join(checkoutdir, 'symlink-to-subdir', 'anotherfile.txt'))
- assert os.path.islink(os.path.join(checkoutdir, 'symlink-to-subdir'))
+ assert os.path.exists(os.path.join(checkoutdir, "subdir", "anotherfile.txt"))
+ assert os.path.exists(
+ os.path.join(checkoutdir, "symlink-to-subdir", "anotherfile.txt")
+ )
+ assert os.path.islink(os.path.join(checkoutdir, "symlink-to-subdir"))
@pytest.mark.integration
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'deterministic-umask'))
-@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "deterministic-umask"))
+@pytest.mark.skipif(
+ not HAVE_SANDBOX, reason="Only available with a functioning sandbox"
+)
def test_deterministic_source_umask(cli, tmpdir, datafiles):
-
- def create_test_file(*path, mode=0o644, content='content\n'):
+ def create_test_file(*path, mode=0o644, content="content\n"):
path = os.path.join(*path)
os.makedirs(os.path.dirname(path), exist_ok=True)
- with open(path, 'w') as f:
+ with open(path, "w") as f:
f.write(content)
os.fchmod(f.fileno(), mode)
def create_test_directory(*path, mode=0o644):
- create_test_file(*path, '.keep', content='')
+ create_test_file(*path, ".keep", content="")
path = os.path.join(*path)
os.chmod(path, mode)
project = str(datafiles)
- element_name = 'list.bst'
- element_path = os.path.join(project, 'elements', element_name)
- sourcedir = os.path.join(project, 'source')
-
- create_test_file(sourcedir, 'a.txt', mode=0o700)
- create_test_file(sourcedir, 'b.txt', mode=0o755)
- create_test_file(sourcedir, 'c.txt', mode=0o600)
- create_test_file(sourcedir, 'd.txt', mode=0o400)
- create_test_file(sourcedir, 'e.txt', mode=0o644)
- create_test_file(sourcedir, 'f.txt', mode=0o4755)
- create_test_file(sourcedir, 'g.txt', mode=0o2755)
- create_test_file(sourcedir, 'h.txt', mode=0o1755)
- create_test_directory(sourcedir, 'dir-a', mode=0o0700)
- create_test_directory(sourcedir, 'dir-c', mode=0o0755)
- create_test_directory(sourcedir, 'dir-d', mode=0o4755)
- create_test_directory(sourcedir, 'dir-e', mode=0o2755)
- create_test_directory(sourcedir, 'dir-f', mode=0o1755)
-
- source = {'kind': 'local',
- 'path': 'source'}
+ element_name = "list.bst"
+ element_path = os.path.join(project, "elements", element_name)
+ sourcedir = os.path.join(project, "source")
+
+ create_test_file(sourcedir, "a.txt", mode=0o700)
+ create_test_file(sourcedir, "b.txt", mode=0o755)
+ create_test_file(sourcedir, "c.txt", mode=0o600)
+ create_test_file(sourcedir, "d.txt", mode=0o400)
+ create_test_file(sourcedir, "e.txt", mode=0o644)
+ create_test_file(sourcedir, "f.txt", mode=0o4755)
+ create_test_file(sourcedir, "g.txt", mode=0o2755)
+ create_test_file(sourcedir, "h.txt", mode=0o1755)
+ create_test_directory(sourcedir, "dir-a", mode=0o0700)
+ create_test_directory(sourcedir, "dir-c", mode=0o0755)
+ create_test_directory(sourcedir, "dir-d", mode=0o4755)
+ create_test_directory(sourcedir, "dir-e", mode=0o2755)
+ create_test_directory(sourcedir, "dir-f", mode=0o1755)
+
+ source = {"kind": "local", "path": "source"}
element = {
- 'kind': 'manual',
- 'depends': [
- {
- 'filename': 'base.bst',
- 'type': 'build'
- }
- ],
- 'sources': [
- source
- ],
- 'config': {
- 'install-commands': [
- 'ls -l >"%{install-root}/ls-l"'
- ]
- }
+ "kind": "manual",
+ "depends": [{"filename": "base.bst", "type": "build"}],
+ "sources": [source],
+ "config": {"install-commands": ['ls -l >"%{install-root}/ls-l"']},
}
_yaml.roundtrip_dump(element, element_path)
diff --git a/tests/sources/no-fetch-cached/plugins/sources/always_cached.py b/tests/sources/no-fetch-cached/plugins/sources/always_cached.py
index fa143a020..623ab19ab 100644
--- a/tests/sources/no-fetch-cached/plugins/sources/always_cached.py
+++ b/tests/sources/no-fetch-cached/plugins/sources/always_cached.py
@@ -11,7 +11,6 @@ from buildstream import Consistency, Source
class AlwaysCachedSource(Source):
-
def configure(self, node):
pass
diff --git a/tests/sources/no_fetch_cached.py b/tests/sources/no_fetch_cached.py
index 81032881c..8476db08c 100644
--- a/tests/sources/no_fetch_cached.py
+++ b/tests/sources/no_fetch_cached.py
@@ -10,10 +10,7 @@ from buildstream.testing import cli # pylint: disable=unused-import
from buildstream.testing import create_repo
from buildstream.testing._utils.site import HAVE_GIT
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'no-fetch-cached'
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "no-fetch-cached")
##################################################################
@@ -26,23 +23,16 @@ def test_no_fetch_cached(cli, tmpdir, datafiles):
project = str(datafiles)
# Create the repo from 'files' subdir
- repo = create_repo('git', str(tmpdir))
- ref = repo.create(os.path.join(project, 'files'))
+ repo = create_repo("git", str(tmpdir))
+ ref = repo.create(os.path.join(project, "files"))
# Write out test target with a cached and a non-cached source
element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref),
- {
- 'kind': 'always_cached'
- }
- ]
+ "kind": "import",
+ "sources": [repo.source_config(ref=ref), {"kind": "always_cached"}],
}
- _yaml.roundtrip_dump(element, os.path.join(project, 'target.bst'))
+ _yaml.roundtrip_dump(element, os.path.join(project, "target.bst"))
# Test fetch of target with a cached and a non-cached source
- result = cli.run(project=project, args=[
- 'source', 'fetch', 'target.bst'
- ])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
diff --git a/tests/sources/patch.py b/tests/sources/patch.py
index 4f9db815b..da93684db 100644
--- a/tests/sources/patch.py
+++ b/tests/sources/patch.py
@@ -8,148 +8,151 @@ from buildstream._exceptions import ErrorDomain, LoadErrorReason
from buildstream.testing import cli # pylint: disable=unused-import
from tests.testutils import filetypegenerator
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'patch',
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "patch",)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_missing_patch(cli, datafiles):
project = str(datafiles)
# Removing the local file causes preflight to fail
- localfile = os.path.join(project, 'file_1.patch')
+ localfile = os.path.join(project, "file_1.patch")
os.remove(localfile)
- result = cli.run(project=project, args=[
- 'show', 'target.bst'
- ])
+ result = cli.run(project=project, args=["show", "target.bst"])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_non_regular_file_patch(cli, datafiles):
project = str(datafiles)
- patch_path = os.path.join(project, 'irregular_file.patch')
+ patch_path = os.path.join(project, "irregular_file.patch")
for _file_type in filetypegenerator.generate_file_types(patch_path):
- result = cli.run(project=project, args=[
- 'show', 'irregular.bst'
- ])
+ result = cli.run(project=project, args=["show", "irregular.bst"])
if os.path.isfile(patch_path) and not os.path.islink(patch_path):
result.assert_success()
else:
- result.assert_main_error(ErrorDomain.LOAD,
- LoadErrorReason.PROJ_PATH_INVALID_KIND)
+ result.assert_main_error(
+ ErrorDomain.LOAD, LoadErrorReason.PROJ_PATH_INVALID_KIND
+ )
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_invalid_absolute_path(cli, datafiles):
project = str(datafiles)
- with open(os.path.join(project, "target.bst"), 'r') as f:
+ with open(os.path.join(project, "target.bst"), "r") as f:
old_yaml = f.read()
- new_yaml = old_yaml.replace("file_1.patch",
- os.path.join(project, "file_1.patch"))
+ new_yaml = old_yaml.replace("file_1.patch", os.path.join(project, "file_1.patch"))
assert old_yaml != new_yaml
- with open(os.path.join(project, "target.bst"), 'w') as f:
+ with open(os.path.join(project, "target.bst"), "w") as f:
f.write(new_yaml)
- result = cli.run(project=project, args=['show', 'target.bst'])
- result.assert_main_error(ErrorDomain.LOAD,
- LoadErrorReason.PROJ_PATH_INVALID)
+ result = cli.run(project=project, args=["show", "target.bst"])
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.PROJ_PATH_INVALID)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'invalid-relative-path'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "invalid-relative-path"))
def test_invalid_relative_path(cli, datafiles):
project = str(datafiles)
- result = cli.run(project=project, args=['show', 'irregular.bst'])
- result.assert_main_error(ErrorDomain.LOAD,
- LoadErrorReason.PROJ_PATH_INVALID)
+ result = cli.run(project=project, args=["show", "irregular.bst"])
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.PROJ_PATH_INVALID)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_stage_and_patch(cli, tmpdir, datafiles):
project = str(datafiles)
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Build, checkout
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Test the file.txt was patched and changed
- with open(os.path.join(checkoutdir, 'file.txt')) as f:
- assert f.read() == 'This is text file with superpowers\n'
+ with open(os.path.join(checkoutdir, "file.txt")) as f:
+ assert f.read() == "This is text file with superpowers\n"
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_stage_file_nonexistent_dir(cli, datafiles):
project = str(datafiles)
# Fails at build time because it tries to patch into a non-existing directory
- result = cli.run(project=project, args=['build', 'failure-nonexistent-dir.bst'])
+ result = cli.run(project=project, args=["build", "failure-nonexistent-dir.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.SOURCE, "patch-no-files")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "basic"))
def test_stage_file_empty_dir(cli, datafiles):
project = str(datafiles)
# Fails at build time because it tries to patch with nothing else staged
- result = cli.run(project=project, args=['build', 'failure-empty-dir.bst'])
+ result = cli.run(project=project, args=["build", "failure-empty-dir.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.SOURCE, "patch-no-files")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'separate-patch-dir'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "separate-patch-dir"))
def test_stage_separate_patch_dir(cli, tmpdir, datafiles):
project = str(datafiles)
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Track, fetch, build, checkout
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Test the file.txt was patched and changed
- with open(os.path.join(checkoutdir, 'test-dir', 'file.txt')) as f:
- assert f.read() == 'This is text file in a directory with superpowers\n'
+ with open(os.path.join(checkoutdir, "test-dir", "file.txt")) as f:
+ assert f.read() == "This is text file in a directory with superpowers\n"
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'multiple-patches'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "multiple-patches"))
def test_stage_multiple_patches(cli, tmpdir, datafiles):
project = str(datafiles)
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Track, fetch, build, checkout
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Test the file.txt was patched and changed
- with open(os.path.join(checkoutdir, 'file.txt')) as f:
- assert f.read() == 'This is text file with more superpowers\n'
+ with open(os.path.join(checkoutdir, "file.txt")) as f:
+ assert f.read() == "This is text file with more superpowers\n"
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'different-strip-level'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "different-strip-level"))
def test_patch_strip_level(cli, tmpdir, datafiles):
project = str(datafiles)
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Track, fetch, build, checkout
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Test the file.txt was patched and changed
- with open(os.path.join(checkoutdir, 'file.txt')) as f:
- assert f.read() == 'This is text file with superpowers\n'
+ with open(os.path.join(checkoutdir, "file.txt")) as f:
+ assert f.read() == "This is text file with superpowers\n"
diff --git a/tests/sources/pip.py b/tests/sources/pip.py
index 7f91ba701..aafdfaf1c 100644
--- a/tests/sources/pip.py
+++ b/tests/sources/pip.py
@@ -9,62 +9,56 @@ from buildstream import _yaml
from buildstream.plugins.sources.pip import _match_package_name
from buildstream.testing import cli # pylint: disable=unused-import
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'pip',
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "pip",)
def generate_project(project_dir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.roundtrip_dump({'name': 'foo'}, project_file)
+ _yaml.roundtrip_dump({"name": "foo"}, project_file)
# Test that without ref, consistency is set appropriately.
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'no-ref'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "no-ref"))
def test_no_ref(cli, datafiles):
project = str(datafiles)
generate_project(project)
- assert cli.get_element_state(project, 'target.bst') == 'no reference'
+ assert cli.get_element_state(project, "target.bst") == "no reference"
# Test that pip is not allowed to be the first source
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'first-source-pip'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "first-source-pip"))
def test_first_source(cli, datafiles):
project = str(datafiles)
generate_project(project)
- result = cli.run(project=project, args=[
- 'show', 'target.bst'
- ])
+ result = cli.run(project=project, args=["show", "target.bst"])
result.assert_main_error(ErrorDomain.ELEMENT, None)
# Test that error is raised when neither packges nor requirements files
# have been specified
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'no-packages'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "no-packages"))
def test_no_packages(cli, datafiles):
project = str(datafiles)
generate_project(project)
- result = cli.run(project=project, args=[
- 'show', 'target.bst'
- ])
+ result = cli.run(project=project, args=["show", "target.bst"])
result.assert_main_error(ErrorDomain.SOURCE, None)
# Test that pip source parses tar ball names correctly for the ref
@pytest.mark.parametrize(
- 'tarball, expected_name, expected_version',
+ "tarball, expected_name, expected_version",
[
- ('dotted.package-0.9.8.tar.gz', 'dotted.package', '0.9.8'),
- ('hyphenated-package-2.6.0.tar.gz', 'hyphenated-package', '2.6.0'),
- ('underscore_pkg-3.1.0.tar.gz', 'underscore_pkg', '3.1.0'),
- ('numbers2and5-1.0.1.tar.gz', 'numbers2and5', '1.0.1'),
- ('multiple.dots.package-5.6.7.tar.gz', 'multiple.dots.package', '5.6.7'),
- ('multiple-hyphens-package-1.2.3.tar.gz', 'multiple-hyphens-package', '1.2.3'),
- ('multiple_underscore_pkg-3.4.5.tar.gz', 'multiple_underscore_pkg', '3.4.5'),
- ('shortversion-1.0.tar.gz', 'shortversion', '1.0'),
- ('longversion-1.2.3.4.tar.gz', 'longversion', '1.2.3.4')
- ])
+ ("dotted.package-0.9.8.tar.gz", "dotted.package", "0.9.8"),
+ ("hyphenated-package-2.6.0.tar.gz", "hyphenated-package", "2.6.0"),
+ ("underscore_pkg-3.1.0.tar.gz", "underscore_pkg", "3.1.0"),
+ ("numbers2and5-1.0.1.tar.gz", "numbers2and5", "1.0.1"),
+ ("multiple.dots.package-5.6.7.tar.gz", "multiple.dots.package", "5.6.7"),
+ ("multiple-hyphens-package-1.2.3.tar.gz", "multiple-hyphens-package", "1.2.3"),
+ ("multiple_underscore_pkg-3.4.5.tar.gz", "multiple_underscore_pkg", "3.4.5"),
+ ("shortversion-1.0.tar.gz", "shortversion", "1.0"),
+ ("longversion-1.2.3.4.tar.gz", "longversion", "1.2.3.4"),
+ ],
+)
def test_match_package_name(tarball, expected_name, expected_version):
name, version = _match_package_name(tarball)
assert (expected_name, expected_version) == (name, version)
diff --git a/tests/sources/previous_source_access.py b/tests/sources/previous_source_access.py
index 750b94381..3ff91b5fd 100644
--- a/tests/sources/previous_source_access.py
+++ b/tests/sources/previous_source_access.py
@@ -8,8 +8,7 @@ from buildstream import _yaml
from buildstream.testing import cli # pylint: disable=unused-import
DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'previous_source_access'
+ os.path.dirname(os.path.realpath(__file__)), "previous_source_access"
)
@@ -29,30 +28,25 @@ def test_custom_transform_source(cli, datafiles):
_yaml.roundtrip_dump(project_config, project_config_path)
# Ensure we can track
- result = cli.run(project=project, args=[
- 'source', 'track', 'target.bst'
- ])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
# Ensure we can fetch
- result = cli.run(project=project, args=[
- 'source', 'fetch', 'target.bst'
- ])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
# Ensure we get correct output from foo_transform
- cli.run(project=project, args=[
- 'build', 'target.bst'
- ])
- destpath = os.path.join(cli.directory, 'checkout')
- result = cli.run(project=project, args=[
- 'artifact', 'checkout', 'target.bst', '--directory', destpath
- ])
+ cli.run(project=project, args=["build", "target.bst"])
+ destpath = os.path.join(cli.directory, "checkout")
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", destpath],
+ )
result.assert_success()
# Assert that files from both sources exist, and that they have
# the same content
- assert os.path.exists(os.path.join(destpath, 'file'))
- assert os.path.exists(os.path.join(destpath, 'filetransform'))
- with open(os.path.join(destpath, 'file')) as file1:
- with open(os.path.join(destpath, 'filetransform')) as file2:
+ assert os.path.exists(os.path.join(destpath, "file"))
+ assert os.path.exists(os.path.join(destpath, "filetransform"))
+ with open(os.path.join(destpath, "file")) as file1:
+ with open(os.path.join(destpath, "filetransform")) as file2:
assert file1.read() == file2.read()
diff --git a/tests/sources/previous_source_access/plugins/sources/foo_transform.py b/tests/sources/previous_source_access/plugins/sources/foo_transform.py
index 4b423a1b3..d59eaeca7 100644
--- a/tests/sources/previous_source_access/plugins/sources/foo_transform.py
+++ b/tests/sources/previous_source_access/plugins/sources/foo_transform.py
@@ -25,14 +25,13 @@ class FooTransformSource(Source):
"""Directory where this source should stage its files
"""
- path = os.path.join(self.get_mirror_directory(), self.name,
- self.ref.strip())
+ path = os.path.join(self.get_mirror_directory(), self.name, self.ref.strip())
os.makedirs(path, exist_ok=True)
return path
def configure(self, node):
- node.validate_keys(['ref', *Source.COMMON_CONFIG_KEYS])
- self.ref = node.get_str('ref', None)
+ node.validate_keys(["ref", *Source.COMMON_CONFIG_KEYS])
+ self.ref = node.get_str("ref", None)
def preflight(self):
pass
@@ -45,9 +44,9 @@ class FooTransformSource(Source):
return Consistency.INCONSISTENT
# If we have a file called "filetransform", verify that its checksum
# matches our ref. Otherwise, it resolved but not cached.
- fpath = os.path.join(self.mirror, 'filetransform')
+ fpath = os.path.join(self.mirror, "filetransform")
try:
- with open(fpath, 'rb') as f:
+ with open(fpath, "rb") as f:
if hashlib.sha256(f.read()).hexdigest() == self.ref.strip():
return Consistency.CACHED
except Exception:
@@ -58,30 +57,32 @@ class FooTransformSource(Source):
return self.ref
def set_ref(self, ref, node):
- self.ref = node['ref'] = ref
+ self.ref = node["ref"] = ref
def track(self, previous_sources_dir):
# Store the checksum of the file from previous source as our ref
- fpath = os.path.join(previous_sources_dir, 'file')
- with open(fpath, 'rb') as f:
+ fpath = os.path.join(previous_sources_dir, "file")
+ with open(fpath, "rb") as f:
return hashlib.sha256(f.read()).hexdigest()
def fetch(self, previous_sources_dir):
- fpath = os.path.join(previous_sources_dir, 'file')
+ fpath = os.path.join(previous_sources_dir, "file")
# Verify that the checksum of the file from previous source matches
# our ref
- with open(fpath, 'rb') as f:
+ with open(fpath, "rb") as f:
if hashlib.sha256(f.read()).hexdigest() != self.ref.strip():
raise SourceError("Element references do not match")
# Copy "file" as "filetransform"
- newfpath = os.path.join(self.mirror, 'filetransform')
+ newfpath = os.path.join(self.mirror, "filetransform")
utils.safe_copy(fpath, newfpath)
def stage(self, directory):
# Simply stage the "filetransform" file
- utils.safe_copy(os.path.join(self.mirror, 'filetransform'),
- os.path.join(directory, 'filetransform'))
+ utils.safe_copy(
+ os.path.join(self.mirror, "filetransform"),
+ os.path.join(directory, "filetransform"),
+ )
def setup():
diff --git a/tests/sources/remote.py b/tests/sources/remote.py
index 5b818b960..685f6bfba 100644
--- a/tests/sources/remote.py
+++ b/tests/sources/remote.py
@@ -10,94 +10,78 @@ from buildstream import _yaml
from buildstream.testing import cli # pylint: disable=unused-import
from tests.testutils.file_server import create_file_server
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'remote',
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "remote",)
def generate_project(project_dir, tmpdir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.roundtrip_dump({
- 'name': 'foo',
- 'aliases': {
- 'tmpdir': "file:///" + str(tmpdir)
- }
- }, project_file)
+ _yaml.roundtrip_dump(
+ {"name": "foo", "aliases": {"tmpdir": "file:///" + str(tmpdir)}}, project_file
+ )
def generate_project_file_server(server, project_dir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.roundtrip_dump({
- 'name': 'foo',
- 'aliases': {
- 'tmpdir': server.base_url()
- }
- }, project_file)
+ _yaml.roundtrip_dump(
+ {"name": "foo", "aliases": {"tmpdir": server.base_url()}}, project_file
+ )
# Test that without ref, consistency is set appropriately.
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'no-ref'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "no-ref"))
def test_no_ref(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
- assert cli.get_element_state(project, 'target.bst') == 'no reference'
+ assert cli.get_element_state(project, "target.bst") == "no reference"
# Here we are doing a fetch on a file that doesn't exist. target.bst
# refers to 'file' but that file is not present.
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'missing-file'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "missing-file"))
def test_missing_file(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
# Try to fetch it
- result = cli.run(project=project, args=[
- 'source', 'fetch', 'target.bst'
- ])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.SOURCE, None)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'path-in-filename'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "path-in-filename"))
def test_path_in_filename(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
# Try to fetch it
- result = cli.run(project=project, args=[
- 'source', 'fetch', 'target.bst'
- ])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
# The bst file has a / in the filename param
result.assert_main_error(ErrorDomain.SOURCE, "filename-contains-directory")
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'single-file'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "single-file"))
def test_simple_file_build(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Try to fetch it
- result = cli.run(project=project, args=[
- 'source', 'fetch', 'target.bst'
- ])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=[
- 'build', 'target.bst'
- ])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=[
- 'artifact', 'checkout', 'target.bst', '--directory', checkoutdir
- ])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Note that the url of the file in target.bst is actually /dir/file
# but this tests confirms we take the basename
- checkout_file = os.path.join(checkoutdir, 'file')
+ checkout_file = os.path.join(checkoutdir, "file")
assert os.path.exists(checkout_file)
mode = os.stat(checkout_file).st_mode
@@ -107,113 +91,120 @@ def test_simple_file_build(cli, tmpdir, datafiles):
assert not mode & (stat.S_IWGRP | stat.S_IWOTH)
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'single-file-custom-name'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "single-file-custom-name"))
def test_simple_file_custom_name_build(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Try to fetch it
- result = cli.run(project=project, args=[
- 'source', 'fetch', 'target.bst'
- ])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=[
- 'build', 'target.bst'
- ])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=[
- 'artifact', 'checkout', 'target.bst', '--directory', checkoutdir
- ])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
- assert not os.path.exists(os.path.join(checkoutdir, 'file'))
- assert os.path.exists(os.path.join(checkoutdir, 'custom-file'))
+ assert not os.path.exists(os.path.join(checkoutdir, "file"))
+ assert os.path.exists(os.path.join(checkoutdir, "custom-file"))
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'unique-keys'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "unique-keys"))
def test_unique_key(cli, tmpdir, datafiles):
- '''This test confirms that the 'filename' parameter is honoured when it comes
+ """This test confirms that the 'filename' parameter is honoured when it comes
to generating a cache key for the source.
- '''
+ """
project = str(datafiles)
generate_project(project, tmpdir)
- states = cli.get_element_states(project, [
- 'target.bst', 'target-custom.bst', 'target-custom-executable.bst'
- ])
- assert states['target.bst'] == "fetch needed"
- assert states['target-custom.bst'] == "fetch needed"
- assert states['target-custom-executable.bst'] == "fetch needed"
+ states = cli.get_element_states(
+ project, ["target.bst", "target-custom.bst", "target-custom-executable.bst"]
+ )
+ assert states["target.bst"] == "fetch needed"
+ assert states["target-custom.bst"] == "fetch needed"
+ assert states["target-custom-executable.bst"] == "fetch needed"
# Try to fetch it
- cli.run(project=project, args=[
- 'source', 'fetch', 'target.bst'
- ])
+ cli.run(project=project, args=["source", "fetch", "target.bst"])
# We should download the file only once
- states = cli.get_element_states(project, [
- 'target.bst', 'target-custom.bst', 'target-custom-executable.bst'
- ])
- assert states['target.bst'] == 'buildable'
- assert states['target-custom.bst'] == 'buildable'
- assert states['target-custom-executable.bst'] == 'buildable'
+ states = cli.get_element_states(
+ project, ["target.bst", "target-custom.bst", "target-custom-executable.bst"]
+ )
+ assert states["target.bst"] == "buildable"
+ assert states["target-custom.bst"] == "buildable"
+ assert states["target-custom-executable.bst"] == "buildable"
# But the cache key is different because the 'filename' is different.
- assert cli.get_element_key(project, 'target.bst') != \
- cli.get_element_key(project, 'target-custom.bst') != \
- cli.get_element_key(project, 'target-custom-executable.bst')
+ assert (
+ cli.get_element_key(project, "target.bst")
+ != cli.get_element_key(project, "target-custom.bst")
+ != cli.get_element_key(project, "target-custom-executable.bst")
+ )
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'unique-keys'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "unique-keys"))
def test_executable(cli, tmpdir, datafiles):
- '''This test confirms that the 'ecxecutable' parameter is honoured.
- '''
+ """This test confirms that the 'ecxecutable' parameter is honoured.
+ """
project = str(datafiles)
generate_project(project, tmpdir)
checkoutdir = os.path.join(str(tmpdir), "checkout")
- assert cli.get_element_state(project, 'target-custom-executable.bst') == "fetch needed"
+ assert (
+ cli.get_element_state(project, "target-custom-executable.bst") == "fetch needed"
+ )
# Try to fetch it
- cli.run(project=project, args=[
- 'build', 'target-custom-executable.bst'
- ])
-
- cli.run(project=project, args=[
- 'artifact', 'checkout', 'target-custom-executable.bst', '--directory', checkoutdir
- ])
- mode = os.stat(os.path.join(checkoutdir, 'some-custom-file')).st_mode
+ cli.run(project=project, args=["build", "target-custom-executable.bst"])
+
+ cli.run(
+ project=project,
+ args=[
+ "artifact",
+ "checkout",
+ "target-custom-executable.bst",
+ "--directory",
+ checkoutdir,
+ ],
+ )
+ mode = os.stat(os.path.join(checkoutdir, "some-custom-file")).st_mode
assert mode & stat.S_IEXEC
# Assert executable by anyone
assert mode & (stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH)
-@pytest.mark.parametrize('server_type', ('FTP', 'HTTP'))
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'single-file'))
+@pytest.mark.parametrize("server_type", ("FTP", "HTTP"))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "single-file"))
def test_use_netrc(cli, datafiles, server_type, tmpdir):
- fake_home = os.path.join(str(tmpdir), 'fake_home')
+ fake_home = os.path.join(str(tmpdir), "fake_home")
os.makedirs(fake_home, exist_ok=True)
project = str(datafiles)
- checkoutdir = os.path.join(str(tmpdir), 'checkout')
+ checkoutdir = os.path.join(str(tmpdir), "checkout")
- os.environ['HOME'] = fake_home
- with open(os.path.join(fake_home, '.netrc'), 'wb') as f:
+ os.environ["HOME"] = fake_home
+ with open(os.path.join(fake_home, ".netrc"), "wb") as f:
os.fchmod(f.fileno(), 0o700)
- f.write(b'machine 127.0.0.1\n')
- f.write(b'login testuser\n')
- f.write(b'password 12345\n')
+ f.write(b"machine 127.0.0.1\n")
+ f.write(b"login testuser\n")
+ f.write(b"password 12345\n")
with create_file_server(server_type) as server:
- server.add_user('testuser', '12345', project)
+ server.add_user("testuser", "12345", project)
generate_project_file_server(server, project)
server.start()
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
- checkout_file = os.path.join(checkoutdir, 'file')
+ checkout_file = os.path.join(checkoutdir, "file")
assert os.path.exists(checkout_file)
diff --git a/tests/sources/tar.py b/tests/sources/tar.py
index fac6f3f8b..cdd2328fd 100644
--- a/tests/sources/tar.py
+++ b/tests/sources/tar.py
@@ -17,10 +17,7 @@ from buildstream.testing._utils.site import HAVE_LZIP
from tests.testutils.file_server import create_file_server
from . import list_dir_contents
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'tar',
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "tar",)
def _assemble_tar(workingdir, srcdir, dstfile):
@@ -38,58 +35,46 @@ def _assemble_tar_lz(workingdir, srcdir, dstfile):
with tarfile.open(fileobj=uncompressed, mode="w:") as tar:
tar.add(srcdir)
uncompressed.seek(0, 0)
- with open(dstfile, 'wb') as dst:
- subprocess.call(['lzip'],
- stdin=uncompressed,
- stdout=dst)
+ with open(dstfile, "wb") as dst:
+ subprocess.call(["lzip"], stdin=uncompressed, stdout=dst)
os.chdir(old_dir)
def generate_project(project_dir, tmpdir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.roundtrip_dump({
- 'name': 'foo',
- 'aliases': {
- 'tmpdir': "file:///" + str(tmpdir)
- }
- }, project_file)
+ _yaml.roundtrip_dump(
+ {"name": "foo", "aliases": {"tmpdir": "file:///" + str(tmpdir)}}, project_file
+ )
def generate_project_file_server(base_url, project_dir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.roundtrip_dump({
- 'name': 'foo',
- 'aliases': {
- 'tmpdir': base_url
- }
- }, project_file)
+ _yaml.roundtrip_dump({"name": "foo", "aliases": {"tmpdir": base_url}}, project_file)
# Test that without ref, consistency is set appropriately.
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'no-ref'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "no-ref"))
def test_no_ref(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
- assert cli.get_element_state(project, 'target.bst') == 'no reference'
+ assert cli.get_element_state(project, "target.bst") == "no reference"
# Test that when I fetch a nonexistent URL, errors are handled gracefully and a retry is performed.
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "fetch"))
def test_fetch_bad_url(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
# Try to fetch it
- result = cli.run(project=project, args=[
- 'source', 'fetch', 'target.bst'
- ])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
assert "FAILURE Try #" in result.stderr
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.SOURCE, None)
# Test that when I fetch with an invalid ref, it fails.
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "fetch"))
def test_fetch_bad_ref(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
@@ -99,15 +84,13 @@ def test_fetch_bad_ref(cli, tmpdir, datafiles):
_assemble_tar(os.path.join(str(datafiles), "content"), "a", src_tar)
# Try to fetch it
- result = cli.run(project=project, args=[
- 'source', 'fetch', 'target.bst'
- ])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.SOURCE, None)
# Test that when tracking with a ref set, there is a warning
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "fetch"))
def test_track_warning(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
@@ -117,15 +100,13 @@ def test_track_warning(cli, tmpdir, datafiles):
_assemble_tar(os.path.join(str(datafiles), "content"), "a", src_tar)
# Track it
- result = cli.run(project=project, args=[
- 'source', 'track', 'target.bst'
- ])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
assert "Potential man-in-the-middle attack!" in result.stderr
# Test that a staged checkout matches what was tarred up, with the default first subdir
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "fetch"))
@pytest.mark.parametrize("srcdir", ["a", "./a"])
def test_stage_default_basedir(cli, tmpdir, datafiles, srcdir):
project = str(datafiles)
@@ -137,13 +118,16 @@ def test_stage_default_basedir(cli, tmpdir, datafiles, srcdir):
_assemble_tar(os.path.join(str(datafiles), "content"), srcdir, src_tar)
# Track, fetch, build, checkout
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the content of the first directory is checked out (base-dir: '*')
@@ -154,7 +138,7 @@ def test_stage_default_basedir(cli, tmpdir, datafiles, srcdir):
# Test that a staged checkout matches what was tarred up, with an empty base-dir
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'no-basedir'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "no-basedir"))
@pytest.mark.parametrize("srcdir", ["a", "./a"])
def test_stage_no_basedir(cli, tmpdir, datafiles, srcdir):
project = str(datafiles)
@@ -166,13 +150,16 @@ def test_stage_no_basedir(cli, tmpdir, datafiles, srcdir):
_assemble_tar(os.path.join(str(datafiles), "content"), srcdir, src_tar)
# Track, fetch, build, checkout
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the full content of the tarball is checked out (base-dir: '')
@@ -183,7 +170,7 @@ def test_stage_no_basedir(cli, tmpdir, datafiles, srcdir):
# Test that a staged checkout matches what was tarred up, with an explicit basedir
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'explicit-basedir'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "explicit-basedir"))
@pytest.mark.parametrize("srcdir", ["a", "./a"])
def test_stage_explicit_basedir(cli, tmpdir, datafiles, srcdir):
project = str(datafiles)
@@ -195,13 +182,16 @@ def test_stage_explicit_basedir(cli, tmpdir, datafiles, srcdir):
_assemble_tar(os.path.join(str(datafiles), "content"), srcdir, src_tar)
# Track, fetch, build, checkout
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the content of the first directory is checked out (base-dir: '*')
@@ -213,7 +203,7 @@ def test_stage_explicit_basedir(cli, tmpdir, datafiles, srcdir):
# Test that we succeed to extract tarballs with hardlinks when stripping the
# leading paths
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'contains-links'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "contains-links"))
def test_stage_contains_links(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
@@ -223,21 +213,31 @@ def test_stage_contains_links(cli, tmpdir, datafiles):
src_tar = os.path.join(str(tmpdir), "a.tar.gz")
# Create a hardlink, we wont trust git to store that info for us
- os.makedirs(os.path.join(str(datafiles), "content", "base-directory", "subdir2"), exist_ok=True)
- file1 = os.path.join(str(datafiles), "content", "base-directory", "subdir1", "file.txt")
- file2 = os.path.join(str(datafiles), "content", "base-directory", "subdir2", "file.txt")
+ os.makedirs(
+ os.path.join(str(datafiles), "content", "base-directory", "subdir2"),
+ exist_ok=True,
+ )
+ file1 = os.path.join(
+ str(datafiles), "content", "base-directory", "subdir1", "file.txt"
+ )
+ file2 = os.path.join(
+ str(datafiles), "content", "base-directory", "subdir2", "file.txt"
+ )
os.link(file1, file2)
_assemble_tar(os.path.join(str(datafiles), "content"), "base-directory", src_tar)
# Track, fetch, build, checkout
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the content of the first directory is checked out (base-dir: '*')
@@ -247,8 +247,8 @@ def test_stage_contains_links(cli, tmpdir, datafiles):
assert checkout_contents == original_contents
-@pytest.mark.skipif(not HAVE_LZIP, reason='lzip is not available')
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
+@pytest.mark.skipif(not HAVE_LZIP, reason="lzip is not available")
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "fetch"))
@pytest.mark.parametrize("srcdir", ["a", "./a"])
def test_stage_default_basedir_lzip(cli, tmpdir, datafiles, srcdir):
project = str(datafiles)
@@ -260,13 +260,16 @@ def test_stage_default_basedir_lzip(cli, tmpdir, datafiles, srcdir):
_assemble_tar_lz(os.path.join(str(datafiles), "content"), srcdir, src_tar)
# Track, fetch, build, checkout
- result = cli.run(project=project, args=['source', 'track', 'target-lz.bst'])
+ result = cli.run(project=project, args=["source", "track", "target-lz.bst"])
result.assert_success()
- result = cli.run(project=project, args=['source', 'fetch', 'target-lz.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target-lz.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target-lz.bst'])
+ result = cli.run(project=project, args=["build", "target-lz.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target-lz.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target-lz.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the content of the first directory is checked out (base-dir: '*')
@@ -280,8 +283,8 @@ def test_stage_default_basedir_lzip(cli, tmpdir, datafiles, srcdir):
# a - contains read-only files in a writable directory
# b - root directory has read-only permission
# c - contains one file that has no read nor write permissions. Base-dir set to '' to extract root of tarball
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'read-only'))
-@pytest.mark.parametrize("tar_name, base_dir", [("a", "*"), ("b", '*'), ("c", '')])
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "read-only"))
+@pytest.mark.parametrize("tar_name, base_dir", [("a", "*"), ("b", "*"), ("c", "")])
def test_read_only_dir(cli, tmpdir, datafiles, tar_name, base_dir):
try:
project = str(datafiles)
@@ -290,25 +293,28 @@ def test_read_only_dir(cli, tmpdir, datafiles, tar_name, base_dir):
bst_path = os.path.join(project, "target.bst")
tar_file = "{}.tar.gz".format(tar_name)
- _yaml.roundtrip_dump({
- 'kind': 'import',
- 'sources': [
- {
- 'kind': 'tar',
- 'url': 'tmpdir:/{}'.format(tar_file),
- 'ref': 'foo',
- 'base-dir': base_dir
- }
- ]
- }, bst_path)
+ _yaml.roundtrip_dump(
+ {
+ "kind": "import",
+ "sources": [
+ {
+ "kind": "tar",
+ "url": "tmpdir:/{}".format(tar_file),
+ "ref": "foo",
+ "base-dir": base_dir,
+ }
+ ],
+ },
+ bst_path,
+ )
# Get the tarball in tests/sources/tar/read-only/content
#
# NOTE that we need to do this because tarfile.open and tar.add()
# are packing the tar up with writeable files and dirs
- tarball = os.path.join(str(datafiles), 'content', tar_file)
+ tarball = os.path.join(str(datafiles), "content", tar_file)
if not os.path.exists(tarball):
- raise FileNotFoundError('{} does not exist'.format(tarball))
+ raise FileNotFoundError("{} does not exist".format(tarball))
copyfile(tarball, os.path.join(str(tmpdir), tar_file))
# Because this test can potentially leave directories behind
@@ -320,11 +326,15 @@ def test_read_only_dir(cli, tmpdir, datafiles, tar_name, base_dir):
env = {"TMP": tmpdir_str}
# Track, fetch, build, checkout
- result = cli.run(project=project, args=['source', 'track', 'target.bst'], env=env)
+ result = cli.run(
+ project=project, args=["source", "track", "target.bst"], env=env
+ )
result.assert_success()
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'], env=env)
+ result = cli.run(
+ project=project, args=["source", "fetch", "target.bst"], env=env
+ )
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'], env=env)
+ result = cli.run(project=project, args=["build", "target.bst"], env=env)
result.assert_success()
finally:
@@ -336,85 +346,91 @@ def test_read_only_dir(cli, tmpdir, datafiles, tar_name, base_dir):
os.rmdir(path)
else:
os.remove(path)
+
rmtree(str(tmpdir), onerror=make_dir_writable)
-@pytest.mark.parametrize('server_type', ('FTP', 'HTTP'))
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
+@pytest.mark.parametrize("server_type", ("FTP", "HTTP"))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "fetch"))
def test_use_netrc(cli, datafiles, server_type, tmpdir):
- file_server_files = os.path.join(str(tmpdir), 'file_server')
- fake_home = os.path.join(str(tmpdir), 'fake_home')
+ file_server_files = os.path.join(str(tmpdir), "file_server")
+ fake_home = os.path.join(str(tmpdir), "fake_home")
os.makedirs(file_server_files, exist_ok=True)
os.makedirs(fake_home, exist_ok=True)
project = str(datafiles)
- checkoutdir = os.path.join(str(tmpdir), 'checkout')
+ checkoutdir = os.path.join(str(tmpdir), "checkout")
- os.environ['HOME'] = fake_home
- with open(os.path.join(fake_home, '.netrc'), 'wb') as f:
+ os.environ["HOME"] = fake_home
+ with open(os.path.join(fake_home, ".netrc"), "wb") as f:
os.fchmod(f.fileno(), 0o700)
- f.write(b'machine 127.0.0.1\n')
- f.write(b'login testuser\n')
- f.write(b'password 12345\n')
+ f.write(b"machine 127.0.0.1\n")
+ f.write(b"login testuser\n")
+ f.write(b"password 12345\n")
with create_file_server(server_type) as server:
- server.add_user('testuser', '12345', file_server_files)
+ server.add_user("testuser", "12345", file_server_files)
generate_project_file_server(server.base_url(), project)
- src_tar = os.path.join(file_server_files, 'a.tar.gz')
- _assemble_tar(os.path.join(str(datafiles), 'content'), 'a', src_tar)
+ src_tar = os.path.join(file_server_files, "a.tar.gz")
+ _assemble_tar(os.path.join(str(datafiles), "content"), "a", src_tar)
server.start()
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
- original_dir = os.path.join(str(datafiles), 'content', 'a')
+ original_dir = os.path.join(str(datafiles), "content", "a")
original_contents = list_dir_contents(original_dir)
checkout_contents = list_dir_contents(checkoutdir)
assert checkout_contents == original_contents
-@pytest.mark.parametrize('server_type', ('FTP', 'HTTP'))
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
+@pytest.mark.parametrize("server_type", ("FTP", "HTTP"))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "fetch"))
def test_netrc_already_specified_user(cli, datafiles, server_type, tmpdir):
- file_server_files = os.path.join(str(tmpdir), 'file_server')
- fake_home = os.path.join(str(tmpdir), 'fake_home')
+ file_server_files = os.path.join(str(tmpdir), "file_server")
+ fake_home = os.path.join(str(tmpdir), "fake_home")
os.makedirs(file_server_files, exist_ok=True)
os.makedirs(fake_home, exist_ok=True)
project = str(datafiles)
- os.environ['HOME'] = fake_home
- with open(os.path.join(fake_home, '.netrc'), 'wb') as f:
+ os.environ["HOME"] = fake_home
+ with open(os.path.join(fake_home, ".netrc"), "wb") as f:
os.fchmod(f.fileno(), 0o700)
- f.write(b'machine 127.0.0.1\n')
- f.write(b'login testuser\n')
- f.write(b'password 12345\n')
+ f.write(b"machine 127.0.0.1\n")
+ f.write(b"login testuser\n")
+ f.write(b"password 12345\n")
with create_file_server(server_type) as server:
- server.add_user('otheruser', '12345', file_server_files)
+ server.add_user("otheruser", "12345", file_server_files)
parts = urllib.parse.urlsplit(server.base_url())
- base_url = urllib.parse.urlunsplit([parts[0], 'otheruser@{}'.format(parts[1]), *parts[2:]])
+ base_url = urllib.parse.urlunsplit(
+ [parts[0], "otheruser@{}".format(parts[1]), *parts[2:]]
+ )
generate_project_file_server(base_url, project)
- src_tar = os.path.join(file_server_files, 'a.tar.gz')
- _assemble_tar(os.path.join(str(datafiles), 'content'), 'a', src_tar)
+ src_tar = os.path.join(file_server_files, "a.tar.gz")
+ _assemble_tar(os.path.join(str(datafiles), "content"), "a", src_tar)
server.start()
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.SOURCE, None)
# Test that BuildStream doesnt crash if HOME is unset while
# the netrc module is trying to find it's ~/.netrc file.
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "fetch"))
def test_homeless_environment(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
@@ -424,11 +440,13 @@ def test_homeless_environment(cli, tmpdir, datafiles):
_assemble_tar(os.path.join(str(datafiles), "content"), "a", src_tar)
# Use a track, make sure the plugin tries to find a ~/.netrc
- result = cli.run(project=project, args=['source', 'track', 'target.bst'], env={'HOME': None})
+ result = cli.run(
+ project=project, args=["source", "track", "target.bst"], env={"HOME": None}
+ )
result.assert_success()
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'out-of-basedir-hardlinks'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "out-of-basedir-hardlinks"))
def test_out_of_basedir_hardlinks(cli, tmpdir, datafiles):
def ensure_link(member):
# By default, python will simply duplicate files - we want
@@ -453,28 +471,33 @@ def test_out_of_basedir_hardlinks(cli, tmpdir, datafiles):
# Make sure our tarfile is actually created with the desired
# attributes set
with tarfile.open(src_tar, "r:gz") as tar:
- assert any(member.islnk() and
- member.path == "contents/to_extract/a" and
- member.linkname == "contents/elsewhere/a"
- for member in tar.getmembers())
+ assert any(
+ member.islnk()
+ and member.path == "contents/to_extract/a"
+ and member.linkname == "contents/elsewhere/a"
+ for member in tar.getmembers()
+ )
# Assert that we will actually create a singular copy of the file
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
- original_dir = os.path.join(str(datafiles), 'contents', 'to_extract')
+ original_dir = os.path.join(str(datafiles), "contents", "to_extract")
original_contents = list_dir_contents(original_dir)
checkout_contents = list_dir_contents(checkoutdir)
assert checkout_contents == original_contents
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'out-of-basedir-hardlinks'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "out-of-basedir-hardlinks"))
def test_malicious_out_of_basedir_hardlinks(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
@@ -499,13 +522,15 @@ def test_malicious_out_of_basedir_hardlinks(cli, tmpdir, datafiles):
# Make sure our tarfile is actually created with the desired
# attributes set
with tarfile.open(src_tar, "r:gz") as tar:
- assert any(member.islnk() and
- member.path == "contents/elsewhere/malicious" and
- member.linkname == "../../../malicious_target.bst"
- for member in tar.getmembers())
+ assert any(
+ member.islnk()
+ and member.path == "contents/elsewhere/malicious"
+ and member.linkname == "../../../malicious_target.bst"
+ for member in tar.getmembers()
+ )
# Try to execute the exploit
- result = cli.run(project=project, args=['source', 'track', 'malicious_target.bst'])
+ result = cli.run(project=project, args=["source", "track", "malicious_target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['source', 'fetch', 'malicious_target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "malicious_target.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
diff --git a/tests/sources/zip.py b/tests/sources/zip.py
index 3fd43b4bb..d1b000167 100644
--- a/tests/sources/zip.py
+++ b/tests/sources/zip.py
@@ -12,17 +12,14 @@ from buildstream.testing import cli # pylint: disable=unused-import
from tests.testutils.file_server import create_file_server
from . import list_dir_contents
-DATA_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'zip',
-)
+DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "zip",)
def _assemble_zip(workingdir, dstfile):
old_dir = os.getcwd()
os.chdir(workingdir)
with zipfile.ZipFile(dstfile, "w") as zipfp:
- for root, dirs, files in os.walk('.'):
+ for root, dirs, files in os.walk("."):
names = dirs + files
names = [os.path.join(root, name) for name in names]
for name in names:
@@ -32,49 +29,41 @@ def _assemble_zip(workingdir, dstfile):
def generate_project(project_dir, tmpdir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.roundtrip_dump({
- 'name': 'foo',
- 'aliases': {
- 'tmpdir': "file:///" + str(tmpdir)
- }
- }, project_file)
+ _yaml.roundtrip_dump(
+ {"name": "foo", "aliases": {"tmpdir": "file:///" + str(tmpdir)}}, project_file
+ )
def generate_project_file_server(server, project_dir):
project_file = os.path.join(project_dir, "project.conf")
- _yaml.roundtrip_dump({
- 'name': 'foo',
- 'aliases': {
- 'tmpdir': server.base_url()
- }
- }, project_file)
+ _yaml.roundtrip_dump(
+ {"name": "foo", "aliases": {"tmpdir": server.base_url()}}, project_file
+ )
# Test that without ref, consistency is set appropriately.
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'no-ref'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "no-ref"))
def test_no_ref(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
- assert cli.get_element_state(project, 'target.bst') == 'no reference'
+ assert cli.get_element_state(project, "target.bst") == "no reference"
# Test that when I fetch a nonexistent URL, errors are handled gracefully and a retry is performed.
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "fetch"))
def test_fetch_bad_url(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
# Try to fetch it
- result = cli.run(project=project, args=[
- 'source', 'fetch', 'target.bst'
- ])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
assert "FAILURE Try #" in result.stderr
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.SOURCE, None)
# Test that when I fetch with an invalid ref, it fails.
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "fetch"))
def test_fetch_bad_ref(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
@@ -84,15 +73,13 @@ def test_fetch_bad_ref(cli, tmpdir, datafiles):
_assemble_zip(os.path.join(str(datafiles), "content"), src_zip)
# Try to fetch it
- result = cli.run(project=project, args=[
- 'source', 'fetch', 'target.bst'
- ])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.SOURCE, None)
# Test that when tracking with a ref set, there is a warning
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "fetch"))
def test_track_warning(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
@@ -102,15 +89,13 @@ def test_track_warning(cli, tmpdir, datafiles):
_assemble_zip(os.path.join(str(datafiles), "content"), src_zip)
# Track it
- result = cli.run(project=project, args=[
- 'source', 'track', 'target.bst'
- ])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
assert "Potential man-in-the-middle attack!" in result.stderr
# Test that a staged checkout matches what was tarred up, with the default first subdir
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "fetch"))
def test_stage_default_basedir(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
@@ -121,13 +106,16 @@ def test_stage_default_basedir(cli, tmpdir, datafiles):
_assemble_zip(os.path.join(str(datafiles), "content"), src_zip)
# Track, fetch, build, checkout
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the content of the first directory is checked out (base-dir: '*')
@@ -138,7 +126,7 @@ def test_stage_default_basedir(cli, tmpdir, datafiles):
# Test that a staged checkout matches what was tarred up, with an empty base-dir
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'no-basedir'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "no-basedir"))
def test_stage_no_basedir(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
@@ -149,13 +137,16 @@ def test_stage_no_basedir(cli, tmpdir, datafiles):
_assemble_zip(os.path.join(str(datafiles), "content"), src_zip)
# Track, fetch, build, checkout
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the full content of the tarball is checked out (base-dir: '')
@@ -166,7 +157,7 @@ def test_stage_no_basedir(cli, tmpdir, datafiles):
# Test that a staged checkout matches what was tarred up, with an explicit basedir
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'explicit-basedir'))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "explicit-basedir"))
def test_stage_explicit_basedir(cli, tmpdir, datafiles):
project = str(datafiles)
generate_project(project, tmpdir)
@@ -177,13 +168,16 @@ def test_stage_explicit_basedir(cli, tmpdir, datafiles):
_assemble_zip(os.path.join(str(datafiles), "content"), src_zip)
# Track, fetch, build, checkout
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
# Check that the content of the first directory is checked out (base-dir: '*')
@@ -193,42 +187,45 @@ def test_stage_explicit_basedir(cli, tmpdir, datafiles):
assert checkout_contents == original_contents
-@pytest.mark.parametrize('server_type', ('FTP', 'HTTP'))
-@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
+@pytest.mark.parametrize("server_type", ("FTP", "HTTP"))
+@pytest.mark.datafiles(os.path.join(DATA_DIR, "fetch"))
def test_use_netrc(cli, datafiles, server_type, tmpdir):
- file_server_files = os.path.join(str(tmpdir), 'file_server')
- fake_home = os.path.join(str(tmpdir), 'fake_home')
+ file_server_files = os.path.join(str(tmpdir), "file_server")
+ fake_home = os.path.join(str(tmpdir), "fake_home")
os.makedirs(file_server_files, exist_ok=True)
os.makedirs(fake_home, exist_ok=True)
project = str(datafiles)
- checkoutdir = os.path.join(str(tmpdir), 'checkout')
+ checkoutdir = os.path.join(str(tmpdir), "checkout")
- os.environ['HOME'] = fake_home
- with open(os.path.join(fake_home, '.netrc'), 'wb') as f:
+ os.environ["HOME"] = fake_home
+ with open(os.path.join(fake_home, ".netrc"), "wb") as f:
os.fchmod(f.fileno(), 0o700)
- f.write(b'machine 127.0.0.1\n')
- f.write(b'login testuser\n')
- f.write(b'password 12345\n')
+ f.write(b"machine 127.0.0.1\n")
+ f.write(b"login testuser\n")
+ f.write(b"password 12345\n")
with create_file_server(server_type) as server:
- server.add_user('testuser', '12345', file_server_files)
+ server.add_user("testuser", "12345", file_server_files)
generate_project_file_server(server, project)
- src_zip = os.path.join(file_server_files, 'a.zip')
- _assemble_zip(os.path.join(str(datafiles), 'content'), src_zip)
+ src_zip = os.path.join(file_server_files, "a.zip")
+ _assemble_zip(os.path.join(str(datafiles), "content"), src_zip)
server.start()
- result = cli.run(project=project, args=['source', 'track', 'target.bst'])
+ result = cli.run(project=project, args=["source", "track", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
+ result = cli.run(project=project, args=["source", "fetch", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['build', 'target.bst'])
+ result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
- result = cli.run(project=project, args=['artifact', 'checkout', 'target.bst', '--directory', checkoutdir])
+ result = cli.run(
+ project=project,
+ args=["artifact", "checkout", "target.bst", "--directory", checkoutdir],
+ )
result.assert_success()
- original_dir = os.path.join(str(datafiles), 'content', 'a')
+ original_dir = os.path.join(str(datafiles), "content", "a")
original_contents = list_dir_contents(original_dir)
checkout_contents = list_dir_contents(checkoutdir)
assert checkout_contents == original_contents
diff --git a/tests/testutils/__init__.py b/tests/testutils/__init__.py
index 9642ddf47..117df6b38 100644
--- a/tests/testutils/__init__.py
+++ b/tests/testutils/__init__.py
@@ -23,7 +23,12 @@
# William Salmon <will.salmon@codethink.co.uk>
#
-from .artifactshare import create_artifact_share, create_split_share, assert_shared, assert_not_shared
+from .artifactshare import (
+ create_artifact_share,
+ create_split_share,
+ assert_shared,
+ assert_not_shared,
+)
from .context import dummy_context
from .element_generators import create_element_size, update_element_size
from .junction import generate_junction
diff --git a/tests/testutils/artifactshare.py b/tests/testutils/artifactshare.py
index 18ecc5e3e..39cc7da10 100644
--- a/tests/testutils/artifactshare.py
+++ b/tests/testutils/artifactshare.py
@@ -24,8 +24,7 @@ from buildstream._protos.buildstream.v2 import artifact_pb2
# cache_quota (int): Maximum amount of disk space to use
# casd (bool): Allow write access via casd
#
-class ArtifactShare():
-
+class ArtifactShare:
def __init__(self, directory, *, quota=None, casd=False, index_only=False):
# The working directory for the artifact share (in case it
@@ -38,9 +37,9 @@ class ArtifactShare():
# Unless this gets more complicated, just use this directly
# in tests as a remote artifact push/pull configuration
#
- self.repodir = os.path.join(self.directory, 'repo')
+ self.repodir = os.path.join(self.directory, "repo")
os.makedirs(self.repodir)
- self.artifactdir = os.path.join(self.repodir, 'artifacts', 'refs')
+ self.artifactdir = os.path.join(self.repodir, "artifacts", "refs")
os.makedirs(self.artifactdir)
self.cas = CASCache(self.repodir, casd=casd)
@@ -59,7 +58,7 @@ class ArtifactShare():
if port is None:
raise Exception("Error occurred when starting artifact server.")
- self.repo = 'http://localhost:{}'.format(port)
+ self.repo = "http://localhost:{}".format(port)
# run():
#
@@ -88,7 +87,7 @@ class ArtifactShare():
index_only=self.index_only,
)
)
- port = server.add_insecure_port('localhost:0')
+ port = server.add_insecure_port("localhost:0")
server.start()
except Exception:
q.put(None)
@@ -122,7 +121,7 @@ class ArtifactShare():
artifact_path = os.path.join(self.artifactdir, artifact_name)
try:
- with open(artifact_path, 'rb') as f:
+ with open(artifact_path, "rb") as f:
artifact_proto.ParseFromString(f.read())
except FileNotFoundError:
return None
@@ -135,7 +134,8 @@ class ArtifactShare():
def reachable_dir(digest):
self.cas._reachable_refs_dir(
- reachable, digest, update_mtime=False, check_exists=True)
+ reachable, digest, update_mtime=False, check_exists=True
+ )
try:
if str(artifact_proto.files):
@@ -213,20 +213,28 @@ def create_split_share(directory1, directory2, *, quota=None, casd=False):
storage.close()
-statvfs_result = namedtuple('statvfs_result', 'f_blocks f_bfree f_bsize f_bavail')
+statvfs_result = namedtuple("statvfs_result", "f_blocks f_bfree f_bsize f_bavail")
# Assert that a given artifact is in the share
#
-def assert_shared(cli, share, project, element_name, *, project_name='test'):
- if not share.get_artifact(cli.get_artifact_name(project, project_name, element_name)):
- raise AssertionError("Artifact share at {} does not contain the expected element {}"
- .format(share.repo, element_name))
+def assert_shared(cli, share, project, element_name, *, project_name="test"):
+ if not share.get_artifact(
+ cli.get_artifact_name(project, project_name, element_name)
+ ):
+ raise AssertionError(
+ "Artifact share at {} does not contain the expected element {}".format(
+ share.repo, element_name
+ )
+ )
# Assert that a given artifact is not in the share
#
-def assert_not_shared(cli, share, project, element_name, *, project_name='test'):
+def assert_not_shared(cli, share, project, element_name, *, project_name="test"):
if share.get_artifact(cli.get_artifact_name(project, project_name, element_name)):
- raise AssertionError("Artifact share at {} unexpectedly contains the element {}"
- .format(share.repo, element_name))
+ raise AssertionError(
+ "Artifact share at {} unexpectedly contains the element {}".format(
+ share.repo, element_name
+ )
+ )
diff --git a/tests/testutils/context.py b/tests/testutils/context.py
index 821adef0a..5d8294fd2 100644
--- a/tests/testutils/context.py
+++ b/tests/testutils/context.py
@@ -55,7 +55,9 @@ class _DummyTask:
@contextmanager
-def _get_dummy_task(self, activity_name, *, element_name=None, full_name=None, silent_nested=False):
+def _get_dummy_task(
+ self, activity_name, *, element_name=None, full_name=None, silent_nested=False
+):
yield _DummyTask("state", activity_name, full_name, 0)
diff --git a/tests/testutils/element_generators.py b/tests/testutils/element_generators.py
index 0fbca7f3e..6da465ab7 100644
--- a/tests/testutils/element_generators.py
+++ b/tests/testutils/element_generators.py
@@ -28,8 +28,8 @@ def create_element_size(name, project_dir, elements_path, dependencies, size):
os.makedirs(full_elements_path, exist_ok=True)
# Create a git repo
- repodir = os.path.join(project_dir, 'repos')
- repo = create_repo('git', repodir, subdir=name)
+ repodir = os.path.join(project_dir, "repos")
+ repo = create_repo("git", repodir, subdir=name)
with utils._tempdir(dir=project_dir) as tmp:
@@ -38,26 +38,24 @@ def create_element_size(name, project_dir, elements_path, dependencies, size):
# part; this ensures we never include a .git/ directory
# in the cached artifacts for these sized elements.
#
- datadir = os.path.join(tmp, 'data')
+ datadir = os.path.join(tmp, "data")
os.makedirs(datadir)
# Use /dev/urandom to create the sized file in the datadir
- with open(os.path.join(datadir, name), 'wb+') as f:
+ with open(os.path.join(datadir, name), "wb+") as f:
f.write(os.urandom(size))
# Create the git repo from the temp directory
ref = repo.create(tmp)
element = {
- 'kind': 'import',
- 'sources': [
- repo.source_config(ref=ref)
- ],
- 'config': {
+ "kind": "import",
+ "sources": [repo.source_config(ref=ref)],
+ "config": {
# Extract only the data directory
- 'source': 'data'
+ "source": "data"
},
- 'depends': dependencies
+ "depends": dependencies,
}
_yaml.roundtrip_dump(element, os.path.join(project_dir, elements_path, name))
@@ -91,9 +89,9 @@ def update_element_size(name, project_dir, repo, size):
new_file = os.path.join(tmp, name)
# Use /dev/urandom to create the sized file in the datadir
- with open(new_file, 'wb+') as f:
+ with open(new_file, "wb+") as f:
f.write(os.urandom(size))
# Modify the git repo with a new commit to the same path,
# replacing the original file with a new one.
- repo.modify_file(new_file, os.path.join('data', name))
+ repo.modify_file(new_file, os.path.join("data", name))
diff --git a/tests/testutils/file_server.py b/tests/testutils/file_server.py
index 05f896013..3e6e41954 100644
--- a/tests/testutils/file_server.py
+++ b/tests/testutils/file_server.py
@@ -6,9 +6,9 @@ from .http_server import SimpleHttpServer
@contextmanager
def create_file_server(file_server_type):
- if file_server_type == 'FTP':
+ if file_server_type == "FTP":
server = SimpleFtpServer()
- elif file_server_type == 'HTTP':
+ elif file_server_type == "HTTP":
server = SimpleHttpServer()
else:
assert False
diff --git a/tests/testutils/filetypegenerator.py b/tests/testutils/filetypegenerator.py
index 8b7d818d8..41c502be2 100644
--- a/tests/testutils/filetypegenerator.py
+++ b/tests/testutils/filetypegenerator.py
@@ -39,7 +39,7 @@ def generate_file_types(path):
clean()
- with open(path, 'w'):
+ with open(path, "w"):
pass
yield
clean()
diff --git a/tests/testutils/ftp_server.py b/tests/testutils/ftp_server.py
index 52c05f8ba..7eda90b70 100644
--- a/tests/testutils/ftp_server.py
+++ b/tests/testutils/ftp_server.py
@@ -11,7 +11,7 @@ class SimpleFtpServer(multiprocessing.Process):
self.authorizer = DummyAuthorizer()
handler = FTPHandler
handler.authorizer = self.authorizer
- self.server = FTPServer(('127.0.0.1', 0), handler)
+ self.server = FTPServer(("127.0.0.1", 0), handler)
def run(self):
self.server.serve_forever()
@@ -26,7 +26,7 @@ class SimpleFtpServer(multiprocessing.Process):
self.authorizer.add_anonymous(cwd)
def add_user(self, user, password, cwd):
- self.authorizer.add_user(user, password, cwd, perm='elradfmwMT')
+ self.authorizer.add_user(user, password, cwd, perm="elradfmwMT")
def base_url(self):
- return 'ftp://127.0.0.1:{}'.format(self.server.address[1])
+ return "ftp://127.0.0.1:{}".format(self.server.address[1])
diff --git a/tests/testutils/http_server.py b/tests/testutils/http_server.py
index b72e745c5..f333f28b2 100644
--- a/tests/testutils/http_server.py
+++ b/tests/testutils/http_server.py
@@ -11,45 +11,46 @@ class Unauthorized(Exception):
class RequestHandler(SimpleHTTPRequestHandler):
-
def get_root_dir(self):
- authorization = self.headers.get('authorization')
+ authorization = self.headers.get("authorization")
if not authorization:
if not self.server.anonymous_dir:
- raise Unauthorized('unauthorized')
+ raise Unauthorized("unauthorized")
return self.server.anonymous_dir
else:
authorization = authorization.split()
- if len(authorization) != 2 or authorization[0].lower() != 'basic':
- raise Unauthorized('unauthorized')
+ if len(authorization) != 2 or authorization[0].lower() != "basic":
+ raise Unauthorized("unauthorized")
try:
- decoded = base64.decodebytes(authorization[1].encode('ascii'))
- user, password = decoded.decode('ascii').split(':')
+ decoded = base64.decodebytes(authorization[1].encode("ascii"))
+ user, password = decoded.decode("ascii").split(":")
expected_password, directory = self.server.users[user]
if password == expected_password:
return directory
- except: # noqa
- raise Unauthorized('unauthorized')
+ except: # noqa
+ raise Unauthorized("unauthorized")
return None
def unauthorized(self):
shortmsg, longmsg = self.responses[HTTPStatus.UNAUTHORIZED]
self.send_response(HTTPStatus.UNAUTHORIZED, shortmsg)
- self.send_header('Connection', 'close')
-
- content = (self.error_message_format % {
- 'code': HTTPStatus.UNAUTHORIZED,
- 'message': html.escape(longmsg, quote=False),
- 'explain': html.escape(longmsg, quote=False)
- })
- body = content.encode('UTF-8', 'replace')
- self.send_header('Content-Type', self.error_content_type)
- self.send_header('Content-Length', str(len(body)))
- self.send_header('WWW-Authenticate', 'Basic realm="{}"'.format(self.server.realm))
+ self.send_header("Connection", "close")
+
+ content = self.error_message_format % {
+ "code": HTTPStatus.UNAUTHORIZED,
+ "message": html.escape(longmsg, quote=False),
+ "explain": html.escape(longmsg, quote=False),
+ }
+ body = content.encode("UTF-8", "replace")
+ self.send_header("Content-Type", self.error_content_type)
+ self.send_header("Content-Length", str(len(body)))
+ self.send_header(
+ "WWW-Authenticate", 'Basic realm="{}"'.format(self.server.realm)
+ )
self.end_headers()
self.end_headers()
- if self.command != 'HEAD' and body:
+ if self.command != "HEAD" and body:
self.wfile.write(body)
def do_GET(self):
@@ -65,11 +66,11 @@ class RequestHandler(SimpleHTTPRequestHandler):
self.unauthorized()
def translate_path(self, path):
- path = path.split('?', 1)[0]
- path = path.split('#', 1)[0]
+ path = path.split("?", 1)[0]
+ path = path.split("#", 1)[0]
path = posixpath.normpath(path)
assert posixpath.isabs(path)
- path = posixpath.relpath(path, '/')
+ path = posixpath.relpath(path, "/")
return os.path.join(self.get_root_dir(), path)
@@ -77,14 +78,14 @@ class AuthHTTPServer(HTTPServer):
def __init__(self, *args, **kwargs):
self.users = {}
self.anonymous_dir = None
- self.realm = 'Realm'
+ self.realm = "Realm"
super().__init__(*args, **kwargs)
class SimpleHttpServer(multiprocessing.Process):
def __init__(self):
super().__init__()
- self.server = AuthHTTPServer(('127.0.0.1', 0), RequestHandler)
+ self.server = AuthHTTPServer(("127.0.0.1", 0), RequestHandler)
self.started = False
def start(self):
@@ -107,4 +108,4 @@ class SimpleHttpServer(multiprocessing.Process):
self.server.users[user] = (password, cwd)
def base_url(self):
- return 'http://127.0.0.1:{}'.format(self.server.server_port)
+ return "http://127.0.0.1:{}".format(self.server.server_port)
diff --git a/tests/testutils/junction.py b/tests/testutils/junction.py
index e867695c4..c086f6f17 100644
--- a/tests/testutils/junction.py
+++ b/tests/testutils/junction.py
@@ -19,17 +19,12 @@ def generate_junction(tmpdir, subproject_path, junction_path, *, store_ref=True)
# Create a repo to hold the subproject and generate
# a junction element for it
#
- repo = create_repo('git', str(tmpdir))
+ repo = create_repo("git", str(tmpdir))
source_ref = ref = repo.create(subproject_path)
if not store_ref:
source_ref = None
- element = {
- 'kind': 'junction',
- 'sources': [
- repo.source_config(ref=source_ref)
- ]
- }
+ element = {"kind": "junction", "sources": [repo.source_config(ref=source_ref)]}
_yaml.roundtrip_dump(element, junction_path)
return ref
diff --git a/tests/testutils/patch.py b/tests/testutils/patch.py
index df287f374..6dec68ca9 100644
--- a/tests/testutils/patch.py
+++ b/tests/testutils/patch.py
@@ -3,19 +3,21 @@ import subprocess
def apply(file, patch):
try:
- subprocess.check_output(['patch', file, patch])
+ subprocess.check_output(["patch", file, patch])
except subprocess.CalledProcessError as e:
message = "Patch failed with exit code {}\n Output:\n {}".format(
- e.returncode, e.output)
+ e.returncode, e.output
+ )
print(message)
raise
def remove(file, patch):
try:
- subprocess.check_output(['patch', '--reverse', file, patch])
+ subprocess.check_output(["patch", "--reverse", file, patch])
except subprocess.CalledProcessError as e:
message = "patch --reverse failed with exit code {}\n Output:\n {}".format(
- e.returncode, e.output)
+ e.returncode, e.output
+ )
print(message)
raise
diff --git a/tests/testutils/python_repo.py b/tests/testutils/python_repo.py
index c8e5bf343..13e9f6209 100644
--- a/tests/testutils/python_repo.py
+++ b/tests/testutils/python_repo.py
@@ -7,7 +7,7 @@ import sys
import pytest
-SETUP_TEMPLATE = '''\
+SETUP_TEMPLATE = """\
from setuptools import setup
setup(
@@ -22,18 +22,18 @@ setup(
]
}}
)
-'''
+"""
# All packages generated via generate_pip_package will have the functions below
-INIT_TEMPLATE = '''\
+INIT_TEMPLATE = """\
def main():
print('This is {name}')
def hello(actor='world'):
print('Hello {{}}! This is {name}'.format(actor))
-'''
+"""
-HTML_TEMPLATE = '''\
+HTML_TEMPLATE = """\
<html>
<head>
<title>Links for {name}</title>
@@ -42,7 +42,7 @@ HTML_TEMPLATE = '''\
<a href='{name}-{version}.tar.gz'>{name}-{version}.tar.gz</a><br />
</body>
</html>
-'''
+"""
# Creates a simple python source distribution and copies this into a specified
@@ -57,11 +57,11 @@ HTML_TEMPLATE = '''\
# Returns:
# None
#
-def generate_pip_package(tmpdir, pypi, name, version='0.1', dependencies=None):
+def generate_pip_package(tmpdir, pypi, name, version="0.1", dependencies=None):
if dependencies is None:
dependencies = []
# check if package already exists in pypi
- pypi_package = os.path.join(pypi, re.sub('[^0-9a-zA-Z]+', '-', name))
+ pypi_package = os.path.join(pypi, re.sub("[^0-9a-zA-Z]+", "-", name))
if os.path.exists(pypi_package):
return
@@ -73,15 +73,12 @@ def generate_pip_package(tmpdir, pypi, name, version='0.1', dependencies=None):
# `-- package
# `-- __init__.py
#
- setup_file = os.path.join(tmpdir, 'setup.py')
- pkgdirname = re.sub('[^0-9a-zA-Z]+', '', name)
- with open(setup_file, 'w') as f:
+ setup_file = os.path.join(tmpdir, "setup.py")
+ pkgdirname = re.sub("[^0-9a-zA-Z]+", "", name)
+ with open(setup_file, "w") as f:
f.write(
SETUP_TEMPLATE.format(
- name=name,
- version=version,
- pkgdirname=pkgdirname,
- pkgdeps=dependencies
+ name=name, version=version, pkgdirname=pkgdirname, pkgdeps=dependencies
)
)
os.chmod(setup_file, 0o755)
@@ -89,13 +86,13 @@ def generate_pip_package(tmpdir, pypi, name, version='0.1', dependencies=None):
package = os.path.join(tmpdir, pkgdirname)
os.makedirs(package)
- main_file = os.path.join(package, '__init__.py')
- with open(main_file, 'w') as f:
+ main_file = os.path.join(package, "__init__.py")
+ with open(main_file, "w") as f:
f.write(INIT_TEMPLATE.format(name=name))
os.chmod(main_file, 0o644)
# Run sdist with a fresh process
- p = subprocess.run([sys.executable, 'setup.py', 'sdist'], cwd=tmpdir)
+ p = subprocess.run([sys.executable, "setup.py", "sdist"], cwd=tmpdir)
assert p.returncode == 0
# create directory for this package in pypi resulting in a directory
@@ -109,12 +106,12 @@ def generate_pip_package(tmpdir, pypi, name, version='0.1', dependencies=None):
os.makedirs(pypi_package)
# add an index html page
- index_html = os.path.join(pypi_package, 'index.html')
- with open(index_html, 'w') as f:
+ index_html = os.path.join(pypi_package, "index.html")
+ with open(index_html, "w") as f:
f.write(HTML_TEMPLATE.format(name=name, version=version))
# copy generated tarfile to pypi package
- dist_dir = os.path.join(tmpdir, 'dist')
+ dist_dir = os.path.join(tmpdir, "dist")
for tar in os.listdir(dist_dir):
tarpath = os.path.join(dist_dir, tar)
shutil.copy(tarpath, pypi_package)
@@ -123,7 +120,7 @@ def generate_pip_package(tmpdir, pypi, name, version='0.1', dependencies=None):
@pytest.fixture
def setup_pypi_repo(tmpdir):
def create_pkgdir(package):
- pkgdirname = re.sub('[^0-9a-zA-Z]+', '', package)
+ pkgdirname = re.sub("[^0-9a-zA-Z]+", "", package)
pkgdir = os.path.join(str(tmpdir), pkgdirname)
os.makedirs(pkgdir)
return pkgdir
@@ -131,7 +128,9 @@ def setup_pypi_repo(tmpdir):
def add_packages(packages, pypi_repo):
for package, dependencies in packages.items():
pkgdir = create_pkgdir(package)
- generate_pip_package(pkgdir, pypi_repo, package, dependencies=list(dependencies.keys()))
+ generate_pip_package(
+ pkgdir, pypi_repo, package, dependencies=list(dependencies.keys())
+ )
for dependency, dependency_dependencies in dependencies.items():
add_packages({dependency: dependency_dependencies}, pypi_repo)
diff --git a/tests/testutils/repo/bzr.py b/tests/testutils/repo/bzr.py
index 074712af1..246a3eb35 100644
--- a/tests/testutils/repo/bzr.py
+++ b/tests/testutils/repo/bzr.py
@@ -7,7 +7,6 @@ from buildstream.testing._utils.site import BZR, BZR_ENV, HAVE_BZR
class Bzr(Repo):
-
def __init__(self, directory, subdir):
if not HAVE_BZR:
pytest.skip("bzr is not available")
@@ -20,34 +19,39 @@ class Bzr(Repo):
def create(self, directory):
# Work around race condition in bzr's creation of ~/.bazaar in
# ensure_config_dir_exists() when running tests in parallel.
- bazaar_config_dir = os.path.expanduser('~/.bazaar')
+ bazaar_config_dir = os.path.expanduser("~/.bazaar")
os.makedirs(bazaar_config_dir, exist_ok=True)
- branch_dir = os.path.join(self.repo, 'trunk')
+ branch_dir = os.path.join(self.repo, "trunk")
- subprocess.call([self.bzr, 'init-repo', self.repo], env=self.env)
- subprocess.call([self.bzr, 'init', branch_dir], env=self.env)
+ subprocess.call([self.bzr, "init-repo", self.repo], env=self.env)
+ subprocess.call([self.bzr, "init", branch_dir], env=self.env)
self.copy_directory(directory, branch_dir)
- subprocess.call([self.bzr, 'add', '.'], env=self.env, cwd=branch_dir)
- subprocess.call([self.bzr, 'commit', '--message="Initial commit"'],
- env=self.env, cwd=branch_dir)
+ subprocess.call([self.bzr, "add", "."], env=self.env, cwd=branch_dir)
+ subprocess.call(
+ [self.bzr, "commit", '--message="Initial commit"'],
+ env=self.env,
+ cwd=branch_dir,
+ )
return self.latest_commit()
def source_config(self, ref=None):
- config = {
- 'kind': 'bzr',
- 'url': 'file://' + self.repo,
- 'track': 'trunk'
- }
+ config = {"kind": "bzr", "url": "file://" + self.repo, "track": "trunk"}
if ref is not None:
- config['ref'] = ref
+ config["ref"] = ref
return config
def latest_commit(self):
- return subprocess.check_output([
- self.bzr, 'version-info',
- '--custom', '--template={revno}',
- os.path.join(self.repo, 'trunk')
- ], env=self.env, universal_newlines=True).strip()
+ return subprocess.check_output(
+ [
+ self.bzr,
+ "version-info",
+ "--custom",
+ "--template={revno}",
+ os.path.join(self.repo, "trunk"),
+ ],
+ env=self.env,
+ universal_newlines=True,
+ ).strip()
diff --git a/tests/testutils/repo/git.py b/tests/testutils/repo/git.py
index 46694fcf2..19ab91601 100644
--- a/tests/testutils/repo/git.py
+++ b/tests/testutils/repo/git.py
@@ -9,7 +9,6 @@ from buildstream.testing._utils.site import GIT, GIT_ENV, HAVE_GIT
class Git(Repo):
-
def __init__(self, directory, subdir):
if not HAVE_GIT:
pytest.skip("git is not available")
@@ -24,99 +23,93 @@ class Git(Repo):
def _run_git(self, *args, **kwargs):
argv = [GIT]
argv.extend(args)
- if 'env' not in kwargs:
- kwargs['env'] = dict(self.env, PWD=self.repo)
- kwargs.setdefault('cwd', self.repo)
- kwargs.setdefault('check', True)
+ if "env" not in kwargs:
+ kwargs["env"] = dict(self.env, PWD=self.repo)
+ kwargs.setdefault("cwd", self.repo)
+ kwargs.setdefault("check", True)
return subprocess.run(argv, **kwargs)
def create(self, directory):
self.copy_directory(directory, self.repo)
- self._run_git('init', '.')
- self._run_git('add', '.')
- self._run_git('commit', '-m', 'Initial commit')
+ self._run_git("init", ".")
+ self._run_git("add", ".")
+ self._run_git("commit", "-m", "Initial commit")
return self.latest_commit()
def add_tag(self, tag):
- self._run_git('tag', tag)
+ self._run_git("tag", tag)
def add_annotated_tag(self, tag, message):
- self._run_git('tag', '-a', tag, '-m', message)
+ self._run_git("tag", "-a", tag, "-m", message)
def add_commit(self):
- self._run_git('commit', '--allow-empty', '-m', 'Additional commit')
+ self._run_git("commit", "--allow-empty", "-m", "Additional commit")
return self.latest_commit()
def add_file(self, filename):
shutil.copy(filename, self.repo)
- self._run_git('add', os.path.basename(filename))
- self._run_git('commit', '-m', 'Added {}'.format(os.path.basename(filename)))
+ self._run_git("add", os.path.basename(filename))
+ self._run_git("commit", "-m", "Added {}".format(os.path.basename(filename)))
return self.latest_commit()
def modify_file(self, new_file, path):
shutil.copy(new_file, os.path.join(self.repo, path))
- self._run_git('commit', path, '-m', 'Modified {}'.format(os.path.basename(path)))
+ self._run_git(
+ "commit", path, "-m", "Modified {}".format(os.path.basename(path))
+ )
return self.latest_commit()
def add_submodule(self, subdir, url=None, checkout=None):
submodule = {}
if checkout is not None:
- submodule['checkout'] = checkout
+ submodule["checkout"] = checkout
if url is not None:
- submodule['url'] = url
+ submodule["url"] = url
self.submodules[subdir] = submodule
- self._run_git('submodule', 'add', url, subdir)
- self._run_git('commit', '-m', 'Added the submodule')
+ self._run_git("submodule", "add", url, subdir)
+ self._run_git("commit", "-m", "Added the submodule")
return self.latest_commit()
# This can also be used to a file or a submodule
def remove_path(self, path):
- self._run_git('rm', path)
- self._run_git('commit', '-m', 'Removing {}'.format(path))
+ self._run_git("rm", path)
+ self._run_git("commit", "-m", "Removing {}".format(path))
return self.latest_commit()
def source_config(self, ref=None):
return self.source_config_extra(ref)
def source_config_extra(self, ref=None, checkout_submodules=None):
- config = {
- 'kind': 'git',
- 'url': 'file://' + self.repo,
- 'track': 'master'
- }
+ config = {"kind": "git", "url": "file://" + self.repo, "track": "master"}
if ref is not None:
- config['ref'] = ref
+ config["ref"] = ref
if checkout_submodules is not None:
- config['checkout-submodules'] = checkout_submodules
+ config["checkout-submodules"] = checkout_submodules
if self.submodules:
- config['submodules'] = dict(self.submodules)
+ config["submodules"] = dict(self.submodules)
return config
def latest_commit(self):
return self._run_git(
- 'rev-parse', 'HEAD',
- stdout=subprocess.PIPE,
- universal_newlines=True,
+ "rev-parse", "HEAD", stdout=subprocess.PIPE, universal_newlines=True,
).stdout.strip()
def branch(self, branch_name):
- self._run_git('checkout', '-b', branch_name)
+ self._run_git("checkout", "-b", branch_name)
def delete_tag(self, tag_name):
- self._run_git('tag', '-d', tag_name)
+ self._run_git("tag", "-d", tag_name)
def checkout(self, commit):
- self._run_git('checkout', commit)
+ self._run_git("checkout", commit)
def merge(self, commit):
- self._run_git('merge', '-m', 'Merge', commit)
+ self._run_git("merge", "-m", "Merge", commit)
return self.latest_commit()
def rev_parse(self, rev):
return self._run_git(
- 'rev-parse', rev,
- stdout=subprocess.PIPE,
- universal_newlines=True,
+ "rev-parse", rev, stdout=subprocess.PIPE, universal_newlines=True,
).stdout.strip()
diff --git a/tests/testutils/repo/tar.py b/tests/testutils/repo/tar.py
index 63231fa4b..7eacd6b4a 100644
--- a/tests/testutils/repo/tar.py
+++ b/tests/testutils/repo/tar.py
@@ -7,9 +7,8 @@ from buildstream.testing import Repo
class Tar(Repo):
-
def create(self, directory):
- tarball = os.path.join(self.repo, 'file.tar.gz')
+ tarball = os.path.join(self.repo, "file.tar.gz")
old_dir = os.getcwd()
os.chdir(directory)
@@ -20,14 +19,14 @@ class Tar(Repo):
return sha256sum(tarball)
def source_config(self, ref=None):
- tarball = os.path.join(self.repo, 'file.tar.gz')
+ tarball = os.path.join(self.repo, "file.tar.gz")
config = {
- 'kind': 'tar',
- 'url': 'file://' + tarball,
- 'directory': '',
- 'base-dir': ''
+ "kind": "tar",
+ "url": "file://" + tarball,
+ "directory": "",
+ "base-dir": "",
}
if ref is not None:
- config['ref'] = ref
+ config["ref"] = ref
return config
diff --git a/tests/testutils/repo/zip.py b/tests/testutils/repo/zip.py
index df3f834a9..ff4f38076 100644
--- a/tests/testutils/repo/zip.py
+++ b/tests/testutils/repo/zip.py
@@ -7,14 +7,13 @@ from buildstream.testing import Repo
class Zip(Repo):
-
def create(self, directory):
- archive = os.path.join(self.repo, 'file.zip')
+ archive = os.path.join(self.repo, "file.zip")
old_dir = os.getcwd()
os.chdir(directory)
with zipfile.ZipFile(archive, "w") as zipfp:
- for root, dirs, files in os.walk('.'):
+ for root, dirs, files in os.walk("."):
names = dirs + files
names = [os.path.join(root, name) for name in names]
@@ -26,14 +25,14 @@ class Zip(Repo):
return sha256sum(archive)
def source_config(self, ref=None):
- archive = os.path.join(self.repo, 'file.zip')
+ archive = os.path.join(self.repo, "file.zip")
config = {
- 'kind': 'zip',
- 'url': 'file://' + archive,
- 'directory': '',
- 'base-dir': ''
+ "kind": "zip",
+ "url": "file://" + archive,
+ "directory": "",
+ "base-dir": "",
}
if ref is not None:
- config['ref'] = ref
+ config["ref"] = ref
return config
diff --git a/tests/testutils/setuptools.py b/tests/testutils/setuptools.py
index cb61e1976..119979da6 100644
--- a/tests/testutils/setuptools.py
+++ b/tests/testutils/setuptools.py
@@ -4,19 +4,19 @@ import pkg_resources
# A mock setuptools dist object.
-class MockDist():
+class MockDist:
def __init__(self, datafiles, module_name):
self.datafiles = datafiles
self.module_name = module_name
def get_resource_filename(self, *_args, **_kwargs):
- return os.path.join(self.datafiles.dirname,
- self.datafiles.basename,
- self.module_name)
+ return os.path.join(
+ self.datafiles.dirname, self.datafiles.basename, self.module_name
+ )
# A mock setuptools entry object.
-class MockEntry():
+class MockEntry:
def __init__(self, datafiles, module_name):
self.dist = MockDist(datafiles, module_name)
self.module_name = module_name
@@ -30,7 +30,7 @@ class MockEntry():
@pytest.fixture()
def entry_fixture(monkeypatch):
def patch(datafiles, entry_point, lookup_string):
- dist, package = lookup_string.split(':')
+ dist, package = lookup_string.split(":")
def mock_entry(pdist, pentry_point, ppackage):
assert pdist == dist
@@ -38,6 +38,7 @@ def entry_fixture(monkeypatch):
assert ppackage == package
return MockEntry(datafiles, package)
- monkeypatch.setattr(pkg_resources, 'get_entry_info', mock_entry)
+
+ monkeypatch.setattr(pkg_resources, "get_entry_info", mock_entry)
return patch