summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBenjamin Schubert <bschubert15@bloomberg.net>2019-11-12 10:24:36 +0000
committerbst-marge-bot <marge-bot@buildstream.build>2019-11-12 13:02:30 +0000
commit7c470b46b33257bb81d70a00cb6b2114d68b9097 (patch)
tree343fbc043c692b4036b7e4b56e9a7a9493b44bbf
parentd51a51002b2b04f20bfa3376b6fef26add26a88d (diff)
downloadbuildstream-7c470b46b33257bb81d70a00cb6b2114d68b9097.tar.gz
_remote: Ensure checks done in the subclasses are propagated
Currently, the `BaseRemote` would call `_check()` on the children, which states that errors should be sent back as a string. However, `BaseRemote` doesn't check the return of `_check()`. This changes the contract so that subclasses throw `RemoteError` themselves. This also fixes the `ArtifactShare` and add a test.
-rw-r--r--src/buildstream/_artifactcache.py21
-rw-r--r--src/buildstream/_remote.py6
-rw-r--r--tests/artifactcache/capabilities.py55
-rw-r--r--tests/testutils/artifactshare.py139
4 files changed, 162 insertions, 59 deletions
diff --git a/src/buildstream/_artifactcache.py b/src/buildstream/_artifactcache.py
index d9112cd58..79d0dc50b 100644
--- a/src/buildstream/_artifactcache.py
+++ b/src/buildstream/_artifactcache.py
@@ -1,5 +1,6 @@
#
# Copyright (C) 2017-2018 Codethink Limited
+# Copyright (C) 2019 Bloomberg Finance LP
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
@@ -21,7 +22,7 @@ import os
import grpc
from ._basecache import BaseCache
-from ._exceptions import ArtifactError, CASError, CASCacheError, CASRemoteError
+from ._exceptions import ArtifactError, CASError, CASCacheError, CASRemoteError, RemoteError
from ._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc, \
artifact_pb2, artifact_pb2_grpc
@@ -60,10 +61,10 @@ class ArtifactRemote(BaseRemote):
#
# Check if this remote provides everything required for the
# particular kind of remote. This is expected to be called as part
- # of check(), and must be called in a non-main process.
+ # of check()
#
- # Returns:
- # (str|None): An error message, or None if no error message.
+ # Raises:
+ # RemoteError: If the upstream has a problem
#
def _check(self):
capabilities_service = buildstream_pb2_grpc.CapabilitiesStub(self.channel)
@@ -77,18 +78,16 @@ class ArtifactRemote(BaseRemote):
except grpc.RpcError as e:
# Check if this remote has the artifact service
if e.code() == grpc.StatusCode.UNIMPLEMENTED:
- return ("Configured remote does not have the BuildStream "
- "capabilities service. Please check remote configuration.")
+ raise RemoteError("Configured remote does not have the BuildStream "
+ "capabilities service. Please check remote configuration.")
# Else raise exception with details
- return "Remote initialisation failed: {}".format(e.details())
+ raise RemoteError("Remote initialisation failed: {}".format(e.details()))
if not response.artifact_capabilities:
- return "Configured remote does not support artifact service"
+ raise RemoteError("Configured remote does not support artifact service")
if self.spec.push and not response.artifact_capabilities.allow_updates:
- return 'Artifact server does not allow push'
-
- return None
+ raise RemoteError("Artifact server does not allow push")
# get_artifact():
#
diff --git a/src/buildstream/_remote.py b/src/buildstream/_remote.py
index fd353d479..ab1dc1924 100644
--- a/src/buildstream/_remote.py
+++ b/src/buildstream/_remote.py
@@ -234,11 +234,11 @@ class BaseRemote():
# particular kind of remote. This is expected to be called as part
# of check(), and must be called in a non-main process.
#
- # Returns:
- # (str|None): An error message, or None if no error message.
+ # Raises:
+ # RemoteError: when the remote isn't compatible or another error happened.
#
def _check(self):
- return None
+ pass
def __str__(self):
return self.url
diff --git a/tests/artifactcache/capabilities.py b/tests/artifactcache/capabilities.py
new file mode 100644
index 000000000..a28516aea
--- /dev/null
+++ b/tests/artifactcache/capabilities.py
@@ -0,0 +1,55 @@
+# Pylint doesn't play well with fixtures and dependency injection from pytest
+# pylint: disable=redefined-outer-name
+
+import os
+
+import pytest
+from buildstream._project import Project
+
+from buildstream import _yaml
+from buildstream.testing.runcli import cli # pylint: disable=unused-import
+from tests.testutils import dummy_context
+
+from tests.testutils.artifactshare import create_dummy_artifact_share
+
+
+DATA_DIR = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)),
+ "project",
+)
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_artifact_cache_with_missing_capabilities_is_skipped(cli, tmpdir, datafiles):
+ project_dir = str(datafiles)
+
+ # Set up an artifact cache.
+ with create_dummy_artifact_share() as share:
+ # Configure artifact share
+ cache_dir = os.path.join(str(tmpdir), 'cache')
+ user_config_file = str(tmpdir.join('buildstream.conf'))
+ user_config = {
+ 'scheduler': {
+ 'pushers': 1
+ },
+ 'artifacts': {
+ 'url': share.repo,
+ 'push': True,
+ },
+ 'cachedir': cache_dir
+ }
+ _yaml.roundtrip_dump(user_config, file=user_config_file)
+
+ with dummy_context(config=user_config_file) as context:
+ # Load the project
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
+
+ # Create a local artifact cache handle
+ artifactcache = context.artifactcache
+
+ # Manually setup the CAS remote
+ artifactcache.setup_remotes(use_config=True)
+
+ assert not artifactcache.has_fetch_remotes(), \
+ "System didn't realize the artifact cache didn't support BuildStream"
diff --git a/tests/testutils/artifactshare.py b/tests/testutils/artifactshare.py
index 18ecc5e3e..d96612686 100644
--- a/tests/testutils/artifactshare.py
+++ b/tests/testutils/artifactshare.py
@@ -3,10 +3,12 @@ import shutil
import signal
import sys
from collections import namedtuple
-
from contextlib import ExitStack, contextmanager
+from concurrent import futures
from multiprocessing import Process, Queue
+import grpc
+
from buildstream._cas import CASCache
from buildstream._cas.casserver import create_server
from buildstream._exceptions import CASError
@@ -14,40 +16,8 @@ from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution
from buildstream._protos.buildstream.v2 import artifact_pb2
-# ArtifactShare()
-#
-# Abstract class providing scaffolding for
-# generating data to be used with various sources
-#
-# Args:
-# directory (str): The base temp directory for the test
-# cache_quota (int): Maximum amount of disk space to use
-# casd (bool): Allow write access via casd
-#
-class ArtifactShare():
-
- def __init__(self, directory, *, quota=None, casd=False, index_only=False):
-
- # The working directory for the artifact share (in case it
- # needs to do something outside of its backend's storage folder).
- #
- self.directory = os.path.abspath(directory)
-
- # The directory the actual repo will be stored in.
- #
- # Unless this gets more complicated, just use this directly
- # in tests as a remote artifact push/pull configuration
- #
- self.repodir = os.path.join(self.directory, 'repo')
- os.makedirs(self.repodir)
- self.artifactdir = os.path.join(self.repodir, 'artifacts', 'refs')
- os.makedirs(self.artifactdir)
-
- self.cas = CASCache(self.repodir, casd=casd)
-
- self.quota = quota
- self.index_only = index_only
-
+class BaseArtifactShare:
+ def __init__(self):
q = Queue()
self.process = Process(target=self.run, args=(q,))
@@ -80,14 +50,7 @@ class ArtifactShare():
else:
cleanup_on_sigterm()
- server = stack.enter_context(
- create_server(
- self.repodir,
- quota=self.quota,
- enable_push=True,
- index_only=self.index_only,
- )
- )
+ server = stack.enter_context(self._create_server())
port = server.add_insecure_port('localhost:0')
server.start()
except Exception:
@@ -100,6 +63,80 @@ class ArtifactShare():
# Sleep until termination by signal
signal.pause()
+ # _create_server()
+ #
+ # Create the server that will be run in the process
+ #
+ def _create_server(self):
+ raise NotImplementedError()
+
+ # close():
+ #
+ # Remove the artifact share.
+ #
+ def close(self):
+ self.process.terminate()
+ self.process.join()
+
+
+# DummyArtifactShare()
+#
+# A dummy artifact share without any capabilities
+#
+class DummyArtifactShare(BaseArtifactShare):
+ @contextmanager
+ def _create_server(self):
+ max_workers = (os.cpu_count() or 1) * 5
+ server = grpc.server(futures.ThreadPoolExecutor(max_workers))
+
+ yield server
+
+
+# ArtifactShare()
+#
+# Abstract class providing scaffolding for
+# generating data to be used with various sources
+#
+# Args:
+# directory (str): The base temp directory for the test
+# cache_quota (int): Maximum amount of disk space to use
+# casd (bool): Allow write access via casd
+# enable_push (bool): Whether the share should allow pushes
+#
+class ArtifactShare(BaseArtifactShare):
+
+ def __init__(self, directory, *, quota=None, casd=False, index_only=False):
+
+ # The working directory for the artifact share (in case it
+ # needs to do something outside of its backend's storage folder).
+ #
+ self.directory = os.path.abspath(directory)
+
+ # The directory the actual repo will be stored in.
+ #
+ # Unless this gets more complicated, just use this directly
+ # in tests as a remote artifact push/pull configuration
+ #
+ self.repodir = os.path.join(self.directory, 'repo')
+ os.makedirs(self.repodir)
+ self.artifactdir = os.path.join(self.repodir, 'artifacts', 'refs')
+ os.makedirs(self.artifactdir)
+
+ self.cas = CASCache(self.repodir, casd=casd)
+
+ self.quota = quota
+ self.index_only = index_only
+
+ super().__init__()
+
+ def _create_server(self):
+ return create_server(
+ self.repodir,
+ quota=self.quota,
+ enable_push=True,
+ index_only=self.index_only,
+ )
+
# has_object():
#
# Checks whether the object is present in the share
@@ -180,8 +217,7 @@ class ArtifactShare():
# Remove the artifact share.
#
def close(self):
- self.process.terminate()
- self.process.join()
+ super().close()
self.cas.release_resources()
@@ -213,6 +249,19 @@ def create_split_share(directory1, directory2, *, quota=None, casd=False):
storage.close()
+# create_dummy_artifact_share()
+#
+# Create a dummy artifact share that doesn't have any capabilities
+#
+@contextmanager
+def create_dummy_artifact_share():
+ share = DummyArtifactShare()
+ try:
+ yield share
+ finally:
+ share.close()
+
+
statvfs_result = namedtuple('statvfs_result', 'f_blocks f_bfree f_bsize f_bavail')