summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJürg Billeter <j@bitron.ch>2019-07-09 16:43:31 +0200
committerJürg Billeter <j@bitron.ch>2019-07-16 15:36:10 +0200
commitf62b5f26adcdee8ab7cbf739cc5d2388ba25aefe (patch)
tree5917ecd642fb72f087e6f6ec5f828bb2b7ea6a06
parentbfef9e23807ab920ae402d743d5ea96ef857ca68 (diff)
downloadbuildstream-f62b5f26adcdee8ab7cbf739cc5d2388ba25aefe.tar.gz
tests/artifactcache/push.py: Use dummy_context()
-rw-r--r--tests/artifactcache/push.py143
1 files changed, 63 insertions, 80 deletions
diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py
index da658f76b..81d75023d 100644
--- a/tests/artifactcache/push.py
+++ b/tests/artifactcache/push.py
@@ -8,11 +8,11 @@ import signal
import pytest
from buildstream import _yaml, _signals, utils, Scope
-from buildstream._context import Context
from buildstream._project import Project
from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
from buildstream.testing import cli # pylint: disable=unused-import
-from tests.testutils import create_artifact_share
+
+from tests.testutils import create_artifact_share, dummy_context
# Project directory
@@ -22,11 +22,6 @@ DATA_DIR = os.path.join(
)
-# Handle messages from the pipeline
-def message_handler(message, is_silenced):
- pass
-
-
# Since parent processes wait for queue events, we need
# to put something on it if the called process raises an
# exception.
@@ -68,19 +63,15 @@ def test_push(cli, tmpdir, datafiles):
# Write down the user configuration file
_yaml.roundtrip_dump(user_config, file=user_config_file)
- # Fake minimal context
- context = Context()
- context.load(config=user_config_file)
- context.messenger.set_message_handler(message_handler)
-
- # Load the project manually
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
+ with dummy_context(config=user_config_file) as context:
+ # Load the project manually
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
- # Assert that the element's artifact is cached
- element = project.load_elements(['target.bst'])[0]
- element_key = cli.get_element_key(project_dir, 'target.bst')
- assert cli.artifact.is_cached(rootcache_dir, element, element_key)
+ # Assert that the element's artifact is cached
+ element = project.load_elements(['target.bst'])[0]
+ element_key = cli.get_element_key(project_dir, 'target.bst')
+ assert cli.artifact.is_cached(rootcache_dir, element, element_key)
queue = multiprocessing.Queue()
# Use subprocess to avoid creation of gRPC threads in main BuildStream process
@@ -105,40 +96,36 @@ def test_push(cli, tmpdir, datafiles):
def _test_push(user_config_file, project_dir, element_name, queue):
- # Fake minimal context
- context = Context()
- context.load(config=user_config_file)
- context.messenger.set_message_handler(message_handler)
-
- # Load the project manually
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
-
- # Create a local artifact cache handle
- artifactcache = context.artifactcache
-
- # Load the target element
- element = project.load_elements([element_name])[0]
-
- # Ensure the element's artifact memeber is initialised
- # This is duplicated from Pipeline.resolve_elements()
- # as this test does not use the cli frontend.
- for e in element.dependencies(Scope.ALL):
- # Determine initial element state.
- e._update_state()
-
- # Manually setup the CAS remotes
- artifactcache.setup_remotes(use_config=True)
- artifactcache.initialize_remotes()
-
- if artifactcache.has_push_remotes(plugin=element):
- # Push the element's artifact
- if not element._push():
- queue.put("Push operation failed")
+ with dummy_context(config=user_config_file) as context:
+ # Load the project manually
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
+
+ # Create a local artifact cache handle
+ artifactcache = context.artifactcache
+
+ # Load the target element
+ element = project.load_elements([element_name])[0]
+
+ # Ensure the element's artifact memeber is initialised
+ # This is duplicated from Pipeline.resolve_elements()
+ # as this test does not use the cli frontend.
+ for e in element.dependencies(Scope.ALL):
+ # Determine initial element state.
+ e._update_state()
+
+ # Manually setup the CAS remotes
+ artifactcache.setup_remotes(use_config=True)
+ artifactcache.initialize_remotes()
+
+ if artifactcache.has_push_remotes(plugin=element):
+ # Push the element's artifact
+ if not element._push():
+ queue.put("Push operation failed")
+ else:
+ queue.put(None)
else:
- queue.put(None)
- else:
- queue.put("No remote configured for element {}".format(element_name))
+ queue.put("No remote configured for element {}".format(element_name))
@pytest.mark.datafiles(DATA_DIR)
@@ -189,31 +176,27 @@ def test_push_message(tmpdir, datafiles):
def _test_push_message(user_config_file, project_dir, queue):
- # Fake minimal context
- context = Context()
- context.load(config=user_config_file)
- context.messenger.set_message_handler(message_handler)
-
- # Load the project manually
- project = Project(project_dir, context)
- project.ensure_fully_loaded()
-
- # Create a local artifact cache handle
- artifactcache = context.artifactcache
-
- # Manually setup the artifact remote
- artifactcache.setup_remotes(use_config=True)
- artifactcache.initialize_remotes()
-
- if artifactcache.has_push_remotes():
- # Create an example message object
- command = remote_execution_pb2.Command(arguments=['/usr/bin/gcc', '--help'],
- working_directory='/buildstream-build',
- output_directories=['/buildstream-install'])
-
- # Push the message object
- command_digest = artifactcache.push_message(project, command)
-
- queue.put((command_digest.hash, command_digest.size_bytes))
- else:
- queue.put("No remote configured")
+ with dummy_context(config=user_config_file) as context:
+ # Load the project manually
+ project = Project(project_dir, context)
+ project.ensure_fully_loaded()
+
+ # Create a local artifact cache handle
+ artifactcache = context.artifactcache
+
+ # Manually setup the artifact remote
+ artifactcache.setup_remotes(use_config=True)
+ artifactcache.initialize_remotes()
+
+ if artifactcache.has_push_remotes():
+ # Create an example message object
+ command = remote_execution_pb2.Command(arguments=['/usr/bin/gcc', '--help'],
+ working_directory='/buildstream-build',
+ output_directories=['/buildstream-install'])
+
+ # Push the message object
+ command_digest = artifactcache.push_message(project, command)
+
+ queue.put((command_digest.hash, command_digest.size_bytes))
+ else:
+ queue.put("No remote configured")