summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSam Thursfield <sam.thursfield@codethink.co.uk>2017-11-28 12:03:10 +0000
committerSam Thursfield <sam.thursfield@codethink.co.uk>2018-01-11 18:18:13 +0000
commit1f39477605f5a8caba487a6ebcf3bf4559babe5f (patch)
treea8d583fc395c8eb304fc27deb2999e6ba2b6c2d9
parent51c17e1147392f15580fb2dd925055ad8863ab3e (diff)
downloadbuildstream-1f39477605f5a8caba487a6ebcf3bf4559babe5f.tar.gz
tests: Exercise the new multiple cache support
This adds a new test for parsing artifact cache configuration, which calls the helper function from the 'artifactcache' module directly rather than trying to assert based on blind push and pull commands whether or not we got the complex precedence rules exactly right. This means frontend push/pull tests no longer need to be so thorough about testing precedence but they are instead expanded to assert that multiple caches work correctly.
-rw-r--r--tests/artifactcache/config.py106
-rw-r--r--tests/frontend/pull.py161
-rw-r--r--tests/frontend/push.py79
-rw-r--r--tests/testutils/__init__.py2
-rw-r--r--tests/testutils/artifactshare.py32
5 files changed, 248 insertions, 132 deletions
diff --git a/tests/artifactcache/config.py b/tests/artifactcache/config.py
new file mode 100644
index 000000000..3416f6c10
--- /dev/null
+++ b/tests/artifactcache/config.py
@@ -0,0 +1,106 @@
+import pytest
+
+import itertools
+import os
+
+from buildstream._artifactcache import ArtifactCacheSpec, configured_remote_artifact_cache_specs
+from buildstream._context import Context
+from buildstream._project import Project
+from buildstream.utils import _deduplicate
+from buildstream import _yaml
+
+
+cache1 = ArtifactCacheSpec(url='https://example.com/cache1', push=True)
+cache2 = ArtifactCacheSpec(url='https://example.com/cache2', push=False)
+cache3 = ArtifactCacheSpec(url='https://example.com/cache3', push=False)
+cache4 = ArtifactCacheSpec(url='https://example.com/cache4', push=False)
+cache5 = ArtifactCacheSpec(url='https://example.com/cache5', push=False)
+cache6 = ArtifactCacheSpec(url='https://example.com/cache6', push=True)
+
+
+# Generate cache configuration fragments for the user config and project config files.
+#
+def configure_remote_caches(override_caches, project_caches=[], user_caches=[]):
+ user_config = {}
+ if len(user_caches) == 1:
+ user_config['artifacts'] = {
+ 'url': user_caches[0].url,
+ 'push': user_caches[0].push,
+ }
+ elif len(user_caches) > 1:
+ user_config['artifacts'] = [
+ {'url': cache.url, 'push': cache.push} for cache in user_caches
+ ]
+
+ if len(override_caches) == 1:
+ user_config['projects'] = {
+ 'test': {
+ 'artifacts': {
+ 'url': override_caches[0].url,
+ 'push': override_caches[0].push,
+ }
+ }
+ }
+ elif len(override_caches) > 1:
+ user_config['projects'] = {
+ 'test': {
+ 'artifacts': [
+ {'url': cache.url, 'push': cache.push} for cache in override_caches
+ ]
+ }
+ }
+
+ project_config = {}
+ if len(project_caches) > 0:
+ if len(project_caches) == 1:
+ project_config.update({
+ 'artifacts': {
+ 'url': project_caches[0].url,
+ 'push': project_caches[0].push,
+ }
+ })
+ elif len(project_caches) > 1:
+ project_config.update({
+ 'artifacts': [
+ {'url': cache.url, 'push': cache.push} for cache in project_caches
+ ]
+ })
+
+ return user_config, project_config
+
+
+# Test that parsing the remote artifact cache locations produces the
+# expected results.
+@pytest.mark.parametrize(
+ 'override_caches, project_caches, user_caches',
+ [
+ # The leftmost cache is the highest priority one in all cases here.
+ pytest.param([], [], [], id='empty-config'),
+ pytest.param([], [], [cache1, cache2], id='user-config'),
+ pytest.param([], [cache1, cache2], [cache3], id='project-config'),
+ pytest.param([cache1], [cache2], [cache3], id='project-override-in-user-config'),
+ pytest.param([cache1, cache2], [cache3, cache4], [cache5, cache6], id='list-order'),
+ pytest.param([cache1, cache2, cache1], [cache2], [cache2, cache1], id='duplicates'),
+ ])
+def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user_caches):
+ # Produce a fake user and project config with the cache configuration.
+ user_config, project_config = configure_remote_caches(override_caches, project_caches, user_caches)
+ project_config['name'] = 'test'
+
+ user_config_file = tmpdir.join('buildstream.conf')
+ _yaml.dump(_yaml.node_sanitize(user_config), filename=user_config_file)
+
+ project_dir = tmpdir.mkdir('project')
+ project_config_file = project_dir.join('project.conf')
+ _yaml.dump(_yaml.node_sanitize(project_config), filename=project_config_file)
+
+ context = Context([])
+ context.load(config=user_config_file)
+ project = Project(project_dir, context)
+
+ # Use the helper from the artifactcache module to parse our configuration.
+ parsed_cache_specs = configured_remote_artifact_cache_specs(context, project)
+
+ # Verify that it was correctly read.
+ expected_cache_specs = list(_deduplicate(itertools.chain(override_caches, project_caches, user_caches)))
+ assert parsed_cache_specs == expected_cache_specs
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index 18a4b4654..b0c923338 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -1,11 +1,9 @@
import os
import shutil
import pytest
-from tests.testutils import cli, create_artifact_share, configure_remote_caches
+from tests.testutils import cli, create_artifact_share
from tests.testutils.site import IS_LINUX
-from buildstream import _yaml
-
# Project directory
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
@@ -25,44 +23,37 @@ def assert_shared(cli, share, project, element_name):
.format(share.repo, element_name))
+# Assert that a given artifact is NOT in the share
+#
+def assert_not_shared(cli, share, project, element_name):
+ # NOTE: 'test' here is the name of the project
+ # specified in the project.conf we are testing with.
+ #
+ cache_key = cli.get_element_key(project, element_name)
+ if share.has_artifact('test', element_name, cache_key):
+ raise AssertionError("Artifact share at {} unexpectedly contains the element {}"
+ .format(share.repo, element_name))
+
+
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
-@pytest.mark.parametrize(
- 'override_url, project_url, user_url',
- [
- pytest.param(None, None, 'share.repo', id='user-config'),
- pytest.param(None, 'share.repo', None, id='project-config'),
- pytest.param('share.repo', None, None, id='project-override-in-user-config'),
- ])
@pytest.mark.datafiles(DATA_DIR)
-def test_push_pull(cli, tmpdir, datafiles, override_url, project_url, user_url):
+def test_push_pull_all(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
- # First build it without the artifact cache configured
- result = cli.run(project=project, args=['build', 'import-bin.bst'])
- result.assert_success()
-
- # Assert that we are now cached locally
- assert cli.get_element_state(project, 'import-bin.bst') == 'cached'
-
- override_url = share.repo if override_url == 'share.repo' else override_url
- project_url = share.repo if project_url == 'share.repo' else project_url
- user_url = share.repo if user_url == 'share.repo' else user_url
-
- project_conf_file = str(datafiles.join('project.conf'))
- configure_remote_caches(cli, project_conf_file, override_url, project_url, user_url)
-
- # Now try bst push
- result = cli.run(project=project, args=['push', 'import-bin.bst'])
+ # First build the target element and push to the remote.
+ cli.configure({
+ 'artifacts': {'url': share.repo, 'push': True}
+ })
+ result = cli.run(project=project, args=['build', 'target.bst'])
result.assert_success()
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
- # And finally assert that the artifact is in the share
- assert_shared(cli, share, project, 'import-bin.bst')
-
- # Make sure we update the summary in our artifact share,
- # we dont have a real server around to do it
- #
+ # Assert that everything is now cached in the remote.
share.update_summary()
+ all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst']
+ for element_name in all_elements:
+ assert_shared(cli, share, project, element_name)
# Now we've pushed, delete the user's local artifact cache
# directory and try to redownload it from the share
@@ -72,76 +63,94 @@ def test_push_pull(cli, tmpdir, datafiles, override_url, project_url, user_url):
# Assert that we are now in a downloadable state, nothing
# is cached locally anymore
- assert cli.get_element_state(project, 'import-bin.bst') == 'downloadable'
+ for element_name in all_elements:
+ assert cli.get_element_state(project, element_name) == 'downloadable'
# Now try bst pull
- result = cli.run(project=project, args=['pull', 'import-bin.bst'])
+ result = cli.run(project=project, args=['pull', '--deps', 'all', 'target.bst'])
result.assert_success()
# And assert that it's again in the local cache, without having built
- assert cli.get_element_state(project, 'import-bin.bst') == 'cached'
+ for element_name in all_elements:
+ assert cli.get_element_state(project, element_name) == 'cached'
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
-def test_push_pull_all(cli, tmpdir, datafiles):
+def test_pull_secondary_cache(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
- # First build it without the artifact cache configured
+ share1 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1'))
+ share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
+
+ # Build the target and push it to share2 only.
+ cli.configure({
+ 'artifacts': [
+ {'url': share1.repo, 'push': False},
+ {'url': share2.repo, 'push': True},
+ ]
+ })
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
+
+ share1.update_summary()
+ share2.update_summary()
+
+ assert_not_shared(cli, share1, project, 'target.bst')
+ assert_shared(cli, share2, project, 'target.bst')
+
+ # Delete the user's local artifact cache.
+ artifacts = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifacts)
+
+ # Assert that the element is 'downloadable', i.e. we found it in share2.
+ assert cli.get_element_state(project, 'target.bst') == 'downloadable'
+
+
+@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
+@pytest.mark.datafiles(DATA_DIR)
+def test_push_pull_specific_remote(cli, tmpdir, datafiles):
+ project = os.path.join(datafiles.dirname, datafiles.basename)
+
+ good_share = create_artifact_share(os.path.join(str(tmpdir), 'goodartifactshare'))
+ bad_share = create_artifact_share(os.path.join(str(tmpdir), 'badartifactshare'))
+
+ # Build the target so we have it cached locally only.
result = cli.run(project=project, args=['build', 'target.bst'])
result.assert_success()
- # Assert that we are now cached locally
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ state = cli.get_element_state(project, 'target.bst')
+ assert state == 'cached'
- # Configure artifact share
+ # Configure the default push location to be bad_share; we will assert that
+ # nothing actually gets pushed there.
cli.configure({
- #
- # FIXME: This test hangs "sometimes" if we allow
- # concurrent push.
- #
- # It's not too bad to ignore since we're
- # using the local artifact cache functionality
- # only, but it should probably be fixed.
- #
- 'scheduler': {
- 'pushers': 1
- },
- 'artifacts': {
- 'url': share.repo,
- }
+ 'artifacts': {'url': bad_share.repo, 'push': True},
})
- # Now try bst push
- result = cli.run(project=project, args=['push', '--deps', 'all', 'target.bst'])
+ # Now try `bst push` to the good_share.
+ result = cli.run(project=project, args=[
+ 'push', 'target.bst', '--remote', good_share.repo
+ ])
result.assert_success()
- # And finally assert that the artifact is in the share
- all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst']
- for element_name in all_elements:
- assert_shared(cli, share, project, element_name)
+ good_share.update_summary()
+ bad_share.update_summary()
- # Make sure we update the summary in our artifact share,
- # we dont have a real server around to do it
- #
- share.update_summary()
+ # Assert that all the artifacts are in the share we pushed
+ # to, and not the other.
+ assert_shared(cli, good_share, project, 'target.bst')
+ assert_not_shared(cli, bad_share, project, 'target.bst')
# Now we've pushed, delete the user's local artifact cache
- # directory and try to redownload it from the share
+ # directory and try to redownload it from the good_share.
#
artifacts = os.path.join(cli.directory, 'artifacts')
shutil.rmtree(artifacts)
- # Assert that we are now in a downloadable state, nothing
- # is cached locally anymore
- for element_name in all_elements:
- assert cli.get_element_state(project, element_name) == 'downloadable'
-
- # Now try bst pull
- result = cli.run(project=project, args=['pull', '--deps', 'all', 'target.bst'])
+ result = cli.run(project=project, args=['pull', 'target.bst', '--remote',
+ good_share.repo])
result.assert_success()
# And assert that it's again in the local cache, without having built
- for element_name in all_elements:
- assert cli.get_element_state(project, element_name) == 'cached'
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 9d897a8e5..e4f39de65 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -1,10 +1,10 @@
import os
+import shutil
import pytest
-from tests.testutils import cli, create_artifact_share, configure_remote_caches
+from buildstream._exceptions import ErrorDomain
+from tests.testutils import cli, create_artifact_share
from tests.testutils.site import IS_LINUX
-from buildstream import _yaml
-
# Project directory
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
@@ -24,39 +24,71 @@ def assert_shared(cli, share, project, element_name):
.format(share.repo, element_name))
-@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
-@pytest.mark.parametrize(
- 'override_url, project_url, user_url',
- [
- pytest.param(None, None, 'share.repo', id='user-config'),
- pytest.param(None, 'share.repo', None, id='project-config'),
- pytest.param('share.repo', None, None, id='project-override-in-user-config'),
- ])
+# Assert that a given artifact is NOT in the share
+#
+def assert_not_shared(cli, share, project, element_name):
+ # NOTE: 'test' here is the name of the project
+ # specified in the project.conf we are testing with.
+ #
+ cache_key = cli.get_element_key(project, element_name)
+ if share.has_artifact('test', element_name, cache_key):
+ raise AssertionError("Artifact share at {} unexpectedly contains the element {}"
+ .format(share.repo, element_name))
+
+
@pytest.mark.datafiles(DATA_DIR)
-def test_push(cli, tmpdir, datafiles, override_url, user_url, project_url):
+def test_push(cli, tmpdir, datafiles):
project = str(datafiles)
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
- # First build it without the artifact cache configured
+ # First build the project without the artifact cache configured
result = cli.run(project=project, args=['build', 'target.bst'])
result.assert_success()
# Assert that we are now cached locally
assert cli.get_element_state(project, 'target.bst') == 'cached'
- override_url = share.repo if override_url == 'share.repo' else override_url
- project_url = share.repo if project_url == 'share.repo' else project_url
- user_url = share.repo if user_url == 'share.repo' else user_url
+ # Set up two artifact shares.
+ share1 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1'))
+ share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
- project_conf_file = str(datafiles.join('project.conf'))
- configure_remote_caches(cli, project_conf_file, override_url, project_url, user_url)
+ # Try pushing with no remotes configured. This should fail.
+ result = cli.run(project=project, args=['push', 'target.bst'])
+ result.assert_main_error(ErrorDomain.PIPELINE, None)
- # Now try bst push
+ # Configure bst to pull but not push from a cache and run `bst push`.
+ # This should also fail.
+ cli.configure({
+ 'artifacts': {'url': share1.repo, 'push': False},
+ })
result = cli.run(project=project, args=['push', 'target.bst'])
- result.assert_success()
+ result.assert_main_error(ErrorDomain.PIPELINE, None)
- # And finally assert that the artifact is in the share
- assert_shared(cli, share, project, 'target.bst')
+ # Configure bst to push to one of the caches and run `bst push`. This works.
+ cli.configure({
+ 'artifacts': [
+ {'url': share1.repo, 'push': False},
+ {'url': share2.repo, 'push': True},
+ ]
+ })
+ result = cli.run(project=project, args=['push', 'target.bst'])
+
+ assert_not_shared(cli, share1, project, 'target.bst')
+ assert_shared(cli, share2, project, 'target.bst')
+
+ # Now try pushing to both (making sure to empty the cache we just pushed
+ # to).
+ shutil.rmtree(share2.directory)
+ share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
+ cli.configure({
+ 'artifacts': [
+ {'url': share1.repo, 'push': True},
+ {'url': share2.repo, 'push': True},
+ ]
+ })
+ result = cli.run(project=project, args=['push', 'target.bst'])
+
+ assert_shared(cli, share1, project, 'target.bst')
+ assert_shared(cli, share2, project, 'target.bst')
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@@ -87,6 +119,7 @@ def test_push_all(cli, tmpdir, datafiles):
},
'artifacts': {
'url': share.repo,
+ 'push': True,
}
})
diff --git a/tests/testutils/__init__.py b/tests/testutils/__init__.py
index f0eb171c3..9fc450a28 100644
--- a/tests/testutils/__init__.py
+++ b/tests/testutils/__init__.py
@@ -1,3 +1,3 @@
from .runcli import cli
from .repo import create_repo, ALL_REPO_KINDS
-from .artifactshare import create_artifact_share, configure_remote_caches
+from .artifactshare import create_artifact_share
diff --git a/tests/testutils/artifactshare.py b/tests/testutils/artifactshare.py
index ebf38f34b..8664c69d0 100644
--- a/tests/testutils/artifactshare.py
+++ b/tests/testutils/artifactshare.py
@@ -108,35 +108,3 @@ class ArtifactShare():
def create_artifact_share(directory):
return ArtifactShare(directory)
-
-
-# Write out cache configuration into the user config and project config files.
-#
-# User config is set through a helper on the 'cli' object, while the
-# project.conf file is updated manually using the _yaml module.
-#
-def configure_remote_caches(cli, project_conf_file, override_url, project_url=None, user_url=None):
- user_config = {}
- if user_url is not None:
- user_config['artifacts'] = {
- 'url': user_url
- }
-
- if override_url is not None:
- user_config['projects'] = {
- 'test': {
- 'artifacts': {
- 'url': override_url,
- }
- }
- }
- cli.configure(user_config)
-
- if project_url is not None:
- project_config = _yaml.load(project_conf_file)
- project_config.update({
- 'artifacts': {
- 'url': project_url,
- }
- })
- _yaml.dump(_yaml.node_sanitize(project_config), filename=project_conf_file)