summaryrefslogtreecommitdiff
path: root/tests/frontend
diff options
context:
space:
mode:
authorJürg Billeter <j@bitron.ch>2018-05-13 20:20:57 +0200
committerJürg Billeter <j@bitron.ch>2018-07-17 07:56:40 +0200
commitfa2eaba257f55bbcd68eb1cbaadc1f390689e0db (patch)
tree70b3902dcac67ce81d7af718e1f8973e194e7aa3 /tests/frontend
parent4c6512d6f6e4defbddebab56a19e5e7f9e50c20c (diff)
downloadbuildstream-fa2eaba257f55bbcd68eb1cbaadc1f390689e0db.tar.gz
tests: Use context manager for ArtifactShare
Diffstat (limited to 'tests/frontend')
-rw-r--r--tests/frontend/pull.py360
-rw-r--r--tests/frontend/push.py488
2 files changed, 426 insertions, 422 deletions
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index 9caf727ac..78c1f9cc2 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -46,38 +46,39 @@ def assert_not_shared(cli, share, project, element_name):
@pytest.mark.datafiles(DATA_DIR)
def test_push_pull_all(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
-
- # First build the target element and push to the remote.
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True}
- })
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_success()
- assert cli.get_element_state(project, 'target.bst') == 'cached'
-
- # Assert that everything is now cached in the remote.
- all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst']
- for element_name in all_elements:
- assert_shared(cli, share, project, element_name)
-
- # Now we've pushed, delete the user's local artifact cache
- # directory and try to redownload it from the share
- #
- artifacts = os.path.join(cli.directory, 'artifacts')
- shutil.rmtree(artifacts)
- # Assert that nothing is cached locally anymore
- for element_name in all_elements:
- assert cli.get_element_state(project, element_name) != 'cached'
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+
+ # First build the target element and push to the remote.
+ cli.configure({
+ 'artifacts': {'url': share.repo, 'push': True}
+ })
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
+
+ # Assert that everything is now cached in the remote.
+ all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst']
+ for element_name in all_elements:
+ assert_shared(cli, share, project, element_name)
+
+ # Now we've pushed, delete the user's local artifact cache
+ # directory and try to redownload it from the share
+ #
+ artifacts = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifacts)
- # Now try bst pull
- result = cli.run(project=project, args=['pull', '--deps', 'all', 'target.bst'])
- result.assert_success()
+ # Assert that nothing is cached locally anymore
+ for element_name in all_elements:
+ assert cli.get_element_state(project, element_name) != 'cached'
- # And assert that it's again in the local cache, without having built
- for element_name in all_elements:
- assert cli.get_element_state(project, element_name) == 'cached'
+ # Now try bst pull
+ result = cli.run(project=project, args=['pull', '--deps', 'all', 'target.bst'])
+ result.assert_success()
+
+ # And assert that it's again in the local cache, without having built
+ for element_name in all_elements:
+ assert cli.get_element_state(project, element_name) == 'cached'
# Tests that:
@@ -90,36 +91,36 @@ def test_push_pull_all(cli, tmpdir, datafiles):
def test_pull_secondary_cache(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
- share1 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1'))
- share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1')) as share1,\
+ create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as share2:
- # Build the target and push it to share2 only.
- cli.configure({
- 'artifacts': [
- {'url': share1.repo, 'push': False},
- {'url': share2.repo, 'push': True},
- ]
- })
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_success()
+ # Build the target and push it to share2 only.
+ cli.configure({
+ 'artifacts': [
+ {'url': share1.repo, 'push': False},
+ {'url': share2.repo, 'push': True},
+ ]
+ })
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
- assert_not_shared(cli, share1, project, 'target.bst')
- assert_shared(cli, share2, project, 'target.bst')
+ assert_not_shared(cli, share1, project, 'target.bst')
+ assert_shared(cli, share2, project, 'target.bst')
- # Delete the user's local artifact cache.
- artifacts = os.path.join(cli.directory, 'artifacts')
- shutil.rmtree(artifacts)
+ # Delete the user's local artifact cache.
+ artifacts = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifacts)
- # Assert that the element is not cached anymore.
- assert cli.get_element_state(project, 'target.bst') != 'cached'
+ # Assert that the element is not cached anymore.
+ assert cli.get_element_state(project, 'target.bst') != 'cached'
- # Now try bst pull
- result = cli.run(project=project, args=['pull', 'target.bst'])
- result.assert_success()
+ # Now try bst pull
+ result = cli.run(project=project, args=['pull', 'target.bst'])
+ result.assert_success()
- # And assert that it's again in the local cache, without having built,
- # i.e. we found it in share2.
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ # And assert that it's again in the local cache, without having built,
+ # i.e. we found it in share2.
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
# Tests that:
@@ -132,45 +133,45 @@ def test_pull_secondary_cache(cli, tmpdir, datafiles):
def test_push_pull_specific_remote(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
- good_share = create_artifact_share(os.path.join(str(tmpdir), 'goodartifactshare'))
- bad_share = create_artifact_share(os.path.join(str(tmpdir), 'badartifactshare'))
+ with create_artifact_share(os.path.join(str(tmpdir), 'goodartifactshare')) as good_share,\
+ create_artifact_share(os.path.join(str(tmpdir), 'badartifactshare')) as bad_share:
- # Build the target so we have it cached locally only.
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_success()
+ # Build the target so we have it cached locally only.
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
- state = cli.get_element_state(project, 'target.bst')
- assert state == 'cached'
+ state = cli.get_element_state(project, 'target.bst')
+ assert state == 'cached'
- # Configure the default push location to be bad_share; we will assert that
- # nothing actually gets pushed there.
- cli.configure({
- 'artifacts': {'url': bad_share.repo, 'push': True},
- })
+ # Configure the default push location to be bad_share; we will assert that
+ # nothing actually gets pushed there.
+ cli.configure({
+ 'artifacts': {'url': bad_share.repo, 'push': True},
+ })
- # Now try `bst push` to the good_share.
- result = cli.run(project=project, args=[
- 'push', 'target.bst', '--remote', good_share.repo
- ])
- result.assert_success()
+ # Now try `bst push` to the good_share.
+ result = cli.run(project=project, args=[
+ 'push', 'target.bst', '--remote', good_share.repo
+ ])
+ result.assert_success()
- # Assert that all the artifacts are in the share we pushed
- # to, and not the other.
- assert_shared(cli, good_share, project, 'target.bst')
- assert_not_shared(cli, bad_share, project, 'target.bst')
+ # Assert that all the artifacts are in the share we pushed
+ # to, and not the other.
+ assert_shared(cli, good_share, project, 'target.bst')
+ assert_not_shared(cli, bad_share, project, 'target.bst')
- # Now we've pushed, delete the user's local artifact cache
- # directory and try to redownload it from the good_share.
- #
- artifacts = os.path.join(cli.directory, 'artifacts')
- shutil.rmtree(artifacts)
+ # Now we've pushed, delete the user's local artifact cache
+ # directory and try to redownload it from the good_share.
+ #
+ artifacts = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifacts)
- result = cli.run(project=project, args=['pull', 'target.bst', '--remote',
- good_share.repo])
- result.assert_success()
+ result = cli.run(project=project, args=['pull', 'target.bst', '--remote',
+ good_share.repo])
+ result.assert_success()
- # And assert that it's again in the local cache, without having built
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ # And assert that it's again in the local cache, without having built
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
# Tests that:
@@ -181,50 +182,51 @@ def test_push_pull_specific_remote(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_push_pull_non_strict(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
- workspace = os.path.join(str(tmpdir), 'workspace')
-
- # First build the target element and push to the remote.
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- 'projects': {
- 'test': {'strict': False}
- }
- })
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_success()
- assert cli.get_element_state(project, 'target.bst') == 'cached'
-
- # Assert that everything is now cached in the remote.
- all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst']
- for element_name in all_elements:
- assert_shared(cli, share, project, element_name)
-
- # Now we've pushed, delete the user's local artifact cache
- # directory and try to redownload it from the share
- #
- artifacts = os.path.join(cli.directory, 'artifacts')
- shutil.rmtree(artifacts)
- # Assert that nothing is cached locally anymore
- for element_name in all_elements:
- assert cli.get_element_state(project, element_name) != 'cached'
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ workspace = os.path.join(str(tmpdir), 'workspace')
- # Add a file to force change in strict cache key of import-bin.bst
- with open(os.path.join(str(project), 'files', 'bin-files', 'usr', 'bin', 'world'), 'w') as f:
- f.write('world')
+ # First build the target element and push to the remote.
+ cli.configure({
+ 'artifacts': {'url': share.repo, 'push': True},
+ 'projects': {
+ 'test': {'strict': False}
+ }
+ })
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
- # Assert that the workspaced element requires a rebuild
- assert cli.get_element_state(project, 'import-bin.bst') == 'buildable'
- # Assert that the target is still waiting due to --no-strict
- assert cli.get_element_state(project, 'target.bst') == 'waiting'
+ # Assert that everything is now cached in the remote.
+ all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst']
+ for element_name in all_elements:
+ assert_shared(cli, share, project, element_name)
- # Now try bst pull
- result = cli.run(project=project, args=['pull', '--deps', 'all', 'target.bst'])
- result.assert_success()
+ # Now we've pushed, delete the user's local artifact cache
+ # directory and try to redownload it from the share
+ #
+ artifacts = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifacts)
- # And assert that the target is again in the local cache, without having built
- assert cli.get_element_state(project, 'target.bst') == 'cached'
+ # Assert that nothing is cached locally anymore
+ for element_name in all_elements:
+ assert cli.get_element_state(project, element_name) != 'cached'
+
+ # Add a file to force change in strict cache key of import-bin.bst
+ with open(os.path.join(str(project), 'files', 'bin-files', 'usr', 'bin', 'world'), 'w') as f:
+ f.write('world')
+
+ # Assert that the workspaced element requires a rebuild
+ assert cli.get_element_state(project, 'import-bin.bst') == 'buildable'
+ # Assert that the target is still waiting due to --no-strict
+ assert cli.get_element_state(project, 'target.bst') == 'waiting'
+
+ # Now try bst pull
+ result = cli.run(project=project, args=['pull', '--deps', 'all', 'target.bst'])
+ result.assert_success()
+
+ # And assert that the target is again in the local cache, without having built
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
# Regression test for https://gitlab.com/BuildStream/buildstream/issues/202
@@ -232,71 +234,73 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_push_pull_track_non_strict(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
-
- # First build the target element and push to the remote.
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- 'projects': {
- 'test': {'strict': False}
- }
- })
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_success()
- assert cli.get_element_state(project, 'target.bst') == 'cached'
-
- # Assert that everything is now cached in the remote.
- all_elements = {'target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst'}
- for element_name in all_elements:
- assert_shared(cli, share, project, element_name)
-
- # Now we've pushed, delete the user's local artifact cache
- # directory and try to redownload it from the share
- #
- artifacts = os.path.join(cli.directory, 'artifacts')
- shutil.rmtree(artifacts)
-
- # Assert that nothing is cached locally anymore
- for element_name in all_elements:
- assert cli.get_element_state(project, element_name) != 'cached'
- # Now try bst build with tracking and pulling.
- # Tracking will be skipped for target.bst as it doesn't have any sources.
- # With the non-strict build plan target.bst immediately enters the pull queue.
- # However, pulling has to be deferred until the dependencies have been
- # tracked as the strict cache key needs to be calculated before querying
- # the caches.
- result = cli.run(project=project, args=['build', '--track-all', '--all', 'target.bst'])
- result.assert_success()
- assert set(result.get_pulled_elements()) == all_elements
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+
+ # First build the target element and push to the remote.
+ cli.configure({
+ 'artifacts': {'url': share.repo, 'push': True},
+ 'projects': {
+ 'test': {'strict': False}
+ }
+ })
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
+
+ # Assert that everything is now cached in the remote.
+ all_elements = {'target.bst', 'import-bin.bst', 'import-dev.bst', 'compose-all.bst'}
+ for element_name in all_elements:
+ assert_shared(cli, share, project, element_name)
+
+ # Now we've pushed, delete the user's local artifact cache
+ # directory and try to redownload it from the share
+ #
+ artifacts = os.path.join(cli.directory, 'artifacts')
+ shutil.rmtree(artifacts)
+
+ # Assert that nothing is cached locally anymore
+ for element_name in all_elements:
+ assert cli.get_element_state(project, element_name) != 'cached'
+
+ # Now try bst build with tracking and pulling.
+ # Tracking will be skipped for target.bst as it doesn't have any sources.
+ # With the non-strict build plan target.bst immediately enters the pull queue.
+ # However, pulling has to be deferred until the dependencies have been
+ # tracked as the strict cache key needs to be calculated before querying
+ # the caches.
+ result = cli.run(project=project, args=['build', '--track-all', '--all', 'target.bst'])
+ result.assert_success()
+ assert set(result.get_pulled_elements()) == all_elements
@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
@pytest.mark.datafiles(DATA_DIR)
def test_push_pull_cross_junction(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
- subproject_path = os.path.join(project, 'files', 'sub-project')
- junction_path = os.path.join(project, 'elements', 'junction.bst')
- generate_junction(tmpdir, subproject_path, junction_path, store_ref=True)
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ subproject_path = os.path.join(project, 'files', 'sub-project')
+ junction_path = os.path.join(project, 'elements', 'junction.bst')
+
+ generate_junction(tmpdir, subproject_path, junction_path, store_ref=True)
- # First build the target element and push to the remote.
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True}
- })
- result = cli.run(project=project, args=['build', 'junction.bst:import-etc.bst'])
- result.assert_success()
- assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
+ # First build the target element and push to the remote.
+ cli.configure({
+ 'artifacts': {'url': share.repo, 'push': True}
+ })
+ result = cli.run(project=project, args=['build', 'junction.bst:import-etc.bst'])
+ result.assert_success()
+ assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
- cache_dir = os.path.join(project, 'cache', 'artifacts')
- shutil.rmtree(cache_dir)
+ cache_dir = os.path.join(project, 'cache', 'artifacts')
+ shutil.rmtree(cache_dir)
- assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'buildable'
+ assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'buildable'
- # Now try bst pull
- result = cli.run(project=project, args=['pull', 'junction.bst:import-etc.bst'])
- result.assert_success()
+ # Now try bst pull
+ result = cli.run(project=project, args=['pull', 'junction.bst:import-etc.bst'])
+ result.assert_success()
- # And assert that it's again in the local cache, without having built
- assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
+ # And assert that it's again in the local cache, without having built
+ assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 5cd88290d..8dba3c0a3 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -1,5 +1,4 @@
import os
-import shutil
import pytest
from collections import namedtuple
from unittest.mock import MagicMock
@@ -59,47 +58,47 @@ def test_push(cli, tmpdir, datafiles):
assert cli.get_element_state(project, 'target.bst') == 'cached'
# Set up two artifact shares.
- share1 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1'))
- share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
-
- # Try pushing with no remotes configured. This should fail.
- result = cli.run(project=project, args=['push', 'target.bst'])
- result.assert_main_error(ErrorDomain.STREAM, None)
-
- # Configure bst to pull but not push from a cache and run `bst push`.
- # This should also fail.
- cli.configure({
- 'artifacts': {'url': share1.repo, 'push': False},
- })
- result = cli.run(project=project, args=['push', 'target.bst'])
- result.assert_main_error(ErrorDomain.STREAM, None)
-
- # Configure bst to push to one of the caches and run `bst push`. This works.
- cli.configure({
- 'artifacts': [
- {'url': share1.repo, 'push': False},
- {'url': share2.repo, 'push': True},
- ]
- })
- result = cli.run(project=project, args=['push', 'target.bst'])
-
- assert_not_shared(cli, share1, project, 'target.bst')
- assert_shared(cli, share2, project, 'target.bst')
-
- # Now try pushing to both (making sure to empty the cache we just pushed
- # to).
- shutil.rmtree(share2.directory)
- share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
- cli.configure({
- 'artifacts': [
- {'url': share1.repo, 'push': True},
- {'url': share2.repo, 'push': True},
- ]
- })
- result = cli.run(project=project, args=['push', 'target.bst'])
-
- assert_shared(cli, share1, project, 'target.bst')
- assert_shared(cli, share2, project, 'target.bst')
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1')) as share1:
+
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as share2:
+
+ # Try pushing with no remotes configured. This should fail.
+ result = cli.run(project=project, args=['push', 'target.bst'])
+ result.assert_main_error(ErrorDomain.STREAM, None)
+
+ # Configure bst to pull but not push from a cache and run `bst push`.
+ # This should also fail.
+ cli.configure({
+ 'artifacts': {'url': share1.repo, 'push': False},
+ })
+ result = cli.run(project=project, args=['push', 'target.bst'])
+ result.assert_main_error(ErrorDomain.STREAM, None)
+
+ # Configure bst to push to one of the caches and run `bst push`. This works.
+ cli.configure({
+ 'artifacts': [
+ {'url': share1.repo, 'push': False},
+ {'url': share2.repo, 'push': True},
+ ]
+ })
+ result = cli.run(project=project, args=['push', 'target.bst'])
+
+ assert_not_shared(cli, share1, project, 'target.bst')
+ assert_shared(cli, share2, project, 'target.bst')
+
+ # Now try pushing to both
+
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as share2:
+ cli.configure({
+ 'artifacts': [
+ {'url': share1.repo, 'push': True},
+ {'url': share2.repo, 'push': True},
+ ]
+ })
+ result = cli.run(project=project, args=['push', 'target.bst'])
+
+ assert_shared(cli, share1, project, 'target.bst')
+ assert_shared(cli, share2, project, 'target.bst')
# Tests that `bst push --deps all` pushes all dependencies of the given element.
@@ -108,46 +107,47 @@ def test_push(cli, tmpdir, datafiles):
@pytest.mark.datafiles(DATA_DIR)
def test_push_all(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
- # First build it without the artifact cache configured
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_success()
-
- # Assert that we are now cached locally
- assert cli.get_element_state(project, 'target.bst') == 'cached'
-
- # Configure artifact share
- cli.configure({
- #
- # FIXME: This test hangs "sometimes" if we allow
- # concurrent push.
- #
- # It's not too bad to ignore since we're
- # using the local artifact cache functionality
- # only, but it should probably be fixed.
- #
- 'scheduler': {
- 'pushers': 1
- },
- 'artifacts': {
- 'url': share.repo,
- 'push': True,
- }
- })
-
- # Now try bst push all the deps
- result = cli.run(project=project, args=[
- 'push', 'target.bst',
- '--deps', 'all'
- ])
- result.assert_success()
-
- # And finally assert that all the artifacts are in the share
- assert_shared(cli, share, project, 'target.bst')
- assert_shared(cli, share, project, 'import-bin.bst')
- assert_shared(cli, share, project, 'import-dev.bst')
- assert_shared(cli, share, project, 'compose-all.bst')
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+
+ # First build it without the artifact cache configured
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
+
+ # Assert that we are now cached locally
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
+
+ # Configure artifact share
+ cli.configure({
+ #
+ # FIXME: This test hangs "sometimes" if we allow
+ # concurrent push.
+ #
+ # It's not too bad to ignore since we're
+ # using the local artifact cache functionality
+ # only, but it should probably be fixed.
+ #
+ 'scheduler': {
+ 'pushers': 1
+ },
+ 'artifacts': {
+ 'url': share.repo,
+ 'push': True,
+ }
+ })
+
+ # Now try bst push all the deps
+ result = cli.run(project=project, args=[
+ 'push', 'target.bst',
+ '--deps', 'all'
+ ])
+ result.assert_success()
+
+ # And finally assert that all the artifacts are in the share
+ assert_shared(cli, share, project, 'target.bst')
+ assert_shared(cli, share, project, 'import-bin.bst')
+ assert_shared(cli, share, project, 'import-dev.bst')
+ assert_shared(cli, share, project, 'compose-all.bst')
# Tests that `bst build` won't push artifacts to the cache it just pulled from.
@@ -159,47 +159,47 @@ def test_push_after_pull(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
# Set up two artifact shares.
- share1 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1'))
- share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1')) as share1,\
+ create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as share2:
- # Set the scene: share1 has the artifact, share2 does not.
- #
- cli.configure({
- 'artifacts': {'url': share1.repo, 'push': True},
- })
+ # Set the scene: share1 has the artifact, share2 does not.
+ #
+ cli.configure({
+ 'artifacts': {'url': share1.repo, 'push': True},
+ })
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_success()
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
- cli.remove_artifact_from_cache(project, 'target.bst')
+ cli.remove_artifact_from_cache(project, 'target.bst')
- assert_shared(cli, share1, project, 'target.bst')
- assert_not_shared(cli, share2, project, 'target.bst')
- assert cli.get_element_state(project, 'target.bst') != 'cached'
+ assert_shared(cli, share1, project, 'target.bst')
+ assert_not_shared(cli, share2, project, 'target.bst')
+ assert cli.get_element_state(project, 'target.bst') != 'cached'
- # Now run the build again. Correct `bst build` behaviour is to download the
- # artifact from share1 but not push it back again.
- #
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_success()
- assert result.get_pulled_elements() == ['target.bst']
- assert result.get_pushed_elements() == []
-
- # Delete the artifact locally again.
- cli.remove_artifact_from_cache(project, 'target.bst')
-
- # Now we add share2 into the mix as a second push remote. This time,
- # `bst build` should push to share2 after pulling from share1.
- cli.configure({
- 'artifacts': [
- {'url': share1.repo, 'push': True},
- {'url': share2.repo, 'push': True},
- ]
- })
- result = cli.run(project=project, args=['build', 'target.bst'])
- result.assert_success()
- assert result.get_pulled_elements() == ['target.bst']
- assert result.get_pushed_elements() == ['target.bst']
+ # Now run the build again. Correct `bst build` behaviour is to download the
+ # artifact from share1 but not push it back again.
+ #
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
+ assert result.get_pulled_elements() == ['target.bst']
+ assert result.get_pushed_elements() == []
+
+ # Delete the artifact locally again.
+ cli.remove_artifact_from_cache(project, 'target.bst')
+
+ # Now we add share2 into the mix as a second push remote. This time,
+ # `bst build` should push to share2 after pulling from share1.
+ cli.configure({
+ 'artifacts': [
+ {'url': share1.repo, 'push': True},
+ {'url': share2.repo, 'push': True},
+ ]
+ })
+ result = cli.run(project=project, args=['build', 'target.bst'])
+ result.assert_success()
+ assert result.get_pulled_elements() == ['target.bst']
+ assert result.get_pushed_elements() == ['target.bst']
# Ensure that when an artifact's size exceeds available disk space
@@ -211,54 +211,54 @@ def test_artifact_expires(cli, datafiles, tmpdir):
element_path = os.path.join(project, 'elements')
# Create an artifact share (remote artifact cache) in the tmpdir/artifactshare
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
-
- # Mock the os.statvfs() call to return a named tuple which emulates an
- # os.statvfs_result object
- statvfs_result = namedtuple('statvfs_result', 'f_blocks f_bfree f_bsize')
- os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(10e9),
- f_bfree=(int(12e6) + int(2e9)),
- f_bsize=1))
-
- # Configure bst to push to the cache
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- })
-
- # Create and build an element of 5 MB
- create_element_size('element1.bst', element_path, [], int(5e6)) # [] => no deps
- result = cli.run(project=project, args=['build', 'element1.bst'])
- result.assert_success()
-
- # Create and build an element of 5 MB
- create_element_size('element2.bst', element_path, [], int(5e6)) # [] => no deps
- result = cli.run(project=project, args=['build', 'element2.bst'])
- result.assert_success()
-
- # check that element's 1 and 2 are cached both locally and remotely
- assert cli.get_element_state(project, 'element1.bst') == 'cached'
- assert_shared(cli, share, project, 'element1.bst')
- assert cli.get_element_state(project, 'element2.bst') == 'cached'
- assert_shared(cli, share, project, 'element2.bst')
-
- # update mocked available disk space now that two 5 MB artifacts have been added
- os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(10e9),
- f_bfree=(int(2e6) + int(2e9)),
- f_bsize=1))
-
- # Create and build another element of 5 MB (This will exceed the free disk space available)
- create_element_size('element3.bst', element_path, [], int(5e6))
- result = cli.run(project=project, args=['build', 'element3.bst'])
- result.assert_success()
-
- # Ensure it is cached both locally and remotely
- assert cli.get_element_state(project, 'element3.bst') == 'cached'
- assert_shared(cli, share, project, 'element3.bst')
-
- # Ensure element1 has been removed from the share
- assert_not_shared(cli, share, project, 'element1.bst')
- # Ensure that elemen2 remains
- assert_shared(cli, share, project, 'element2.bst')
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+
+ # Mock the os.statvfs() call to return a named tuple which emulates an
+ # os.statvfs_result object
+ statvfs_result = namedtuple('statvfs_result', 'f_blocks f_bfree f_bsize')
+ os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(10e9),
+ f_bfree=(int(12e6) + int(2e9)),
+ f_bsize=1))
+
+ # Configure bst to push to the cache
+ cli.configure({
+ 'artifacts': {'url': share.repo, 'push': True},
+ })
+
+ # Create and build an element of 5 MB
+ create_element_size('element1.bst', element_path, [], int(5e6)) # [] => no deps
+ result = cli.run(project=project, args=['build', 'element1.bst'])
+ result.assert_success()
+
+ # Create and build an element of 5 MB
+ create_element_size('element2.bst', element_path, [], int(5e6)) # [] => no deps
+ result = cli.run(project=project, args=['build', 'element2.bst'])
+ result.assert_success()
+
+ # check that element's 1 and 2 are cached both locally and remotely
+ assert cli.get_element_state(project, 'element1.bst') == 'cached'
+ assert_shared(cli, share, project, 'element1.bst')
+ assert cli.get_element_state(project, 'element2.bst') == 'cached'
+ assert_shared(cli, share, project, 'element2.bst')
+
+ # update mocked available disk space now that two 5 MB artifacts have been added
+ os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(10e9),
+ f_bfree=(int(2e6) + int(2e9)),
+ f_bsize=1))
+
+ # Create and build another element of 5 MB (This will exceed the free disk space available)
+ create_element_size('element3.bst', element_path, [], int(5e6))
+ result = cli.run(project=project, args=['build', 'element3.bst'])
+ result.assert_success()
+
+ # Ensure it is cached both locally and remotely
+ assert cli.get_element_state(project, 'element3.bst') == 'cached'
+ assert_shared(cli, share, project, 'element3.bst')
+
+ # Ensure element1 has been removed from the share
+ assert_not_shared(cli, share, project, 'element1.bst')
+ # Ensure that elemen2 remains
+ assert_shared(cli, share, project, 'element2.bst')
# Test that a large artifact, whose size exceeds the quota, is not pushed
@@ -269,36 +269,36 @@ def test_artifact_too_large(cli, datafiles, tmpdir):
element_path = os.path.join(project, 'elements')
# Create an artifact share (remote cache) in tmpdir/artifactshare
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
-
- # Mock a file system with 5 MB total space
- statvfs_result = namedtuple('statvfs_result', 'f_blocks f_bfree f_bsize')
- os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(5e6) + int(2e9),
- f_bfree=(int(5e6) + int(2e9)),
- f_bsize=1))
-
- # Configure bst to push to the remote cache
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- })
-
- # Create and push a 3MB element
- create_element_size('small_element.bst', element_path, [], int(3e6))
- result = cli.run(project=project, args=['build', 'small_element.bst'])
- result.assert_success()
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
- # Create and try to push a 6MB element.
- create_element_size('large_element.bst', element_path, [], int(6e6))
- result = cli.run(project=project, args=['build', 'large_element.bst'])
- result.assert_success()
+ # Mock a file system with 5 MB total space
+ statvfs_result = namedtuple('statvfs_result', 'f_blocks f_bfree f_bsize')
+ os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(5e6) + int(2e9),
+ f_bfree=(int(5e6) + int(2e9)),
+ f_bsize=1))
+
+ # Configure bst to push to the remote cache
+ cli.configure({
+ 'artifacts': {'url': share.repo, 'push': True},
+ })
- # Ensure that the small artifact is still in the share
- assert cli.get_element_state(project, 'small_element.bst') == 'cached'
- assert_shared(cli, share, project, 'small_element.bst')
+ # Create and push a 3MB element
+ create_element_size('small_element.bst', element_path, [], int(3e6))
+ result = cli.run(project=project, args=['build', 'small_element.bst'])
+ result.assert_success()
- # Ensure that the artifact is cached locally but NOT remotely
- assert cli.get_element_state(project, 'large_element.bst') == 'cached'
- assert_not_shared(cli, share, project, 'large_element.bst')
+ # Create and try to push a 6MB element.
+ create_element_size('large_element.bst', element_path, [], int(6e6))
+ result = cli.run(project=project, args=['build', 'large_element.bst'])
+ result.assert_success()
+
+ # Ensure that the small artifact is still in the share
+ assert cli.get_element_state(project, 'small_element.bst') == 'cached'
+ assert_shared(cli, share, project, 'small_element.bst')
+
+ # Ensure that the artifact is cached locally but NOT remotely
+ assert cli.get_element_state(project, 'large_element.bst') == 'cached'
+ assert_not_shared(cli, share, project, 'large_element.bst')
# Test that when an element is pulled recently, it is not considered the LRU element.
@@ -312,60 +312,60 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
element_path = os.path.join(project, 'elements')
# Create an artifact share (remote cache) in tmpdir/artifactshare
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
-
- # Mock a file system with 12 MB free disk space
- statvfs_result = namedtuple('statvfs_result', 'f_blocks f_bfree f_bsize')
- os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(10e9) + int(2e9),
- f_bfree=(int(12e6) + int(2e9)),
- f_bsize=1))
-
- # Configure bst to push to the cache
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- })
-
- # Create and build 2 elements, each of 5 MB.
- create_element_size('element1.bst', element_path, [], int(5e6))
- result = cli.run(project=project, args=['build', 'element1.bst'])
- result.assert_success()
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
- create_element_size('element2.bst', element_path, [], int(5e6))
- result = cli.run(project=project, args=['build', 'element2.bst'])
- result.assert_success()
+ # Mock a file system with 12 MB free disk space
+ statvfs_result = namedtuple('statvfs_result', 'f_blocks f_bfree f_bsize')
+ os.statvfs = MagicMock(return_value=statvfs_result(f_blocks=int(10e9) + int(2e9),
+ f_bfree=(int(12e6) + int(2e9)),
+ f_bsize=1))
- # Ensure they are cached locally
- assert cli.get_element_state(project, 'element1.bst') == 'cached'
- assert cli.get_element_state(project, 'element2.bst') == 'cached'
+ # Configure bst to push to the cache
+ cli.configure({
+ 'artifacts': {'url': share.repo, 'push': True},
+ })
- # Ensure that they have been pushed to the cache
- assert_shared(cli, share, project, 'element1.bst')
- assert_shared(cli, share, project, 'element2.bst')
+ # Create and build 2 elements, each of 5 MB.
+ create_element_size('element1.bst', element_path, [], int(5e6))
+ result = cli.run(project=project, args=['build', 'element1.bst'])
+ result.assert_success()
- # Remove element1 from the local cache
- cli.remove_artifact_from_cache(project, 'element1.bst')
- assert cli.get_element_state(project, 'element1.bst') != 'cached'
+ create_element_size('element2.bst', element_path, [], int(5e6))
+ result = cli.run(project=project, args=['build', 'element2.bst'])
+ result.assert_success()
- # Pull the element1 from the remote cache (this should update its mtime)
- result = cli.run(project=project, args=['pull', 'element1.bst', '--remote',
- share.repo])
- result.assert_success()
+ # Ensure they are cached locally
+ assert cli.get_element_state(project, 'element1.bst') == 'cached'
+ assert cli.get_element_state(project, 'element2.bst') == 'cached'
- # Ensure element1 is cached locally
- assert cli.get_element_state(project, 'element1.bst') == 'cached'
+ # Ensure that they have been pushed to the cache
+ assert_shared(cli, share, project, 'element1.bst')
+ assert_shared(cli, share, project, 'element2.bst')
- # Create and build the element3 (of 5 MB)
- create_element_size('element3.bst', element_path, [], int(5e6))
- result = cli.run(project=project, args=['build', 'element3.bst'])
- result.assert_success()
+ # Remove element1 from the local cache
+ cli.remove_artifact_from_cache(project, 'element1.bst')
+ assert cli.get_element_state(project, 'element1.bst') != 'cached'
+
+ # Pull the element1 from the remote cache (this should update its mtime)
+ result = cli.run(project=project, args=['pull', 'element1.bst', '--remote',
+ share.repo])
+ result.assert_success()
+
+ # Ensure element1 is cached locally
+ assert cli.get_element_state(project, 'element1.bst') == 'cached'
+
+ # Create and build the element3 (of 5 MB)
+ create_element_size('element3.bst', element_path, [], int(5e6))
+ result = cli.run(project=project, args=['build', 'element3.bst'])
+ result.assert_success()
- # Make sure it's cached locally and remotely
- assert cli.get_element_state(project, 'element3.bst') == 'cached'
- assert_shared(cli, share, project, 'element3.bst')
+ # Make sure it's cached locally and remotely
+ assert cli.get_element_state(project, 'element3.bst') == 'cached'
+ assert_shared(cli, share, project, 'element3.bst')
- # Ensure that element2 was deleted from the share and element1 remains
- assert_not_shared(cli, share, project, 'element2.bst')
- assert_shared(cli, share, project, 'element1.bst')
+ # Ensure that element2 was deleted from the share and element1 remains
+ assert_not_shared(cli, share, project, 'element2.bst')
+ assert_shared(cli, share, project, 'element1.bst')
@pytest.mark.datafiles(DATA_DIR)
@@ -381,11 +381,11 @@ def test_push_cross_junction(cli, tmpdir, datafiles):
assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
- share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
- cli.configure({
- 'artifacts': {'url': share.repo, 'push': True},
- })
- result = cli.run(project=project, args=['push', 'junction.bst:import-etc.bst'])
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+ cli.configure({
+ 'artifacts': {'url': share.repo, 'push': True},
+ })
+ result = cli.run(project=project, args=['push', 'junction.bst:import-etc.bst'])
- cache_key = cli.get_element_key(project, 'junction.bst:import-etc.bst')
- assert share.has_artifact('subtest', 'import-etc.bst', cache_key)
+ cache_key = cli.get_element_key(project, 'junction.bst:import-etc.bst')
+ assert share.has_artifact('subtest', 'import-etc.bst', cache_key)