summaryrefslogtreecommitdiff
path: root/tests/frontend/push.py
diff options
context:
space:
mode:
Diffstat (limited to 'tests/frontend/push.py')
-rw-r--r--tests/frontend/push.py121
1 files changed, 24 insertions, 97 deletions
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 21a47838c..6e2e283cd 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -61,9 +61,7 @@ def test_push(cli, tmpdir, datafiles):
# Set up two artifact shares.
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as share1:
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare2")
- ) as share2:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare2")) as share2:
# Try pushing with no remotes configured. This should fail.
result = cli.run(project=project, args=["artifact", "push", "target.bst"])
@@ -78,14 +76,7 @@ def test_push(cli, tmpdir, datafiles):
result.assert_main_error(ErrorDomain.STREAM, None)
# Configure bst to push to one of the caches and run `bst artifact push`. This works.
- cli.configure(
- {
- "artifacts": [
- {"url": share1.repo, "push": False},
- {"url": share2.repo, "push": True},
- ]
- }
- )
+ cli.configure({"artifacts": [{"url": share1.repo, "push": False}, {"url": share2.repo, "push": True},]})
cli.run(project=project, args=["artifact", "push", "target.bst"])
assert_not_shared(cli, share1, project, "target.bst")
@@ -93,17 +84,8 @@ def test_push(cli, tmpdir, datafiles):
# Now try pushing to both
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare2")
- ) as share2:
- cli.configure(
- {
- "artifacts": [
- {"url": share1.repo, "push": True},
- {"url": share2.repo, "push": True},
- ]
- }
- )
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare2")) as share2:
+ cli.configure({"artifacts": [{"url": share1.repo, "push": True}, {"url": share2.repo, "push": True},]})
cli.run(project=project, args=["artifact", "push", "target.bst"])
assert_shared(cli, share1, project, "target.bst")
@@ -129,9 +111,7 @@ def test_push_artifact(cli, tmpdir, datafiles):
# Assert that the *artifact* is cached locally
cache_key = cli.get_element_key(project, element)
artifact_ref = os.path.join("test", os.path.splitext(element)[0], cache_key)
- assert os.path.exists(
- os.path.join(local_cache, "artifacts", "refs", artifact_ref)
- )
+ assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref))
# Configure artifact share
cli.configure(
@@ -215,15 +195,7 @@ def test_push_fails_with_on_error_continue(cli, tmpdir, datafiles):
# Now try and push the target with its deps using --on-error continue
# and assert that push failed, but what could be pushed was pushed
result = cli.run(
- project=project,
- args=[
- "--on-error=continue",
- "artifact",
- "push",
- "--deps",
- "all",
- "target.bst",
- ],
+ project=project, args=["--on-error=continue", "artifact", "push", "--deps", "all", "target.bst",],
)
# The overall process should return as failed
@@ -279,9 +251,7 @@ def test_push_all(cli, tmpdir, datafiles):
)
# Now try bst artifact push all the deps
- result = cli.run(
- project=project, args=["artifact", "push", "target.bst", "--deps", "all"]
- )
+ result = cli.run(project=project, args=["artifact", "push", "target.bst", "--deps", "all"])
result.assert_success()
# And finally assert that all the artifacts are in the share
@@ -310,9 +280,7 @@ def test_push_artifacts_all_deps_fails(cli, tmpdir, datafiles):
# Assert that the *artifact* is cached locally
cache_key = cli.get_element_key(project, element)
artifact_ref = os.path.join("test", os.path.splitext(element)[0], cache_key)
- assert os.path.exists(
- os.path.join(local_cache, "artifacts", "refs", artifact_ref)
- )
+ assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref))
# Configure artifact share
cli.configure(
@@ -331,9 +299,7 @@ def test_push_artifacts_all_deps_fails(cli, tmpdir, datafiles):
)
# Now try bst artifact push all the deps
- result = cli.run(
- project=project, args=["artifact", "push", "--deps", "all", artifact_ref]
- )
+ result = cli.run(project=project, args=["artifact", "push", "--deps", "all", artifact_ref])
result.assert_main_error(ErrorDomain.STREAM, None)
assert "Error: '--deps all' is not supported for artifact refs" in result.stderr
@@ -347,9 +313,7 @@ def test_push_after_pull(cli, tmpdir, datafiles):
project = str(datafiles)
# Set up two artifact shares.
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare1")
- ) as share1, create_artifact_share(
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as share1, create_artifact_share(
os.path.join(str(tmpdir), "artifactshare2")
) as share2:
@@ -381,14 +345,7 @@ def test_push_after_pull(cli, tmpdir, datafiles):
# Now we add share2 into the mix as a second push remote. This time,
# `bst build` should push to share2 after pulling from share1.
- cli.configure(
- {
- "artifacts": [
- {"url": share1.repo, "push": True},
- {"url": share2.repo, "push": True},
- ]
- }
- )
+ cli.configure({"artifacts": [{"url": share1.repo, "push": True}, {"url": share2.repo, "push": True},]})
result = cli.run(project=project, args=["build", "target.bst"])
result.assert_success()
assert result.get_pulled_elements() == ["target.bst"]
@@ -405,9 +362,7 @@ def test_artifact_expires(cli, datafiles, tmpdir):
# Create an artifact share (remote artifact cache) in the tmpdir/artifactshare
# Set a 22 MB quota
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)
- ) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)) as share:
# Configure bst to push to the cache
cli.configure(
@@ -459,9 +414,7 @@ def test_artifact_too_large(cli, datafiles, tmpdir):
# Create an artifact share (remote cache) in tmpdir/artifactshare
# Mock a file system with 5 MB total space
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare"), quota=int(5e6)
- ) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(5e6)) as share:
# Configure bst to push to the remote cache
cli.configure(
@@ -488,9 +441,7 @@ def test_artifact_too_large(cli, datafiles, tmpdir):
result.assert_main_error(ErrorDomain.STREAM, None)
# Ensure that the small artifact is still in the share
- states = cli.get_element_states(
- project, ["small_element.bst", "large_element.bst"]
- )
+ states = cli.get_element_states(project, ["small_element.bst", "large_element.bst"])
assert states["small_element.bst"] == "cached"
assert_shared(cli, share, project, "small_element.bst")
@@ -507,9 +458,7 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
# Create an artifact share (remote cache) in tmpdir/artifactshare
# Set a 22 MB quota
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)
- ) as share:
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)) as share:
# Configure bst to push to the cache
cli.configure(
@@ -541,10 +490,7 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
assert cli.get_element_state(project, "element1.bst") != "cached"
# Pull the element1 from the remote cache (this should update its mtime)
- result = cli.run(
- project=project,
- args=["artifact", "pull", "element1.bst", "--remote", share.repo],
- )
+ result = cli.run(project=project, args=["artifact", "pull", "element1.bst", "--remote", share.repo],)
result.assert_success()
# Ensure element1 is cached locally
@@ -583,16 +529,10 @@ def test_push_cross_junction(cli, tmpdir, datafiles):
cli.configure(
{"artifacts": {"url": share.repo, "push": True},}
)
- cli.run(
- project=project, args=["artifact", "push", "junction.bst:import-etc.bst"]
- )
+ cli.run(project=project, args=["artifact", "push", "junction.bst:import-etc.bst"])
cache_key = cli.get_element_key(project, "junction.bst:import-etc.bst")
- assert share.get_artifact(
- cli.get_artifact_name(
- project, "subtest", "import-etc.bst", cache_key=cache_key
- )
- )
+ assert share.get_artifact(cli.get_artifact_name(project, "subtest", "import-etc.bst", cache_key=cache_key))
@pytest.mark.datafiles(DATA_DIR)
@@ -611,9 +551,7 @@ def test_push_already_cached(caplog, cli, tmpdir, datafiles):
result = cli.run(project=project, args=["artifact", "push", "target.bst"])
result.assert_success()
- assert (
- not result.get_pushed_elements()
- ), "No elements should have been pushed since the cache was populated"
+ assert not result.get_pushed_elements(), "No elements should have been pushed since the cache was populated"
assert "INFO Remote ({}) already has ".format(share.repo) in result.stderr
assert "SKIPPED Push" in result.stderr
@@ -623,26 +561,18 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
project = str(datafiles)
caplog.set_level(1)
- with create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare1")
- ) as shareuser, create_artifact_share(
+ with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as shareuser, create_artifact_share(
os.path.join(str(tmpdir), "artifactshare2")
- ) as shareproject, create_artifact_share(
- os.path.join(str(tmpdir), "artifactshare3")
- ) as sharecli:
+ ) as shareproject, create_artifact_share(os.path.join(str(tmpdir), "artifactshare3")) as sharecli:
# Add shareproject repo url to project.conf
with open(os.path.join(project, "project.conf"), "a") as projconf:
- projconf.write(
- "artifacts:\n url: {}\n push: True".format(shareproject.repo)
- )
+ projconf.write("artifacts:\n url: {}\n push: True".format(shareproject.repo))
# Configure shareuser remote in user conf
cli.configure({"artifacts": {"url": shareuser.repo, "push": True}})
- result = cli.run(
- project=project, args=["build", "--remote", sharecli.repo, "target.bst"]
- )
+ result = cli.run(project=project, args=["build", "--remote", sharecli.repo, "target.bst"])
# Artifacts should have only been pushed to sharecli, as that was provided via the cli
result.assert_success()
@@ -668,10 +598,7 @@ def test_push_no_strict(caplog, cli, tmpdir, datafiles, buildtrees):
with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
cli.configure(
- {
- "artifacts": {"url": share.repo, "push": True},
- "projects": {"test": {"strict": False}},
- }
+ {"artifacts": {"url": share.repo, "push": True}, "projects": {"test": {"strict": False}},}
)
# First get us a build