diff options
author | Jürg Billeter <j@bitron.ch> | 2020-07-30 10:37:54 +0200 |
---|---|---|
committer | Jürg Billeter <j@bitron.ch> | 2020-09-03 14:12:02 +0200 |
commit | 7175dbb76aab99935a4e3f5884bac9451bfb655e (patch) | |
tree | 6881fb1690664b621bf62cead6075c5e44afd942 /tests | |
parent | 8d006f8f17ad35f61f69101fe1531564d8d8a688 (diff) | |
download | buildstream-7175dbb76aab99935a4e3f5884bac9451bfb655e.tar.gz |
Add ElementSourcesCache
Sources have been cached in CAS individually, except for sources that
transform other sources, which have been cached combined with all
previous sources of the element. This caching structure may be confusing
as sources are specified in the element as a list and this is not a good
fit for #1274 where we want to support caching individual sources in a
Remote Asset server with a BuildStream-independent URI (especially the
`directory` configuration would be problematic).
This replaces the combined caching of 'previous' sources with an
element-level source cache, which caches all sources of an element
staged together. Sources that don't depend on previous sources are still
cached individually.
This also makes it possible to add a list of all element sources to the
source proto used by the element-level source cache.
Diffstat (limited to 'tests')
-rw-r--r-- | tests/sourcecache/cache.py | 37 | ||||
-rw-r--r-- | tests/sourcecache/fetch.py | 14 | ||||
-rw-r--r-- | tests/sourcecache/push.py | 5 | ||||
-rw-r--r-- | tests/sourcecache/staging.py | 8 |
4 files changed, 44 insertions, 20 deletions
diff --git a/tests/sourcecache/cache.py b/tests/sourcecache/cache.py index bbc3d8329..34009fc65 100644 --- a/tests/sourcecache/cache.py +++ b/tests/sourcecache/cache.py @@ -37,12 +37,15 @@ def test_patch_sources_cached_1(cli, datafiles): res = cli.run(project=project_dir, args=["build", "source-with-patches-1.bst"]) res.assert_success() - # as we have a local, patch, local config, the first local and patch should - # be cached together, and the last local on it's own source_protos = os.path.join(project_dir, "cache", "source_protos") + elementsources_protos = os.path.join(project_dir, "cache", "elementsources") - assert len(os.listdir(os.path.join(source_protos, "patch"))) == 1 + # The two local sources can be cached individually, + # the patch source cannot be cached on its own assert len(os.listdir(os.path.join(source_protos, "local"))) == 2 + assert not os.path.exists(os.path.join(source_protos, "patch")) + + assert len(os.listdir(elementsources_protos)) == 1 @pytest.mark.datafiles(DATA_DIR) @@ -52,10 +55,15 @@ def test_patch_sources_cached_2(cli, datafiles): res = cli.run(project=project_dir, args=["build", "source-with-patches-2.bst"]) res.assert_success() - # As everything is before the patch it should all be cached together source_protos = os.path.join(project_dir, "cache", "source_protos") + elementsources_protos = os.path.join(project_dir, "cache", "elementsources") + + # The three local sources can be cached individually, + # the patch source cannot be cached on its own + assert len(os.listdir(os.path.join(source_protos, "local"))) == 3 + assert not os.path.exists(os.path.join(source_protos, "patch")) - assert len(os.listdir(os.path.join(source_protos, "patch"))) == 1 + assert len(os.listdir(elementsources_protos)) == 1 @pytest.mark.datafiles(DATA_DIR) @@ -67,9 +75,12 @@ def test_sources_without_patch(cli, datafiles): # No patches so everything should be cached seperately source_protos = os.path.join(project_dir, "cache", "source_protos") + elementsources_protos = os.path.join(project_dir, "cache", "elementsources") assert len(os.listdir(os.path.join(source_protos, "local"))) == 3 + assert len(os.listdir(elementsources_protos)) == 1 + @pytest.mark.datafiles(DATA_DIR) def test_source_cache_key(cli, datafiles): @@ -103,9 +114,17 @@ def test_source_cache_key(cli, datafiles): res = cli.run(project=project_dir, args=["build", element_name]) res.assert_success() - # Should have one source ref + # Should have source refs for the two remote sources + remote_protos = os.path.join(project_dir, "cache", "source_protos", "remote") + assert len(os.listdir(remote_protos)) == 2 + # Should not have any source refs for the patch source + # as that is a transformation of the previous sources, + # not cacheable on its own patch_protos = os.path.join(project_dir, "cache", "source_protos", "patch") - assert len(os.listdir(patch_protos)) == 1 + assert not os.path.exists(patch_protos) + # Should have one element sources ref + elementsources_protos = os.path.join(project_dir, "cache", "elementsources") + assert len(os.listdir(elementsources_protos)) == 1 # modify hello-patch file and check tracking updates refs with open(os.path.join(file_path, "dev-files", "usr", "include", "pony.h"), "a") as f: @@ -118,5 +137,5 @@ def test_source_cache_key(cli, datafiles): res = cli.run(project=project_dir, args=["source", "fetch", element_name]) res.assert_success() - # We should have a new source ref - assert len(os.listdir(patch_protos)) == 2 + # We should have a new element sources ref + assert len(os.listdir(elementsources_protos)) == 2 diff --git a/tests/sourcecache/fetch.py b/tests/sourcecache/fetch.py index ac8c6258f..76f5508f9 100644 --- a/tests/sourcecache/fetch.py +++ b/tests/sourcecache/fetch.py @@ -75,7 +75,7 @@ def test_source_fetch(cli, tmpdir, datafiles): element = project.load_elements([element_name])[0] element._initialize_state() - assert not element._has_all_sources_in_source_cache() + assert not element._cached_sources() source = list(element.sources())[0] assert not share.get_source_proto(source._get_source_name()) @@ -117,7 +117,7 @@ def test_source_fetch(cli, tmpdir, datafiles): element._initialize_state() # check that we have the source in the cas now and it's not fetched - assert element._has_all_sources_in_source_cache() + assert element._cached_sources() assert os.listdir(os.path.join(str(tmpdir), "cache", "sources", "git")) == [] @@ -135,7 +135,7 @@ def test_fetch_fallback(cli, tmpdir, datafiles): element = project.load_elements([element_name])[0] element._initialize_state() - assert not element._has_all_sources_in_source_cache() + assert not element._cached_sources() source = list(element.sources())[0] assert not share.get_source_proto(source._get_source_name()) @@ -151,7 +151,9 @@ def test_fetch_fallback(cli, tmpdir, datafiles): assert ("SUCCESS Fetching from {}".format(repo.source_config(ref=ref)["url"])) in res.stderr # Check that the source in both in the source dir and the local CAS - assert element._has_all_sources_in_source_cache() + element = project.load_elements([element_name])[0] + element._initialize_state() + assert element._cached_sources() @pytest.mark.datafiles(DATA_DIR) @@ -167,7 +169,7 @@ def test_pull_fail(cli, tmpdir, datafiles): element = project.load_elements([element_name])[0] element._initialize_state() - assert not element._has_all_sources_in_source_cache() + assert not element._cached_sources() source = list(element.sources())[0] # remove files and check that it doesn't build @@ -199,7 +201,7 @@ def test_source_pull_partial_fallback_fetch(cli, tmpdir, datafiles): element = project.load_elements([element_name])[0] element._initialize_state() - assert not element._has_all_sources_in_source_cache() + assert not element._cached_sources() source = list(element.sources())[0] assert not share.get_artifact_proto(source._get_source_name()) diff --git a/tests/sourcecache/push.py b/tests/sourcecache/push.py index b1aa8a375..25a4309b8 100644 --- a/tests/sourcecache/push.py +++ b/tests/sourcecache/push.py @@ -85,7 +85,7 @@ def test_source_push_split(cli, tmpdir, datafiles): element = project.load_elements(["push.bst"])[0] element._initialize_state() - assert not element._has_all_sources_in_source_cache() + assert not element._cached_sources() source = list(element.sources())[0] # check we don't have it in the current cache @@ -135,7 +135,7 @@ def test_source_push(cli, tmpdir, datafiles): element = project.load_elements(["push.bst"])[0] element._initialize_state() - assert not element._has_all_sources_in_source_cache() + assert not element._cached_sources() source = list(element.sources())[0] # check we don't have it in the current cache @@ -268,6 +268,7 @@ def test_push_missing_source_after_build(cli, tmpdir, datafiles): res.assert_success() # Delete source but keep artifact in cache + shutil.rmtree(os.path.join(cache_dir, "elementsources")) shutil.rmtree(os.path.join(cache_dir, "source_protos")) with create_artifact_share(os.path.join(str(tmpdir), "sourceshare")) as share: diff --git a/tests/sourcecache/staging.py b/tests/sourcecache/staging.py index bfde1b436..0f2f05891 100644 --- a/tests/sourcecache/staging.py +++ b/tests/sourcecache/staging.py @@ -66,7 +66,7 @@ def test_source_staged(tmpdir, cli, datafiles): element = project.load_elements(["import-bin.bst"])[0] element._initialize_state() source = list(element.sources())[0] - assert element._has_all_sources_in_source_cache() + assert element._cached_sources() assert sourcecache.contains(source) # Extract the file and check it's the same as the one we imported @@ -101,7 +101,7 @@ def test_source_fetch(tmpdir, cli, datafiles): element = project.load_elements(["import-dev.bst"])[0] element._initialize_state() source = list(element.sources())[0] - assert element._has_all_sources_in_source_cache() + assert element._cached_sources() # check that the directory structures are identical digest = sourcecache.export(source)._get_digest() @@ -120,6 +120,7 @@ def test_staged_source_build(tmpdir, datafiles, cli): cachedir = os.path.join(str(tmpdir), "cache") element_path = "elements" source_protos = os.path.join(str(tmpdir), "cache", "source_protos") + elementsources = os.path.join(str(tmpdir), "cache", "elementsources") source_dir = os.path.join(str(tmpdir), "cache", "sources") cli.configure({"cachedir": cachedir}) @@ -135,7 +136,7 @@ def test_staged_source_build(tmpdir, datafiles, cli): element._initialize_state() # check consistency of the source - assert not element._has_all_sources_in_source_cache() + assert not element._cached_sources() res = cli.run(project=project_dir, args=["build", "target.bst"]) res.assert_success() @@ -164,6 +165,7 @@ def test_staged_source_build(tmpdir, datafiles, cli): # Now remove the source refs and check the state shutil.rmtree(source_protos) + shutil.rmtree(elementsources) cli.remove_artifact_from_cache(project_dir, "target.bst") states = cli.get_element_states(project_dir, ["target.bst"]) assert states["target.bst"] == "fetch needed" |