diff options
author | Benjamin Schubert <bschubert15@bloomberg.net> | 2019-11-27 10:13:20 +0000 |
---|---|---|
committer | Benjamin Schubert <contact@benschubert.me> | 2020-01-16 14:36:35 +0000 |
commit | c48e89d5da3a7b2b2e72d536aaea9ec182d1e1fd (patch) | |
tree | 1af19de25fd77085750a9713aeb96f7460eff548 | |
parent | 0d012ee39240a677baa6336bcdf96a5c282d3d85 (diff) | |
download | buildstream-c48e89d5da3a7b2b2e72d536aaea9ec182d1e1fd.tar.gz |
element.py: Rename '_source_cached' to '_has_all_sources_in_source_cache'
'_source_cached' is not explicit enough as it doesn't distinguishes
between sources in their respective caches and sources in the global
sourcecache.
-rw-r--r-- | src/buildstream/_frontend/widget.py | 2 | ||||
-rw-r--r-- | src/buildstream/_loader/loader.py | 2 | ||||
-rw-r--r-- | src/buildstream/_pipeline.py | 2 | ||||
-rw-r--r-- | src/buildstream/_scheduler/queues/fetchqueue.py | 2 | ||||
-rw-r--r-- | src/buildstream/_stream.py | 2 | ||||
-rw-r--r-- | src/buildstream/element.py | 22 | ||||
-rw-r--r-- | tests/sourcecache/fetch.py | 12 | ||||
-rw-r--r-- | tests/sourcecache/push.py | 4 | ||||
-rw-r--r-- | tests/sourcecache/staging.py | 6 |
9 files changed, 27 insertions, 27 deletions
diff --git a/src/buildstream/_frontend/widget.py b/src/buildstream/_frontend/widget.py index 98ebf31f3..44922cac8 100644 --- a/src/buildstream/_frontend/widget.py +++ b/src/buildstream/_frontend/widget.py @@ -356,7 +356,7 @@ class LogLine(Widget): line = p.fmt_subst(line, "state", "failed", fg="red") elif element._cached_success(): line = p.fmt_subst(line, "state", "cached", fg="magenta") - elif consistency == Consistency.RESOLVED and not element._source_cached(): + elif consistency == Consistency.RESOLVED and not element._has_all_sources_in_source_cache(): line = p.fmt_subst(line, "state", "fetch needed", fg="red") elif element._buildable(): line = p.fmt_subst(line, "state", "buildable", fg="green") diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py index 0c6c725c7..05c139bb2 100644 --- a/src/buildstream/_loader/loader.py +++ b/src/buildstream/_loader/loader.py @@ -653,7 +653,7 @@ class Loader: # Handle the case where a subproject needs to be fetched # - if element._get_consistency() >= Consistency.RESOLVED and not element._source_cached(): + if element._get_consistency() >= Consistency.RESOLVED and not element._has_all_sources_in_source_cache(): if ticker: ticker(filename, "Fetching subproject") self._fetch_subprojects([element]) diff --git a/src/buildstream/_pipeline.py b/src/buildstream/_pipeline.py index 282408cd4..8d0a3b411 100644 --- a/src/buildstream/_pipeline.py +++ b/src/buildstream/_pipeline.py @@ -375,7 +375,7 @@ class Pipeline: uncached = [] with self._context.messenger.timed_activity("Checking sources"): for element in elements: - if element._get_consistency() < Consistency.CACHED and not element._source_cached(): + if element._get_consistency() < Consistency.CACHED and not element._has_all_sources_in_source_cache(): uncached.append(element) if uncached: diff --git a/src/buildstream/_scheduler/queues/fetchqueue.py b/src/buildstream/_scheduler/queues/fetchqueue.py index 4f38f377a..77c8b8c9c 100644 --- a/src/buildstream/_scheduler/queues/fetchqueue.py +++ b/src/buildstream/_scheduler/queues/fetchqueue.py @@ -75,7 +75,7 @@ class FetchQueue(Queue): if self._should_fetch_original: assert element._get_consistency() == Consistency.CACHED else: - assert element._source_cached() + assert element._has_all_sources_in_source_cache() def register_pending_element(self, element): # Set a "can_query_cache" callback for an element not yet ready diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py index db9794c45..18297c2e4 100644 --- a/src/buildstream/_stream.py +++ b/src/buildstream/_stream.py @@ -212,7 +212,7 @@ class Stream: # definitions to control the execution environment only. if directory is None: - if not element._source_cached(): + if not element._has_all_sources_in_source_cache(): raise StreamError( "Sources for element {} are not cached." "Element must be fetched.".format(element._get_full_name()) diff --git a/src/buildstream/element.py b/src/buildstream/element.py index 8c77ce775..057b229e8 100644 --- a/src/buildstream/element.py +++ b/src/buildstream/element.py @@ -262,7 +262,7 @@ class Element(Plugin): self.__assemble_done = False # Element is assembled self.__pull_done = False # Whether pull was attempted self.__cached_successfully = None # If the Element is known to be successfully cached - self.__source_cached = None # If the sources are known to be successfully cached + self.__has_all_sources_in_source_cache = None # If the sources are known to be successfully cached self.__splits = None # Resolved regex objects for computing split domains self.__whitelist_regex = None # Resolved regex object to check if file is allowed to overlap self.__tainted = None # Whether the artifact is tainted and should not be shared @@ -1169,7 +1169,7 @@ class Element(Plugin): # (bool): Whether this element can currently be built # def _buildable(self): - if self._get_consistency() < Consistency.CACHED and not self._source_cached(): + if self._get_consistency() < Consistency.CACHED and not self._has_all_sources_in_source_cache(): return False if not self.__assemble_scheduled: @@ -1442,7 +1442,7 @@ class Element(Plugin): else: # Assert sources are cached - assert self._source_cached() + assert self._has_all_sources_in_source_cache() if self.__sources: @@ -1830,11 +1830,11 @@ class Element(Plugin): def _skip_source_push(self): if not self.__sources or self._get_workspace(): return True - return not (self.__sourcecache.has_push_remotes(plugin=self) and self._source_cached()) + return not (self.__sourcecache.has_push_remotes(plugin=self) and self._has_all_sources_in_source_cache()) def _source_push(self): # try and push sources if we've got them - if self.__sourcecache.has_push_remotes(plugin=self) and self._source_cached(): + if self.__sourcecache.has_push_remotes(plugin=self) and self._has_all_sources_in_source_cache(): for source in self.sources(): if not self.__sourcecache.push(source): return False @@ -2155,9 +2155,9 @@ class Element(Plugin): return _cachekey.generate_key(cache_key_dict) # Check if sources are cached, generating the source key if it hasn't been - def _source_cached(self): - if self.__source_cached is not None: - return self.__source_cached + def _has_all_sources_in_source_cache(self): + if self.__has_all_sources_in_source_cache is not None: + return self.__has_all_sources_in_source_cache if self.__sources: sourcecache = self._get_context().sourcecache @@ -2175,7 +2175,7 @@ class Element(Plugin): if not sourcecache.contains(source): return False - self.__source_cached = True + self.__has_all_sources_in_source_cache = True return True def _should_fetch(self, fetch_original=False): @@ -2185,7 +2185,7 @@ class Element(Plugin): fetch_original (bool): whether we need to original unstaged source """ if (self._get_consistency() == Consistency.CACHED and fetch_original) or ( - self._source_cached() and not fetch_original + self._has_all_sources_in_source_cache() and not fetch_original ): return False else: @@ -2968,7 +2968,7 @@ class Element(Plugin): # Caches the sources into the local CAS # def __cache_sources(self): - if self.__sources and not self._source_cached(): + if self.__sources and not self._has_all_sources_in_source_cache(): last_requires_previous = 0 # commit all other sources by themselves for ix, source in enumerate(self.__sources): diff --git a/tests/sourcecache/fetch.py b/tests/sourcecache/fetch.py index 4096b56b8..e21f84c89 100644 --- a/tests/sourcecache/fetch.py +++ b/tests/sourcecache/fetch.py @@ -83,7 +83,7 @@ def test_source_fetch(cli, tmpdir, datafiles): project.ensure_fully_loaded() element = project.load_elements([element_name])[0] - assert not element._source_cached() + assert not element._has_all_sources_in_source_cache() source = list(element.sources())[0] assert not share.get_source_proto(source._get_source_name()) @@ -112,7 +112,7 @@ def test_source_fetch(cli, tmpdir, datafiles): assert "Pulled source" in res.stderr # check that we have the source in the cas now and it's not fetched - assert element._source_cached() + assert element._has_all_sources_in_source_cache() assert os.listdir(os.path.join(str(tmpdir), "cache", "sources", "git")) == [] @@ -129,7 +129,7 @@ def test_fetch_fallback(cli, tmpdir, datafiles): project.ensure_fully_loaded() element = project.load_elements([element_name])[0] - assert not element._source_cached() + assert not element._has_all_sources_in_source_cache() source = list(element.sources())[0] assert not share.get_source_proto(source._get_source_name()) @@ -145,7 +145,7 @@ def test_fetch_fallback(cli, tmpdir, datafiles): assert ("SUCCESS Fetching from {}".format(repo.source_config(ref=ref)["url"])) in res.stderr # Check that the source in both in the source dir and the local CAS - assert element._source_cached() + assert element._has_all_sources_in_source_cache() @pytest.mark.datafiles(DATA_DIR) @@ -160,7 +160,7 @@ def test_pull_fail(cli, tmpdir, datafiles): project.ensure_fully_loaded() element = project.load_elements([element_name])[0] - assert not element._source_cached() + assert not element._has_all_sources_in_source_cache() source = list(element.sources())[0] # remove files and check that it doesn't build @@ -191,7 +191,7 @@ def test_source_pull_partial_fallback_fetch(cli, tmpdir, datafiles): project.ensure_fully_loaded() element = project.load_elements([element_name])[0] - assert not element._source_cached() + assert not element._has_all_sources_in_source_cache() source = list(element.sources())[0] assert not share.get_artifact_proto(source._get_source_name()) diff --git a/tests/sourcecache/push.py b/tests/sourcecache/push.py index 771a94ca1..210bbfcff 100644 --- a/tests/sourcecache/push.py +++ b/tests/sourcecache/push.py @@ -84,7 +84,7 @@ def test_source_push_split(cli, tmpdir, datafiles): project.ensure_fully_loaded() element = project.load_elements(["push.bst"])[0] - assert not element._source_cached() + assert not element._has_all_sources_in_source_cache() source = list(element.sources())[0] # check we don't have it in the current cache @@ -133,7 +133,7 @@ def test_source_push(cli, tmpdir, datafiles): project.ensure_fully_loaded() element = project.load_elements(["push.bst"])[0] - assert not element._source_cached() + assert not element._has_all_sources_in_source_cache() source = list(element.sources())[0] # check we don't have it in the current cache diff --git a/tests/sourcecache/staging.py b/tests/sourcecache/staging.py index 994adb32a..dbfc028f2 100644 --- a/tests/sourcecache/staging.py +++ b/tests/sourcecache/staging.py @@ -65,7 +65,7 @@ def test_source_staged(tmpdir, cli, datafiles): # seems to be the only way to get the sources? element = project.load_elements(["import-bin.bst"])[0] source = list(element.sources())[0] - assert element._source_cached() + assert element._has_all_sources_in_source_cache() assert sourcecache.contains(source) # Extract the file and check it's the same as the one we imported @@ -99,7 +99,7 @@ def test_source_fetch(tmpdir, cli, datafiles): element = project.load_elements(["import-dev.bst"])[0] source = list(element.sources())[0] - assert element._source_cached() + assert element._has_all_sources_in_source_cache() # check that the directory structures are identical digest = sourcecache.export(source)._get_digest() @@ -132,7 +132,7 @@ def test_staged_source_build(tmpdir, datafiles, cli): element = project.load_elements(["import-dev.bst"])[0] # check consistency of the source - assert not element._source_cached() + assert not element._has_all_sources_in_source_cache() res = cli.run(project=project_dir, args=["build", "target.bst"]) res.assert_success() |