summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJürg Billeter <j@bitron.ch>2020-10-08 17:09:00 +0200
committerJürg Billeter <j@bitron.ch>2020-12-15 07:59:38 +0100
commit834c4c3b9b0317b81765cb1a9dcab7d7ef2e5c8c (patch)
tree98fec1743620ce70d8cf3cca0d417c914265a4c5
parent9c9f11bfbcfad94ef8263eb1c41a2cc472965c19 (diff)
downloadbuildstream-juerg/cache-query-job-65.tar.gz
Call _initialize_state() in Element._new_from_load_element()juerg/cache-query-job-65
With the cache queries moved to job threads, `_initialize_state()` is fairly lightweight and can be called earlier.
-rw-r--r--src/buildstream/_loader/loadelement.pyx1
-rw-r--r--src/buildstream/_loader/loader.py1
-rw-r--r--src/buildstream/_pipeline.py30
-rw-r--r--src/buildstream/_stream.py1
-rw-r--r--src/buildstream/element.py4
-rw-r--r--tests/artifactcache/push.py7
-rw-r--r--tests/sourcecache/fetch.py11
-rw-r--r--tests/sourcecache/push.py4
-rw-r--r--tests/sourcecache/staging.py5
9 files changed, 13 insertions, 51 deletions
diff --git a/src/buildstream/_loader/loadelement.pyx b/src/buildstream/_loader/loadelement.pyx
index 210869e51..f69e13857 100644
--- a/src/buildstream/_loader/loadelement.pyx
+++ b/src/buildstream/_loader/loadelement.pyx
@@ -286,7 +286,6 @@ cdef class LoadElement:
from ..element import Element
element = Element._new_from_load_element(self)
- element._initialize_state()
# Custom error for link dependencies, since we don't completely
# parse their dependencies we cannot rely on the built-in ElementError.
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index 54efd27ae..3d835a983 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -817,7 +817,6 @@ class Loader:
)
element = Element._new_from_load_element(load_element)
- element._initialize_state()
# Handle the case where a subproject has no ref
#
diff --git a/src/buildstream/_pipeline.py b/src/buildstream/_pipeline.py
index 6e41d70c8..b5e01d9a9 100644
--- a/src/buildstream/_pipeline.py
+++ b/src/buildstream/_pipeline.py
@@ -79,36 +79,6 @@ class Pipeline:
return tuple(element_groups)
- # resolve_elements()
- #
- # Resolve element state and cache keys.
- #
- # Args:
- # targets (list of Element): The list of toplevel element targets
- #
- def resolve_elements(self, targets):
- with self._context.messenger.simple_task("Resolving cached state", silent_nested=True) as task:
- # We need to go through the project to access the loader
- if task:
- task.set_maximum_progress(self._project.loader.loaded)
-
- # XXX: Now that Element._update_state() can trigger recursive update_state calls
- # it is possible that we could get a RecursionError. However, this is unlikely
- # to happen, even for large projects (tested with the Debian stack). Although,
- # if it does become a problem we may have to set the recursion limit to a
- # greater value.
- for element in self.dependencies(targets, _Scope.ALL):
- # Determine initial element state.
- element._initialize_state()
-
- # We may already have Elements which are cached and have their runtimes
- # cached, if this is the case, we should immediately notify their reverse
- # dependencies.
- element._update_ready_for_runtime_and_cached()
-
- if task:
- task.add_current_progress()
-
# check_remotes()
#
# Check if the target artifact is cached in any of the available remotes
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index 27f2ed497..dfe9e1466 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -1348,7 +1348,6 @@ class Stream:
# Now move on to loading primary selection.
#
- self._pipeline.resolve_elements(self.targets)
selected = self._pipeline.get_selection(self.targets, selection, silent=False)
selected = self._pipeline.except_elements(self.targets, selected, except_elements)
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index e27de6e96..4bee57054 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -1132,6 +1132,8 @@ class Element(Plugin):
element.__preflight()
+ element._initialize_state()
+
if task:
task.add_current_progress()
@@ -2871,6 +2873,8 @@ class Element(Plugin):
self.__strict_cache_key = artifact.strict_key
self.__weak_cache_key = artifact.weak_key
+ self._initialize_state()
+
@classmethod
def __compose_default_splits(cls, project, defaults, first_pass):
diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py
index 17ad2e2a7..cd9930ecc 100644
--- a/tests/artifactcache/push.py
+++ b/tests/artifactcache/push.py
@@ -6,7 +6,6 @@ import os
import pytest
from buildstream import _yaml
-from buildstream.types import _Scope
from buildstream._project import Project
from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
from buildstream.testing import cli # pylint: disable=unused-import
@@ -33,12 +32,6 @@ def _push(cli, cache_dir, project_dir, config_file, target):
# Create a local artifact cache handle
artifactcache = context.artifactcache
- # Ensure the element's artifact memeber is initialised
- # This is duplicated from Pipeline.resolve_elements()
- # as this test does not use the cli frontend.
- for e in element._dependencies(_Scope.ALL):
- e._initialize_state()
-
# Manually setup the CAS remotes
artifactcache.setup_remotes(use_config=True)
artifactcache.initialize_remotes()
diff --git a/tests/sourcecache/fetch.py b/tests/sourcecache/fetch.py
index 76f5508f9..b39e16299 100644
--- a/tests/sourcecache/fetch.py
+++ b/tests/sourcecache/fetch.py
@@ -74,7 +74,7 @@ def test_source_fetch(cli, tmpdir, datafiles):
project.ensure_fully_loaded()
element = project.load_elements([element_name])[0]
- element._initialize_state()
+ element._fetch(check_only=True)
assert not element._cached_sources()
source = list(element.sources())[0]
@@ -114,7 +114,6 @@ def test_source_fetch(cli, tmpdir, datafiles):
project.ensure_fully_loaded()
element = project.load_elements([element_name])[0]
- element._initialize_state()
# check that we have the source in the cas now and it's not fetched
assert element._cached_sources()
@@ -134,7 +133,7 @@ def test_fetch_fallback(cli, tmpdir, datafiles):
project.ensure_fully_loaded()
element = project.load_elements([element_name])[0]
- element._initialize_state()
+ element._fetch(check_only=True)
assert not element._cached_sources()
source = list(element.sources())[0]
@@ -152,7 +151,7 @@ def test_fetch_fallback(cli, tmpdir, datafiles):
# Check that the source in both in the source dir and the local CAS
element = project.load_elements([element_name])[0]
- element._initialize_state()
+ element._fetch(check_only=True)
assert element._cached_sources()
@@ -168,7 +167,7 @@ def test_pull_fail(cli, tmpdir, datafiles):
project.ensure_fully_loaded()
element = project.load_elements([element_name])[0]
- element._initialize_state()
+ element._fetch(check_only=True)
assert not element._cached_sources()
source = list(element.sources())[0]
@@ -200,7 +199,7 @@ def test_source_pull_partial_fallback_fetch(cli, tmpdir, datafiles):
project.ensure_fully_loaded()
element = project.load_elements([element_name])[0]
- element._initialize_state()
+ element._fetch(check_only=True)
assert not element._cached_sources()
source = list(element.sources())[0]
diff --git a/tests/sourcecache/push.py b/tests/sourcecache/push.py
index 25a4309b8..3291c4023 100644
--- a/tests/sourcecache/push.py
+++ b/tests/sourcecache/push.py
@@ -84,7 +84,7 @@ def test_source_push_split(cli, tmpdir, datafiles):
project.ensure_fully_loaded()
element = project.load_elements(["push.bst"])[0]
- element._initialize_state()
+ element._fetch(check_only=True)
assert not element._cached_sources()
source = list(element.sources())[0]
@@ -134,7 +134,7 @@ def test_source_push(cli, tmpdir, datafiles):
project.ensure_fully_loaded()
element = project.load_elements(["push.bst"])[0]
- element._initialize_state()
+ element._fetch(check_only=True)
assert not element._cached_sources()
source = list(element.sources())[0]
diff --git a/tests/sourcecache/staging.py b/tests/sourcecache/staging.py
index 0f2f05891..940f1f1ab 100644
--- a/tests/sourcecache/staging.py
+++ b/tests/sourcecache/staging.py
@@ -64,7 +64,7 @@ def test_source_staged(tmpdir, cli, datafiles):
# now check that the source is in the refs file, this is pretty messy but
# seems to be the only way to get the sources?
element = project.load_elements(["import-bin.bst"])[0]
- element._initialize_state()
+ element._fetch(check_only=True)
source = list(element.sources())[0]
assert element._cached_sources()
assert sourcecache.contains(source)
@@ -99,7 +99,7 @@ def test_source_fetch(tmpdir, cli, datafiles):
sourcecache = context.sourcecache
element = project.load_elements(["import-dev.bst"])[0]
- element._initialize_state()
+ element._fetch(check_only=True)
source = list(element.sources())[0]
assert element._cached_sources()
@@ -133,7 +133,6 @@ def test_staged_source_build(tmpdir, datafiles, cli):
project.ensure_fully_loaded()
element = project.load_elements(["import-dev.bst"])[0]
- element._initialize_state()
# check consistency of the source
assert not element._cached_sources()