summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/buildstream/_loader/loadelement.pyx1
-rw-r--r--src/buildstream/_loader/loader.py1
-rw-r--r--src/buildstream/_pipeline.py30
-rw-r--r--src/buildstream/_stream.py1
-rw-r--r--src/buildstream/element.py4
5 files changed, 4 insertions, 33 deletions
diff --git a/src/buildstream/_loader/loadelement.pyx b/src/buildstream/_loader/loadelement.pyx
index 210869e51..f69e13857 100644
--- a/src/buildstream/_loader/loadelement.pyx
+++ b/src/buildstream/_loader/loadelement.pyx
@@ -286,7 +286,6 @@ cdef class LoadElement:
from ..element import Element
element = Element._new_from_load_element(self)
- element._initialize_state()
# Custom error for link dependencies, since we don't completely
# parse their dependencies we cannot rely on the built-in ElementError.
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index 54efd27ae..3d835a983 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -817,7 +817,6 @@ class Loader:
)
element = Element._new_from_load_element(load_element)
- element._initialize_state()
# Handle the case where a subproject has no ref
#
diff --git a/src/buildstream/_pipeline.py b/src/buildstream/_pipeline.py
index 6e41d70c8..b5e01d9a9 100644
--- a/src/buildstream/_pipeline.py
+++ b/src/buildstream/_pipeline.py
@@ -79,36 +79,6 @@ class Pipeline:
return tuple(element_groups)
- # resolve_elements()
- #
- # Resolve element state and cache keys.
- #
- # Args:
- # targets (list of Element): The list of toplevel element targets
- #
- def resolve_elements(self, targets):
- with self._context.messenger.simple_task("Resolving cached state", silent_nested=True) as task:
- # We need to go through the project to access the loader
- if task:
- task.set_maximum_progress(self._project.loader.loaded)
-
- # XXX: Now that Element._update_state() can trigger recursive update_state calls
- # it is possible that we could get a RecursionError. However, this is unlikely
- # to happen, even for large projects (tested with the Debian stack). Although,
- # if it does become a problem we may have to set the recursion limit to a
- # greater value.
- for element in self.dependencies(targets, _Scope.ALL):
- # Determine initial element state.
- element._initialize_state()
-
- # We may already have Elements which are cached and have their runtimes
- # cached, if this is the case, we should immediately notify their reverse
- # dependencies.
- element._update_ready_for_runtime_and_cached()
-
- if task:
- task.add_current_progress()
-
# check_remotes()
#
# Check if the target artifact is cached in any of the available remotes
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index 27f2ed497..dfe9e1466 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -1348,7 +1348,6 @@ class Stream:
# Now move on to loading primary selection.
#
- self._pipeline.resolve_elements(self.targets)
selected = self._pipeline.get_selection(self.targets, selection, silent=False)
selected = self._pipeline.except_elements(self.targets, selected, except_elements)
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index e27de6e96..4bee57054 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -1132,6 +1132,8 @@ class Element(Plugin):
element.__preflight()
+ element._initialize_state()
+
if task:
task.add_current_progress()
@@ -2871,6 +2873,8 @@ class Element(Plugin):
self.__strict_cache_key = artifact.strict_key
self.__weak_cache_key = artifact.weak_key
+ self._initialize_state()
+
@classmethod
def __compose_default_splits(cls, project, defaults, first_pass):