diff options
author | Jürg Billeter <j@bitron.ch> | 2018-05-10 12:09:32 +0200 |
---|---|---|
committer | Jürg Billeter <j@bitron.ch> | 2018-05-11 07:57:37 +0200 |
commit | 6d71826e52049cc068ed137f17159242276560f6 (patch) | |
tree | e81b15ee1ae8cfb0b3c5f9607f320665bff612ec /buildstream/_pipeline.py | |
parent | e1420437a2d82af099bdb3d273823effbea35ce4 (diff) | |
download | buildstream-6d71826e52049cc068ed137f17159242276560f6.tar.gz |
_pipeline.py: Remove unused ignore_cache parameter from _Planner
Diffstat (limited to 'buildstream/_pipeline.py')
-rw-r--r-- | buildstream/_pipeline.py | 14 |
1 files changed, 7 insertions, 7 deletions
diff --git a/buildstream/_pipeline.py b/buildstream/_pipeline.py index 8861556c9..55e2c47fb 100644 --- a/buildstream/_pipeline.py +++ b/buildstream/_pipeline.py @@ -456,7 +456,7 @@ class _Planner(): # Here we want to traverse the same element more than once when # it is reachable from multiple places, with the interest of finding # the deepest occurance of every element - def plan_element(self, element, depth, ignore_cache): + def plan_element(self, element, depth): if element in self.visiting_elements: # circular dependency, already being processed return @@ -468,19 +468,19 @@ class _Planner(): self.visiting_elements.add(element) for dep in element.dependencies(Scope.RUN, recurse=False): - self.plan_element(dep, depth, ignore_cache) + self.plan_element(dep, depth) # Dont try to plan builds of elements that are cached already - if ignore_cache or (not element._cached() and not element._remotely_cached()): + if not element._cached() and not element._remotely_cached(): for dep in element.dependencies(Scope.BUILD, recurse=False): - self.plan_element(dep, depth + 1, ignore_cache) + self.plan_element(dep, depth + 1) self.depth_map[element] = depth self.visiting_elements.remove(element) - def plan(self, roots, ignore_cache=False): + def plan(self, roots): for root in roots: - self.plan_element(root, 0, ignore_cache) + self.plan_element(root, 0) depth_sorted = sorted(self.depth_map.items(), key=itemgetter(1), reverse=True) - return [item[0] for item in depth_sorted if ignore_cache or not item[0]._cached()] + return [item[0] for item in depth_sorted if not item[0]._cached()] |