summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTristan Van Berkom <tristan.vanberkom@codethink.co.uk>2018-04-30 22:17:48 +0900
committerTristan Van Berkom <tristan.vanberkom@codethink.co.uk>2018-05-08 03:59:38 +0900
commitc81fb2dabb89a5a443fe5d9bbf8a2c19f45ac276 (patch)
tree2b0678d846a08154f912c503b1b087679a6c870f
parent541cd76022ca7e9ef8e4114495b908943fe3e8b1 (diff)
downloadbuildstream-c81fb2dabb89a5a443fe5d9bbf8a2c19f45ac276.tar.gz
Moving 'fetch_subprojects' configuration to stream <--> loader
This was previously decided in CLI, but knowledge of what to initialize has been moved to Stream(). Now there is no more point to store this configuration in the Context, we just have the Stream() decide it when asking the Pipeline() to invoke the Loader().
-rw-r--r--buildstream/_context.py5
-rw-r--r--buildstream/_frontend/app.py16
-rw-r--r--buildstream/_frontend/cli.py12
-rw-r--r--buildstream/_loader/loader.py11
-rw-r--r--buildstream/_pipeline.py12
-rw-r--r--buildstream/_stream.py41
6 files changed, 53 insertions, 44 deletions
diff --git a/buildstream/_context.py b/buildstream/_context.py
index 9ae2ec57a..4d5b2b87d 100644
--- a/buildstream/_context.py
+++ b/buildstream/_context.py
@@ -45,10 +45,7 @@ from ._artifactcache import artifact_cache_specs_from_config_node
#
class Context():
- def __init__(self, *, fetch_subprojects=False):
-
- # Whether to automatically fetch subprojects in this session
- self.fetch_subprojects = fetch_subprojects
+ def __init__(self):
# Filename indicating which configuration file was used, or None for the defaults
self.config_origin = None
diff --git a/buildstream/_frontend/app.py b/buildstream/_frontend/app.py
index 5b0cfc4b1..50787b1ec 100644
--- a/buildstream/_frontend/app.py
+++ b/buildstream/_frontend/app.py
@@ -134,12 +134,8 @@ class App():
# partial initialization is useful for some contexts where we dont
# want to load the pipeline, such as executing workspace commands.
#
- # Args:
- # fetch_subprojects (bool): Whether we should fetch subprojects as a part of the
- # loading process, if they are not yet locally cached
- #
@contextmanager
- def partially_initialized(self, *, fetch_subprojects=False):
+ def partially_initialized(self):
directory = self._main_options['directory']
config = self._main_options['config']
@@ -147,7 +143,7 @@ class App():
# Load the Context
#
try:
- self.context = Context(fetch_subprojects=fetch_subprojects)
+ self.context = Context()
self.context.load(config)
except BstError as e:
self._error_exit(e, "Error loading user configuration")
@@ -241,8 +237,6 @@ class App():
#
# Args:
# session_name (str): The name of the session, or None for no session
- # fetch_subprojects (bool): Whether we should fetch subprojects as a part of the
- # loading process, if they are not yet locally cached
#
# Note that the except_ argument may have a subtly different meaning depending
# on the activity performed on the Pipeline. In normal circumstances the except_
@@ -253,14 +247,12 @@ class App():
# the session header and summary, and time the main session from startup time.
#
@contextmanager
- def initialized(self, *,
- session_name=None,
- fetch_subprojects=False):
+ def initialized(self, *, session_name=None):
self._session_name = session_name
# Start with the early stage init, this enables logging right away
- with self.partially_initialized(fetch_subprojects=fetch_subprojects):
+ with self.partially_initialized():
# Mark the beginning of the session
if session_name:
diff --git a/buildstream/_frontend/cli.py b/buildstream/_frontend/cli.py
index 1ac4548b6..2b5c77f8a 100644
--- a/buildstream/_frontend/cli.py
+++ b/buildstream/_frontend/cli.py
@@ -235,7 +235,7 @@ def build(app, elements, all_, track_, track_save, track_all, track_except, trac
if track_all:
track_ = elements
- with app.initialized(session_name="Build", fetch_subprojects=True):
+ with app.initialized(session_name="Build"):
app.stream.build(elements,
track_targets=track_,
track_except=track_except,
@@ -279,7 +279,7 @@ def fetch(app, elements, deps, track_, except_, track_cross_junctions):
click.echo("ERROR: The --track-cross-junctions option can only be used with --track", err=True)
sys.exit(-1)
- with app.initialized(session_name="Fetch", fetch_subprojects=True):
+ with app.initialized(session_name="Fetch"):
app.stream.fetch(elements,
selection=deps,
except_targets=except_,
@@ -315,7 +315,7 @@ def track(app, elements, deps, except_, cross_junctions):
none: No dependencies, just the specified elements
all: All dependencies of all specified elements
"""
- with app.initialized(session_name="Track", fetch_subprojects=True):
+ with app.initialized(session_name="Track"):
app.stream.track(elements,
selection=deps,
except_targets=except_,
@@ -347,7 +347,7 @@ def pull(app, elements, deps, remote):
none: No dependencies, just the element itself
all: All dependencies
"""
- with app.initialized(session_name="Pull", fetch_subprojects=True):
+ with app.initialized(session_name="Pull"):
app.stream.pull(elements, selection=deps, remote=remote)
@@ -375,7 +375,7 @@ def push(app, elements, deps, remote):
none: No dependencies, just the element itself
all: All dependencies
"""
- with app.initialized(session_name="Push", fetch_subprojects=True):
+ with app.initialized(session_name="Push"):
app.stream.push(elements, selection=deps, remote=remote)
@@ -716,7 +716,7 @@ def source_bundle(app, element, force, directory,
track_, compression, except_):
"""Produce a source bundle to be manually executed
"""
- with app.initialized(fetch_subprojects=True):
+ with app.initialized():
app.stream.source_bundle(element, directory,
track_first=track_,
force=force,
diff --git a/buildstream/_loader/loader.py b/buildstream/_loader/loader.py
index fd0f9445f..e0ceb4fb9 100644
--- a/buildstream/_loader/loader.py
+++ b/buildstream/_loader/loader.py
@@ -51,10 +51,11 @@ from . import MetaSource
# parent (Loader): A parent Loader object, in the case this is a junctioned Loader
# tempdir (str): A directory to cleanup with the Loader, given to the loader by a parent
# loader in the case that this loader is a subproject loader.
+# fetch_subprojects (bool): Whether to fetch subprojects while loading
#
class Loader():
- def __init__(self, context, project, filenames, *, parent=None, tempdir=None):
+ def __init__(self, context, project, filenames, *, parent=None, tempdir=None, fetch_subprojects=False):
# Ensure we have an absolute path for the base directory
basedir = project.element_path
@@ -78,6 +79,7 @@ class Loader():
#
# Private members
#
+ self._fetch_subprojects = fetch_subprojects
self._context = context
self._options = project.options # Project options (OptionPool)
self._basedir = basedir # Base project directory
@@ -475,7 +477,7 @@ class Loader():
# Handle the case where a subproject needs to be fetched
#
if source.get_consistency() == Consistency.RESOLVED:
- if self._context.fetch_subprojects:
+ if self._fetch_subprojects:
if ticker:
ticker(filename, 'Fetching subproject from {} source'.format(source.get_kind()))
source.fetch()
@@ -511,7 +513,10 @@ class Loader():
else:
raise
- loader = Loader(self._context, project, [], parent=self, tempdir=basedir)
+ loader = Loader(self._context, project, [],
+ parent=self,
+ tempdir=basedir,
+ fetch_subprojects=self._fetch_subprojects)
self._loaders[filename] = loader
diff --git a/buildstream/_pipeline.py b/buildstream/_pipeline.py
index 7f3c657fa..010e951b5 100644
--- a/buildstream/_pipeline.py
+++ b/buildstream/_pipeline.py
@@ -69,9 +69,8 @@ class PipelineSelection():
# current source refs will not be the effective refs.
# rewritable (bool): Whether the loaded files should be rewritable
# this is a bit more expensive due to deep copies
-# use_configured_remote_caches (bool): Whether to connect to configured artifact remotes.
-# add_remote_cache (str): Adds an additional artifact remote URL, which is
-# prepended to the list of remotes (and thus given highest priority).
+# fetch_subprojects (bool): Whether we should fetch subprojects as a part of the
+# loading process, if they are not yet locally cached
#
# The ticker methods will be called with an element name for each tick, a final
# tick with None as the argument is passed to signal that processing of this
@@ -86,7 +85,9 @@ class PipelineSelection():
#
class Pipeline():
- def __init__(self, context, project, artifacts, targets, except_, rewritable=False):
+ def __init__(self, context, project, artifacts, targets, except_, *,
+ rewritable=False,
+ fetch_subprojects=True):
self.context = context # The Context
self.project = project # The toplevel project
@@ -105,7 +106,8 @@ class Pipeline():
# Early initialization
#
- self._loader = Loader(self.context, self.project, targets + except_)
+ self._loader = Loader(self.context, self.project, targets + except_,
+ fetch_subprojects=fetch_subprojects)
with self.context.timed_activity("Loading pipeline", silent_nested=True):
meta_elements = self._loader.load(rewritable, None)
diff --git a/buildstream/_stream.py b/buildstream/_stream.py
index 09ad51d1b..09433147c 100644
--- a/buildstream/_stream.py
+++ b/buildstream/_stream.py
@@ -86,7 +86,8 @@ class Stream():
except_targets=(),
downloadable=False):
self.init_pipeline(targets, except_=except_targets,
- use_configured_remote_caches=downloadable)
+ use_configured_remote_caches=downloadable,
+ fetch_subprojects=False)
return self._pipeline.get_selection(selection)
# shell()
@@ -153,7 +154,8 @@ class Stream():
rewritable=rewritable,
use_configured_remote_caches=True,
track_elements=track_targets,
- track_cross_junctions=track_cross_junctions)
+ track_cross_junctions=track_cross_junctions,
+ fetch_subprojects=True)
if build_all:
plan = self._pipeline.dependencies(Scope.ALL)
@@ -229,7 +231,8 @@ class Stream():
except_=except_targets,
rewritable=rewritable,
track_elements=targets if track_targets else None,
- track_cross_junctions=track_cross_junctions)
+ track_cross_junctions=track_cross_junctions,
+ fetch_subprojects=True)
fetch_plan = self._pipeline.get_selection(selection)
@@ -260,7 +263,8 @@ class Stream():
rewritable=True,
track_elements=targets,
track_cross_junctions=cross_junctions,
- track_selection=selection)
+ track_selection=selection,
+ fetch_subprojects=True)
track = TrackQueue(self._scheduler)
track.enqueue(self._pipeline._track_elements)
@@ -294,7 +298,9 @@ class Stream():
self.init_pipeline(targets,
use_configured_remote_caches=use_configured_remote_caches,
- add_remote_cache=remote)
+ add_remote_cache=remote,
+ fetch_subprojects=True)
+
elements = self._pipeline.get_selection(selection)
if not self._pipeline._artifacts.has_fetch_remotes():
@@ -336,7 +342,9 @@ class Stream():
self.init_pipeline(targets,
use_configured_remote_caches=use_configured_remote_caches,
- add_remote_cache=remote)
+ add_remote_cache=remote,
+ fetch_subprojects=True)
+
elements = self._pipeline.get_selection(selection)
if not self._pipeline._artifacts.has_push_remotes():
@@ -374,7 +382,7 @@ class Stream():
integrate=True,
hardlinks=False):
- self.init_pipeline((target,))
+ self.init_pipeline((target,), fetch_subprojects=True)
# We only have one target in a checkout command
target = self._pipeline.targets[0]
@@ -428,7 +436,8 @@ class Stream():
self.init_pipeline((target,),
track_elements=[target] if track_first else None,
track_selection=PipelineSelection.NONE,
- rewritable=track_first)
+ rewritable=track_first,
+ fetch_subprojects=False)
target = self._pipeline.targets[0]
workdir = os.path.abspath(directory)
@@ -516,7 +525,8 @@ class Stream():
self.init_pipeline(targets,
track_elements=targets if track_first else None,
track_selection=PipelineSelection.NONE,
- rewritable=track_first)
+ rewritable=track_first,
+ fetch_subprojects=False)
# Do the tracking first
if track_first:
@@ -602,7 +612,8 @@ class Stream():
self.init_pipeline((target,),
track_elements=[target] if track_first else None,
track_selection=PipelineSelection.NONE,
- rewritable=track_first)
+ rewritable=track_first,
+ fetch_subprojects=True)
# source-bundle only supports one target
target = self._pipeline.targets[0]
@@ -787,8 +798,7 @@ class Stream():
# track_elements (list of elements): Elements which are to be tracked
# track_cross_junctions (bool): Whether tracking is allowed to cross junction boundaries
# track_selection (PipelineSelection): The selection algorithm for track elements
- # fetch_subprojects (bool): Whether we should fetch subprojects as a part of the
- # loading process, if they are not yet locally cached
+ # fetch_subprojects (bool): Whether to fetch subprojects while loading
#
# Note that the except_ argument may have a subtly different meaning depending
# on the activity performed on the Pipeline. In normal circumstances the except_
@@ -802,12 +812,15 @@ class Stream():
add_remote_cache=None,
track_elements=None,
track_cross_junctions=False,
- track_selection=PipelineSelection.ALL):
+ track_selection=PipelineSelection.ALL,
+ fetch_subprojects=True):
profile_start(Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, '-') for t in elements))
self._pipeline = Pipeline(self._context, self._project, self._artifacts,
- elements, except_, rewritable=rewritable)
+ elements, except_,
+ rewritable=rewritable,
+ fetch_subprojects=fetch_subprojects)
self._pipeline.initialize(use_configured_remote_caches=use_configured_remote_caches,
add_remote_cache=add_remote_cache,