summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTristan Van Berkom <tristan.vanberkom@codethink.co.uk>2018-04-02 22:45:34 +0900
committerTristan Van Berkom <tristan.vanberkom@codethink.co.uk>2018-04-03 22:49:10 +0900
commit500f4f330fde8b6001a2f8d8921bd5b8acb79960 (patch)
treefbde3a81f1732ba219d131438ec8134ea6b2c298
parent70c73b93c0aca0c36c17ad4686b7e71b32a8dc1d (diff)
downloadbuildstream-500f4f330fde8b6001a2f8d8921bd5b8acb79960.tar.gz
_context.py: Adhere to policy on private symbols
And adjust all surrounding sources for changed symbols. This is a part of issue #285
-rw-r--r--buildstream/_artifactcache/artifactcache.py2
-rw-r--r--buildstream/_artifactcache/ostreecache.py2
-rw-r--r--buildstream/_context.py87
-rw-r--r--buildstream/_frontend/app.py12
-rw-r--r--buildstream/_frontend/widget.py2
-rw-r--r--buildstream/_pipeline.py10
-rw-r--r--buildstream/_project.py4
-rw-r--r--buildstream/_scheduler/job.py8
-rw-r--r--buildstream/_scheduler/queue.py2
-rw-r--r--buildstream/element.py6
-rw-r--r--buildstream/plugin.py8
-rw-r--r--buildstream/source.py6
-rw-r--r--tests/plugins/pipeline.py2
-rw-r--r--tests/project/plugins.py2
-rw-r--r--tests/variables/variables.py2
15 files changed, 80 insertions, 75 deletions
diff --git a/buildstream/_artifactcache/artifactcache.py b/buildstream/_artifactcache/artifactcache.py
index 8937afb49..b10b6b0b9 100644
--- a/buildstream/_artifactcache/artifactcache.py
+++ b/buildstream/_artifactcache/artifactcache.py
@@ -90,7 +90,7 @@ def artifact_cache_specs_from_config_node(config_node):
# A list of ArtifactCacheSpec instances describing the remote artifact caches.
#
def configured_remote_artifact_cache_specs(context, project):
- project_overrides = context._get_overrides(project.name)
+ project_overrides = context.get_overrides(project.name)
project_extra_specs = artifact_cache_specs_from_config_node(project_overrides)
return list(utils._deduplicate(
diff --git a/buildstream/_artifactcache/ostreecache.py b/buildstream/_artifactcache/ostreecache.py
index 76b4c3507..29f6c0b8c 100644
--- a/buildstream/_artifactcache/ostreecache.py
+++ b/buildstream/_artifactcache/ostreecache.py
@@ -505,7 +505,7 @@ class OSTreeCache(ArtifactCache):
remote_results[remote_spec.url] = (push_url, pull_url, remote_refs)
# Prepare push_urls, pull_urls, and remote_refs for each project
- for project in self.context._get_projects():
+ for project in self.context.get_projects():
remote_specs = self.global_remote_specs
if project in self.project_remote_specs:
remote_specs = list(utils._deduplicate(remote_specs + self.project_remote_specs[project]))
diff --git a/buildstream/_context.py b/buildstream/_context.py
index 15fadddcc..727be6924 100644
--- a/buildstream/_context.py
+++ b/buildstream/_context.py
@@ -50,9 +50,6 @@ class Context():
# Filename indicating which configuration file was used, or None for the defaults
self.config_origin = None
- # Whether elements must be rebuilt when their dependencies have changed
- self.strict_build_plan = None
-
# The directory where various sources are stored
self.sourcedir = None
@@ -101,6 +98,9 @@ class Context():
# What to do when a build fails in non interactive mode
self.sched_error_action = 'continue'
+ # Whether elements must be rebuilt when their dependencies have changed
+ self._strict_build_plan = None
+
# Make sure the XDG vars are set in the environment before loading anything
self._init_xdg()
@@ -209,27 +209,27 @@ class Context():
"{}: on-error should be one of: {}".format(
provenance, ", ".join(valid_actions)))
- # _add_project():
+ # add_project():
#
# Add a project to the context.
#
# Args:
# project (Project): The project to add
#
- def _add_project(self, project):
+ def add_project(self, project):
self._projects.append(project)
- # _get_projects():
+ # get_projects():
#
# Return the list of projects in the context.
#
# Returns:
# (list): The list of projects
#
- def _get_projects(self):
+ def get_projects(self):
return self._projects
- # _get_toplevel_project():
+ # get_toplevel_project():
#
# Return the toplevel project, the one which BuildStream was
# invoked with as opposed to a junctioned subproject.
@@ -237,10 +237,10 @@ class Context():
# Returns:
# (list): The list of projects
#
- def _get_toplevel_project(self):
+ def get_toplevel_project(self):
return self._projects[0]
- # _get_overrides():
+ # get_overrides():
#
# Fetch the override dictionary for the active project. This returns
# a node loaded from YAML and as such, values loaded from the returned
@@ -252,10 +252,10 @@ class Context():
# Returns:
# (Mapping): The overrides dictionary for the specified project
#
- def _get_overrides(self, project_name):
+ def get_overrides(self, project_name):
return _yaml.node_get(self._project_overrides, Mapping, project_name, default_value={})
- # _get_strict():
+ # get_strict():
#
# Fetch whether we are strict or not
#
@@ -265,23 +265,23 @@ class Context():
# Returns:
# (bool): Whether or not to use strict build plan
#
- def _get_strict(self, project_name):
+ def get_strict(self, project_name):
# If it was set by the CLI, it overrides any config
- if self.strict_build_plan is not None:
- return self.strict_build_plan
+ if self._strict_build_plan is not None:
+ return self._strict_build_plan
- overrides = self._get_overrides(project_name)
+ overrides = self.get_overrides(project_name)
return _yaml.node_get(overrides, bool, 'strict', default_value=True)
- # _get_cache_key():
+ # get_cache_key():
#
# Returns the cache key, calculating it if necessary
#
# Returns:
# (str): A hex digest cache key for the Context
#
- def _get_cache_key(self):
+ def get_cache_key(self):
if self._cache_key is None:
# Anything that alters the build goes into the unique key
@@ -289,35 +289,28 @@ class Context():
return self._cache_key
- # _set_message_handler()
+ # set_message_handler()
#
# Sets the handler for any status messages propagated through
# the context.
#
# The message handler should have the same signature as
- # the _message() method
- def _set_message_handler(self, handler):
+ # the message() method
+ def set_message_handler(self, handler):
self._message_handler = handler
- # _push_message_depth() / _pop_message_depth()
+ # silent_messages():
#
- # For status messages, send the depth of timed
- # activities inside a given task through the message
+ # Returns:
+ # (bool): Whether messages are currently being silenced
#
- def _push_message_depth(self, silent_nested):
- self._message_depth.appendleft(silent_nested)
-
- def _pop_message_depth(self):
- assert self._message_depth
- self._message_depth.popleft()
-
- def _silent_messages(self):
+ def silent_messages(self):
for silent in self._message_depth:
if silent:
return True
return False
- # _message():
+ # message():
#
# Proxies a message back to the caller, this is the central
# point through which all messages pass.
@@ -325,7 +318,7 @@ class Context():
# Args:
# message: A Message object
#
- def _message(self, message):
+ def message(self, message):
# Tag message only once
if message.depth is None:
@@ -339,7 +332,7 @@ class Context():
self._message_handler(message, context=self)
return
- # _silence()
+ # silence()
#
# A context manager to silence messages, this behaves in
# the same way as the `silent_nested` argument of the
@@ -347,14 +340,14 @@ class Context():
# important messages will not be silenced.
#
@contextmanager
- def _silence(self):
+ def silence(self):
self._push_message_depth(True)
try:
yield
finally:
self._pop_message_depth()
- # _timed_activity()
+ # timed_activity()
#
# Context manager for performing timed activities and logging those
#
@@ -365,7 +358,7 @@ class Context():
# silent_nested (bool): If specified, nested messages will be silenced
#
@contextmanager
- def _timed_activity(self, activity_name, *, unique_id=None, detail=None, silent_nested=False):
+ def timed_activity(self, activity_name, *, unique_id=None, detail=None, silent_nested=False):
starttime = datetime.datetime.now()
stopped_time = None
@@ -384,7 +377,7 @@ class Context():
try:
# Push activity depth for status messages
message = Message(unique_id, MessageType.START, activity_name, detail=detail)
- self._message(message)
+ self.message(message)
self._push_message_depth(silent_nested)
yield
@@ -394,13 +387,25 @@ class Context():
elapsed = datetime.datetime.now() - starttime
message = Message(unique_id, MessageType.FAIL, activity_name, elapsed=elapsed)
self._pop_message_depth()
- self._message(message)
+ self.message(message)
raise
elapsed = datetime.datetime.now() - starttime
message = Message(unique_id, MessageType.SUCCESS, activity_name, elapsed=elapsed)
self._pop_message_depth()
- self._message(message)
+ self.message(message)
+
+ # _push_message_depth() / _pop_message_depth()
+ #
+ # For status messages, send the depth of timed
+ # activities inside a given task through the message
+ #
+ def _push_message_depth(self, silent_nested):
+ self._message_depth.appendleft(silent_nested)
+
+ def _pop_message_depth(self):
+ assert self._message_depth
+ self._message_depth.popleft()
# Force the resolved XDG variables into the environment,
# this is so that they can be used directly to specify
diff --git a/buildstream/_frontend/app.py b/buildstream/_frontend/app.py
index c9c006b7d..ffe12d1db 100644
--- a/buildstream/_frontend/app.py
+++ b/buildstream/_frontend/app.py
@@ -138,7 +138,7 @@ class App():
# the command line when used, trumps the config files.
#
override_map = {
- 'strict': 'strict_build_plan',
+ 'strict': '_strict_build_plan',
'debug': 'log_debug',
'verbose': 'log_verbose',
'error_lines': 'log_error_lines',
@@ -178,7 +178,7 @@ class App():
message_format=self.context.log_message_format)
# Propagate pipeline feedback to the user
- self.context._set_message_handler(self.message_handler)
+ self.context.set_message_handler(self.message_handler)
try:
self.project = Project(directory, self.context, cli_options=self.main_options['option'])
@@ -375,8 +375,8 @@ class App():
# Remove workspace directory if prompted
if remove_dir:
- with self.context._timed_activity("Removing workspace directory {}"
- .format(workspace.path)):
+ with self.context.timed_activity("Removing workspace directory {}"
+ .format(workspace.path)):
try:
shutil.rmtree(workspace.path)
except OSError as e:
@@ -417,7 +417,7 @@ class App():
#
def message(self, message_type, message, **kwargs):
args = dict(kwargs)
- self.context._message(
+ self.context.message(
Message(None, message_type, message, **args))
#
@@ -639,7 +639,7 @@ class App():
self.fail_messages[message.unique_id] = message
# Send to frontend if appropriate
- if self.context._silent_messages() and (message.message_type not in unconditional_messages):
+ if self.context.silent_messages() and (message.message_type not in unconditional_messages):
return
if self.status:
diff --git a/buildstream/_frontend/widget.py b/buildstream/_frontend/widget.py
index 90b573636..dd934182c 100644
--- a/buildstream/_frontend/widget.py
+++ b/buildstream/_frontend/widget.py
@@ -520,7 +520,7 @@ class LogLine(Widget):
values["Source Mirrors"] = context.sourcedir
values["Build Area"] = context.builddir
values["Artifact Cache"] = context.artifactdir
- values["Strict Build Plan"] = "Yes" if context.strict_build_plan else "No"
+ values["Strict Build Plan"] = "Yes" if context.get_strict(project.name) else "No"
values["Maximum Fetch Tasks"] = context.sched_fetchers
values["Maximum Build Tasks"] = context.sched_builders
values["Maximum Push Tasks"] = context.sched_pushers
diff --git a/buildstream/_pipeline.py b/buildstream/_pipeline.py
index 676b068a7..22c3a0b2b 100644
--- a/buildstream/_pipeline.py
+++ b/buildstream/_pipeline.py
@@ -163,7 +163,7 @@ class Pipeline():
self.artifacts.set_remotes([ArtifactCacheSpec(add_remote_cache, push=True)])
has_remote_caches = True
if use_configured_remote_caches:
- for project in self.context._get_projects():
+ for project in self.context.get_projects():
artifact_caches = configured_remote_artifact_cache_specs(self.context, project)
if artifact_caches: # artifact_caches is a list of ArtifactCacheSpec instances
self.artifacts.set_remotes(artifact_caches, project=project)
@@ -307,16 +307,16 @@ class Pipeline():
#
def message(self, message_type, message, **kwargs):
args = dict(kwargs)
- self.context._message(
+ self.context.message(
Message(None, message_type, message, **args))
# Local timed activities, announces the jobs as well
#
@contextmanager
def timed_activity(self, activity_name, *, detail=None, silent_nested=False):
- with self.context._timed_activity(activity_name,
- detail=detail,
- silent_nested=silent_nested):
+ with self.context.timed_activity(activity_name,
+ detail=detail,
+ silent_nested=silent_nested):
yield
# Internal: Instantiates plugin-provided Element and Source instances
diff --git a/buildstream/_project.py b/buildstream/_project.py
index ce23452f1..b8ee61394 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -117,7 +117,7 @@ class Project():
self._load()
profile_end(Topics.LOAD_PROJECT, self.directory.replace(os.sep, '-'))
- self._context._add_project(self)
+ self._context.add_project(self)
# translate_url():
#
@@ -204,7 +204,7 @@ class Project():
self._options.load_yaml_values(self._junction.options, transform=self._junction._subst_string)
# Collect option values specified in the user configuration
- overrides = self._context._get_overrides(self.name)
+ overrides = self._context.get_overrides(self.name)
override_options = _yaml.node_get(overrides, Mapping, 'options', default_value={})
self._options.load_yaml_values(override_options)
if self._cli_options:
diff --git a/buildstream/_scheduler/job.py b/buildstream/_scheduler/job.py
index 707b78ec0..d4f5238c5 100644
--- a/buildstream/_scheduler/job.py
+++ b/buildstream/_scheduler/job.py
@@ -210,7 +210,7 @@ class Job():
def message(self, plugin, message_type, message, **kwargs):
args = dict(kwargs)
args['scheduler'] = True
- self.scheduler.context._message(
+ self.scheduler.context.message(
Message(plugin._get_unique_id(),
message_type,
message,
@@ -230,7 +230,7 @@ class Job():
# Set the global message handler in this child
# process to forward messages to the parent process
self.queue = queue
- self.scheduler.context._set_message_handler(self.child_message_handler)
+ self.scheduler.context.set_message_handler(self.child_message_handler)
starttime = datetime.datetime.now()
stopped_time = None
@@ -378,7 +378,7 @@ class Job():
message.message_type = MessageType.WARN
# Send to frontend if appropriate
- if context._silent_messages() and (message.message_type not in unconditional_messages):
+ if context.silent_messages() and (message.message_type not in unconditional_messages):
return
if message.message_type == MessageType.LOG:
@@ -396,7 +396,7 @@ class Job():
if envelope.message_type == 'message':
# Propagate received messages from children
# back through the context.
- self.scheduler.context._message(envelope.message)
+ self.scheduler.context.message(envelope.message)
elif envelope.message_type == 'error':
# For regression tests only, save the last error domain / reason
# reported from a child task in the main process, this global state
diff --git a/buildstream/_scheduler/queue.py b/buildstream/_scheduler/queue.py
index f3e0bd7f3..0cea1005f 100644
--- a/buildstream/_scheduler/queue.py
+++ b/buildstream/_scheduler/queue.py
@@ -266,4 +266,4 @@ class Queue():
def message(self, element, message_type, brief, **kwargs):
context = element._get_context()
message = Message(element._get_unique_id(), message_type, brief, **kwargs)
- context._message(message)
+ context.message(message)
diff --git a/buildstream/element.py b/buildstream/element.py
index 418096c58..b127a0d8e 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -1009,7 +1009,7 @@ class Element(Plugin):
self.__cache_key_dict = {
'artifact-version': "{}.{}".format(_BST_CORE_ARTIFACT_VERSION,
self.BST_ARTIFACT_VERSION),
- 'context': context._get_cache_key(),
+ 'context': context.get_cache_key(),
'project': project._get_cache_key(),
'element': self.get_unique_key(),
'execution-environment': self.__sandbox_config.get_unique_key(),
@@ -1500,7 +1500,7 @@ class Element(Plugin):
# Fetch the main toplevel project, in case this is a junctioned
# subproject, we want to use the rules defined by the main one.
context = self._get_context()
- project = context._get_toplevel_project()
+ project = context.get_toplevel_project()
if prompt is not None:
environment['PS1'] = prompt
@@ -1609,7 +1609,7 @@ class Element(Plugin):
def _get_strict(self):
project = self._get_project()
context = self._get_context()
- return context._get_strict(project.name)
+ return context.get_strict(project.name)
# _pull_pending()
#
diff --git a/buildstream/plugin.py b/buildstream/plugin.py
index 28a04600e..c67e177bf 100644
--- a/buildstream/plugin.py
+++ b/buildstream/plugin.py
@@ -439,9 +439,9 @@ class Plugin():
# This will raise SourceError on its own
self.call(... command which takes time ...)
"""
- with self.__context._timed_activity(activity_name,
- detail=detail,
- silent_nested=silent_nested):
+ with self.__context.timed_activity(activity_name,
+ detail=detail,
+ silent_nested=silent_nested):
yield
def call(self, *popenargs, fail=None, **kwargs):
@@ -615,7 +615,7 @@ class Plugin():
def __message(self, message_type, brief, **kwargs):
message = Message(self.__unique_id, message_type, brief, **kwargs)
- self.__context._message(message)
+ self.__context.message(message)
def __note_command(self, output, *popenargs, **kwargs):
workdir = os.getcwd()
diff --git a/buildstream/source.py b/buildstream/source.py
index cf362efc2..7de1780fb 100644
--- a/buildstream/source.py
+++ b/buildstream/source.py
@@ -318,7 +318,7 @@ class Source(Plugin):
# Source consistency interrogations are silent.
context = self._get_context()
- with context._silence():
+ with context.silence():
self.__consistency = self.get_consistency()
if self._has_workspace() and \
@@ -478,7 +478,7 @@ class Source(Plugin):
def _load_ref(self):
context = self._get_context()
project = self._get_project()
- toplevel = context._get_toplevel_project()
+ toplevel = context.get_toplevel_project()
redundant_ref = None
element_name = self.__element_name
@@ -536,7 +536,7 @@ class Source(Plugin):
context = self._get_context()
project = self._get_project()
- toplevel = context._get_toplevel_project()
+ toplevel = context.get_toplevel_project()
provenance = self._get_provenance()
element_name = self.__element_name
diff --git a/tests/plugins/pipeline.py b/tests/plugins/pipeline.py
index 805155731..012b48c28 100644
--- a/tests/plugins/pipeline.py
+++ b/tests/plugins/pipeline.py
@@ -21,7 +21,7 @@ def create_pipeline(tmpdir, basedir, target):
def dummy_handler(message, context):
pass
- context._set_message_handler(dummy_handler)
+ context.set_message_handler(dummy_handler)
return Pipeline(context, project, [target], [])
diff --git a/tests/project/plugins.py b/tests/project/plugins.py
index 8907c7eaf..432983dc5 100644
--- a/tests/project/plugins.py
+++ b/tests/project/plugins.py
@@ -19,7 +19,7 @@ def create_pipeline(tmpdir, basedir, target):
def dummy_handler(message, context):
pass
- context._set_message_handler(dummy_handler)
+ context.set_message_handler(dummy_handler)
return Pipeline(context, project, [target], [])
diff --git a/tests/variables/variables.py b/tests/variables/variables.py
index 944d19818..233ea5c1f 100644
--- a/tests/variables/variables.py
+++ b/tests/variables/variables.py
@@ -19,7 +19,7 @@ def create_pipeline(tmpdir, basedir, target):
def dummy_handler(message, context):
pass
- context._set_message_handler(dummy_handler)
+ context.set_message_handler(dummy_handler)
return Pipeline(context, project, [target], [])