summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDarius Makovsky <traveltissues@protonmail.com>2019-12-09 09:43:10 +0000
committerDarius Makovsky <traveltissues@protonmail.com>2019-12-09 09:43:10 +0000
commit89aecef1d5e32ce90c254824dbd4ac54bbd141c0 (patch)
tree43ccd8409d4698ef7887153c96aca74743e5aa29
parentf47bc0d2831359372ccab2c038c604257a7165ab (diff)
parent88e16b6bc8b55fd8a669e9f81c40af4fc95a0712 (diff)
downloadbuildstream-89aecef1d5e32ce90c254824dbd4ac54bbd141c0.tar.gz
Merge branch 'traveltissues/remove-unused-functions' into 'master'
remove unused functions 1/2 See merge request BuildStream/buildstream!1753
-rw-r--r--src/buildstream/_artifactcache.py52
-rw-r--r--src/buildstream/_basecache.py12
-rw-r--r--src/buildstream/_cas/cascache.py4
-rw-r--r--src/buildstream/_cas/casserver.py49
-rw-r--r--src/buildstream/_context.py10
-rw-r--r--src/buildstream/_pipeline.py32
-rw-r--r--src/buildstream/_profile.py2
-rw-r--r--src/buildstream/_project.py13
-rw-r--r--src/buildstream/_remote.py2
-rw-r--r--src/buildstream/_scheduler/resources.py50
-rw-r--r--src/buildstream/_workspaces.py46
-rw-r--r--src/buildstream/element.py26
12 files changed, 2 insertions, 296 deletions
diff --git a/src/buildstream/_artifactcache.py b/src/buildstream/_artifactcache.py
index 02dd21d41..69a65833c 100644
--- a/src/buildstream/_artifactcache.py
+++ b/src/buildstream/_artifactcache.py
@@ -26,7 +26,6 @@ from ._exceptions import ArtifactError, CASError, CacheError, CASRemoteError, Re
from ._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc, artifact_pb2, artifact_pb2_grpc
from ._remote import BaseRemote
-from .storage._casbaseddirectory import CasBasedDirectory
from ._artifact import Artifact
from . import utils
@@ -413,21 +412,6 @@ class ArtifactCache(BaseCache):
if not os.path.exists(os.path.join(self._basedir, newref)):
os.link(os.path.join(self._basedir, oldref), os.path.join(self._basedir, newref))
- # get_artifact_logs():
- #
- # Get the logs of an existing artifact
- #
- # Args:
- # ref (str): The ref of the artifact
- #
- # Returns:
- # logsdir (CasBasedDirectory): A CasBasedDirectory containing the artifact's logs
- #
- def get_artifact_logs(self, ref):
- cache_id = self.cas.resolve_ref(ref, update_mtime=True)
- vdir = CasBasedDirectory(self.cas, digest=cache_id).descend("logs")
- return vdir
-
# fetch_missing_blobs():
#
# Fetch missing blobs from configured remote repositories.
@@ -508,42 +492,6 @@ class ArtifactCache(BaseCache):
# Local Private Methods #
################################################
- # _reachable_directories()
- #
- # Returns:
- # (iter): Iterator over directories digests available from artifacts.
- #
- def _reachable_directories(self):
- for root, _, files in os.walk(self._basedir):
- for artifact_file in files:
- artifact = artifact_pb2.Artifact()
- with open(os.path.join(root, artifact_file), "r+b") as f:
- artifact.ParseFromString(f.read())
-
- if str(artifact.files):
- yield artifact.files
-
- if str(artifact.buildtree):
- yield artifact.buildtree
-
- # _reachable_digests()
- #
- # Returns:
- # (iter): Iterator over single file digests in artifacts
- #
- def _reachable_digests(self):
- for root, _, files in os.walk(self._basedir):
- for artifact_file in files:
- artifact = artifact_pb2.Artifact()
- with open(os.path.join(root, artifact_file), "r+b") as f:
- artifact.ParseFromString(f.read())
-
- if str(artifact.public_data):
- yield artifact.public_data
-
- for log_file in artifact.logs:
- yield log_file.digest
-
# _push_artifact_blobs()
#
# Push the blobs that make up an artifact to the remote server.
diff --git a/src/buildstream/_basecache.py b/src/buildstream/_basecache.py
index dff7742e7..91eae6ae4 100644
--- a/src/buildstream/_basecache.py
+++ b/src/buildstream/_basecache.py
@@ -64,18 +64,6 @@ class BaseCache:
self._basedir = None
- # has_open_grpc_channels():
- #
- # Return whether there are gRPC channel instances. This is used to safeguard
- # against fork() with open gRPC channels.
- #
- def has_open_grpc_channels(self):
- for project_remotes in chain(self._index_remotes.values(), self._storage_remotes.values()):
- for remote in project_remotes:
- if remote.channel:
- return True
- return False
-
# close_grpc_channels():
#
# Close open gRPC channels.
diff --git a/src/buildstream/_cas/cascache.py b/src/buildstream/_cas/cascache.py
index c45a199fe..9c0b46d1e 100644
--- a/src/buildstream/_cas/cascache.py
+++ b/src/buildstream/_cas/cascache.py
@@ -368,10 +368,6 @@ class CASCache:
return utils._message_digest(root_directory)
- def update_tree_mtime(self, tree):
- reachable = set()
- self._reachable_refs_dir(reachable, tree, update_mtime=True)
-
# remote_missing_blobs_for_directory():
#
# Determine which blobs of a directory tree are missing on the remote.
diff --git a/src/buildstream/_cas/casserver.py b/src/buildstream/_cas/casserver.py
index e4acbde55..882e7e6d4 100644
--- a/src/buildstream/_cas/casserver.py
+++ b/src/buildstream/_cas/casserver.py
@@ -24,7 +24,6 @@ import logging
import os
import signal
import sys
-import uuid
import grpc
from google.protobuf.message import DecodeError
@@ -514,10 +513,6 @@ class _ArtifactServicer(artifact_pb2_grpc.ArtifactServiceServicer):
return artifact
- def ArtifactStatus(self, request, context):
- self.logger.info("Retrieving status")
- return artifact_pb2.ArtifactStatusResponse()
-
def _check_directory(self, name, digest, context):
try:
self.resolve_digest(digest)
@@ -581,47 +576,3 @@ class _SourceServicer(source_pb2_grpc.SourceServiceServicer):
os.makedirs(os.path.dirname(path), exist_ok=True)
with save_file_atomic(path, "w+b") as f:
f.write(source_proto.SerializeToString())
-
-
-def _digest_from_download_resource_name(resource_name):
- parts = resource_name.split("/")
-
- # Accept requests from non-conforming BuildStream 1.1.x clients
- if len(parts) == 2:
- parts.insert(0, "blobs")
-
- if len(parts) != 3 or parts[0] != "blobs":
- return None
-
- try:
- digest = remote_execution_pb2.Digest()
- digest.hash = parts[1]
- digest.size_bytes = int(parts[2])
- return digest
- except ValueError:
- return None
-
-
-def _digest_from_upload_resource_name(resource_name):
- parts = resource_name.split("/")
-
- # Accept requests from non-conforming BuildStream 1.1.x clients
- if len(parts) == 2:
- parts.insert(0, "uploads")
- parts.insert(1, str(uuid.uuid4()))
- parts.insert(2, "blobs")
-
- if len(parts) < 5 or parts[0] != "uploads" or parts[2] != "blobs":
- return None
-
- try:
- uuid_ = uuid.UUID(hex=parts[1])
- if uuid_.version != 4:
- return None
-
- digest = remote_execution_pb2.Digest()
- digest.hash = parts[3]
- digest.size_bytes = int(parts[4])
- return digest
- except ValueError:
- return None
diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py
index f426f4bb0..47001440a 100644
--- a/src/buildstream/_context.py
+++ b/src/buildstream/_context.py
@@ -488,16 +488,6 @@ class Context:
# value which we cache here too.
return self._strict_build_plan
- # set_artifact_directories_optional()
- #
- # This indicates that the current context (command or configuration)
- # does not require directory trees of all artifacts to be available in the
- # local cache.
- #
- def set_artifact_directories_optional(self):
- self.require_artifact_directories = False
- self.require_artifact_files = False
-
# set_artifact_files_optional()
#
# This indicates that the current context (command or configuration)
diff --git a/src/buildstream/_pipeline.py b/src/buildstream/_pipeline.py
index a40d89861..aee0c55da 100644
--- a/src/buildstream/_pipeline.py
+++ b/src/buildstream/_pipeline.py
@@ -317,38 +317,6 @@ class Pipeline:
# in before.
return [element for element in elements if element in visited]
- # targets_include()
- #
- # Checks whether the given targets are, or depend on some elements
- #
- # Args:
- # targets (list of Element): A list of targets
- # elements (list of Element): List of elements to check
- #
- # Returns:
- # (bool): True if all of `elements` are the `targets`, or are
- # somehow depended on by `targets`.
- #
- def targets_include(self, targets, elements):
- target_element_set = set(self.dependencies(targets, Scope.ALL))
- element_set = set(elements)
- return element_set.issubset(target_element_set)
-
- # subtract_elements()
- #
- # Subtract a subset of elements
- #
- # Args:
- # elements (list of Element): The element list
- # subtract (list of Element): List of elements to subtract from elements
- #
- # Returns:
- # (list): The original elements list, with elements in subtract removed
- #
- def subtract_elements(self, elements, subtract):
- subtract_set = set(subtract)
- return [e for e in elements if e not in subtract_set]
-
# add_elements()
#
# Add to a list of elements all elements that are not already in it
diff --git a/src/buildstream/_profile.py b/src/buildstream/_profile.py
index b3182b630..0219e8374 100644
--- a/src/buildstream/_profile.py
+++ b/src/buildstream/_profile.py
@@ -73,7 +73,7 @@ class _Profile:
def __enter__(self):
self.start()
- def __exit__(self, exc_type, exc_value, traceback):
+ def __exit__(self, _exc_type, _exc_value, traceback):
self.stop()
self.save()
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index 67d41a6b5..fb2ae1a88 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -378,19 +378,6 @@ class Project:
else:
return self.config.element_factory.create(self._context, self, meta)
- # create_artifact_element()
- #
- # Instantiate and return an ArtifactElement
- #
- # Args:
- # ref (str): A string of the artifact ref
- #
- # Returns:
- # (ArtifactElement): A newly created ArtifactElement object of the appropriate kind
- #
- def create_artifact_element(self, ref):
- return ArtifactElement(self._context, ref)
-
# create_source()
#
# Instantiate and return a Source
diff --git a/src/buildstream/_remote.py b/src/buildstream/_remote.py
index 78f67726a..f8edd5192 100644
--- a/src/buildstream/_remote.py
+++ b/src/buildstream/_remote.py
@@ -184,7 +184,7 @@ class BaseRemote:
def __enter__(self):
return self
- def __exit__(self, exc_type, exc_value, traceback):
+ def __exit__(self, _exc_type, _exc_value, traceback):
self.close()
return False
diff --git a/src/buildstream/_scheduler/resources.py b/src/buildstream/_scheduler/resources.py
index e76158779..a11555c53 100644
--- a/src/buildstream/_scheduler/resources.py
+++ b/src/buildstream/_scheduler/resources.py
@@ -113,53 +113,3 @@ class Resources:
for resource in resources:
assert self._used_resources[resource] > 0, "Scheduler resource imbalance"
self._used_resources[resource] -= 1
-
- # register_exclusive_interest()
- #
- # Inform the resources pool that `source` has an interest in
- # reserving this resource exclusively.
- #
- # The source parameter is used to identify the caller, it
- # must be ensured to be unique for the time that the
- # interest is registered.
- #
- # This function may be called multiple times, and subsequent
- # calls will simply have no effect until clear_exclusive_interest()
- # is used to clear the interest.
- #
- # This must be called in advance of reserve()
- #
- # Args:
- # resources (set): Set of resources to reserve exclusively
- # source (any): Source identifier, to be used again when unregistering
- # the interest.
- #
- def register_exclusive_interest(self, resources, source):
-
- # The very first thing we do is to register any exclusive
- # resources this job may want. Even if the job is not yet
- # allowed to run (because another job is holding the resource
- # it wants), we can still set this - it just means that any
- # job *currently* using these resources has to finish first,
- # and no new jobs wanting these can be launched (except other
- # exclusive-access jobs).
- #
- for resource in resources:
- self._exclusive_resources[resource].add(source)
-
- # unregister_exclusive_interest()
- #
- # Clear the exclusive interest in these resources.
- #
- # This should be called by the given source which registered
- # an exclusive interest.
- #
- # Args:
- # resources (set): Set of resources to reserve exclusively
- # source (str): Source identifier, to be used again when unregistering
- # the interest.
- #
- def unregister_exclusive_interest(self, resources, source):
-
- for resource in resources:
- self._exclusive_resources[resource].discard(source)
diff --git a/src/buildstream/_workspaces.py b/src/buildstream/_workspaces.py
index 3d50fd9c0..49b76a7b9 100644
--- a/src/buildstream/_workspaces.py
+++ b/src/buildstream/_workspaces.py
@@ -299,14 +299,6 @@ class Workspace:
def differs(self, other):
return self.to_dict() != other.to_dict()
- # invalidate_key()
- #
- # Invalidate the workspace key, forcing a recalculation next time
- # it is accessed.
- #
- def invalidate_key(self):
- self._key = None
-
# stage()
#
# Stage the workspace to the given directory.
@@ -346,44 +338,6 @@ class Workspace:
def clear_running_files(self):
self.running_files = {}
- # get_key()
- #
- # Get a unique key for this workspace.
- #
- # Args:
- # recalculate (bool) - Whether to recalculate the key
- #
- # Returns:
- # (str) A unique key for this workspace
- #
- def get_key(self):
- def unique_key(filename):
- try:
- stat = os.lstat(filename)
- except OSError as e:
- raise LoadError("Failed to stat file in workspace: {}".format(e), LoadErrorReason.MISSING_FILE)
-
- # Use the mtime of any file with sub second precision
- return stat.st_mtime_ns
-
- if self._key is None:
- fullpath = self.get_absolute_path()
-
- excluded_files = (WORKSPACE_PROJECT_FILE,)
-
- # Get a list of tuples of the the project relative paths and fullpaths
- if os.path.isdir(fullpath):
- filelist = utils.list_relative_paths(fullpath)
- filelist = [
- (relpath, os.path.join(fullpath, relpath)) for relpath in filelist if relpath not in excluded_files
- ]
- else:
- filelist = [(self.get_absolute_path(), fullpath)]
-
- self._key = [(relpath, unique_key(fullpath)) for relpath, fullpath in filelist]
-
- return self._key
-
# get_absolute_path():
#
# Returns: The absolute path of the element's workspace.
diff --git a/src/buildstream/element.py b/src/buildstream/element.py
index 7b96e8a8b..106460b9a 100644
--- a/src/buildstream/element.py
+++ b/src/buildstream/element.py
@@ -268,7 +268,6 @@ class Element(Plugin):
# Location where Element.stage_sources() was called
self.__staged_sources_directory = None # type: Optional[str]
self.__tainted = None # Whether the artifact is tainted and should not be shared
- self.__required = False # Whether the artifact is required in the current session
self.__artifact_files_required = False # Whether artifact files are required in the local cache
self.__build_result = None # The result of assembling this Element (success, description, detail)
self._build_log_path = None # The path of the build log for this Element
@@ -1483,13 +1482,6 @@ class Element(Plugin):
# Ensure deterministic owners of sources at build time
vdirectory.set_deterministic_user()
- # _is_required():
- #
- # Returns whether this element has been marked as required.
- #
- def _is_required(self):
- return self.__required
-
# _set_artifact_files_required():
#
# Mark artifact files for this element and its runtime dependencies as
@@ -3008,24 +3000,6 @@ class Element(Plugin):
self.__last_source_requires_previous_ix = last_requires_previous
return self.__last_source_requires_previous_ix
- # __reset_cache_data()
- #
- # Resets all data related to cache key calculation and whether an artifact
- # is cached.
- #
- # This is useful because we need to know whether a workspace is cached
- # before we know whether to assemble it, and doing that would generate a
- # different cache key to the initial one.
- #
- def __reset_cache_data(self):
- self.__build_result = None
- self.__cache_key_dict = None
- self.__cache_key = None
- self.__weak_cache_key = None
- self.__strict_cache_key = None
- self.__artifact = None
- self.__strict_artifact = None
-
# __update_cache_keys()
#
# Updates weak and strict cache keys