summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTiago Gomes <tiago.avv@gmail.com>2018-09-14 16:08:07 +0000
committerTiago Gomes <tiago.avv@gmail.com>2018-09-14 16:08:07 +0000
commitc285f24489b073013373b4eb1978516112e3281d (patch)
treef8ef0f8e18e1ab9380a188dec259f6ee1827595c
parentc2af0d5108c333cafd877670e86f61c84238c8f9 (diff)
parent2fa927163cdbfb23832357017ae83740cfcd8cd5 (diff)
downloadbuildstream-c285f24489b073013373b4eb1978516112e3281d.tar.gz
Merge branch 'tiagogomes/some-cleanups' into 'master'
Bunch of cleanups See merge request BuildStream/buildstream!798
-rw-r--r--buildstream/_artifactcache/artifactcache.py6
-rw-r--r--buildstream/_artifactcache/cascache.py50
-rw-r--r--buildstream/_context.py1
-rw-r--r--buildstream/_scheduler/jobs/__init__.py19
-rw-r--r--buildstream/_scheduler/jobs/cleanupjob.py9
-rw-r--r--buildstream/_scheduler/queues/buildqueue.py2
-rw-r--r--buildstream/_scheduler/scheduler.py5
7 files changed, 65 insertions, 27 deletions
diff --git a/buildstream/_artifactcache/artifactcache.py b/buildstream/_artifactcache/artifactcache.py
index 45293e345..eb7759d83 100644
--- a/buildstream/_artifactcache/artifactcache.py
+++ b/buildstream/_artifactcache/artifactcache.py
@@ -267,7 +267,7 @@ class ArtifactCache():
"Please increase the cache-quota in {}."
.format(self.context.config_origin or default_conf))
- if self.get_quota_exceeded():
+ if self.has_quota_exceeded():
raise ArtifactError("Cache too full. Aborting.",
detail=detail,
reason="cache-too-full")
@@ -354,14 +354,14 @@ class ArtifactCache():
self._cache_size = cache_size
self._write_cache_size(self._cache_size)
- # get_quota_exceeded()
+ # has_quota_exceeded()
#
# Checks if the current artifact cache size exceeds the quota.
#
# Returns:
# (bool): True of the quota is exceeded
#
- def get_quota_exceeded(self):
+ def has_quota_exceeded(self):
return self.get_cache_size() > self._cache_quota
################################################
diff --git a/buildstream/_artifactcache/cascache.py b/buildstream/_artifactcache/cascache.py
index 78191ccf8..9cf83a222 100644
--- a/buildstream/_artifactcache/cascache.py
+++ b/buildstream/_artifactcache/cascache.py
@@ -117,7 +117,7 @@ class CASCache(ArtifactCache):
def commit(self, element, content, keys):
refs = [self.get_artifact_fullname(element, key) for key in keys]
- tree = self._create_tree(content)
+ tree = self._commit_directory(content)
for ref in refs:
self.set_ref(ref, tree)
@@ -665,7 +665,21 @@ class CASCache(ArtifactCache):
def _refpath(self, ref):
return os.path.join(self.casdir, 'refs', 'heads', ref)
- def _create_tree(self, path, *, digest=None):
+ # _commit_directory():
+ #
+ # Adds local directory to content addressable store.
+ #
+ # Adds files, symbolic links and recursively other directories in
+ # a local directory to the content addressable store.
+ #
+ # Args:
+ # path (str): Path to the directory to add.
+ # dir_digest (Digest): An optional Digest object to use.
+ #
+ # Returns:
+ # (Digest): Digest object for the directory added.
+ #
+ def _commit_directory(self, path, *, dir_digest=None):
directory = remote_execution_pb2.Directory()
for name in sorted(os.listdir(path)):
@@ -674,7 +688,7 @@ class CASCache(ArtifactCache):
if stat.S_ISDIR(mode):
dirnode = directory.directories.add()
dirnode.name = name
- self._create_tree(full_path, digest=dirnode.digest)
+ self._commit_directory(full_path, dir_digest=dirnode.digest)
elif stat.S_ISREG(mode):
filenode = directory.files.add()
filenode.name = name
@@ -690,7 +704,8 @@ class CASCache(ArtifactCache):
else:
raise ArtifactError("Unsupported file type for {}".format(full_path))
- return self.add_object(digest=digest, buffer=directory.SerializeToString())
+ return self.add_object(digest=dir_digest,
+ buffer=directory.SerializeToString())
def _get_subdir(self, tree, subdir):
head, name = os.path.split(subdir)
@@ -833,14 +848,26 @@ class CASCache(ArtifactCache):
assert digest.size_bytes == os.fstat(stream.fileno()).st_size
- def _fetch_directory(self, remote, tree):
- objpath = self.objpath(tree)
+ # _fetch_directory():
+ #
+ # Fetches remote directory and adds it to content addressable store.
+ #
+ # Fetches files, symbolic links and recursively other directories in
+ # the remote directory and adds them to the content addressable
+ # store.
+ #
+ # Args:
+ # remote (Remote): The remote to use.
+ # dir_digest (Digest): Digest object for the directory to fetch.
+ #
+ def _fetch_directory(self, remote, dir_digest):
+ objpath = self.objpath(dir_digest)
if os.path.exists(objpath):
# already in local cache
return
with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
- self._fetch_blob(remote, tree, out)
+ self._fetch_blob(remote, dir_digest, out)
directory = remote_execution_pb2.Directory()
@@ -848,7 +875,7 @@ class CASCache(ArtifactCache):
directory.ParseFromString(f.read())
for filenode in directory.files:
- fileobjpath = self.objpath(tree)
+ fileobjpath = self.objpath(filenode.digest)
if os.path.exists(fileobjpath):
# already in local cache
continue
@@ -862,10 +889,11 @@ class CASCache(ArtifactCache):
for dirnode in directory.directories:
self._fetch_directory(remote, dirnode.digest)
- # place directory blob only in final location when we've downloaded
- # all referenced blobs to avoid dangling references in the repository
+ # Place directory blob only in final location when we've
+ # downloaded all referenced blobs to avoid dangling
+ # references in the repository.
digest = self.add_object(path=out.name)
- assert digest.hash == tree.hash
+ assert digest.hash == dir_digest.hash
def _fetch_tree(self, remote, digest):
# download but do not store the Tree object
diff --git a/buildstream/_context.py b/buildstream/_context.py
index a94d374cf..8b8b01b17 100644
--- a/buildstream/_context.py
+++ b/buildstream/_context.py
@@ -119,7 +119,6 @@ class Context():
self._log_handle = None
self._log_filename = None
self.config_cache_quota = 'infinity'
- self.artifactdir_volume = None
# load()
#
diff --git a/buildstream/_scheduler/jobs/__init__.py b/buildstream/_scheduler/jobs/__init__.py
index 185d8258a..4b0b11dac 100644
--- a/buildstream/_scheduler/jobs/__init__.py
+++ b/buildstream/_scheduler/jobs/__init__.py
@@ -1,3 +1,22 @@
+#
+# Copyright (C) 2018 Codethink Limited
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+# Authors:
+# Tristan Maat <tristan.maat@codethink.co.uk>
+
from .elementjob import ElementJob
from .cachesizejob import CacheSizeJob
from .cleanupjob import CleanupJob
diff --git a/buildstream/_scheduler/jobs/cleanupjob.py b/buildstream/_scheduler/jobs/cleanupjob.py
index c22ce3b98..399435ad9 100644
--- a/buildstream/_scheduler/jobs/cleanupjob.py
+++ b/buildstream/_scheduler/jobs/cleanupjob.py
@@ -21,9 +21,8 @@ from ..._platform import Platform
class CleanupJob(Job):
- def __init__(self, *args, complete_cb, **kwargs):
+ def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self._complete_cb = complete_cb
platform = Platform.get_platform()
self._artifacts = platform.artifactcache
@@ -34,9 +33,3 @@ class CleanupJob(Job):
def parent_complete(self, success, result):
if success:
self._artifacts.set_cache_size(result)
-
- if self._complete_cb:
- self._complete_cb()
-
- def child_process_data(self):
- return {}
diff --git a/buildstream/_scheduler/queues/buildqueue.py b/buildstream/_scheduler/queues/buildqueue.py
index 6e7ce04aa..0c74b3698 100644
--- a/buildstream/_scheduler/queues/buildqueue.py
+++ b/buildstream/_scheduler/queues/buildqueue.py
@@ -101,7 +101,7 @@ class BuildQueue(Queue):
# If the estimated size outgrows the quota, ask the scheduler
# to queue a job to actually check the real cache size.
#
- if artifacts.get_quota_exceeded():
+ if artifacts.has_quota_exceeded():
self._scheduler.check_cache_size()
def done(self, job, element, result, success):
diff --git a/buildstream/_scheduler/scheduler.py b/buildstream/_scheduler/scheduler.py
index 635b0628c..8faf58e65 100644
--- a/buildstream/_scheduler/scheduler.py
+++ b/buildstream/_scheduler/scheduler.py
@@ -351,14 +351,13 @@ class Scheduler():
platform = Platform.get_platform()
artifacts = platform.artifactcache
- if not artifacts.get_quota_exceeded():
+ if not artifacts.has_quota_exceeded():
return
job = CleanupJob(self, 'cleanup', 'cleanup/cleanup',
resources=[ResourceType.CACHE,
ResourceType.PROCESS],
- exclusive_resources=[ResourceType.CACHE],
- complete_cb=None)
+ exclusive_resources=[ResourceType.CACHE])
self.schedule_jobs([job])
# _suspend_jobs()