diff options
author | Jürg Billeter <j@bitron.ch> | 2019-06-12 11:54:36 +0200 |
---|---|---|
committer | Jürg Billeter <j@bitron.ch> | 2019-08-20 08:09:52 +0200 |
commit | 4bb791e1f4c7840058cacef858d7eb99e408b0bb (patch) | |
tree | 800792388cd382067b514ca0c235ca7d4f4ec4c4 | |
parent | 2f34f1907be7f0c663af14e1412ff52ad5d5bce8 (diff) | |
download | buildstream-4bb791e1f4c7840058cacef858d7eb99e408b0bb.tar.gz |
cascache.py: Use buildbox-casd in add_object()
buildbox-casd clients are not allowed to directly write into the local
cache directory. Use LocalCas.CaptureFiles() instead.
-rw-r--r-- | src/buildstream/_cas/cascache.py | 53 |
1 files changed, 20 insertions, 33 deletions
diff --git a/src/buildstream/_cas/cascache.py b/src/buildstream/_cas/cascache.py index 5280615b3..8d2ed02fb 100644 --- a/src/buildstream/_cas/cascache.py +++ b/src/buildstream/_cas/cascache.py @@ -17,7 +17,6 @@ # Authors: # Jürg Billeter <juerg.billeter@codethink.co.uk> -import hashlib import itertools import os import stat @@ -31,8 +30,9 @@ import time import grpc +from .._protos.google.rpc import code_pb2 from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc -from .._protos.build.buildgrid import local_cas_pb2_grpc +from .._protos.build.buildgrid import local_cas_pb2, local_cas_pb2_grpc from .._protos.buildstream.v2 import buildstream_pb2 from .. import utils @@ -442,42 +442,29 @@ class CASCache(): if digest is None: digest = remote_execution_pb2.Digest() - try: - h = hashlib.sha256() - # Always write out new file to avoid corruption if input file is modified - with contextlib.ExitStack() as stack: - if path is not None and link_directly: - tmp = stack.enter_context(open(path, 'rb')) - for chunk in iter(lambda: tmp.read(_BUFFER_SIZE), b""): - h.update(chunk) - else: - tmp = stack.enter_context(self._temporary_object()) + with contextlib.ExitStack() as stack: + if path is None: + tmp = stack.enter_context(self._temporary_object()) + tmp.write(buffer) + tmp.flush() + path = tmp.name - if path: - with open(path, 'rb') as f: - for chunk in iter(lambda: f.read(_BUFFER_SIZE), b""): - h.update(chunk) - tmp.write(chunk) - else: - h.update(buffer) - tmp.write(buffer) + request = local_cas_pb2.CaptureFilesRequest() + request.path.append(path) - tmp.flush() + local_cas = self._get_local_cas() - digest.hash = h.hexdigest() - digest.size_bytes = os.fstat(tmp.fileno()).st_size + response = local_cas.CaptureFiles(request) - # Place file at final location - objpath = self.objpath(digest) - os.makedirs(os.path.dirname(objpath), exist_ok=True) - os.link(tmp.name, objpath) - - except FileExistsError: - # We can ignore the failed link() if the object is already in the repo. - pass + if len(response.responses) != 1: + raise CASCacheError("Expected 1 response from CaptureFiles, got {}".format(len(response.responses))) - except OSError as e: - raise CASCacheError("Failed to hash object: {}".format(e)) from e + blob_response = response.responses[0] + if blob_response.status.code == code_pb2.RESOURCE_EXHAUSTED: + raise CASCacheError("Cache too full", reason="cache-too-full") + elif blob_response.status.code != code_pb2.OK: + raise CASCacheError("Failed to capture blob {}: {}".format(path, blob_response.status.code)) + digest.CopyFrom(blob_response.digest) return digest |