Jürg Billeter pushed to branch master at BuildStream / buildstream
Commits:
- 
2683f98a
by Raoul Hidalgo Charman at 2019-01-16T11:55:07Z
- 
d2cc4798
by Raoul Hidalgo Charman at 2019-01-16T11:55:07Z
- 
76f67483
by Raoul Hidalgo Charman at 2019-01-16T11:55:07Z
- 
6c428bc9
by Jürg Billeter at 2019-01-16T12:56:38Z
15 changed files:
- buildstream/_artifactcache/artifactcache.py → buildstream/_artifactcache.py
- buildstream/_artifactcache/__init__.py → buildstream/_cas/__init__.py
- buildstream/_artifactcache/cascache.py → buildstream/_cas/cascache.py
- + buildstream/_cas/casremote.py
- buildstream/_artifactcache/casserver.py → buildstream/_cas/casserver.py
- buildstream/_context.py
- buildstream/_exceptions.py
- buildstream/sandbox/_sandboxremote.py
- doc/source/using_configuring_artifact_server.rst
- tests/artifactcache/config.py
- tests/artifactcache/expiry.py
- tests/sandboxes/storage-tests.py
- tests/storage/virtual_directory_import.py
- tests/testutils/artifactshare.py
- tests/utils/misc.py
Changes:
| ... | ... | @@ -19,18 +19,16 @@ | 
| 19 | 19 |  | 
| 20 | 20 |  import multiprocessing
 | 
| 21 | 21 |  import os
 | 
| 22 | -import signal
 | |
| 23 | 22 |  import string
 | 
| 24 | 23 |  from collections.abc import Mapping
 | 
| 25 | 24 |  | 
| 26 | -from ..types import _KeyStrength
 | |
| 27 | -from .._exceptions import ArtifactError, CASError, LoadError, LoadErrorReason
 | |
| 28 | -from .._message import Message, MessageType
 | |
| 29 | -from .. import _signals
 | |
| 30 | -from .. import utils
 | |
| 31 | -from .. import _yaml
 | |
| 25 | +from .types import _KeyStrength
 | |
| 26 | +from ._exceptions import ArtifactError, CASError, LoadError, LoadErrorReason
 | |
| 27 | +from ._message import Message, MessageType
 | |
| 28 | +from . import utils
 | |
| 29 | +from . import _yaml
 | |
| 32 | 30 |  | 
| 33 | -from .cascache import CASRemote, CASRemoteSpec
 | |
| 31 | +from ._cas import CASRemote, CASRemoteSpec
 | |
| 34 | 32 |  | 
| 35 | 33 |  | 
| 36 | 34 |  CACHE_SIZE_FILE = "cache_size"
 | 
| ... | ... | @@ -375,20 +373,8 @@ class ArtifactCache(): | 
| 375 | 373 |          remotes = {}
 | 
| 376 | 374 |          q = multiprocessing.Queue()
 | 
| 377 | 375 |          for remote_spec in remote_specs:
 | 
| 378 | -            # Use subprocess to avoid creation of gRPC threads in main BuildStream process
 | |
| 379 | -            # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
 | |
| 380 | -            p = multiprocessing.Process(target=self.cas.initialize_remote, args=(remote_spec, q))
 | |
| 381 | 376 |  | 
| 382 | -            try:
 | |
| 383 | -                # Keep SIGINT blocked in the child process
 | |
| 384 | -                with _signals.blocked([signal.SIGINT], ignore=False):
 | |
| 385 | -                    p.start()
 | |
| 386 | - | |
| 387 | -                error = q.get()
 | |
| 388 | -                p.join()
 | |
| 389 | -            except KeyboardInterrupt:
 | |
| 390 | -                utils._kill_process_tree(p.pid)
 | |
| 391 | -                raise
 | |
| 377 | +            error = CASRemote.check_remote(remote_spec, q)
 | |
| 392 | 378 |  | 
| 393 | 379 |              if error and on_failure:
 | 
| 394 | 380 |                  on_failure(remote_spec.url, error)
 | 
| ... | ... | @@ -747,7 +733,7 @@ class ArtifactCache(): | 
| 747 | 733 |                                  "servers are configured as push remotes.")
 | 
| 748 | 734 |  | 
| 749 | 735 |          for remote in push_remotes:
 | 
| 750 | -            message_digest = self.cas.push_message(remote, message)
 | |
| 736 | +            message_digest = remote.push_message(message)
 | |
| 751 | 737 |  | 
| 752 | 738 |          return message_digest
 | 
| 753 | 739 |  | 
| ... | ... | @@ -17,4 +17,5 @@ | 
| 17 | 17 |  #  Authors:
 | 
| 18 | 18 |  #        Tristan Van Berkom <tristan vanberkom codethink co uk>
 | 
| 19 | 19 |  | 
| 20 | -from .artifactcache import ArtifactCache, ArtifactCacheSpec, CACHE_SIZE_FILE | |
| 20 | +from .cascache import CASCache
 | |
| 21 | +from .casremote import CASRemote, CASRemoteSpec | 
| ... | ... | @@ -17,85 +17,23 @@ | 
| 17 | 17 |  #  Authors:
 | 
| 18 | 18 |  #        Jürg Billeter <juerg billeter codethink co uk>
 | 
| 19 | 19 |  | 
| 20 | -from collections import namedtuple
 | |
| 21 | 20 |  import hashlib
 | 
| 22 | 21 |  import itertools
 | 
| 23 | -import io
 | |
| 24 | 22 |  import os
 | 
| 25 | 23 |  import stat
 | 
| 26 | 24 |  import tempfile
 | 
| 27 | 25 |  import uuid
 | 
| 28 | 26 |  import contextlib
 | 
| 29 | -from urllib.parse import urlparse
 | |
| 30 | 27 |  | 
| 31 | 28 |  import grpc
 | 
| 32 | 29 |  | 
| 33 | -from .._protos.google.rpc import code_pb2
 | |
| 34 | -from .._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
 | |
| 35 | -from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
 | |
| 36 | -from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc
 | |
| 30 | +from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
 | |
| 31 | +from .._protos.buildstream.v2 import buildstream_pb2
 | |
| 37 | 32 |  | 
| 38 | 33 |  from .. import utils
 | 
| 39 | -from .._exceptions import CASError, LoadError, LoadErrorReason
 | |
| 40 | -from .. import _yaml
 | |
| 34 | +from .._exceptions import CASCacheError
 | |
| 41 | 35 |  | 
| 42 | - | |
| 43 | -# The default limit for gRPC messages is 4 MiB.
 | |
| 44 | -# Limit payload to 1 MiB to leave sufficient headroom for metadata.
 | |
| 45 | -_MAX_PAYLOAD_BYTES = 1024 * 1024
 | |
| 46 | - | |
| 47 | - | |
| 48 | -class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key client_cert instance_name')):
 | |
| 49 | - | |
| 50 | -    # _new_from_config_node
 | |
| 51 | -    #
 | |
| 52 | -    # Creates an CASRemoteSpec() from a YAML loaded node
 | |
| 53 | -    #
 | |
| 54 | -    @staticmethod
 | |
| 55 | -    def _new_from_config_node(spec_node, basedir=None):
 | |
| 56 | -        _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance-name'])
 | |
| 57 | -        url = _yaml.node_get(spec_node, str, 'url')
 | |
| 58 | -        push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
 | |
| 59 | -        if not url:
 | |
| 60 | -            provenance = _yaml.node_get_provenance(spec_node, 'url')
 | |
| 61 | -            raise LoadError(LoadErrorReason.INVALID_DATA,
 | |
| 62 | -                            "{}: empty artifact cache URL".format(provenance))
 | |
| 63 | - | |
| 64 | -        instance_name = _yaml.node_get(spec_node, str, 'instance-name', default_value=None)
 | |
| 65 | - | |
| 66 | -        server_cert = _yaml.node_get(spec_node, str, 'server-cert', default_value=None)
 | |
| 67 | -        if server_cert and basedir:
 | |
| 68 | -            server_cert = os.path.join(basedir, server_cert)
 | |
| 69 | - | |
| 70 | -        client_key = _yaml.node_get(spec_node, str, 'client-key', default_value=None)
 | |
| 71 | -        if client_key and basedir:
 | |
| 72 | -            client_key = os.path.join(basedir, client_key)
 | |
| 73 | - | |
| 74 | -        client_cert = _yaml.node_get(spec_node, str, 'client-cert', default_value=None)
 | |
| 75 | -        if client_cert and basedir:
 | |
| 76 | -            client_cert = os.path.join(basedir, client_cert)
 | |
| 77 | - | |
| 78 | -        if client_key and not client_cert:
 | |
| 79 | -            provenance = _yaml.node_get_provenance(spec_node, 'client-key')
 | |
| 80 | -            raise LoadError(LoadErrorReason.INVALID_DATA,
 | |
| 81 | -                            "{}: 'client-key' was specified without 'client-cert'".format(provenance))
 | |
| 82 | - | |
| 83 | -        if client_cert and not client_key:
 | |
| 84 | -            provenance = _yaml.node_get_provenance(spec_node, 'client-cert')
 | |
| 85 | -            raise LoadError(LoadErrorReason.INVALID_DATA,
 | |
| 86 | -                            "{}: 'client-cert' was specified without 'client-key'".format(provenance))
 | |
| 87 | - | |
| 88 | -        return CASRemoteSpec(url, push, server_cert, client_key, client_cert, instance_name)
 | |
| 89 | - | |
| 90 | - | |
| 91 | -CASRemoteSpec.__new__.__defaults__ = (None, None, None, None)
 | |
| 92 | - | |
| 93 | - | |
| 94 | -class BlobNotFound(CASError):
 | |
| 95 | - | |
| 96 | -    def __init__(self, blob, msg):
 | |
| 97 | -        self.blob = blob
 | |
| 98 | -        super().__init__(msg)
 | |
| 36 | +from .casremote import BlobNotFound, _CASBatchRead, _CASBatchUpdate
 | |
| 99 | 37 |  | 
| 100 | 38 |  | 
| 101 | 39 |  # A CASCache manages a CAS repository as specified in the Remote Execution API.
 | 
| ... | ... | @@ -120,7 +58,7 @@ class CASCache(): | 
| 120 | 58 |          headdir = os.path.join(self.casdir, 'refs', 'heads')
 | 
| 121 | 59 |          objdir = os.path.join(self.casdir, 'objects')
 | 
| 122 | 60 |          if not (os.path.isdir(headdir) and os.path.isdir(objdir)):
 | 
| 123 | -            raise CASError("CAS repository check failed for '{}'".format(self.casdir))
 | |
| 61 | +            raise CASCacheError("CAS repository check failed for '{}'".format(self.casdir))
 | |
| 124 | 62 |  | 
| 125 | 63 |      # contains():
 | 
| 126 | 64 |      #
 | 
| ... | ... | @@ -169,7 +107,7 @@ class CASCache(): | 
| 169 | 107 |      #     subdir (str): Optional specific dir to extract
 | 
| 170 | 108 |      #
 | 
| 171 | 109 |      # Raises:
 | 
| 172 | -    #     CASError: In cases there was an OSError, or if the ref did not exist.
 | |
| 110 | +    #     CASCacheError: In cases there was an OSError, or if the ref did not exist.
 | |
| 173 | 111 |      #
 | 
| 174 | 112 |      # Returns: path to extracted directory
 | 
| 175 | 113 |      #
 | 
| ... | ... | @@ -201,7 +139,7 @@ class CASCache(): | 
| 201 | 139 |                  # Another process beat us to rename
 | 
| 202 | 140 |                  pass
 | 
| 203 | 141 |              except OSError as e:
 | 
| 204 | -                raise CASError("Failed to extract directory for ref '{}': {}".format(ref, e)) from e
 | |
| 142 | +                raise CASCacheError("Failed to extract directory for ref '{}': {}".format(ref, e)) from e
 | |
| 205 | 143 |  | 
| 206 | 144 |          return originaldest
 | 
| 207 | 145 |  | 
| ... | ... | @@ -245,29 +183,6 @@ class CASCache(): | 
| 245 | 183 |  | 
| 246 | 184 |          return modified, removed, added
 | 
| 247 | 185 |  | 
| 248 | -    def initialize_remote(self, remote_spec, q):
 | |
| 249 | -        try:
 | |
| 250 | -            remote = CASRemote(remote_spec)
 | |
| 251 | -            remote.init()
 | |
| 252 | - | |
| 253 | -            request = buildstream_pb2.StatusRequest(instance_name=remote_spec.instance_name)
 | |
| 254 | -            response = remote.ref_storage.Status(request)
 | |
| 255 | - | |
| 256 | -            if remote_spec.push and not response.allow_updates:
 | |
| 257 | -                q.put('CAS server does not allow push')
 | |
| 258 | -            else:
 | |
| 259 | -                # No error
 | |
| 260 | -                q.put(None)
 | |
| 261 | - | |
| 262 | -        except grpc.RpcError as e:
 | |
| 263 | -            # str(e) is too verbose for errors reported to the user
 | |
| 264 | -            q.put(e.details())
 | |
| 265 | - | |
| 266 | -        except Exception as e:               # pylint: disable=broad-except
 | |
| 267 | -            # Whatever happens, we need to return it to the calling process
 | |
| 268 | -            #
 | |
| 269 | -            q.put(str(e))
 | |
| 270 | - | |
| 271 | 186 |      # pull():
 | 
| 272 | 187 |      #
 | 
| 273 | 188 |      # Pull a ref from a remote repository.
 | 
| ... | ... | @@ -306,7 +221,7 @@ class CASCache(): | 
| 306 | 221 |              return True
 | 
| 307 | 222 |          except grpc.RpcError as e:
 | 
| 308 | 223 |              if e.code() != grpc.StatusCode.NOT_FOUND:
 | 
| 309 | -                raise CASError("Failed to pull ref {}: {}".format(ref, e)) from e
 | |
| 224 | +                raise CASCacheError("Failed to pull ref {}: {}".format(ref, e)) from e
 | |
| 310 | 225 |              else:
 | 
| 311 | 226 |                  return False
 | 
| 312 | 227 |          except BlobNotFound as e:
 | 
| ... | ... | @@ -360,7 +275,7 @@ class CASCache(): | 
| 360 | 275 |      #   (bool): True if any remote was updated, False if no pushes were required
 | 
| 361 | 276 |      #
 | 
| 362 | 277 |      # Raises:
 | 
| 363 | -    #   (CASError): if there was an error
 | |
| 278 | +    #   (CASCacheError): if there was an error
 | |
| 364 | 279 |      #
 | 
| 365 | 280 |      def push(self, refs, remote):
 | 
| 366 | 281 |          skipped_remote = True
 | 
| ... | ... | @@ -395,7 +310,7 @@ class CASCache(): | 
| 395 | 310 |                  skipped_remote = False
 | 
| 396 | 311 |          except grpc.RpcError as e:
 | 
| 397 | 312 |              if e.code() != grpc.StatusCode.RESOURCE_EXHAUSTED:
 | 
| 398 | -                raise CASError("Failed to push ref {}: {}".format(refs, e), temporary=True) from e
 | |
| 313 | +                raise CASCacheError("Failed to push ref {}: {}".format(refs, e), temporary=True) from e
 | |
| 399 | 314 |  | 
| 400 | 315 |          return not skipped_remote
 | 
| 401 | 316 |  | 
| ... | ... | @@ -408,57 +323,13 @@ class CASCache(): | 
| 408 | 323 |      #     directory (Directory): A virtual directory object to push.
 | 
| 409 | 324 |      #
 | 
| 410 | 325 |      # Raises:
 | 
| 411 | -    #     (CASError): if there was an error
 | |
| 326 | +    #     (CASCacheError): if there was an error
 | |
| 412 | 327 |      #
 | 
| 413 | 328 |      def push_directory(self, remote, directory):
 | 
| 414 | 329 |          remote.init()
 | 
| 415 | 330 |  | 
| 416 | 331 |          self._send_directory(remote, directory.ref)
 | 
| 417 | 332 |  | 
| 418 | -    # push_message():
 | |
| 419 | -    #
 | |
| 420 | -    # Push the given protobuf message to a remote.
 | |
| 421 | -    #
 | |
| 422 | -    # Args:
 | |
| 423 | -    #     remote (CASRemote): The remote to push to
 | |
| 424 | -    #     message (Message): A protobuf message to push.
 | |
| 425 | -    #
 | |
| 426 | -    # Raises:
 | |
| 427 | -    #     (CASError): if there was an error
 | |
| 428 | -    #
 | |
| 429 | -    def push_message(self, remote, message):
 | |
| 430 | - | |
| 431 | -        message_buffer = message.SerializeToString()
 | |
| 432 | -        message_digest = utils._message_digest(message_buffer)
 | |
| 433 | - | |
| 434 | -        remote.init()
 | |
| 435 | - | |
| 436 | -        with io.BytesIO(message_buffer) as b:
 | |
| 437 | -            self._send_blob(remote, message_digest, b)
 | |
| 438 | - | |
| 439 | -        return message_digest
 | |
| 440 | - | |
| 441 | -    # verify_digest_on_remote():
 | |
| 442 | -    #
 | |
| 443 | -    # Check whether the object is already on the server in which case
 | |
| 444 | -    # there is no need to upload it.
 | |
| 445 | -    #
 | |
| 446 | -    # Args:
 | |
| 447 | -    #     remote (CASRemote): The remote to check
 | |
| 448 | -    #     digest (Digest): The object digest.
 | |
| 449 | -    #
 | |
| 450 | -    def verify_digest_on_remote(self, remote, digest):
 | |
| 451 | -        remote.init()
 | |
| 452 | - | |
| 453 | -        request = remote_execution_pb2.FindMissingBlobsRequest(instance_name=remote.spec.instance_name)
 | |
| 454 | -        request.blob_digests.extend([digest])
 | |
| 455 | - | |
| 456 | -        response = remote.cas.FindMissingBlobs(request)
 | |
| 457 | -        if digest in response.missing_blob_digests:
 | |
| 458 | -            return False
 | |
| 459 | - | |
| 460 | -        return True
 | |
| 461 | - | |
| 462 | 333 |      # objpath():
 | 
| 463 | 334 |      #
 | 
| 464 | 335 |      # Return the path of an object based on its digest.
 | 
| ... | ... | @@ -531,7 +402,7 @@ class CASCache(): | 
| 531 | 402 |              pass
 | 
| 532 | 403 |  | 
| 533 | 404 |          except OSError as e:
 | 
| 534 | -            raise CASError("Failed to hash object: {}".format(e)) from e
 | |
| 405 | +            raise CASCacheError("Failed to hash object: {}".format(e)) from e
 | |
| 535 | 406 |  | 
| 536 | 407 |          return digest
 | 
| 537 | 408 |  | 
| ... | ... | @@ -572,7 +443,7 @@ class CASCache(): | 
| 572 | 443 |                  return digest
 | 
| 573 | 444 |  | 
| 574 | 445 |          except FileNotFoundError as e:
 | 
| 575 | -            raise CASError("Attempt to access unavailable ref: {}".format(e)) from e
 | |
| 446 | +            raise CASCacheError("Attempt to access unavailable ref: {}".format(e)) from e
 | |
| 576 | 447 |  | 
| 577 | 448 |      # update_mtime()
 | 
| 578 | 449 |      #
 | 
| ... | ... | @@ -585,7 +456,7 @@ class CASCache(): | 
| 585 | 456 |          try:
 | 
| 586 | 457 |              os.utime(self._refpath(ref))
 | 
| 587 | 458 |          except FileNotFoundError as e:
 | 
| 588 | -            raise CASError("Attempt to access unavailable ref: {}".format(e)) from e
 | |
| 459 | +            raise CASCacheError("Attempt to access unavailable ref: {}".format(e)) from e
 | |
| 589 | 460 |  | 
| 590 | 461 |      # calculate_cache_size()
 | 
| 591 | 462 |      #
 | 
| ... | ... | @@ -676,7 +547,7 @@ class CASCache(): | 
| 676 | 547 |          # Remove cache ref
 | 
| 677 | 548 |          refpath = self._refpath(ref)
 | 
| 678 | 549 |          if not os.path.exists(refpath):
 | 
| 679 | -            raise CASError("Could not find ref '{}'".format(ref))
 | |
| 550 | +            raise CASCacheError("Could not find ref '{}'".format(ref))
 | |
| 680 | 551 |  | 
| 681 | 552 |          os.unlink(refpath)
 | 
| 682 | 553 |  | 
| ... | ... | @@ -792,7 +663,7 @@ class CASCache(): | 
| 792 | 663 |                  # The process serving the socket can't be cached anyway
 | 
| 793 | 664 |                  pass
 | 
| 794 | 665 |              else:
 | 
| 795 | -                raise CASError("Unsupported file type for {}".format(full_path))
 | |
| 666 | +                raise CASCacheError("Unsupported file type for {}".format(full_path))
 | |
| 796 | 667 |  | 
| 797 | 668 |          return self.add_object(digest=dir_digest,
 | 
| 798 | 669 |                                 buffer=directory.SerializeToString())
 | 
| ... | ... | @@ -811,7 +682,7 @@ class CASCache(): | 
| 811 | 682 |              if dirnode.name == name:
 | 
| 812 | 683 |                  return dirnode.digest
 | 
| 813 | 684 |  | 
| 814 | -        raise CASError("Subdirectory {} not found".format(name))
 | |
| 685 | +        raise CASCacheError("Subdirectory {} not found".format(name))
 | |
| 815 | 686 |  | 
| 816 | 687 |      def _diff_trees(self, tree_a, tree_b, *, added, removed, modified, path=""):
 | 
| 817 | 688 |          dir_a = remote_execution_pb2.Directory()
 | 
| ... | ... | @@ -909,23 +780,6 @@ class CASCache(): | 
| 909 | 780 |          for dirnode in directory.directories:
 | 
| 910 | 781 |              yield from self._required_blobs(dirnode.digest)
 | 
| 911 | 782 |  | 
| 912 | -    def _fetch_blob(self, remote, digest, stream):
 | |
| 913 | -        resource_name_components = ['blobs', digest.hash, str(digest.size_bytes)]
 | |
| 914 | - | |
| 915 | -        if remote.spec.instance_name:
 | |
| 916 | -            resource_name_components.insert(0, remote.spec.instance_name)
 | |
| 917 | - | |
| 918 | -        resource_name = '/'.join(resource_name_components)
 | |
| 919 | - | |
| 920 | -        request = bytestream_pb2.ReadRequest()
 | |
| 921 | -        request.resource_name = resource_name
 | |
| 922 | -        request.read_offset = 0
 | |
| 923 | -        for response in remote.bytestream.Read(request):
 | |
| 924 | -            stream.write(response.data)
 | |
| 925 | -        stream.flush()
 | |
| 926 | - | |
| 927 | -        assert digest.size_bytes == os.fstat(stream.fileno()).st_size
 | |
| 928 | - | |
| 929 | 783 |      # _ensure_blob():
 | 
| 930 | 784 |      #
 | 
| 931 | 785 |      # Fetch and add blob if it's not already local.
 | 
| ... | ... | @@ -944,7 +798,7 @@ class CASCache(): | 
| 944 | 798 |              return objpath
 | 
| 945 | 799 |  | 
| 946 | 800 |          with tempfile.NamedTemporaryFile(dir=self.tmpdir) as f:
 | 
| 947 | -            self._fetch_blob(remote, digest, f)
 | |
| 801 | +            remote._fetch_blob(digest, f)
 | |
| 948 | 802 |  | 
| 949 | 803 |              added_digest = self.add_object(path=f.name, link_directly=True)
 | 
| 950 | 804 |              assert added_digest.hash == digest.hash
 | 
| ... | ... | @@ -1051,7 +905,7 @@ class CASCache(): | 
| 1051 | 905 |      def _fetch_tree(self, remote, digest):
 | 
| 1052 | 906 |          # download but do not store the Tree object
 | 
| 1053 | 907 |          with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
 | 
| 1054 | -            self._fetch_blob(remote, digest, out)
 | |
| 908 | +            remote._fetch_blob(digest, out)
 | |
| 1055 | 909 |  | 
| 1056 | 910 |              tree = remote_execution_pb2.Tree()
 | 
| 1057 | 911 |  | 
| ... | ... | @@ -1071,39 +925,6 @@ class CASCache(): | 
| 1071 | 925 |  | 
| 1072 | 926 |          return dirdigest
 | 
| 1073 | 927 |  | 
| 1074 | -    def _send_blob(self, remote, digest, stream, u_uid=uuid.uuid4()):
 | |
| 1075 | -        resource_name_components = ['uploads', str(u_uid), 'blobs',
 | |
| 1076 | -                                    digest.hash, str(digest.size_bytes)]
 | |
| 1077 | - | |
| 1078 | -        if remote.spec.instance_name:
 | |
| 1079 | -            resource_name_components.insert(0, remote.spec.instance_name)
 | |
| 1080 | - | |
| 1081 | -        resource_name = '/'.join(resource_name_components)
 | |
| 1082 | - | |
| 1083 | -        def request_stream(resname, instream):
 | |
| 1084 | -            offset = 0
 | |
| 1085 | -            finished = False
 | |
| 1086 | -            remaining = digest.size_bytes
 | |
| 1087 | -            while not finished:
 | |
| 1088 | -                chunk_size = min(remaining, _MAX_PAYLOAD_BYTES)
 | |
| 1089 | -                remaining -= chunk_size
 | |
| 1090 | - | |
| 1091 | -                request = bytestream_pb2.WriteRequest()
 | |
| 1092 | -                request.write_offset = offset
 | |
| 1093 | -                # max. _MAX_PAYLOAD_BYTES chunks
 | |
| 1094 | -                request.data = instream.read(chunk_size)
 | |
| 1095 | -                request.resource_name = resname
 | |
| 1096 | -                request.finish_write = remaining <= 0
 | |
| 1097 | - | |
| 1098 | -                yield request
 | |
| 1099 | - | |
| 1100 | -                offset += chunk_size
 | |
| 1101 | -                finished = request.finish_write
 | |
| 1102 | - | |
| 1103 | -        response = remote.bytestream.Write(request_stream(resource_name, stream))
 | |
| 1104 | - | |
| 1105 | -        assert response.committed_size == digest.size_bytes
 | |
| 1106 | - | |
| 1107 | 928 |      def _send_directory(self, remote, digest, u_uid=uuid.uuid4()):
 | 
| 1108 | 929 |          required_blobs = self._required_blobs(digest)
 | 
| 1109 | 930 |  | 
| ... | ... | @@ -1137,7 +958,7 @@ class CASCache(): | 
| 1137 | 958 |                  if (digest.size_bytes >= remote.max_batch_total_size_bytes or
 | 
| 1138 | 959 |                          not remote.batch_update_supported):
 | 
| 1139 | 960 |                      # Too large for batch request, upload in independent request.
 | 
| 1140 | -                    self._send_blob(remote, digest, f, u_uid=u_uid)
 | |
| 961 | +                    remote._send_blob(digest, f, u_uid=u_uid)
 | |
| 1141 | 962 |                  else:
 | 
| 1142 | 963 |                      if not batch.add(digest, f):
 | 
| 1143 | 964 |                          # Not enough space left in batch request.
 | 
| ... | ... | @@ -1150,183 +971,6 @@ class CASCache(): | 
| 1150 | 971 |          batch.send()
 | 
| 1151 | 972 |  | 
| 1152 | 973 |  | 
| 1153 | -# Represents a single remote CAS cache.
 | |
| 1154 | -#
 | |
| 1155 | -class CASRemote():
 | |
| 1156 | -    def __init__(self, spec):
 | |
| 1157 | -        self.spec = spec
 | |
| 1158 | -        self._initialized = False
 | |
| 1159 | -        self.channel = None
 | |
| 1160 | -        self.bytestream = None
 | |
| 1161 | -        self.cas = None
 | |
| 1162 | -        self.ref_storage = None
 | |
| 1163 | -        self.batch_update_supported = None
 | |
| 1164 | -        self.batch_read_supported = None
 | |
| 1165 | -        self.capabilities = None
 | |
| 1166 | -        self.max_batch_total_size_bytes = None
 | |
| 1167 | - | |
| 1168 | -    def init(self):
 | |
| 1169 | -        if not self._initialized:
 | |
| 1170 | -            url = urlparse(self.spec.url)
 | |
| 1171 | -            if url.scheme == 'http':
 | |
| 1172 | -                port = url.port or 80
 | |
| 1173 | -                self.channel = grpc.insecure_channel('{}:{}'.format(url.hostname, port))
 | |
| 1174 | -            elif url.scheme == 'https':
 | |
| 1175 | -                port = url.port or 443
 | |
| 1176 | - | |
| 1177 | -                if self.spec.server_cert:
 | |
| 1178 | -                    with open(self.spec.server_cert, 'rb') as f:
 | |
| 1179 | -                        server_cert_bytes = f.read()
 | |
| 1180 | -                else:
 | |
| 1181 | -                    server_cert_bytes = None
 | |
| 1182 | - | |
| 1183 | -                if self.spec.client_key:
 | |
| 1184 | -                    with open(self.spec.client_key, 'rb') as f:
 | |
| 1185 | -                        client_key_bytes = f.read()
 | |
| 1186 | -                else:
 | |
| 1187 | -                    client_key_bytes = None
 | |
| 1188 | - | |
| 1189 | -                if self.spec.client_cert:
 | |
| 1190 | -                    with open(self.spec.client_cert, 'rb') as f:
 | |
| 1191 | -                        client_cert_bytes = f.read()
 | |
| 1192 | -                else:
 | |
| 1193 | -                    client_cert_bytes = None
 | |
| 1194 | - | |
| 1195 | -                credentials = grpc.ssl_channel_credentials(root_certificates=server_cert_bytes,
 | |
| 1196 | -                                                           private_key=client_key_bytes,
 | |
| 1197 | -                                                           certificate_chain=client_cert_bytes)
 | |
| 1198 | -                self.channel = grpc.secure_channel('{}:{}'.format(url.hostname, port), credentials)
 | |
| 1199 | -            else:
 | |
| 1200 | -                raise CASError("Unsupported URL: {}".format(self.spec.url))
 | |
| 1201 | - | |
| 1202 | -            self.bytestream = bytestream_pb2_grpc.ByteStreamStub(self.channel)
 | |
| 1203 | -            self.cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(self.channel)
 | |
| 1204 | -            self.capabilities = remote_execution_pb2_grpc.CapabilitiesStub(self.channel)
 | |
| 1205 | -            self.ref_storage = buildstream_pb2_grpc.ReferenceStorageStub(self.channel)
 | |
| 1206 | - | |
| 1207 | -            self.max_batch_total_size_bytes = _MAX_PAYLOAD_BYTES
 | |
| 1208 | -            try:
 | |
| 1209 | -                request = remote_execution_pb2.GetCapabilitiesRequest(instance_name=self.spec.instance_name)
 | |
| 1210 | -                response = self.capabilities.GetCapabilities(request)
 | |
| 1211 | -                server_max_batch_total_size_bytes = response.cache_capabilities.max_batch_total_size_bytes
 | |
| 1212 | -                if 0 < server_max_batch_total_size_bytes < self.max_batch_total_size_bytes:
 | |
| 1213 | -                    self.max_batch_total_size_bytes = server_max_batch_total_size_bytes
 | |
| 1214 | -            except grpc.RpcError as e:
 | |
| 1215 | -                # Simply use the defaults for servers that don't implement GetCapabilities()
 | |
| 1216 | -                if e.code() != grpc.StatusCode.UNIMPLEMENTED:
 | |
| 1217 | -                    raise
 | |
| 1218 | - | |
| 1219 | -            # Check whether the server supports BatchReadBlobs()
 | |
| 1220 | -            self.batch_read_supported = False
 | |
| 1221 | -            try:
 | |
| 1222 | -                request = remote_execution_pb2.BatchReadBlobsRequest(instance_name=self.spec.instance_name)
 | |
| 1223 | -                response = self.cas.BatchReadBlobs(request)
 | |
| 1224 | -                self.batch_read_supported = True
 | |
| 1225 | -            except grpc.RpcError as e:
 | |
| 1226 | -                if e.code() != grpc.StatusCode.UNIMPLEMENTED:
 | |
| 1227 | -                    raise
 | |
| 1228 | - | |
| 1229 | -            # Check whether the server supports BatchUpdateBlobs()
 | |
| 1230 | -            self.batch_update_supported = False
 | |
| 1231 | -            try:
 | |
| 1232 | -                request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=self.spec.instance_name)
 | |
| 1233 | -                response = self.cas.BatchUpdateBlobs(request)
 | |
| 1234 | -                self.batch_update_supported = True
 | |
| 1235 | -            except grpc.RpcError as e:
 | |
| 1236 | -                if (e.code() != grpc.StatusCode.UNIMPLEMENTED and
 | |
| 1237 | -                        e.code() != grpc.StatusCode.PERMISSION_DENIED):
 | |
| 1238 | -                    raise
 | |
| 1239 | - | |
| 1240 | -            self._initialized = True
 | |
| 1241 | - | |
| 1242 | - | |
| 1243 | -# Represents a batch of blobs queued for fetching.
 | |
| 1244 | -#
 | |
| 1245 | -class _CASBatchRead():
 | |
| 1246 | -    def __init__(self, remote):
 | |
| 1247 | -        self._remote = remote
 | |
| 1248 | -        self._max_total_size_bytes = remote.max_batch_total_size_bytes
 | |
| 1249 | -        self._request = remote_execution_pb2.BatchReadBlobsRequest(instance_name=remote.spec.instance_name)
 | |
| 1250 | -        self._size = 0
 | |
| 1251 | -        self._sent = False
 | |
| 1252 | - | |
| 1253 | -    def add(self, digest):
 | |
| 1254 | -        assert not self._sent
 | |
| 1255 | - | |
| 1256 | -        new_batch_size = self._size + digest.size_bytes
 | |
| 1257 | -        if new_batch_size > self._max_total_size_bytes:
 | |
| 1258 | -            # Not enough space left in current batch
 | |
| 1259 | -            return False
 | |
| 1260 | - | |
| 1261 | -        request_digest = self._request.digests.add()
 | |
| 1262 | -        request_digest.hash = digest.hash
 | |
| 1263 | -        request_digest.size_bytes = digest.size_bytes
 | |
| 1264 | -        self._size = new_batch_size
 | |
| 1265 | -        return True
 | |
| 1266 | - | |
| 1267 | -    def send(self):
 | |
| 1268 | -        assert not self._sent
 | |
| 1269 | -        self._sent = True
 | |
| 1270 | - | |
| 1271 | -        if not self._request.digests:
 | |
| 1272 | -            return
 | |
| 1273 | - | |
| 1274 | -        batch_response = self._remote.cas.BatchReadBlobs(self._request)
 | |
| 1275 | - | |
| 1276 | -        for response in batch_response.responses:
 | |
| 1277 | -            if response.status.code == code_pb2.NOT_FOUND:
 | |
| 1278 | -                raise BlobNotFound(response.digest.hash, "Failed to download blob {}: {}".format(
 | |
| 1279 | -                    response.digest.hash, response.status.code))
 | |
| 1280 | -            if response.status.code != code_pb2.OK:
 | |
| 1281 | -                raise CASError("Failed to download blob {}: {}".format(
 | |
| 1282 | -                    response.digest.hash, response.status.code))
 | |
| 1283 | -            if response.digest.size_bytes != len(response.data):
 | |
| 1284 | -                raise CASError("Failed to download blob {}: expected {} bytes, received {} bytes".format(
 | |
| 1285 | -                    response.digest.hash, response.digest.size_bytes, len(response.data)))
 | |
| 1286 | - | |
| 1287 | -            yield (response.digest, response.data)
 | |
| 1288 | - | |
| 1289 | - | |
| 1290 | -# Represents a batch of blobs queued for upload.
 | |
| 1291 | -#
 | |
| 1292 | -class _CASBatchUpdate():
 | |
| 1293 | -    def __init__(self, remote):
 | |
| 1294 | -        self._remote = remote
 | |
| 1295 | -        self._max_total_size_bytes = remote.max_batch_total_size_bytes
 | |
| 1296 | -        self._request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=remote.spec.instance_name)
 | |
| 1297 | -        self._size = 0
 | |
| 1298 | -        self._sent = False
 | |
| 1299 | - | |
| 1300 | -    def add(self, digest, stream):
 | |
| 1301 | -        assert not self._sent
 | |
| 1302 | - | |
| 1303 | -        new_batch_size = self._size + digest.size_bytes
 | |
| 1304 | -        if new_batch_size > self._max_total_size_bytes:
 | |
| 1305 | -            # Not enough space left in current batch
 | |
| 1306 | -            return False
 | |
| 1307 | - | |
| 1308 | -        blob_request = self._request.requests.add()
 | |
| 1309 | -        blob_request.digest.hash = digest.hash
 | |
| 1310 | -        blob_request.digest.size_bytes = digest.size_bytes
 | |
| 1311 | -        blob_request.data = stream.read(digest.size_bytes)
 | |
| 1312 | -        self._size = new_batch_size
 | |
| 1313 | -        return True
 | |
| 1314 | - | |
| 1315 | -    def send(self):
 | |
| 1316 | -        assert not self._sent
 | |
| 1317 | -        self._sent = True
 | |
| 1318 | - | |
| 1319 | -        if not self._request.requests:
 | |
| 1320 | -            return
 | |
| 1321 | - | |
| 1322 | -        batch_response = self._remote.cas.BatchUpdateBlobs(self._request)
 | |
| 1323 | - | |
| 1324 | -        for response in batch_response.responses:
 | |
| 1325 | -            if response.status.code != code_pb2.OK:
 | |
| 1326 | -                raise CASError("Failed to upload blob {}: {}".format(
 | |
| 1327 | -                    response.digest.hash, response.status.code))
 | |
| 1328 | - | |
| 1329 | - | |
| 1330 | 974 |  def _grouper(iterable, n):
 | 
| 1331 | 975 |      while True:
 | 
| 1332 | 976 |          try:
 | 
| 1 | +from collections import namedtuple
 | |
| 2 | +import io
 | |
| 3 | +import os
 | |
| 4 | +import multiprocessing
 | |
| 5 | +import signal
 | |
| 6 | +from urllib.parse import urlparse
 | |
| 7 | +import uuid
 | |
| 8 | + | |
| 9 | +import grpc
 | |
| 10 | + | |
| 11 | +from .. import _yaml
 | |
| 12 | +from .._protos.google.rpc import code_pb2
 | |
| 13 | +from .._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
 | |
| 14 | +from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
 | |
| 15 | +from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc
 | |
| 16 | + | |
| 17 | +from .._exceptions import CASRemoteError, LoadError, LoadErrorReason
 | |
| 18 | +from .. import _signals
 | |
| 19 | +from .. import utils
 | |
| 20 | + | |
| 21 | +# The default limit for gRPC messages is 4 MiB.
 | |
| 22 | +# Limit payload to 1 MiB to leave sufficient headroom for metadata.
 | |
| 23 | +_MAX_PAYLOAD_BYTES = 1024 * 1024
 | |
| 24 | + | |
| 25 | + | |
| 26 | +class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key client_cert instance_name')):
 | |
| 27 | + | |
| 28 | +    # _new_from_config_node
 | |
| 29 | +    #
 | |
| 30 | +    # Creates an CASRemoteSpec() from a YAML loaded node
 | |
| 31 | +    #
 | |
| 32 | +    @staticmethod
 | |
| 33 | +    def _new_from_config_node(spec_node, basedir=None):
 | |
| 34 | +        _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance_name'])
 | |
| 35 | +        url = _yaml.node_get(spec_node, str, 'url')
 | |
| 36 | +        push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
 | |
| 37 | +        if not url:
 | |
| 38 | +            provenance = _yaml.node_get_provenance(spec_node, 'url')
 | |
| 39 | +            raise LoadError(LoadErrorReason.INVALID_DATA,
 | |
| 40 | +                            "{}: empty artifact cache URL".format(provenance))
 | |
| 41 | + | |
| 42 | +        instance_name = _yaml.node_get(spec_node, str, 'server-cert', default_value=None)
 | |
| 43 | + | |
| 44 | +        server_cert = _yaml.node_get(spec_node, str, 'server-cert', default_value=None)
 | |
| 45 | +        if server_cert and basedir:
 | |
| 46 | +            server_cert = os.path.join(basedir, server_cert)
 | |
| 47 | + | |
| 48 | +        client_key = _yaml.node_get(spec_node, str, 'client-key', default_value=None)
 | |
| 49 | +        if client_key and basedir:
 | |
| 50 | +            client_key = os.path.join(basedir, client_key)
 | |
| 51 | + | |
| 52 | +        client_cert = _yaml.node_get(spec_node, str, 'client-cert', default_value=None)
 | |
| 53 | +        if client_cert and basedir:
 | |
| 54 | +            client_cert = os.path.join(basedir, client_cert)
 | |
| 55 | + | |
| 56 | +        if client_key and not client_cert:
 | |
| 57 | +            provenance = _yaml.node_get_provenance(spec_node, 'client-key')
 | |
| 58 | +            raise LoadError(LoadErrorReason.INVALID_DATA,
 | |
| 59 | +                            "{}: 'client-key' was specified without 'client-cert'".format(provenance))
 | |
| 60 | + | |
| 61 | +        if client_cert and not client_key:
 | |
| 62 | +            provenance = _yaml.node_get_provenance(spec_node, 'client-cert')
 | |
| 63 | +            raise LoadError(LoadErrorReason.INVALID_DATA,
 | |
| 64 | +                            "{}: 'client-cert' was specified without 'client-key'".format(provenance))
 | |
| 65 | + | |
| 66 | +        return CASRemoteSpec(url, push, server_cert, client_key, client_cert, instance_name)
 | |
| 67 | + | |
| 68 | + | |
| 69 | +CASRemoteSpec.__new__.__defaults__ = (None, None, None, None)
 | |
| 70 | + | |
| 71 | + | |
| 72 | +class BlobNotFound(CASRemoteError):
 | |
| 73 | + | |
| 74 | +    def __init__(self, blob, msg):
 | |
| 75 | +        self.blob = blob
 | |
| 76 | +        super().__init__(msg)
 | |
| 77 | + | |
| 78 | + | |
| 79 | +# Represents a single remote CAS cache.
 | |
| 80 | +#
 | |
| 81 | +class CASRemote():
 | |
| 82 | +    def __init__(self, spec):
 | |
| 83 | +        self.spec = spec
 | |
| 84 | +        self._initialized = False
 | |
| 85 | +        self.channel = None
 | |
| 86 | +        self.bytestream = None
 | |
| 87 | +        self.cas = None
 | |
| 88 | +        self.ref_storage = None
 | |
| 89 | +        self.batch_update_supported = None
 | |
| 90 | +        self.batch_read_supported = None
 | |
| 91 | +        self.capabilities = None
 | |
| 92 | +        self.max_batch_total_size_bytes = None
 | |
| 93 | + | |
| 94 | +    def init(self):
 | |
| 95 | +        if not self._initialized:
 | |
| 96 | +            url = urlparse(self.spec.url)
 | |
| 97 | +            if url.scheme == 'http':
 | |
| 98 | +                port = url.port or 80
 | |
| 99 | +                self.channel = grpc.insecure_channel('{}:{}'.format(url.hostname, port))
 | |
| 100 | +            elif url.scheme == 'https':
 | |
| 101 | +                port = url.port or 443
 | |
| 102 | + | |
| 103 | +                if self.spec.server_cert:
 | |
| 104 | +                    with open(self.spec.server_cert, 'rb') as f:
 | |
| 105 | +                        server_cert_bytes = f.read()
 | |
| 106 | +                else:
 | |
| 107 | +                    server_cert_bytes = None
 | |
| 108 | + | |
| 109 | +                if self.spec.client_key:
 | |
| 110 | +                    with open(self.spec.client_key, 'rb') as f:
 | |
| 111 | +                        client_key_bytes = f.read()
 | |
| 112 | +                else:
 | |
| 113 | +                    client_key_bytes = None
 | |
| 114 | + | |
| 115 | +                if self.spec.client_cert:
 | |
| 116 | +                    with open(self.spec.client_cert, 'rb') as f:
 | |
| 117 | +                        client_cert_bytes = f.read()
 | |
| 118 | +                else:
 | |
| 119 | +                    client_cert_bytes = None
 | |
| 120 | + | |
| 121 | +                credentials = grpc.ssl_channel_credentials(root_certificates=server_cert_bytes,
 | |
| 122 | +                                                           private_key=client_key_bytes,
 | |
| 123 | +                                                           certificate_chain=client_cert_bytes)
 | |
| 124 | +                self.channel = grpc.secure_channel('{}:{}'.format(url.hostname, port), credentials)
 | |
| 125 | +            else:
 | |
| 126 | +                raise CASRemoteError("Unsupported URL: {}".format(self.spec.url))
 | |
| 127 | + | |
| 128 | +            self.bytestream = bytestream_pb2_grpc.ByteStreamStub(self.channel)
 | |
| 129 | +            self.cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(self.channel)
 | |
| 130 | +            self.capabilities = remote_execution_pb2_grpc.CapabilitiesStub(self.channel)
 | |
| 131 | +            self.ref_storage = buildstream_pb2_grpc.ReferenceStorageStub(self.channel)
 | |
| 132 | + | |
| 133 | +            self.max_batch_total_size_bytes = _MAX_PAYLOAD_BYTES
 | |
| 134 | +            try:
 | |
| 135 | +                request = remote_execution_pb2.GetCapabilitiesRequest()
 | |
| 136 | +                response = self.capabilities.GetCapabilities(request)
 | |
| 137 | +                server_max_batch_total_size_bytes = response.cache_capabilities.max_batch_total_size_bytes
 | |
| 138 | +                if 0 < server_max_batch_total_size_bytes < self.max_batch_total_size_bytes:
 | |
| 139 | +                    self.max_batch_total_size_bytes = server_max_batch_total_size_bytes
 | |
| 140 | +            except grpc.RpcError as e:
 | |
| 141 | +                # Simply use the defaults for servers that don't implement GetCapabilities()
 | |
| 142 | +                if e.code() != grpc.StatusCode.UNIMPLEMENTED:
 | |
| 143 | +                    raise
 | |
| 144 | + | |
| 145 | +            # Check whether the server supports BatchReadBlobs()
 | |
| 146 | +            self.batch_read_supported = False
 | |
| 147 | +            try:
 | |
| 148 | +                request = remote_execution_pb2.BatchReadBlobsRequest()
 | |
| 149 | +                response = self.cas.BatchReadBlobs(request)
 | |
| 150 | +                self.batch_read_supported = True
 | |
| 151 | +            except grpc.RpcError as e:
 | |
| 152 | +                if e.code() != grpc.StatusCode.UNIMPLEMENTED:
 | |
| 153 | +                    raise
 | |
| 154 | + | |
| 155 | +            # Check whether the server supports BatchUpdateBlobs()
 | |
| 156 | +            self.batch_update_supported = False
 | |
| 157 | +            try:
 | |
| 158 | +                request = remote_execution_pb2.BatchUpdateBlobsRequest()
 | |
| 159 | +                response = self.cas.BatchUpdateBlobs(request)
 | |
| 160 | +                self.batch_update_supported = True
 | |
| 161 | +            except grpc.RpcError as e:
 | |
| 162 | +                if (e.code() != grpc.StatusCode.UNIMPLEMENTED and
 | |
| 163 | +                        e.code() != grpc.StatusCode.PERMISSION_DENIED):
 | |
| 164 | +                    raise
 | |
| 165 | + | |
| 166 | +            self._initialized = True
 | |
| 167 | + | |
| 168 | +    # check_remote
 | |
| 169 | +    #
 | |
| 170 | +    # Used when checking whether remote_specs work in the buildstream main
 | |
| 171 | +    # thread, runs this in a seperate process to avoid creation of gRPC threads
 | |
| 172 | +    # in the main BuildStream process
 | |
| 173 | +    # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
 | |
| 174 | +    @classmethod
 | |
| 175 | +    def check_remote(cls, remote_spec, q):
 | |
| 176 | + | |
| 177 | +        def __check_remote():
 | |
| 178 | +            try:
 | |
| 179 | +                remote = cls(remote_spec)
 | |
| 180 | +                remote.init()
 | |
| 181 | + | |
| 182 | +                request = buildstream_pb2.StatusRequest()
 | |
| 183 | +                response = remote.ref_storage.Status(request)
 | |
| 184 | + | |
| 185 | +                if remote_spec.push and not response.allow_updates:
 | |
| 186 | +                    q.put('CAS server does not allow push')
 | |
| 187 | +                else:
 | |
| 188 | +                    # No error
 | |
| 189 | +                    q.put(None)
 | |
| 190 | + | |
| 191 | +            except grpc.RpcError as e:
 | |
| 192 | +                # str(e) is too verbose for errors reported to the user
 | |
| 193 | +                q.put(e.details())
 | |
| 194 | + | |
| 195 | +            except Exception as e:               # pylint: disable=broad-except
 | |
| 196 | +                # Whatever happens, we need to return it to the calling process
 | |
| 197 | +                #
 | |
| 198 | +                q.put(str(e))
 | |
| 199 | + | |
| 200 | +        p = multiprocessing.Process(target=__check_remote)
 | |
| 201 | + | |
| 202 | +        try:
 | |
| 203 | +            # Keep SIGINT blocked in the child process
 | |
| 204 | +            with _signals.blocked([signal.SIGINT], ignore=False):
 | |
| 205 | +                p.start()
 | |
| 206 | + | |
| 207 | +            error = q.get()
 | |
| 208 | +            p.join()
 | |
| 209 | +        except KeyboardInterrupt:
 | |
| 210 | +            utils._kill_process_tree(p.pid)
 | |
| 211 | +            raise
 | |
| 212 | + | |
| 213 | +        return error
 | |
| 214 | + | |
| 215 | +    # verify_digest_on_remote():
 | |
| 216 | +    #
 | |
| 217 | +    # Check whether the object is already on the server in which case
 | |
| 218 | +    # there is no need to upload it.
 | |
| 219 | +    #
 | |
| 220 | +    # Args:
 | |
| 221 | +    #     digest (Digest): The object digest.
 | |
| 222 | +    #
 | |
| 223 | +    def verify_digest_on_remote(self, digest):
 | |
| 224 | +        self.init()
 | |
| 225 | + | |
| 226 | +        request = remote_execution_pb2.FindMissingBlobsRequest()
 | |
| 227 | +        request.blob_digests.extend([digest])
 | |
| 228 | + | |
| 229 | +        response = self.cas.FindMissingBlobs(request)
 | |
| 230 | +        if digest in response.missing_blob_digests:
 | |
| 231 | +            return False
 | |
| 232 | + | |
| 233 | +        return True
 | |
| 234 | + | |
| 235 | +    # push_message():
 | |
| 236 | +    #
 | |
| 237 | +    # Push the given protobuf message to a remote.
 | |
| 238 | +    #
 | |
| 239 | +    # Args:
 | |
| 240 | +    #     message (Message): A protobuf message to push.
 | |
| 241 | +    #
 | |
| 242 | +    # Raises:
 | |
| 243 | +    #     (CASRemoteError): if there was an error
 | |
| 244 | +    #
 | |
| 245 | +    def push_message(self, message):
 | |
| 246 | + | |
| 247 | +        message_buffer = message.SerializeToString()
 | |
| 248 | +        message_digest = utils._message_digest(message_buffer)
 | |
| 249 | + | |
| 250 | +        self.init()
 | |
| 251 | + | |
| 252 | +        with io.BytesIO(message_buffer) as b:
 | |
| 253 | +            self._send_blob(message_digest, b)
 | |
| 254 | + | |
| 255 | +        return message_digest
 | |
| 256 | + | |
| 257 | +    ################################################
 | |
| 258 | +    #             Local Private Methods            #
 | |
| 259 | +    ################################################
 | |
| 260 | +    def _fetch_blob(self, digest, stream):
 | |
| 261 | +        resource_name = '/'.join(['blobs', digest.hash, str(digest.size_bytes)])
 | |
| 262 | +        request = bytestream_pb2.ReadRequest()
 | |
| 263 | +        request.resource_name = resource_name
 | |
| 264 | +        request.read_offset = 0
 | |
| 265 | +        for response in self.bytestream.Read(request):
 | |
| 266 | +            stream.write(response.data)
 | |
| 267 | +        stream.flush()
 | |
| 268 | + | |
| 269 | +        assert digest.size_bytes == os.fstat(stream.fileno()).st_size
 | |
| 270 | + | |
| 271 | +    def _send_blob(self, digest, stream, u_uid=uuid.uuid4()):
 | |
| 272 | +        resource_name = '/'.join(['uploads', str(u_uid), 'blobs',
 | |
| 273 | +                                  digest.hash, str(digest.size_bytes)])
 | |
| 274 | + | |
| 275 | +        def request_stream(resname, instream):
 | |
| 276 | +            offset = 0
 | |
| 277 | +            finished = False
 | |
| 278 | +            remaining = digest.size_bytes
 | |
| 279 | +            while not finished:
 | |
| 280 | +                chunk_size = min(remaining, _MAX_PAYLOAD_BYTES)
 | |
| 281 | +                remaining -= chunk_size
 | |
| 282 | + | |
| 283 | +                request = bytestream_pb2.WriteRequest()
 | |
| 284 | +                request.write_offset = offset
 | |
| 285 | +                # max. _MAX_PAYLOAD_BYTES chunks
 | |
| 286 | +                request.data = instream.read(chunk_size)
 | |
| 287 | +                request.resource_name = resname
 | |
| 288 | +                request.finish_write = remaining <= 0
 | |
| 289 | + | |
| 290 | +                yield request
 | |
| 291 | + | |
| 292 | +                offset += chunk_size
 | |
| 293 | +                finished = request.finish_write
 | |
| 294 | + | |
| 295 | +        response = self.bytestream.Write(request_stream(resource_name, stream))
 | |
| 296 | + | |
| 297 | +        assert response.committed_size == digest.size_bytes
 | |
| 298 | + | |
| 299 | + | |
| 300 | +# Represents a batch of blobs queued for fetching.
 | |
| 301 | +#
 | |
| 302 | +class _CASBatchRead():
 | |
| 303 | +    def __init__(self, remote):
 | |
| 304 | +        self._remote = remote
 | |
| 305 | +        self._max_total_size_bytes = remote.max_batch_total_size_bytes
 | |
| 306 | +        self._request = remote_execution_pb2.BatchReadBlobsRequest()
 | |
| 307 | +        self._size = 0
 | |
| 308 | +        self._sent = False
 | |
| 309 | + | |
| 310 | +    def add(self, digest):
 | |
| 311 | +        assert not self._sent
 | |
| 312 | + | |
| 313 | +        new_batch_size = self._size + digest.size_bytes
 | |
| 314 | +        if new_batch_size > self._max_total_size_bytes:
 | |
| 315 | +            # Not enough space left in current batch
 | |
| 316 | +            return False
 | |
| 317 | + | |
| 318 | +        request_digest = self._request.digests.add()
 | |
| 319 | +        request_digest.hash = digest.hash
 | |
| 320 | +        request_digest.size_bytes = digest.size_bytes
 | |
| 321 | +        self._size = new_batch_size
 | |
| 322 | +        return True
 | |
| 323 | + | |
| 324 | +    def send(self):
 | |
| 325 | +        assert not self._sent
 | |
| 326 | +        self._sent = True
 | |
| 327 | + | |
| 328 | +        if not self._request.digests:
 | |
| 329 | +            return
 | |
| 330 | + | |
| 331 | +        batch_response = self._remote.cas.BatchReadBlobs(self._request)
 | |
| 332 | + | |
| 333 | +        for response in batch_response.responses:
 | |
| 334 | +            if response.status.code == code_pb2.NOT_FOUND:
 | |
| 335 | +                raise BlobNotFound(response.digest.hash, "Failed to download blob {}: {}".format(
 | |
| 336 | +                    response.digest.hash, response.status.code))
 | |
| 337 | +            if response.status.code != code_pb2.OK:
 | |
| 338 | +                raise CASRemoteError("Failed to download blob {}: {}".format(
 | |
| 339 | +                    response.digest.hash, response.status.code))
 | |
| 340 | +            if response.digest.size_bytes != len(response.data):
 | |
| 341 | +                raise CASRemoteError("Failed to download blob {}: expected {} bytes, received {} bytes".format(
 | |
| 342 | +                    response.digest.hash, response.digest.size_bytes, len(response.data)))
 | |
| 343 | + | |
| 344 | +            yield (response.digest, response.data)
 | |
| 345 | + | |
| 346 | + | |
| 347 | +# Represents a batch of blobs queued for upload.
 | |
| 348 | +#
 | |
| 349 | +class _CASBatchUpdate():
 | |
| 350 | +    def __init__(self, remote):
 | |
| 351 | +        self._remote = remote
 | |
| 352 | +        self._max_total_size_bytes = remote.max_batch_total_size_bytes
 | |
| 353 | +        self._request = remote_execution_pb2.BatchUpdateBlobsRequest()
 | |
| 354 | +        self._size = 0
 | |
| 355 | +        self._sent = False
 | |
| 356 | + | |
| 357 | +    def add(self, digest, stream):
 | |
| 358 | +        assert not self._sent
 | |
| 359 | + | |
| 360 | +        new_batch_size = self._size + digest.size_bytes
 | |
| 361 | +        if new_batch_size > self._max_total_size_bytes:
 | |
| 362 | +            # Not enough space left in current batch
 | |
| 363 | +            return False
 | |
| 364 | + | |
| 365 | +        blob_request = self._request.requests.add()
 | |
| 366 | +        blob_request.digest.hash = digest.hash
 | |
| 367 | +        blob_request.digest.size_bytes = digest.size_bytes
 | |
| 368 | +        blob_request.data = stream.read(digest.size_bytes)
 | |
| 369 | +        self._size = new_batch_size
 | |
| 370 | +        return True
 | |
| 371 | + | |
| 372 | +    def send(self):
 | |
| 373 | +        assert not self._sent
 | |
| 374 | +        self._sent = True
 | |
| 375 | + | |
| 376 | +        if not self._request.requests:
 | |
| 377 | +            return
 | |
| 378 | + | |
| 379 | +        batch_response = self._remote.cas.BatchUpdateBlobs(self._request)
 | |
| 380 | + | |
| 381 | +        for response in batch_response.responses:
 | |
| 382 | +            if response.status.code != code_pb2.OK:
 | |
| 383 | +                raise CASRemoteError("Failed to upload blob {}: {}".format(
 | |
| 384 | +                    response.digest.hash, response.status.code)) | 
| ... | ... | @@ -31,7 +31,7 @@ from ._exceptions import LoadError, LoadErrorReason, BstError | 
| 31 | 31 |  from ._message import Message, MessageType
 | 
| 32 | 32 |  from ._profile import Topics, profile_start, profile_end
 | 
| 33 | 33 |  from ._artifactcache import ArtifactCache
 | 
| 34 | -from ._artifactcache.cascache import CASCache
 | |
| 34 | +from ._cas import CASCache
 | |
| 35 | 35 |  from ._workspaces import Workspaces, WorkspaceProjectCache, WORKSPACE_PROJECT_FILE
 | 
| 36 | 36 |  from .plugin import _plugin_lookup
 | 
| 37 | 37 |  from .sandbox import SandboxRemote
 | 
| ... | ... | @@ -284,6 +284,21 @@ class CASError(BstError): | 
| 284 | 284 |          super().__init__(message, detail=detail, domain=ErrorDomain.CAS, reason=reason, temporary=True)
 | 
| 285 | 285 |  | 
| 286 | 286 |  | 
| 287 | +# CASRemoteError
 | |
| 288 | +#
 | |
| 289 | +# Raised when errors are encountered in the remote CAS
 | |
| 290 | +class CASRemoteError(CASError):
 | |
| 291 | +    pass
 | |
| 292 | + | |
| 293 | + | |
| 294 | +# CASCacheError
 | |
| 295 | +#
 | |
| 296 | +# Raised when errors are encountered in the local CASCacheError
 | |
| 297 | +#
 | |
| 298 | +class CASCacheError(CASError):
 | |
| 299 | +    pass
 | |
| 300 | + | |
| 301 | + | |
| 287 | 302 |  # PipelineError
 | 
| 288 | 303 |  #
 | 
| 289 | 304 |  # Raised from pipeline operations
 | 
| ... | ... | @@ -38,7 +38,7 @@ from .._protos.google.rpc import code_pb2 | 
| 38 | 38 |  from .._exceptions import SandboxError
 | 
| 39 | 39 |  from .. import _yaml
 | 
| 40 | 40 |  from .._protos.google.longrunning import operations_pb2, operations_pb2_grpc
 | 
| 41 | -from .._artifactcache.cascache import CASRemote, CASRemoteSpec
 | |
| 41 | +from .._cas import CASRemote, CASRemoteSpec
 | |
| 42 | 42 |  | 
| 43 | 43 |  | 
| 44 | 44 |  class RemoteExecutionSpec(namedtuple('RemoteExecutionSpec', 'exec_service storage_service action_service')):
 | 
| ... | ... | @@ -348,17 +348,17 @@ class SandboxRemote(Sandbox): | 
| 348 | 348 |              except grpc.RpcError as e:
 | 
| 349 | 349 |                  raise SandboxError("Failed to push source directory to remote: {}".format(e)) from e
 | 
| 350 | 350 |  | 
| 351 | -            if not cascache.verify_digest_on_remote(casremote, upload_vdir.ref):
 | |
| 351 | +            if not casremote.verify_digest_on_remote(upload_vdir.ref):
 | |
| 352 | 352 |                  raise SandboxError("Failed to verify that source has been pushed to the remote artifact cache.")
 | 
| 353 | 353 |  | 
| 354 | 354 |              # Push command and action
 | 
| 355 | 355 |              try:
 | 
| 356 | -                cascache.push_message(casremote, command_proto)
 | |
| 356 | +                casremote.push_message(command_proto)
 | |
| 357 | 357 |              except grpc.RpcError as e:
 | 
| 358 | 358 |                  raise SandboxError("Failed to push command to remote: {}".format(e))
 | 
| 359 | 359 |  | 
| 360 | 360 |              try:
 | 
| 361 | -                cascache.push_message(casremote, action)
 | |
| 361 | +                casremote.push_message(action)
 | |
| 362 | 362 |              except grpc.RpcError as e:
 | 
| 363 | 363 |                  raise SandboxError("Failed to push action to remote: {}".format(e))
 | 
| 364 | 364 |  | 
| ... | ... | @@ -94,7 +94,7 @@ requiring BuildStream's more exigent dependencies by setting the | 
| 94 | 94 |  Command reference
 | 
| 95 | 95 |  ~~~~~~~~~~~~~~~~~
 | 
| 96 | 96 |  | 
| 97 | -.. click:: buildstream._artifactcache.casserver:server_main
 | |
| 97 | +.. click:: buildstream._cas.casserver:server_main
 | |
| 98 | 98 |     :prog: bst-artifact-server
 | 
| 99 | 99 |  | 
| 100 | 100 |  | 
| ... | ... | @@ -3,8 +3,7 @@ import pytest | 
| 3 | 3 |  import itertools
 | 
| 4 | 4 |  import os
 | 
| 5 | 5 |  | 
| 6 | -from buildstream._artifactcache import ArtifactCacheSpec
 | |
| 7 | -from buildstream._artifactcache.artifactcache import _configured_remote_artifact_cache_specs
 | |
| 6 | +from buildstream._artifactcache import ArtifactCacheSpec, _configured_remote_artifact_cache_specs
 | |
| 8 | 7 |  from buildstream._context import Context
 | 
| 9 | 8 |  from buildstream._project import Project
 | 
| 10 | 9 |  from buildstream.utils import _deduplicate
 | 
| ... | ... | @@ -342,13 +342,13 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, success): | 
| 342 | 342 |          total_space = 10000
 | 
| 343 | 343 |  | 
| 344 | 344 |      volume_space_patch = mock.patch(
 | 
| 345 | -        "buildstream._artifactcache.artifactcache.ArtifactCache._get_volume_space_info_for",
 | |
| 345 | +        "buildstream._artifactcache.ArtifactCache._get_volume_space_info_for",
 | |
| 346 | 346 |          autospec=True,
 | 
| 347 | 347 |          return_value=(free_space, total_space),
 | 
| 348 | 348 |      )
 | 
| 349 | 349 |  | 
| 350 | 350 |      cache_size_patch = mock.patch(
 | 
| 351 | -        "buildstream._artifactcache.artifactcache.ArtifactCache.get_cache_size",
 | |
| 351 | +        "buildstream._artifactcache.ArtifactCache.get_cache_size",
 | |
| 352 | 352 |          autospec=True,
 | 
| 353 | 353 |          return_value=0,
 | 
| 354 | 354 |      )
 | 
| ... | ... | @@ -3,7 +3,7 @@ import pytest | 
| 3 | 3 |  | 
| 4 | 4 |  from buildstream._exceptions import ErrorDomain
 | 
| 5 | 5 |  | 
| 6 | -from buildstream._artifactcache.cascache import CASCache
 | |
| 6 | +from buildstream._cas import CASCache
 | |
| 7 | 7 |  from buildstream.storage._casbaseddirectory import CasBasedDirectory
 | 
| 8 | 8 |  from buildstream.storage._filebaseddirectory import FileBasedDirectory
 | 
| 9 | 9 |  | 
| ... | ... | @@ -8,7 +8,7 @@ from tests.testutils import cli | 
| 8 | 8 |  from buildstream.storage._casbaseddirectory import CasBasedDirectory
 | 
| 9 | 9 |  from buildstream.storage._filebaseddirectory import FileBasedDirectory
 | 
| 10 | 10 |  from buildstream._artifactcache import ArtifactCache
 | 
| 11 | -from buildstream._artifactcache.cascache import CASCache
 | |
| 11 | +from buildstream._cas import CASCache
 | |
| 12 | 12 |  from buildstream import utils
 | 
| 13 | 13 |  | 
| 14 | 14 |  | 
| ... | ... | @@ -11,8 +11,8 @@ from multiprocessing import Process, Queue | 
| 11 | 11 |  import pytest_cov
 | 
| 12 | 12 |  | 
| 13 | 13 |  from buildstream import _yaml
 | 
| 14 | -from buildstream._artifactcache.cascache import CASCache
 | |
| 15 | -from buildstream._artifactcache.casserver import create_server
 | |
| 14 | +from buildstream._cas import CASCache
 | |
| 15 | +from buildstream._cas.casserver import create_server
 | |
| 16 | 16 |  from buildstream._exceptions import CASError
 | 
| 17 | 17 |  from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
 | 
| 18 | 18 |  | 
| ... | ... | @@ -23,7 +23,7 @@ def test_parse_size_over_1024T(cli, tmpdir): | 
| 23 | 23 |      _yaml.dump({'name': 'main'}, str(project.join("project.conf")))
 | 
| 24 | 24 |  | 
| 25 | 25 |      volume_space_patch = mock.patch(
 | 
| 26 | -        "buildstream._artifactcache.artifactcache.ArtifactCache._get_volume_space_info_for",
 | |
| 26 | +        "buildstream._artifactcache.ArtifactCache._get_volume_space_info_for",
 | |
| 27 | 27 |          autospec=True,
 | 
| 28 | 28 |          return_value=(1025 * TiB, 1025 * TiB)
 | 
| 29 | 29 |      )
 | 
