[Notes] [Git][BuildStream/buildstream][raoul/cas-refactor] 19 commits: dev-requirements: pytest-cov==2.6.1 for '--no-cov'



Title: GitLab

Raoul Hidalgo Charman pushed to branch raoul/cas-refactor at BuildStream / buildstream

Commits:

21 changed files:

Changes:

  • .gitlab-ci.yml
    ... ... @@ -61,7 +61,7 @@ tests-ubuntu-18.04:
    61 61
       <<: *tests
    
    62 62
     
    
    63 63
     overnight-fedora-28-aarch64:
    
    64
    -  image: buildstream/testsuite-fedora:aarch64-28-06bab030-32a101f6
    
    64
    +  image: buildstream/testsuite-fedora:aarch64-28-5da27168-32c47d1c
    
    65 65
       tags:
    
    66 66
         - aarch64
    
    67 67
       <<: *tests
    
    ... ... @@ -70,6 +70,12 @@ overnight-fedora-28-aarch64:
    70 70
       except: []
    
    71 71
       only:
    
    72 72
       - schedules
    
    73
    +  before_script:
    
    74
    +    # grpcio needs to be compiled from source on aarch64 so we additionally
    
    75
    +    # need a C++ compiler here.
    
    76
    +    # FIXME: Ideally this would be provided by the base image. This will be
    
    77
    +    # unblocked by https://gitlab.com/BuildStream/buildstream-docker-images/issues/34
    
    78
    +    - dnf install -y gcc-c++
    
    73 79
     
    
    74 80
     tests-unix:
    
    75 81
       # Use fedora here, to a) run a test on fedora and b) ensure that we
    
    ... ... @@ -90,7 +96,6 @@ tests-unix:
    90 96
         # Since the unix platform is required to run as root, no user change required
    
    91 97
         - ${TEST_COMMAND}
    
    92 98
     
    
    93
    -
    
    94 99
     tests-fedora-missing-deps:
    
    95 100
       # Ensure that tests behave nicely while missing bwrap and ostree
    
    96 101
       image: buildstream/testsuite-fedora:28-5da27168-32c47d1c
    
    ... ... @@ -108,6 +113,22 @@ tests-fedora-missing-deps:
    108 113
     
    
    109 114
         - ${TEST_COMMAND}
    
    110 115
     
    
    116
    +tests-fedora-update-deps:
    
    117
    +  # Check if the tests pass after updating requirements to their latest
    
    118
    +  # allowed version.
    
    119
    +  allow_failure: true
    
    120
    +  image: buildstream/testsuite-fedora:28-5da27168-32c47d1c
    
    121
    +  <<: *tests
    
    122
    +
    
    123
    +  script:
    
    124
    +    - useradd -Um buildstream
    
    125
    +    - chown -R buildstream:buildstream .
    
    126
    +
    
    127
    +    - make --always-make --directory requirements
    
    128
    +    - cat requirements/*.txt
    
    129
    +
    
    130
    +    - su buildstream -c "${TEST_COMMAND}"
    
    131
    +
    
    111 132
     # Lint separately from testing
    
    112 133
     lint:
    
    113 134
       stage: test
    
    ... ... @@ -140,8 +161,8 @@ docs:
    140 161
       stage: test
    
    141 162
       variables:
    
    142 163
         BST_EXT_URL: git+https://gitlab.com/BuildStream/bst-external.git
    
    143
    -    BST_EXT_REF: 573843768f4d297f85dc3067465b3c7519a8dcc3 # 0.7.0
    
    144
    -    FD_SDK_REF: 612f66e218445eee2b1a9d7dd27c9caba571612e # freedesktop-sdk-18.08.19-54-g612f66e2
    
    164
    +    BST_EXT_REF: 0.9.0-0-g63a19e8068bd777bd9cd59b1a9442f9749ea5a85
    
    165
    +    FD_SDK_REF: freedesktop-sdk-18.08.25-0-g250939d465d6dd7768a215f1fa59c4a3412fc337
    
    145 166
       before_script:
    
    146 167
       - |
    
    147 168
         mkdir -p "${HOME}/.config"
    

  • CONTRIBUTING.rst
    ... ... @@ -1534,6 +1534,10 @@ You can always abort on the first failure by running::
    1534 1534
     
    
    1535 1535
       tox -- -x
    
    1536 1536
     
    
    1537
    +Similarly, you may also be interested in the ``--last-failed`` and
    
    1538
    +``--failed-first`` options as per the
    
    1539
    +`pytest cache <https://docs.pytest.org/en/latest/cache.html>`_ documentation.
    
    1540
    +
    
    1537 1541
     If you want to run a specific test or a group of tests, you
    
    1538 1542
     can specify a prefix to match. E.g. if you want to run all of
    
    1539 1543
     the frontend tests you can do::
    
    ... ... @@ -1545,6 +1549,12 @@ If you wanted to run the test_build_track test within frontend/buildtrack.py you
    1545 1549
     
    
    1546 1550
       tox -- tests/frontend/buildtrack.py::test_build_track
    
    1547 1551
     
    
    1552
    +When running only a few tests, you may find the coverage and timing output
    
    1553
    +excessive, there are options to trim them. Note that coverage step will fail.
    
    1554
    +Here is an example::
    
    1555
    +
    
    1556
    +  tox -- --no-cov --durations=1 tests/frontend/buildtrack.py::test_build_track
    
    1557
    +
    
    1548 1558
     We also have a set of slow integration tests that are disabled by
    
    1549 1559
     default - you will notice most of them marked with SKIP in the pytest
    
    1550 1560
     output. To run them, you can use::
    

  • buildstream/_artifactcache/artifactcache.pybuildstream/_artifactcache.py
    ... ... @@ -19,18 +19,16 @@
    19 19
     
    
    20 20
     import multiprocessing
    
    21 21
     import os
    
    22
    -import signal
    
    23 22
     import string
    
    24 23
     from collections.abc import Mapping
    
    25 24
     
    
    26
    -from ..types import _KeyStrength
    
    27
    -from .._exceptions import ArtifactError, CASError, LoadError, LoadErrorReason
    
    28
    -from .._message import Message, MessageType
    
    29
    -from .. import _signals
    
    30
    -from .. import utils
    
    31
    -from .. import _yaml
    
    25
    +from .types import _KeyStrength
    
    26
    +from ._exceptions import ArtifactError, CASError, LoadError, LoadErrorReason
    
    27
    +from ._message import Message, MessageType
    
    28
    +from . import utils
    
    29
    +from . import _yaml
    
    32 30
     
    
    33
    -from .cascache import CASRemote, CASRemoteSpec
    
    31
    +from ._cas import CASRemote, CASRemoteSpec
    
    34 32
     
    
    35 33
     
    
    36 34
     CACHE_SIZE_FILE = "cache_size"
    
    ... ... @@ -375,20 +373,8 @@ class ArtifactCache():
    375 373
             remotes = {}
    
    376 374
             q = multiprocessing.Queue()
    
    377 375
             for remote_spec in remote_specs:
    
    378
    -            # Use subprocess to avoid creation of gRPC threads in main BuildStream process
    
    379
    -            # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    380
    -            p = multiprocessing.Process(target=self.cas.initialize_remote, args=(remote_spec, q))
    
    381 376
     
    
    382
    -            try:
    
    383
    -                # Keep SIGINT blocked in the child process
    
    384
    -                with _signals.blocked([signal.SIGINT], ignore=False):
    
    385
    -                    p.start()
    
    386
    -
    
    387
    -                error = q.get()
    
    388
    -                p.join()
    
    389
    -            except KeyboardInterrupt:
    
    390
    -                utils._kill_process_tree(p.pid)
    
    391
    -                raise
    
    377
    +            error = CASRemote.check_remote(remote_spec, q)
    
    392 378
     
    
    393 379
                 if error and on_failure:
    
    394 380
                     on_failure(remote_spec.url, error)
    
    ... ... @@ -747,7 +733,7 @@ class ArtifactCache():
    747 733
                                     "servers are configured as push remotes.")
    
    748 734
     
    
    749 735
             for remote in push_remotes:
    
    750
    -            message_digest = self.cas.push_message(remote, message)
    
    736
    +            message_digest = remote.push_message(message)
    
    751 737
     
    
    752 738
             return message_digest
    
    753 739
     
    

  • buildstream/_artifactcache/__init__.pybuildstream/_cas/__init__.py
    ... ... @@ -17,4 +17,5 @@
    17 17
     #  Authors:
    
    18 18
     #        Tristan Van Berkom <tristan vanberkom codethink co uk>
    
    19 19
     
    
    20
    -from .artifactcache import ArtifactCache, ArtifactCacheSpec, CACHE_SIZE_FILE
    20
    +from .cascache import CASCache
    
    21
    +from .casremote import CASRemote, CASRemoteSpec

  • buildstream/_artifactcache/cascache.pybuildstream/_cas/cascache.py
    ... ... @@ -17,85 +17,23 @@
    17 17
     #  Authors:
    
    18 18
     #        Jürg Billeter <juerg billeter codethink co uk>
    
    19 19
     
    
    20
    -from collections import namedtuple
    
    21 20
     import hashlib
    
    22 21
     import itertools
    
    23
    -import io
    
    24 22
     import os
    
    25 23
     import stat
    
    26 24
     import tempfile
    
    27 25
     import uuid
    
    28 26
     import contextlib
    
    29
    -from urllib.parse import urlparse
    
    30 27
     
    
    31 28
     import grpc
    
    32 29
     
    
    33
    -from .._protos.google.rpc import code_pb2
    
    34
    -from .._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
    
    35
    -from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
    
    36
    -from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc
    
    30
    +from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
    
    31
    +from .._protos.buildstream.v2 import buildstream_pb2
    
    37 32
     
    
    38 33
     from .. import utils
    
    39
    -from .._exceptions import CASError, LoadError, LoadErrorReason
    
    40
    -from .. import _yaml
    
    34
    +from .._exceptions import CASCacheError
    
    41 35
     
    
    42
    -
    
    43
    -# The default limit for gRPC messages is 4 MiB.
    
    44
    -# Limit payload to 1 MiB to leave sufficient headroom for metadata.
    
    45
    -_MAX_PAYLOAD_BYTES = 1024 * 1024
    
    46
    -
    
    47
    -
    
    48
    -class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key client_cert instance_name')):
    
    49
    -
    
    50
    -    # _new_from_config_node
    
    51
    -    #
    
    52
    -    # Creates an CASRemoteSpec() from a YAML loaded node
    
    53
    -    #
    
    54
    -    @staticmethod
    
    55
    -    def _new_from_config_node(spec_node, basedir=None):
    
    56
    -        _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance-name'])
    
    57
    -        url = _yaml.node_get(spec_node, str, 'url')
    
    58
    -        push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
    
    59
    -        if not url:
    
    60
    -            provenance = _yaml.node_get_provenance(spec_node, 'url')
    
    61
    -            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    62
    -                            "{}: empty artifact cache URL".format(provenance))
    
    63
    -
    
    64
    -        instance_name = _yaml.node_get(spec_node, str, 'instance-name', default_value=None)
    
    65
    -
    
    66
    -        server_cert = _yaml.node_get(spec_node, str, 'server-cert', default_value=None)
    
    67
    -        if server_cert and basedir:
    
    68
    -            server_cert = os.path.join(basedir, server_cert)
    
    69
    -
    
    70
    -        client_key = _yaml.node_get(spec_node, str, 'client-key', default_value=None)
    
    71
    -        if client_key and basedir:
    
    72
    -            client_key = os.path.join(basedir, client_key)
    
    73
    -
    
    74
    -        client_cert = _yaml.node_get(spec_node, str, 'client-cert', default_value=None)
    
    75
    -        if client_cert and basedir:
    
    76
    -            client_cert = os.path.join(basedir, client_cert)
    
    77
    -
    
    78
    -        if client_key and not client_cert:
    
    79
    -            provenance = _yaml.node_get_provenance(spec_node, 'client-key')
    
    80
    -            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    81
    -                            "{}: 'client-key' was specified without 'client-cert'".format(provenance))
    
    82
    -
    
    83
    -        if client_cert and not client_key:
    
    84
    -            provenance = _yaml.node_get_provenance(spec_node, 'client-cert')
    
    85
    -            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    86
    -                            "{}: 'client-cert' was specified without 'client-key'".format(provenance))
    
    87
    -
    
    88
    -        return CASRemoteSpec(url, push, server_cert, client_key, client_cert, instance_name)
    
    89
    -
    
    90
    -
    
    91
    -CASRemoteSpec.__new__.__defaults__ = (None, None, None, None)
    
    92
    -
    
    93
    -
    
    94
    -class BlobNotFound(CASError):
    
    95
    -
    
    96
    -    def __init__(self, blob, msg):
    
    97
    -        self.blob = blob
    
    98
    -        super().__init__(msg)
    
    36
    +from .casremote import BlobNotFound, _CASBatchRead, _CASBatchUpdate
    
    99 37
     
    
    100 38
     
    
    101 39
     # A CASCache manages a CAS repository as specified in the Remote Execution API.
    
    ... ... @@ -120,7 +58,7 @@ class CASCache():
    120 58
             headdir = os.path.join(self.casdir, 'refs', 'heads')
    
    121 59
             objdir = os.path.join(self.casdir, 'objects')
    
    122 60
             if not (os.path.isdir(headdir) and os.path.isdir(objdir)):
    
    123
    -            raise CASError("CAS repository check failed for '{}'".format(self.casdir))
    
    61
    +            raise CASCacheError("CAS repository check failed for '{}'".format(self.casdir))
    
    124 62
     
    
    125 63
         # contains():
    
    126 64
         #
    
    ... ... @@ -169,7 +107,7 @@ class CASCache():
    169 107
         #     subdir (str): Optional specific dir to extract
    
    170 108
         #
    
    171 109
         # Raises:
    
    172
    -    #     CASError: In cases there was an OSError, or if the ref did not exist.
    
    110
    +    #     CASCacheError: In cases there was an OSError, or if the ref did not exist.
    
    173 111
         #
    
    174 112
         # Returns: path to extracted directory
    
    175 113
         #
    
    ... ... @@ -201,7 +139,7 @@ class CASCache():
    201 139
                     # Another process beat us to rename
    
    202 140
                     pass
    
    203 141
                 except OSError as e:
    
    204
    -                raise CASError("Failed to extract directory for ref '{}': {}".format(ref, e)) from e
    
    142
    +                raise CASCacheError("Failed to extract directory for ref '{}': {}".format(ref, e)) from e
    
    205 143
     
    
    206 144
             return originaldest
    
    207 145
     
    
    ... ... @@ -245,29 +183,6 @@ class CASCache():
    245 183
     
    
    246 184
             return modified, removed, added
    
    247 185
     
    
    248
    -    def initialize_remote(self, remote_spec, q):
    
    249
    -        try:
    
    250
    -            remote = CASRemote(remote_spec)
    
    251
    -            remote.init()
    
    252
    -
    
    253
    -            request = buildstream_pb2.StatusRequest(instance_name=remote_spec.instance_name)
    
    254
    -            response = remote.ref_storage.Status(request)
    
    255
    -
    
    256
    -            if remote_spec.push and not response.allow_updates:
    
    257
    -                q.put('CAS server does not allow push')
    
    258
    -            else:
    
    259
    -                # No error
    
    260
    -                q.put(None)
    
    261
    -
    
    262
    -        except grpc.RpcError as e:
    
    263
    -            # str(e) is too verbose for errors reported to the user
    
    264
    -            q.put(e.details())
    
    265
    -
    
    266
    -        except Exception as e:               # pylint: disable=broad-except
    
    267
    -            # Whatever happens, we need to return it to the calling process
    
    268
    -            #
    
    269
    -            q.put(str(e))
    
    270
    -
    
    271 186
         # pull():
    
    272 187
         #
    
    273 188
         # Pull a ref from a remote repository.
    
    ... ... @@ -306,7 +221,7 @@ class CASCache():
    306 221
                 return True
    
    307 222
             except grpc.RpcError as e:
    
    308 223
                 if e.code() != grpc.StatusCode.NOT_FOUND:
    
    309
    -                raise CASError("Failed to pull ref {}: {}".format(ref, e)) from e
    
    224
    +                raise CASCacheError("Failed to pull ref {}: {}".format(ref, e)) from e
    
    310 225
                 else:
    
    311 226
                     return False
    
    312 227
             except BlobNotFound as e:
    
    ... ... @@ -360,7 +275,7 @@ class CASCache():
    360 275
         #   (bool): True if any remote was updated, False if no pushes were required
    
    361 276
         #
    
    362 277
         # Raises:
    
    363
    -    #   (CASError): if there was an error
    
    278
    +    #   (CASCacheError): if there was an error
    
    364 279
         #
    
    365 280
         def push(self, refs, remote):
    
    366 281
             skipped_remote = True
    
    ... ... @@ -395,7 +310,7 @@ class CASCache():
    395 310
                     skipped_remote = False
    
    396 311
             except grpc.RpcError as e:
    
    397 312
                 if e.code() != grpc.StatusCode.RESOURCE_EXHAUSTED:
    
    398
    -                raise CASError("Failed to push ref {}: {}".format(refs, e), temporary=True) from e
    
    313
    +                raise CASCacheError("Failed to push ref {}: {}".format(refs, e), temporary=True) from e
    
    399 314
     
    
    400 315
             return not skipped_remote
    
    401 316
     
    
    ... ... @@ -408,57 +323,13 @@ class CASCache():
    408 323
         #     directory (Directory): A virtual directory object to push.
    
    409 324
         #
    
    410 325
         # Raises:
    
    411
    -    #     (CASError): if there was an error
    
    326
    +    #     (CASCacheError): if there was an error
    
    412 327
         #
    
    413 328
         def push_directory(self, remote, directory):
    
    414 329
             remote.init()
    
    415 330
     
    
    416 331
             self._send_directory(remote, directory.ref)
    
    417 332
     
    
    418
    -    # push_message():
    
    419
    -    #
    
    420
    -    # Push the given protobuf message to a remote.
    
    421
    -    #
    
    422
    -    # Args:
    
    423
    -    #     remote (CASRemote): The remote to push to
    
    424
    -    #     message (Message): A protobuf message to push.
    
    425
    -    #
    
    426
    -    # Raises:
    
    427
    -    #     (CASError): if there was an error
    
    428
    -    #
    
    429
    -    def push_message(self, remote, message):
    
    430
    -
    
    431
    -        message_buffer = message.SerializeToString()
    
    432
    -        message_digest = utils._message_digest(message_buffer)
    
    433
    -
    
    434
    -        remote.init()
    
    435
    -
    
    436
    -        with io.BytesIO(message_buffer) as b:
    
    437
    -            self._send_blob(remote, message_digest, b)
    
    438
    -
    
    439
    -        return message_digest
    
    440
    -
    
    441
    -    # verify_digest_on_remote():
    
    442
    -    #
    
    443
    -    # Check whether the object is already on the server in which case
    
    444
    -    # there is no need to upload it.
    
    445
    -    #
    
    446
    -    # Args:
    
    447
    -    #     remote (CASRemote): The remote to check
    
    448
    -    #     digest (Digest): The object digest.
    
    449
    -    #
    
    450
    -    def verify_digest_on_remote(self, remote, digest):
    
    451
    -        remote.init()
    
    452
    -
    
    453
    -        request = remote_execution_pb2.FindMissingBlobsRequest(instance_name=remote.spec.instance_name)
    
    454
    -        request.blob_digests.extend([digest])
    
    455
    -
    
    456
    -        response = remote.cas.FindMissingBlobs(request)
    
    457
    -        if digest in response.missing_blob_digests:
    
    458
    -            return False
    
    459
    -
    
    460
    -        return True
    
    461
    -
    
    462 333
         # objpath():
    
    463 334
         #
    
    464 335
         # Return the path of an object based on its digest.
    
    ... ... @@ -531,7 +402,7 @@ class CASCache():
    531 402
                 pass
    
    532 403
     
    
    533 404
             except OSError as e:
    
    534
    -            raise CASError("Failed to hash object: {}".format(e)) from e
    
    405
    +            raise CASCacheError("Failed to hash object: {}".format(e)) from e
    
    535 406
     
    
    536 407
             return digest
    
    537 408
     
    
    ... ... @@ -572,7 +443,7 @@ class CASCache():
    572 443
                     return digest
    
    573 444
     
    
    574 445
             except FileNotFoundError as e:
    
    575
    -            raise CASError("Attempt to access unavailable ref: {}".format(e)) from e
    
    446
    +            raise CASCacheError("Attempt to access unavailable ref: {}".format(e)) from e
    
    576 447
     
    
    577 448
         # update_mtime()
    
    578 449
         #
    
    ... ... @@ -585,7 +456,7 @@ class CASCache():
    585 456
             try:
    
    586 457
                 os.utime(self._refpath(ref))
    
    587 458
             except FileNotFoundError as e:
    
    588
    -            raise CASError("Attempt to access unavailable ref: {}".format(e)) from e
    
    459
    +            raise CASCacheError("Attempt to access unavailable ref: {}".format(e)) from e
    
    589 460
     
    
    590 461
         # calculate_cache_size()
    
    591 462
         #
    
    ... ... @@ -676,7 +547,7 @@ class CASCache():
    676 547
             # Remove cache ref
    
    677 548
             refpath = self._refpath(ref)
    
    678 549
             if not os.path.exists(refpath):
    
    679
    -            raise CASError("Could not find ref '{}'".format(ref))
    
    550
    +            raise CASCacheError("Could not find ref '{}'".format(ref))
    
    680 551
     
    
    681 552
             os.unlink(refpath)
    
    682 553
     
    
    ... ... @@ -792,7 +663,7 @@ class CASCache():
    792 663
                     # The process serving the socket can't be cached anyway
    
    793 664
                     pass
    
    794 665
                 else:
    
    795
    -                raise CASError("Unsupported file type for {}".format(full_path))
    
    666
    +                raise CASCacheError("Unsupported file type for {}".format(full_path))
    
    796 667
     
    
    797 668
             return self.add_object(digest=dir_digest,
    
    798 669
                                    buffer=directory.SerializeToString())
    
    ... ... @@ -811,7 +682,7 @@ class CASCache():
    811 682
                 if dirnode.name == name:
    
    812 683
                     return dirnode.digest
    
    813 684
     
    
    814
    -        raise CASError("Subdirectory {} not found".format(name))
    
    685
    +        raise CASCacheError("Subdirectory {} not found".format(name))
    
    815 686
     
    
    816 687
         def _diff_trees(self, tree_a, tree_b, *, added, removed, modified, path=""):
    
    817 688
             dir_a = remote_execution_pb2.Directory()
    
    ... ... @@ -909,23 +780,6 @@ class CASCache():
    909 780
             for dirnode in directory.directories:
    
    910 781
                 yield from self._required_blobs(dirnode.digest)
    
    911 782
     
    
    912
    -    def _fetch_blob(self, remote, digest, stream):
    
    913
    -        resource_name_components = ['blobs', digest.hash, str(digest.size_bytes)]
    
    914
    -
    
    915
    -        if remote.spec.instance_name:
    
    916
    -            resource_name_components.insert(0, remote.spec.instance_name)
    
    917
    -
    
    918
    -        resource_name = '/'.join(resource_name_components)
    
    919
    -
    
    920
    -        request = bytestream_pb2.ReadRequest()
    
    921
    -        request.resource_name = resource_name
    
    922
    -        request.read_offset = 0
    
    923
    -        for response in remote.bytestream.Read(request):
    
    924
    -            stream.write(response.data)
    
    925
    -        stream.flush()
    
    926
    -
    
    927
    -        assert digest.size_bytes == os.fstat(stream.fileno()).st_size
    
    928
    -
    
    929 783
         # _ensure_blob():
    
    930 784
         #
    
    931 785
         # Fetch and add blob if it's not already local.
    
    ... ... @@ -944,7 +798,7 @@ class CASCache():
    944 798
                 return objpath
    
    945 799
     
    
    946 800
             with tempfile.NamedTemporaryFile(dir=self.tmpdir) as f:
    
    947
    -            self._fetch_blob(remote, digest, f)
    
    801
    +            remote._fetch_blob(digest, f)
    
    948 802
     
    
    949 803
                 added_digest = self.add_object(path=f.name, link_directly=True)
    
    950 804
                 assert added_digest.hash == digest.hash
    
    ... ... @@ -1051,7 +905,7 @@ class CASCache():
    1051 905
         def _fetch_tree(self, remote, digest):
    
    1052 906
             # download but do not store the Tree object
    
    1053 907
             with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
    
    1054
    -            self._fetch_blob(remote, digest, out)
    
    908
    +            remote._fetch_blob(digest, out)
    
    1055 909
     
    
    1056 910
                 tree = remote_execution_pb2.Tree()
    
    1057 911
     
    
    ... ... @@ -1071,39 +925,6 @@ class CASCache():
    1071 925
     
    
    1072 926
             return dirdigest
    
    1073 927
     
    
    1074
    -    def _send_blob(self, remote, digest, stream, u_uid=uuid.uuid4()):
    
    1075
    -        resource_name_components = ['uploads', str(u_uid), 'blobs',
    
    1076
    -                                    digest.hash, str(digest.size_bytes)]
    
    1077
    -
    
    1078
    -        if remote.spec.instance_name:
    
    1079
    -            resource_name_components.insert(0, remote.spec.instance_name)
    
    1080
    -
    
    1081
    -        resource_name = '/'.join(resource_name_components)
    
    1082
    -
    
    1083
    -        def request_stream(resname, instream):
    
    1084
    -            offset = 0
    
    1085
    -            finished = False
    
    1086
    -            remaining = digest.size_bytes
    
    1087
    -            while not finished:
    
    1088
    -                chunk_size = min(remaining, _MAX_PAYLOAD_BYTES)
    
    1089
    -                remaining -= chunk_size
    
    1090
    -
    
    1091
    -                request = bytestream_pb2.WriteRequest()
    
    1092
    -                request.write_offset = offset
    
    1093
    -                # max. _MAX_PAYLOAD_BYTES chunks
    
    1094
    -                request.data = instream.read(chunk_size)
    
    1095
    -                request.resource_name = resname
    
    1096
    -                request.finish_write = remaining <= 0
    
    1097
    -
    
    1098
    -                yield request
    
    1099
    -
    
    1100
    -                offset += chunk_size
    
    1101
    -                finished = request.finish_write
    
    1102
    -
    
    1103
    -        response = remote.bytestream.Write(request_stream(resource_name, stream))
    
    1104
    -
    
    1105
    -        assert response.committed_size == digest.size_bytes
    
    1106
    -
    
    1107 928
         def _send_directory(self, remote, digest, u_uid=uuid.uuid4()):
    
    1108 929
             required_blobs = self._required_blobs(digest)
    
    1109 930
     
    
    ... ... @@ -1137,7 +958,7 @@ class CASCache():
    1137 958
                     if (digest.size_bytes >= remote.max_batch_total_size_bytes or
    
    1138 959
                             not remote.batch_update_supported):
    
    1139 960
                         # Too large for batch request, upload in independent request.
    
    1140
    -                    self._send_blob(remote, digest, f, u_uid=u_uid)
    
    961
    +                    remote._send_blob(digest, f, u_uid=u_uid)
    
    1141 962
                     else:
    
    1142 963
                         if not batch.add(digest, f):
    
    1143 964
                             # Not enough space left in batch request.
    
    ... ... @@ -1150,183 +971,6 @@ class CASCache():
    1150 971
             batch.send()
    
    1151 972
     
    
    1152 973
     
    
    1153
    -# Represents a single remote CAS cache.
    
    1154
    -#
    
    1155
    -class CASRemote():
    
    1156
    -    def __init__(self, spec):
    
    1157
    -        self.spec = spec
    
    1158
    -        self._initialized = False
    
    1159
    -        self.channel = None
    
    1160
    -        self.bytestream = None
    
    1161
    -        self.cas = None
    
    1162
    -        self.ref_storage = None
    
    1163
    -        self.batch_update_supported = None
    
    1164
    -        self.batch_read_supported = None
    
    1165
    -        self.capabilities = None
    
    1166
    -        self.max_batch_total_size_bytes = None
    
    1167
    -
    
    1168
    -    def init(self):
    
    1169
    -        if not self._initialized:
    
    1170
    -            url = urlparse(self.spec.url)
    
    1171
    -            if url.scheme == 'http':
    
    1172
    -                port = url.port or 80
    
    1173
    -                self.channel = grpc.insecure_channel('{}:{}'.format(url.hostname, port))
    
    1174
    -            elif url.scheme == 'https':
    
    1175
    -                port = url.port or 443
    
    1176
    -
    
    1177
    -                if self.spec.server_cert:
    
    1178
    -                    with open(self.spec.server_cert, 'rb') as f:
    
    1179
    -                        server_cert_bytes = f.read()
    
    1180
    -                else:
    
    1181
    -                    server_cert_bytes = None
    
    1182
    -
    
    1183
    -                if self.spec.client_key:
    
    1184
    -                    with open(self.spec.client_key, 'rb') as f:
    
    1185
    -                        client_key_bytes = f.read()
    
    1186
    -                else:
    
    1187
    -                    client_key_bytes = None
    
    1188
    -
    
    1189
    -                if self.spec.client_cert:
    
    1190
    -                    with open(self.spec.client_cert, 'rb') as f:
    
    1191
    -                        client_cert_bytes = f.read()
    
    1192
    -                else:
    
    1193
    -                    client_cert_bytes = None
    
    1194
    -
    
    1195
    -                credentials = grpc.ssl_channel_credentials(root_certificates=server_cert_bytes,
    
    1196
    -                                                           private_key=client_key_bytes,
    
    1197
    -                                                           certificate_chain=client_cert_bytes)
    
    1198
    -                self.channel = grpc.secure_channel('{}:{}'.format(url.hostname, port), credentials)
    
    1199
    -            else:
    
    1200
    -                raise CASError("Unsupported URL: {}".format(self.spec.url))
    
    1201
    -
    
    1202
    -            self.bytestream = bytestream_pb2_grpc.ByteStreamStub(self.channel)
    
    1203
    -            self.cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(self.channel)
    
    1204
    -            self.capabilities = remote_execution_pb2_grpc.CapabilitiesStub(self.channel)
    
    1205
    -            self.ref_storage = buildstream_pb2_grpc.ReferenceStorageStub(self.channel)
    
    1206
    -
    
    1207
    -            self.max_batch_total_size_bytes = _MAX_PAYLOAD_BYTES
    
    1208
    -            try:
    
    1209
    -                request = remote_execution_pb2.GetCapabilitiesRequest(instance_name=self.spec.instance_name)
    
    1210
    -                response = self.capabilities.GetCapabilities(request)
    
    1211
    -                server_max_batch_total_size_bytes = response.cache_capabilities.max_batch_total_size_bytes
    
    1212
    -                if 0 < server_max_batch_total_size_bytes < self.max_batch_total_size_bytes:
    
    1213
    -                    self.max_batch_total_size_bytes = server_max_batch_total_size_bytes
    
    1214
    -            except grpc.RpcError as e:
    
    1215
    -                # Simply use the defaults for servers that don't implement GetCapabilities()
    
    1216
    -                if e.code() != grpc.StatusCode.UNIMPLEMENTED:
    
    1217
    -                    raise
    
    1218
    -
    
    1219
    -            # Check whether the server supports BatchReadBlobs()
    
    1220
    -            self.batch_read_supported = False
    
    1221
    -            try:
    
    1222
    -                request = remote_execution_pb2.BatchReadBlobsRequest(instance_name=self.spec.instance_name)
    
    1223
    -                response = self.cas.BatchReadBlobs(request)
    
    1224
    -                self.batch_read_supported = True
    
    1225
    -            except grpc.RpcError as e:
    
    1226
    -                if e.code() != grpc.StatusCode.UNIMPLEMENTED:
    
    1227
    -                    raise
    
    1228
    -
    
    1229
    -            # Check whether the server supports BatchUpdateBlobs()
    
    1230
    -            self.batch_update_supported = False
    
    1231
    -            try:
    
    1232
    -                request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=self.spec.instance_name)
    
    1233
    -                response = self.cas.BatchUpdateBlobs(request)
    
    1234
    -                self.batch_update_supported = True
    
    1235
    -            except grpc.RpcError as e:
    
    1236
    -                if (e.code() != grpc.StatusCode.UNIMPLEMENTED and
    
    1237
    -                        e.code() != grpc.StatusCode.PERMISSION_DENIED):
    
    1238
    -                    raise
    
    1239
    -
    
    1240
    -            self._initialized = True
    
    1241
    -
    
    1242
    -
    
    1243
    -# Represents a batch of blobs queued for fetching.
    
    1244
    -#
    
    1245
    -class _CASBatchRead():
    
    1246
    -    def __init__(self, remote):
    
    1247
    -        self._remote = remote
    
    1248
    -        self._max_total_size_bytes = remote.max_batch_total_size_bytes
    
    1249
    -        self._request = remote_execution_pb2.BatchReadBlobsRequest(instance_name=remote.spec.instance_name)
    
    1250
    -        self._size = 0
    
    1251
    -        self._sent = False
    
    1252
    -
    
    1253
    -    def add(self, digest):
    
    1254
    -        assert not self._sent
    
    1255
    -
    
    1256
    -        new_batch_size = self._size + digest.size_bytes
    
    1257
    -        if new_batch_size > self._max_total_size_bytes:
    
    1258
    -            # Not enough space left in current batch
    
    1259
    -            return False
    
    1260
    -
    
    1261
    -        request_digest = self._request.digests.add()
    
    1262
    -        request_digest.hash = digest.hash
    
    1263
    -        request_digest.size_bytes = digest.size_bytes
    
    1264
    -        self._size = new_batch_size
    
    1265
    -        return True
    
    1266
    -
    
    1267
    -    def send(self):
    
    1268
    -        assert not self._sent
    
    1269
    -        self._sent = True
    
    1270
    -
    
    1271
    -        if not self._request.digests:
    
    1272
    -            return
    
    1273
    -
    
    1274
    -        batch_response = self._remote.cas.BatchReadBlobs(self._request)
    
    1275
    -
    
    1276
    -        for response in batch_response.responses:
    
    1277
    -            if response.status.code == code_pb2.NOT_FOUND:
    
    1278
    -                raise BlobNotFound(response.digest.hash, "Failed to download blob {}: {}".format(
    
    1279
    -                    response.digest.hash, response.status.code))
    
    1280
    -            if response.status.code != code_pb2.OK:
    
    1281
    -                raise CASError("Failed to download blob {}: {}".format(
    
    1282
    -                    response.digest.hash, response.status.code))
    
    1283
    -            if response.digest.size_bytes != len(response.data):
    
    1284
    -                raise CASError("Failed to download blob {}: expected {} bytes, received {} bytes".format(
    
    1285
    -                    response.digest.hash, response.digest.size_bytes, len(response.data)))
    
    1286
    -
    
    1287
    -            yield (response.digest, response.data)
    
    1288
    -
    
    1289
    -
    
    1290
    -# Represents a batch of blobs queued for upload.
    
    1291
    -#
    
    1292
    -class _CASBatchUpdate():
    
    1293
    -    def __init__(self, remote):
    
    1294
    -        self._remote = remote
    
    1295
    -        self._max_total_size_bytes = remote.max_batch_total_size_bytes
    
    1296
    -        self._request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=remote.spec.instance_name)
    
    1297
    -        self._size = 0
    
    1298
    -        self._sent = False
    
    1299
    -
    
    1300
    -    def add(self, digest, stream):
    
    1301
    -        assert not self._sent
    
    1302
    -
    
    1303
    -        new_batch_size = self._size + digest.size_bytes
    
    1304
    -        if new_batch_size > self._max_total_size_bytes:
    
    1305
    -            # Not enough space left in current batch
    
    1306
    -            return False
    
    1307
    -
    
    1308
    -        blob_request = self._request.requests.add()
    
    1309
    -        blob_request.digest.hash = digest.hash
    
    1310
    -        blob_request.digest.size_bytes = digest.size_bytes
    
    1311
    -        blob_request.data = stream.read(digest.size_bytes)
    
    1312
    -        self._size = new_batch_size
    
    1313
    -        return True
    
    1314
    -
    
    1315
    -    def send(self):
    
    1316
    -        assert not self._sent
    
    1317
    -        self._sent = True
    
    1318
    -
    
    1319
    -        if not self._request.requests:
    
    1320
    -            return
    
    1321
    -
    
    1322
    -        batch_response = self._remote.cas.BatchUpdateBlobs(self._request)
    
    1323
    -
    
    1324
    -        for response in batch_response.responses:
    
    1325
    -            if response.status.code != code_pb2.OK:
    
    1326
    -                raise CASError("Failed to upload blob {}: {}".format(
    
    1327
    -                    response.digest.hash, response.status.code))
    
    1328
    -
    
    1329
    -
    
    1330 974
     def _grouper(iterable, n):
    
    1331 975
         while True:
    
    1332 976
             try:
    

  • buildstream/_cas/casremote.py
    1
    +from collections import namedtuple
    
    2
    +import io
    
    3
    +import os
    
    4
    +import multiprocessing
    
    5
    +import signal
    
    6
    +from urllib.parse import urlparse
    
    7
    +import uuid
    
    8
    +
    
    9
    +import grpc
    
    10
    +
    
    11
    +from .. import _yaml
    
    12
    +from .._protos.google.rpc import code_pb2
    
    13
    +from .._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
    
    14
    +from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
    
    15
    +from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc
    
    16
    +
    
    17
    +from .._exceptions import CASRemoteError, LoadError, LoadErrorReason
    
    18
    +from .. import _signals
    
    19
    +from .. import utils
    
    20
    +
    
    21
    +# The default limit for gRPC messages is 4 MiB.
    
    22
    +# Limit payload to 1 MiB to leave sufficient headroom for metadata.
    
    23
    +_MAX_PAYLOAD_BYTES = 1024 * 1024
    
    24
    +
    
    25
    +
    
    26
    +class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key client_cert instance_name')):
    
    27
    +
    
    28
    +    # _new_from_config_node
    
    29
    +    #
    
    30
    +    # Creates an CASRemoteSpec() from a YAML loaded node
    
    31
    +    #
    
    32
    +    @staticmethod
    
    33
    +    def _new_from_config_node(spec_node, basedir=None):
    
    34
    +        _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance_name'])
    
    35
    +        url = _yaml.node_get(spec_node, str, 'url')
    
    36
    +        push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
    
    37
    +        if not url:
    
    38
    +            provenance = _yaml.node_get_provenance(spec_node, 'url')
    
    39
    +            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    40
    +                            "{}: empty artifact cache URL".format(provenance))
    
    41
    +
    
    42
    +        instance_name = _yaml.node_get(spec_node, str, 'server-cert', default_value=None)
    
    43
    +
    
    44
    +        server_cert = _yaml.node_get(spec_node, str, 'server-cert', default_value=None)
    
    45
    +        if server_cert and basedir:
    
    46
    +            server_cert = os.path.join(basedir, server_cert)
    
    47
    +
    
    48
    +        client_key = _yaml.node_get(spec_node, str, 'client-key', default_value=None)
    
    49
    +        if client_key and basedir:
    
    50
    +            client_key = os.path.join(basedir, client_key)
    
    51
    +
    
    52
    +        client_cert = _yaml.node_get(spec_node, str, 'client-cert', default_value=None)
    
    53
    +        if client_cert and basedir:
    
    54
    +            client_cert = os.path.join(basedir, client_cert)
    
    55
    +
    
    56
    +        if client_key and not client_cert:
    
    57
    +            provenance = _yaml.node_get_provenance(spec_node, 'client-key')
    
    58
    +            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    59
    +                            "{}: 'client-key' was specified without 'client-cert'".format(provenance))
    
    60
    +
    
    61
    +        if client_cert and not client_key:
    
    62
    +            provenance = _yaml.node_get_provenance(spec_node, 'client-cert')
    
    63
    +            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    64
    +                            "{}: 'client-cert' was specified without 'client-key'".format(provenance))
    
    65
    +
    
    66
    +        return CASRemoteSpec(url, push, server_cert, client_key, client_cert, instance_name)
    
    67
    +
    
    68
    +
    
    69
    +CASRemoteSpec.__new__.__defaults__ = (None, None, None, None)
    
    70
    +
    
    71
    +
    
    72
    +class BlobNotFound(CASRemoteError):
    
    73
    +
    
    74
    +    def __init__(self, blob, msg):
    
    75
    +        self.blob = blob
    
    76
    +        super().__init__(msg)
    
    77
    +
    
    78
    +
    
    79
    +# Represents a single remote CAS cache.
    
    80
    +#
    
    81
    +class CASRemote():
    
    82
    +    def __init__(self, spec):
    
    83
    +        self.spec = spec
    
    84
    +        self._initialized = False
    
    85
    +        self.channel = None
    
    86
    +        self.bytestream = None
    
    87
    +        self.cas = None
    
    88
    +        self.ref_storage = None
    
    89
    +        self.batch_update_supported = None
    
    90
    +        self.batch_read_supported = None
    
    91
    +        self.capabilities = None
    
    92
    +        self.max_batch_total_size_bytes = None
    
    93
    +
    
    94
    +    def init(self):
    
    95
    +        if not self._initialized:
    
    96
    +            url = urlparse(self.spec.url)
    
    97
    +            if url.scheme == 'http':
    
    98
    +                port = url.port or 80
    
    99
    +                self.channel = grpc.insecure_channel('{}:{}'.format(url.hostname, port))
    
    100
    +            elif url.scheme == 'https':
    
    101
    +                port = url.port or 443
    
    102
    +
    
    103
    +                if self.spec.server_cert:
    
    104
    +                    with open(self.spec.server_cert, 'rb') as f:
    
    105
    +                        server_cert_bytes = f.read()
    
    106
    +                else:
    
    107
    +                    server_cert_bytes = None
    
    108
    +
    
    109
    +                if self.spec.client_key:
    
    110
    +                    with open(self.spec.client_key, 'rb') as f:
    
    111
    +                        client_key_bytes = f.read()
    
    112
    +                else:
    
    113
    +                    client_key_bytes = None
    
    114
    +
    
    115
    +                if self.spec.client_cert:
    
    116
    +                    with open(self.spec.client_cert, 'rb') as f:
    
    117
    +                        client_cert_bytes = f.read()
    
    118
    +                else:
    
    119
    +                    client_cert_bytes = None
    
    120
    +
    
    121
    +                credentials = grpc.ssl_channel_credentials(root_certificates=server_cert_bytes,
    
    122
    +                                                           private_key=client_key_bytes,
    
    123
    +                                                           certificate_chain=client_cert_bytes)
    
    124
    +                self.channel = grpc.secure_channel('{}:{}'.format(url.hostname, port), credentials)
    
    125
    +            else:
    
    126
    +                raise CASRemoteError("Unsupported URL: {}".format(self.spec.url))
    
    127
    +
    
    128
    +            self.bytestream = bytestream_pb2_grpc.ByteStreamStub(self.channel)
    
    129
    +            self.cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(self.channel)
    
    130
    +            self.capabilities = remote_execution_pb2_grpc.CapabilitiesStub(self.channel)
    
    131
    +            self.ref_storage = buildstream_pb2_grpc.ReferenceStorageStub(self.channel)
    
    132
    +
    
    133
    +            self.max_batch_total_size_bytes = _MAX_PAYLOAD_BYTES
    
    134
    +            try:
    
    135
    +                request = remote_execution_pb2.GetCapabilitiesRequest()
    
    136
    +                response = self.capabilities.GetCapabilities(request)
    
    137
    +                server_max_batch_total_size_bytes = response.cache_capabilities.max_batch_total_size_bytes
    
    138
    +                if 0 < server_max_batch_total_size_bytes < self.max_batch_total_size_bytes:
    
    139
    +                    self.max_batch_total_size_bytes = server_max_batch_total_size_bytes
    
    140
    +            except grpc.RpcError as e:
    
    141
    +                # Simply use the defaults for servers that don't implement GetCapabilities()
    
    142
    +                if e.code() != grpc.StatusCode.UNIMPLEMENTED:
    
    143
    +                    raise
    
    144
    +
    
    145
    +            # Check whether the server supports BatchReadBlobs()
    
    146
    +            self.batch_read_supported = False
    
    147
    +            try:
    
    148
    +                request = remote_execution_pb2.BatchReadBlobsRequest()
    
    149
    +                response = self.cas.BatchReadBlobs(request)
    
    150
    +                self.batch_read_supported = True
    
    151
    +            except grpc.RpcError as e:
    
    152
    +                if e.code() != grpc.StatusCode.UNIMPLEMENTED:
    
    153
    +                    raise
    
    154
    +
    
    155
    +            # Check whether the server supports BatchUpdateBlobs()
    
    156
    +            self.batch_update_supported = False
    
    157
    +            try:
    
    158
    +                request = remote_execution_pb2.BatchUpdateBlobsRequest()
    
    159
    +                response = self.cas.BatchUpdateBlobs(request)
    
    160
    +                self.batch_update_supported = True
    
    161
    +            except grpc.RpcError as e:
    
    162
    +                if (e.code() != grpc.StatusCode.UNIMPLEMENTED and
    
    163
    +                        e.code() != grpc.StatusCode.PERMISSION_DENIED):
    
    164
    +                    raise
    
    165
    +
    
    166
    +            self._initialized = True
    
    167
    +
    
    168
    +    # check_remote
    
    169
    +    #
    
    170
    +    # Used when checking whether remote_specs work in the buildstream main
    
    171
    +    # thread, runs this in a seperate process to avoid creation of gRPC threads
    
    172
    +    # in the main BuildStream process
    
    173
    +    # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    174
    +    @classmethod
    
    175
    +    def check_remote(cls, remote_spec, q):
    
    176
    +
    
    177
    +        def __check_remote():
    
    178
    +            try:
    
    179
    +                remote = cls(remote_spec)
    
    180
    +                remote.init()
    
    181
    +
    
    182
    +                request = buildstream_pb2.StatusRequest()
    
    183
    +                response = remote.ref_storage.Status(request)
    
    184
    +
    
    185
    +                if remote_spec.push and not response.allow_updates:
    
    186
    +                    q.put('CAS server does not allow push')
    
    187
    +                else:
    
    188
    +                    # No error
    
    189
    +                    q.put(None)
    
    190
    +
    
    191
    +            except grpc.RpcError as e:
    
    192
    +                # str(e) is too verbose for errors reported to the user
    
    193
    +                q.put(e.details())
    
    194
    +
    
    195
    +            except Exception as e:               # pylint: disable=broad-except
    
    196
    +                # Whatever happens, we need to return it to the calling process
    
    197
    +                #
    
    198
    +                q.put(str(e))
    
    199
    +
    
    200
    +        p = multiprocessing.Process(target=__check_remote)
    
    201
    +
    
    202
    +        try:
    
    203
    +            # Keep SIGINT blocked in the child process
    
    204
    +            with _signals.blocked([signal.SIGINT], ignore=False):
    
    205
    +                p.start()
    
    206
    +
    
    207
    +            error = q.get()
    
    208
    +            p.join()
    
    209
    +        except KeyboardInterrupt:
    
    210
    +            utils._kill_process_tree(p.pid)
    
    211
    +            raise
    
    212
    +
    
    213
    +        return error
    
    214
    +
    
    215
    +    # verify_digest_on_remote():
    
    216
    +    #
    
    217
    +    # Check whether the object is already on the server in which case
    
    218
    +    # there is no need to upload it.
    
    219
    +    #
    
    220
    +    # Args:
    
    221
    +    #     digest (Digest): The object digest.
    
    222
    +    #
    
    223
    +    def verify_digest_on_remote(self, digest):
    
    224
    +        self.init()
    
    225
    +
    
    226
    +        request = remote_execution_pb2.FindMissingBlobsRequest()
    
    227
    +        request.blob_digests.extend([digest])
    
    228
    +
    
    229
    +        response = self.cas.FindMissingBlobs(request)
    
    230
    +        if digest in response.missing_blob_digests:
    
    231
    +            return False
    
    232
    +
    
    233
    +        return True
    
    234
    +
    
    235
    +    # push_message():
    
    236
    +    #
    
    237
    +    # Push the given protobuf message to a remote.
    
    238
    +    #
    
    239
    +    # Args:
    
    240
    +    #     message (Message): A protobuf message to push.
    
    241
    +    #
    
    242
    +    # Raises:
    
    243
    +    #     (CASRemoteError): if there was an error
    
    244
    +    #
    
    245
    +    def push_message(self, message):
    
    246
    +
    
    247
    +        message_buffer = message.SerializeToString()
    
    248
    +        message_digest = utils._message_digest(message_buffer)
    
    249
    +
    
    250
    +        self.init()
    
    251
    +
    
    252
    +        with io.BytesIO(message_buffer) as b:
    
    253
    +            self._send_blob(message_digest, b)
    
    254
    +
    
    255
    +        return message_digest
    
    256
    +
    
    257
    +    ################################################
    
    258
    +    #             Local Private Methods            #
    
    259
    +    ################################################
    
    260
    +    def _fetch_blob(self, digest, stream):
    
    261
    +        resource_name = '/'.join(['blobs', digest.hash, str(digest.size_bytes)])
    
    262
    +        request = bytestream_pb2.ReadRequest()
    
    263
    +        request.resource_name = resource_name
    
    264
    +        request.read_offset = 0
    
    265
    +        for response in self.bytestream.Read(request):
    
    266
    +            stream.write(response.data)
    
    267
    +        stream.flush()
    
    268
    +
    
    269
    +        assert digest.size_bytes == os.fstat(stream.fileno()).st_size
    
    270
    +
    
    271
    +    def _send_blob(self, digest, stream, u_uid=uuid.uuid4()):
    
    272
    +        resource_name = '/'.join(['uploads', str(u_uid), 'blobs',
    
    273
    +                                  digest.hash, str(digest.size_bytes)])
    
    274
    +
    
    275
    +        def request_stream(resname, instream):
    
    276
    +            offset = 0
    
    277
    +            finished = False
    
    278
    +            remaining = digest.size_bytes
    
    279
    +            while not finished:
    
    280
    +                chunk_size = min(remaining, _MAX_PAYLOAD_BYTES)
    
    281
    +                remaining -= chunk_size
    
    282
    +
    
    283
    +                request = bytestream_pb2.WriteRequest()
    
    284
    +                request.write_offset = offset
    
    285
    +                # max. _MAX_PAYLOAD_BYTES chunks
    
    286
    +                request.data = instream.read(chunk_size)
    
    287
    +                request.resource_name = resname
    
    288
    +                request.finish_write = remaining <= 0
    
    289
    +
    
    290
    +                yield request
    
    291
    +
    
    292
    +                offset += chunk_size
    
    293
    +                finished = request.finish_write
    
    294
    +
    
    295
    +        response = self.bytestream.Write(request_stream(resource_name, stream))
    
    296
    +
    
    297
    +        assert response.committed_size == digest.size_bytes
    
    298
    +
    
    299
    +
    
    300
    +# Represents a batch of blobs queued for fetching.
    
    301
    +#
    
    302
    +class _CASBatchRead():
    
    303
    +    def __init__(self, remote):
    
    304
    +        self._remote = remote
    
    305
    +        self._max_total_size_bytes = remote.max_batch_total_size_bytes
    
    306
    +        self._request = remote_execution_pb2.BatchReadBlobsRequest()
    
    307
    +        self._size = 0
    
    308
    +        self._sent = False
    
    309
    +
    
    310
    +    def add(self, digest):
    
    311
    +        assert not self._sent
    
    312
    +
    
    313
    +        new_batch_size = self._size + digest.size_bytes
    
    314
    +        if new_batch_size > self._max_total_size_bytes:
    
    315
    +            # Not enough space left in current batch
    
    316
    +            return False
    
    317
    +
    
    318
    +        request_digest = self._request.digests.add()
    
    319
    +        request_digest.hash = digest.hash
    
    320
    +        request_digest.size_bytes = digest.size_bytes
    
    321
    +        self._size = new_batch_size
    
    322
    +        return True
    
    323
    +
    
    324
    +    def send(self):
    
    325
    +        assert not self._sent
    
    326
    +        self._sent = True
    
    327
    +
    
    328
    +        if not self._request.digests:
    
    329
    +            return
    
    330
    +
    
    331
    +        batch_response = self._remote.cas.BatchReadBlobs(self._request)
    
    332
    +
    
    333
    +        for response in batch_response.responses:
    
    334
    +            if response.status.code == code_pb2.NOT_FOUND:
    
    335
    +                raise BlobNotFound(response.digest.hash, "Failed to download blob {}: {}".format(
    
    336
    +                    response.digest.hash, response.status.code))
    
    337
    +            if response.status.code != code_pb2.OK:
    
    338
    +                raise CASRemoteError("Failed to download blob {}: {}".format(
    
    339
    +                    response.digest.hash, response.status.code))
    
    340
    +            if response.digest.size_bytes != len(response.data):
    
    341
    +                raise CASRemoteError("Failed to download blob {}: expected {} bytes, received {} bytes".format(
    
    342
    +                    response.digest.hash, response.digest.size_bytes, len(response.data)))
    
    343
    +
    
    344
    +            yield (response.digest, response.data)
    
    345
    +
    
    346
    +
    
    347
    +# Represents a batch of blobs queued for upload.
    
    348
    +#
    
    349
    +class _CASBatchUpdate():
    
    350
    +    def __init__(self, remote):
    
    351
    +        self._remote = remote
    
    352
    +        self._max_total_size_bytes = remote.max_batch_total_size_bytes
    
    353
    +        self._request = remote_execution_pb2.BatchUpdateBlobsRequest()
    
    354
    +        self._size = 0
    
    355
    +        self._sent = False
    
    356
    +
    
    357
    +    def add(self, digest, stream):
    
    358
    +        assert not self._sent
    
    359
    +
    
    360
    +        new_batch_size = self._size + digest.size_bytes
    
    361
    +        if new_batch_size > self._max_total_size_bytes:
    
    362
    +            # Not enough space left in current batch
    
    363
    +            return False
    
    364
    +
    
    365
    +        blob_request = self._request.requests.add()
    
    366
    +        blob_request.digest.hash = digest.hash
    
    367
    +        blob_request.digest.size_bytes = digest.size_bytes
    
    368
    +        blob_request.data = stream.read(digest.size_bytes)
    
    369
    +        self._size = new_batch_size
    
    370
    +        return True
    
    371
    +
    
    372
    +    def send(self):
    
    373
    +        assert not self._sent
    
    374
    +        self._sent = True
    
    375
    +
    
    376
    +        if not self._request.requests:
    
    377
    +            return
    
    378
    +
    
    379
    +        batch_response = self._remote.cas.BatchUpdateBlobs(self._request)
    
    380
    +
    
    381
    +        for response in batch_response.responses:
    
    382
    +            if response.status.code != code_pb2.OK:
    
    383
    +                raise CASRemoteError("Failed to upload blob {}: {}".format(
    
    384
    +                    response.digest.hash, response.status.code))

  • buildstream/_artifactcache/casserver.pybuildstream/_cas/casserver.py

  • buildstream/_context.py
    ... ... @@ -31,7 +31,7 @@ from ._exceptions import LoadError, LoadErrorReason, BstError
    31 31
     from ._message import Message, MessageType
    
    32 32
     from ._profile import Topics, profile_start, profile_end
    
    33 33
     from ._artifactcache import ArtifactCache
    
    34
    -from ._artifactcache.cascache import CASCache
    
    34
    +from ._cas import CASCache
    
    35 35
     from ._workspaces import Workspaces, WorkspaceProjectCache, WORKSPACE_PROJECT_FILE
    
    36 36
     from .plugin import _plugin_lookup
    
    37 37
     from .sandbox import SandboxRemote
    

  • buildstream/_exceptions.py
    ... ... @@ -284,6 +284,21 @@ class CASError(BstError):
    284 284
             super().__init__(message, detail=detail, domain=ErrorDomain.CAS, reason=reason, temporary=True)
    
    285 285
     
    
    286 286
     
    
    287
    +# CASRemoteError
    
    288
    +#
    
    289
    +# Raised when errors are encountered in the remote CAS
    
    290
    +class CASRemoteError(CASError):
    
    291
    +    pass
    
    292
    +
    
    293
    +
    
    294
    +# CASCacheError
    
    295
    +#
    
    296
    +# Raised when errors are encountered in the local CASCacheError
    
    297
    +#
    
    298
    +class CASCacheError(CASError):
    
    299
    +    pass
    
    300
    +
    
    301
    +
    
    287 302
     # PipelineError
    
    288 303
     #
    
    289 304
     # Raised from pipeline operations
    

  • buildstream/_frontend/cli.py
    ... ... @@ -2,6 +2,7 @@ import os
    2 2
     import sys
    
    3 3
     from contextlib import ExitStack
    
    4 4
     from fnmatch import fnmatch
    
    5
    +from functools import partial
    
    5 6
     from tempfile import TemporaryDirectory
    
    6 7
     
    
    7 8
     import click
    
    ... ... @@ -111,14 +112,25 @@ def complete_target(args, incomplete):
    111 112
         return complete_list
    
    112 113
     
    
    113 114
     
    
    114
    -def complete_artifact(args, incomplete):
    
    115
    +def complete_artifact(orig_args, args, incomplete):
    
    115 116
         from .._context import Context
    
    116 117
         ctx = Context()
    
    117 118
     
    
    118 119
         config = None
    
    119
    -    for i, arg in enumerate(args):
    
    120
    -        if arg in ('-c', '--config'):
    
    121
    -            config = args[i + 1]
    
    120
    +    if orig_args:
    
    121
    +        for i, arg in enumerate(orig_args):
    
    122
    +            if arg in ('-c', '--config'):
    
    123
    +                try:
    
    124
    +                    config = orig_args[i + 1]
    
    125
    +                except IndexError:
    
    126
    +                    pass
    
    127
    +    if args:
    
    128
    +        for i, arg in enumerate(args):
    
    129
    +            if arg in ('-c', '--config'):
    
    130
    +                try:
    
    131
    +                    config = args[i + 1]
    
    132
    +                except IndexError:
    
    133
    +                    pass
    
    122 134
         ctx.load(config)
    
    123 135
     
    
    124 136
         # element targets are valid artifact names
    
    ... ... @@ -128,8 +140,9 @@ def complete_artifact(args, incomplete):
    128 140
         return complete_list
    
    129 141
     
    
    130 142
     
    
    131
    -def override_completions(cmd, cmd_param, args, incomplete):
    
    143
    +def override_completions(orig_args, cmd, cmd_param, args, incomplete):
    
    132 144
         """
    
    145
    +    :param orig_args: original, non-completion args
    
    133 146
         :param cmd_param: command definition
    
    134 147
         :param args: full list of args typed before the incomplete arg
    
    135 148
         :param incomplete: the incomplete text to autocomplete
    
    ... ... @@ -150,7 +163,7 @@ def override_completions(cmd, cmd_param, args, incomplete):
    150 163
                     cmd_param.opts == ['--track-except']):
    
    151 164
                 return complete_target(args, incomplete)
    
    152 165
             if cmd_param.name == 'artifacts':
    
    153
    -            return complete_artifact(args, incomplete)
    
    166
    +            return complete_artifact(orig_args, args, incomplete)
    
    154 167
     
    
    155 168
         raise CompleteUnhandled()
    
    156 169
     
    
    ... ... @@ -161,7 +174,7 @@ def override_main(self, args=None, prog_name=None, complete_var=None,
    161 174
         # Hook for the Bash completion.  This only activates if the Bash
    
    162 175
         # completion is actually enabled, otherwise this is quite a fast
    
    163 176
         # noop.
    
    164
    -    if main_bashcomplete(self, prog_name, override_completions):
    
    177
    +    if main_bashcomplete(self, prog_name, partial(override_completions, args)):
    
    165 178
     
    
    166 179
             # If we're running tests we cant just go calling exit()
    
    167 180
             # from the main process.
    

  • buildstream/_scheduler/queues/queue.py
    ... ... @@ -170,9 +170,9 @@ class Queue():
    170 170
             skip = [job for job in jobs if self.status(job.element) == QueueStatus.SKIP]
    
    171 171
             wait = [job for job in jobs if job not in skip]
    
    172 172
     
    
    173
    +        self.skipped_elements.extend([job.element for job in skip])
    
    173 174
             self._wait_queue.extend(wait)
    
    174 175
             self._done_queue.extend(skip)
    
    175
    -        self.skipped_elements.extend(skip)
    
    176 176
     
    
    177 177
         # dequeue()
    
    178 178
         #
    

  • buildstream/sandbox/_sandboxremote.py
    ... ... @@ -38,7 +38,7 @@ from .._protos.google.rpc import code_pb2
    38 38
     from .._exceptions import SandboxError
    
    39 39
     from .. import _yaml
    
    40 40
     from .._protos.google.longrunning import operations_pb2, operations_pb2_grpc
    
    41
    -from .._artifactcache.cascache import CASRemote, CASRemoteSpec
    
    41
    +from .._cas import CASRemote, CASRemoteSpec
    
    42 42
     
    
    43 43
     
    
    44 44
     class RemoteExecutionSpec(namedtuple('RemoteExecutionSpec', 'exec_service storage_service action_service')):
    
    ... ... @@ -348,17 +348,17 @@ class SandboxRemote(Sandbox):
    348 348
                 except grpc.RpcError as e:
    
    349 349
                     raise SandboxError("Failed to push source directory to remote: {}".format(e)) from e
    
    350 350
     
    
    351
    -            if not cascache.verify_digest_on_remote(casremote, upload_vdir.ref):
    
    351
    +            if not casremote.verify_digest_on_remote(upload_vdir.ref):
    
    352 352
                     raise SandboxError("Failed to verify that source has been pushed to the remote artifact cache.")
    
    353 353
     
    
    354 354
                 # Push command and action
    
    355 355
                 try:
    
    356
    -                cascache.push_message(casremote, command_proto)
    
    356
    +                casremote.push_message(command_proto)
    
    357 357
                 except grpc.RpcError as e:
    
    358 358
                     raise SandboxError("Failed to push command to remote: {}".format(e))
    
    359 359
     
    
    360 360
                 try:
    
    361
    -                cascache.push_message(casremote, action)
    
    361
    +                casremote.push_message(action)
    
    362 362
                 except grpc.RpcError as e:
    
    363 363
                     raise SandboxError("Failed to push action to remote: {}".format(e))
    
    364 364
     
    

  • doc/source/using_configuring_artifact_server.rst
    ... ... @@ -94,7 +94,7 @@ requiring BuildStream's more exigent dependencies by setting the
    94 94
     Command reference
    
    95 95
     ~~~~~~~~~~~~~~~~~
    
    96 96
     
    
    97
    -.. click:: buildstream._artifactcache.casserver:server_main
    
    97
    +.. click:: buildstream._cas.casserver:server_main
    
    98 98
        :prog: bst-artifact-server
    
    99 99
     
    
    100 100
     
    

  • requirements/dev-requirements.txt
    ... ... @@ -2,7 +2,7 @@ coverage==4.4
    2 2
     pylint==2.2.2
    
    3 3
     pycodestyle==2.4.0
    
    4 4
     pytest==4.0.2
    
    5
    -pytest-cov==2.6.0
    
    5
    +pytest-cov==2.6.1
    
    6 6
     pytest-datafiles==2.0
    
    7 7
     pytest-env==0.6.2
    
    8 8
     pytest-xdist==1.25.0
    

  • tests/artifactcache/config.py
    ... ... @@ -3,8 +3,7 @@ import pytest
    3 3
     import itertools
    
    4 4
     import os
    
    5 5
     
    
    6
    -from buildstream._artifactcache import ArtifactCacheSpec
    
    7
    -from buildstream._artifactcache.artifactcache import _configured_remote_artifact_cache_specs
    
    6
    +from buildstream._artifactcache import ArtifactCacheSpec, _configured_remote_artifact_cache_specs
    
    8 7
     from buildstream._context import Context
    
    9 8
     from buildstream._project import Project
    
    10 9
     from buildstream.utils import _deduplicate
    

  • tests/artifactcache/expiry.py
    ... ... @@ -342,13 +342,13 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, success):
    342 342
             total_space = 10000
    
    343 343
     
    
    344 344
         volume_space_patch = mock.patch(
    
    345
    -        "buildstream._artifactcache.artifactcache.ArtifactCache._get_volume_space_info_for",
    
    345
    +        "buildstream._artifactcache.ArtifactCache._get_volume_space_info_for",
    
    346 346
             autospec=True,
    
    347 347
             return_value=(free_space, total_space),
    
    348 348
         )
    
    349 349
     
    
    350 350
         cache_size_patch = mock.patch(
    
    351
    -        "buildstream._artifactcache.artifactcache.ArtifactCache.get_cache_size",
    
    351
    +        "buildstream._artifactcache.ArtifactCache.get_cache_size",
    
    352 352
             autospec=True,
    
    353 353
             return_value=0,
    
    354 354
         )
    

  • tests/completions/completions.py
    ... ... @@ -281,3 +281,44 @@ def test_argument_element_invalid(datafiles, cli, project, cmd, word_idx, expect
    281 281
     ])
    
    282 282
     def test_help_commands(cli, cmd, word_idx, expected):
    
    283 283
         assert_completion(cli, cmd, word_idx, expected)
    
    284
    +
    
    285
    +
    
    286
    +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
    
    287
    +def test_argument_artifact(cli, tmpdir, datafiles):
    
    288
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    289
    +
    
    290
    +    # Build an import element with no dependencies (as there will only be ONE cache key)
    
    291
    +    result = cli.run(project=project, args=['build', 'import-bin.bst'])  # Has no dependencies
    
    292
    +    result.assert_success()
    
    293
    +
    
    294
    +    # Get the key and the artifact ref ($project/$element_name/$key)
    
    295
    +    key = cli.get_element_key(project, 'import-bin.bst')
    
    296
    +    artifact = os.path.join('test', 'import-bin', key)
    
    297
    +
    
    298
    +    # Test autocompletion of the artifact
    
    299
    +    cmds = [
    
    300
    +        'bst artifact log ',
    
    301
    +        'bst artifact log t',
    
    302
    +        'bst artifact log test/'
    
    303
    +    ]
    
    304
    +
    
    305
    +    for i, cmd in enumerate(cmds):
    
    306
    +        word_idx = 3
    
    307
    +        result = cli.run(project=project, cwd=project, env={
    
    308
    +            '_BST_COMPLETION': 'complete',
    
    309
    +            'COMP_WORDS': cmd,
    
    310
    +            'COMP_CWORD': str(word_idx)
    
    311
    +        })
    
    312
    +        words = []
    
    313
    +        if result.output:
    
    314
    +            words = result.output.splitlines()  # This leaves an extra space on each e.g. ['foo.bst ']
    
    315
    +            words = [word.strip() for word in words]
    
    316
    +
    
    317
    +            if i == 0:
    
    318
    +                expected = PROJECT_ELEMENTS + [artifact]  # We should now be able to see the artifact
    
    319
    +            elif i == 1:
    
    320
    +                expected = ['target.bst', artifact]
    
    321
    +            elif i == 2:
    
    322
    +                expected = [artifact]
    
    323
    +
    
    324
    +            assert expected == words

  • tests/sandboxes/storage-tests.py
    ... ... @@ -3,7 +3,7 @@ import pytest
    3 3
     
    
    4 4
     from buildstream._exceptions import ErrorDomain
    
    5 5
     
    
    6
    -from buildstream._artifactcache.cascache import CASCache
    
    6
    +from buildstream._cas import CASCache
    
    7 7
     from buildstream.storage._casbaseddirectory import CasBasedDirectory
    
    8 8
     from buildstream.storage._filebaseddirectory import FileBasedDirectory
    
    9 9
     
    

  • tests/storage/virtual_directory_import.py
    ... ... @@ -8,7 +8,7 @@ from tests.testutils import cli
    8 8
     from buildstream.storage._casbaseddirectory import CasBasedDirectory
    
    9 9
     from buildstream.storage._filebaseddirectory import FileBasedDirectory
    
    10 10
     from buildstream._artifactcache import ArtifactCache
    
    11
    -from buildstream._artifactcache.cascache import CASCache
    
    11
    +from buildstream._cas import CASCache
    
    12 12
     from buildstream import utils
    
    13 13
     
    
    14 14
     
    

  • tests/testutils/artifactshare.py
    ... ... @@ -11,8 +11,8 @@ from multiprocessing import Process, Queue
    11 11
     import pytest_cov
    
    12 12
     
    
    13 13
     from buildstream import _yaml
    
    14
    -from buildstream._artifactcache.cascache import CASCache
    
    15
    -from buildstream._artifactcache.casserver import create_server
    
    14
    +from buildstream._cas import CASCache
    
    15
    +from buildstream._cas.casserver import create_server
    
    16 16
     from buildstream._exceptions import CASError
    
    17 17
     from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
    
    18 18
     
    

  • tests/utils/misc.py
    ... ... @@ -23,7 +23,7 @@ def test_parse_size_over_1024T(cli, tmpdir):
    23 23
         _yaml.dump({'name': 'main'}, str(project.join("project.conf")))
    
    24 24
     
    
    25 25
         volume_space_patch = mock.patch(
    
    26
    -        "buildstream._artifactcache.artifactcache.ArtifactCache._get_volume_space_info_for",
    
    26
    +        "buildstream._artifactcache.ArtifactCache._get_volume_space_info_for",
    
    27 27
             autospec=True,
    
    28 28
             return_value=(1025 * TiB, 1025 * TiB)
    
    29 29
         )
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]