Phil Dawson pushed to branch phil/848-plugin-deprecation-warnings at BuildStream / buildstream
Commits:
- 
0b83d024
by Tristan Van Berkom at 2019-01-10T20:02:50Z
- 
630e26f1
by Tristan Van Berkom at 2019-01-10T20:03:52Z
- 
d1d7de57
by Tristan Van Berkom at 2019-01-11T04:13:16Z
- 
276b8d48
by Tristan Van Berkom at 2019-01-11T04:42:24Z
- 
fc3dcec8
by Angelos Evripiotis at 2019-01-11T16:45:00Z
- 
f86b7ff3
by Angelos Evripiotis at 2019-01-11T16:45:00Z
- 
d983f231
by Angelos Evripiotis at 2019-01-11T17:19:06Z
- 
829a2b93
by Tristan Van Berkom at 2019-01-12T21:32:13Z
- 
993e30ae
by Tristan Van Berkom at 2019-01-12T22:02:55Z
- 
32732e01
by Javier Jardón at 2019-01-14T09:04:01Z
- 
4f5f1184
by Valentin David at 2019-01-14T09:40:56Z
- 
bb80a2b8
by Chandan Singh at 2019-01-14T14:30:15Z
- 
10b3ee62
by Chandan Singh at 2019-01-14T14:57:08Z
- 
542cdaf0
by Jürg Billeter at 2019-01-14T18:46:57Z
- 
ff666e76
by James Ennis at 2019-01-14T18:46:57Z
- 
273b0f55
by Tristan Van Berkom at 2019-01-14T20:24:57Z
- 
2e3c2ea2
by Javier Jardón at 2019-01-14T22:26:04Z
- 
d60d2e31
by Javier Jardón at 2019-01-15T00:39:22Z
- 
605836c1
by Chandan Singh at 2019-01-15T00:57:05Z
- 
006370af
by Chandan Singh at 2019-01-15T01:21:33Z
- 
4b544555
by Chandan Singh at 2019-01-15T21:28:40Z
- 
80fe0d9a
by Javier Jardón at 2019-01-15T22:48:22Z
- 
c91784ab
by Chandan Singh at 2019-01-15T22:49:01Z
- 
ecae4d73
by Javier Jardón at 2019-01-15T23:23:41Z
- 
0eac4008
by Valentin David at 2019-01-16T10:04:57Z
- 
a405e08f
by Valentin David at 2019-01-16T10:04:57Z
- 
c0631d48
by Valentin David at 2019-01-16T10:33:52Z
- 
b608ac86
by James Ennis at 2019-01-16T11:05:44Z
- 
97a3beb6
by James Ennis at 2019-01-16T11:05:44Z
- 
d6587aa0
by James Ennis at 2019-01-16T11:33:21Z
- 
2683f98a
by Raoul Hidalgo Charman at 2019-01-16T11:55:07Z
- 
d2cc4798
by Raoul Hidalgo Charman at 2019-01-16T11:55:07Z
- 
76f67483
by Raoul Hidalgo Charman at 2019-01-16T11:55:07Z
- 
6c428bc9
by Jürg Billeter at 2019-01-16T12:56:38Z
- 
63ab4ae4
by Phil Dawson at 2019-01-16T13:21:21Z
- 
c74e2ec6
by Phil Dawson at 2019-01-16T13:29:43Z
29 changed files:
- .gitlab-ci.yml
- CONTRIBUTING.rst
- buildstream/_artifactcache/artifactcache.py → buildstream/_artifactcache.py
- buildstream/_artifactcache/__init__.py → buildstream/_cas/__init__.py
- buildstream/_artifactcache/cascache.py → buildstream/_cas/cascache.py
- + buildstream/_cas/casremote.py
- buildstream/_artifactcache/casserver.py → buildstream/_cas/casserver.py
- buildstream/_context.py
- buildstream/_exceptions.py
- buildstream/_frontend/cli.py
- buildstream/_gitsourcebase.py
- buildstream/_pipeline.py
- buildstream/_project.py
- buildstream/_scheduler/queues/queue.py
- buildstream/plugin.py
- buildstream/sandbox/_sandboxremote.py
- conftest.py
- doc/source/using_configuring_artifact_server.rst
- requirements/dev-requirements.txt
- tests/artifactcache/config.py
- tests/artifactcache/expiry.py
- tests/completions/completions.py
- + tests/frontend/order.py
- tests/sandboxes/storage-tests.py
- tests/sources/git.py
- tests/storage/virtual_directory_import.py
- tests/testutils/artifactshare.py
- tests/testutils/runcli.py
- tests/utils/misc.py
Changes:
| ... | ... | @@ -60,8 +60,18 @@ tests-ubuntu-18.04: | 
| 60 | 60 |    image: buildstream/testsuite-ubuntu:18.04-5da27168-32c47d1c
 | 
| 61 | 61 |    <<: *tests
 | 
| 62 | 62 |  | 
| 63 | +tests-python-3.7-stretch:
 | |
| 64 | +  image: buildstream/testsuite-python:3.7-stretch-a60f0c39
 | |
| 65 | +  <<: *tests
 | |
| 66 | + | |
| 67 | +  variables:
 | |
| 68 | +    # Note that we explicitly specify TOXENV in this case because this
 | |
| 69 | +    # image has both 3.6 and 3.7 versions. python3.6 cannot be removed because
 | |
| 70 | +    # some of our base dependencies declare it as their runtime dependency.
 | |
| 71 | +    TOXENV: py37
 | |
| 72 | + | |
| 63 | 73 |  overnight-fedora-28-aarch64:
 | 
| 64 | -  image: buildstream/testsuite-fedora:aarch64-28-06bab030-32a101f6
 | |
| 74 | +  image: buildstream/testsuite-fedora:aarch64-28-5da27168-32c47d1c
 | |
| 65 | 75 |    tags:
 | 
| 66 | 76 |      - aarch64
 | 
| 67 | 77 |    <<: *tests
 | 
| ... | ... | @@ -70,6 +80,12 @@ overnight-fedora-28-aarch64: | 
| 70 | 80 |    except: []
 | 
| 71 | 81 |    only:
 | 
| 72 | 82 |    - schedules
 | 
| 83 | +  before_script:
 | |
| 84 | +    # grpcio needs to be compiled from source on aarch64 so we additionally
 | |
| 85 | +    # need a C++ compiler here.
 | |
| 86 | +    # FIXME: Ideally this would be provided by the base image. This will be
 | |
| 87 | +    # unblocked by https://gitlab.com/BuildStream/buildstream-docker-images/issues/34
 | |
| 88 | +    - dnf install -y gcc-c++
 | |
| 73 | 89 |  | 
| 74 | 90 |  tests-unix:
 | 
| 75 | 91 |    # Use fedora here, to a) run a test on fedora and b) ensure that we
 | 
| ... | ... | @@ -90,7 +106,6 @@ tests-unix: | 
| 90 | 106 |      # Since the unix platform is required to run as root, no user change required
 | 
| 91 | 107 |      - ${TEST_COMMAND}
 | 
| 92 | 108 |  | 
| 93 | - | |
| 94 | 109 |  tests-fedora-missing-deps:
 | 
| 95 | 110 |    # Ensure that tests behave nicely while missing bwrap and ostree
 | 
| 96 | 111 |    image: buildstream/testsuite-fedora:28-5da27168-32c47d1c
 | 
| ... | ... | @@ -108,6 +123,22 @@ tests-fedora-missing-deps: | 
| 108 | 123 |  | 
| 109 | 124 |      - ${TEST_COMMAND}
 | 
| 110 | 125 |  | 
| 126 | +tests-fedora-update-deps:
 | |
| 127 | +  # Check if the tests pass after updating requirements to their latest
 | |
| 128 | +  # allowed version.
 | |
| 129 | +  allow_failure: true
 | |
| 130 | +  image: buildstream/testsuite-fedora:28-5da27168-32c47d1c
 | |
| 131 | +  <<: *tests
 | |
| 132 | + | |
| 133 | +  script:
 | |
| 134 | +    - useradd -Um buildstream
 | |
| 135 | +    - chown -R buildstream:buildstream .
 | |
| 136 | + | |
| 137 | +    - make --always-make --directory requirements
 | |
| 138 | +    - cat requirements/*.txt
 | |
| 139 | + | |
| 140 | +    - su buildstream -c "${TEST_COMMAND}"
 | |
| 141 | + | |
| 111 | 142 |  # Lint separately from testing
 | 
| 112 | 143 |  lint:
 | 
| 113 | 144 |    stage: test
 | 
| ... | ... | @@ -140,8 +171,8 @@ docs: | 
| 140 | 171 |    stage: test
 | 
| 141 | 172 |    variables:
 | 
| 142 | 173 |      BST_EXT_URL: git+https://gitlab.com/BuildStream/bst-external.git
 | 
| 143 | -    BST_EXT_REF: 573843768f4d297f85dc3067465b3c7519a8dcc3 # 0.7.0
 | |
| 144 | -    FD_SDK_REF: 612f66e218445eee2b1a9d7dd27c9caba571612e # freedesktop-sdk-18.08.19-54-g612f66e2
 | |
| 174 | +    BST_EXT_REF: 0.9.0-0-g63a19e8068bd777bd9cd59b1a9442f9749ea5a85
 | |
| 175 | +    FD_SDK_REF: freedesktop-sdk-18.08.25-0-g250939d465d6dd7768a215f1fa59c4a3412fc337
 | |
| 145 | 176 |    before_script:
 | 
| 146 | 177 |    - |
 | 
| 147 | 178 |      mkdir -p "${HOME}/.config"
 | 
| ... | ... | @@ -1534,6 +1534,10 @@ You can always abort on the first failure by running:: | 
| 1534 | 1534 |  | 
| 1535 | 1535 |    tox -- -x
 | 
| 1536 | 1536 |  | 
| 1537 | +Similarly, you may also be interested in the ``--last-failed`` and
 | |
| 1538 | +``--failed-first`` options as per the
 | |
| 1539 | +`pytest cache <https://docs.pytest.org/en/latest/cache.html>`_ documentation.
 | |
| 1540 | + | |
| 1537 | 1541 |  If you want to run a specific test or a group of tests, you
 | 
| 1538 | 1542 |  can specify a prefix to match. E.g. if you want to run all of
 | 
| 1539 | 1543 |  the frontend tests you can do::
 | 
| ... | ... | @@ -1545,6 +1549,12 @@ If you wanted to run the test_build_track test within frontend/buildtrack.py you | 
| 1545 | 1549 |  | 
| 1546 | 1550 |    tox -- tests/frontend/buildtrack.py::test_build_track
 | 
| 1547 | 1551 |  | 
| 1552 | +When running only a few tests, you may find the coverage and timing output
 | |
| 1553 | +excessive, there are options to trim them. Note that coverage step will fail.
 | |
| 1554 | +Here is an example::
 | |
| 1555 | + | |
| 1556 | +  tox -- --no-cov --durations=1 tests/frontend/buildtrack.py::test_build_track
 | |
| 1557 | + | |
| 1548 | 1558 |  We also have a set of slow integration tests that are disabled by
 | 
| 1549 | 1559 |  default - you will notice most of them marked with SKIP in the pytest
 | 
| 1550 | 1560 |  output. To run them, you can use::
 | 
| ... | ... | @@ -19,18 +19,16 @@ | 
| 19 | 19 |  | 
| 20 | 20 |  import multiprocessing
 | 
| 21 | 21 |  import os
 | 
| 22 | -import signal
 | |
| 23 | 22 |  import string
 | 
| 24 | 23 |  from collections.abc import Mapping
 | 
| 25 | 24 |  | 
| 26 | -from ..types import _KeyStrength
 | |
| 27 | -from .._exceptions import ArtifactError, CASError, LoadError, LoadErrorReason
 | |
| 28 | -from .._message import Message, MessageType
 | |
| 29 | -from .. import _signals
 | |
| 30 | -from .. import utils
 | |
| 31 | -from .. import _yaml
 | |
| 25 | +from .types import _KeyStrength
 | |
| 26 | +from ._exceptions import ArtifactError, CASError, LoadError, LoadErrorReason
 | |
| 27 | +from ._message import Message, MessageType
 | |
| 28 | +from . import utils
 | |
| 29 | +from . import _yaml
 | |
| 32 | 30 |  | 
| 33 | -from .cascache import CASRemote, CASRemoteSpec
 | |
| 31 | +from ._cas import CASRemote, CASRemoteSpec
 | |
| 34 | 32 |  | 
| 35 | 33 |  | 
| 36 | 34 |  CACHE_SIZE_FILE = "cache_size"
 | 
| ... | ... | @@ -375,20 +373,8 @@ class ArtifactCache(): | 
| 375 | 373 |          remotes = {}
 | 
| 376 | 374 |          q = multiprocessing.Queue()
 | 
| 377 | 375 |          for remote_spec in remote_specs:
 | 
| 378 | -            # Use subprocess to avoid creation of gRPC threads in main BuildStream process
 | |
| 379 | -            # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
 | |
| 380 | -            p = multiprocessing.Process(target=self.cas.initialize_remote, args=(remote_spec, q))
 | |
| 381 | 376 |  | 
| 382 | -            try:
 | |
| 383 | -                # Keep SIGINT blocked in the child process
 | |
| 384 | -                with _signals.blocked([signal.SIGINT], ignore=False):
 | |
| 385 | -                    p.start()
 | |
| 386 | - | |
| 387 | -                error = q.get()
 | |
| 388 | -                p.join()
 | |
| 389 | -            except KeyboardInterrupt:
 | |
| 390 | -                utils._kill_process_tree(p.pid)
 | |
| 391 | -                raise
 | |
| 377 | +            error = CASRemote.check_remote(remote_spec, q)
 | |
| 392 | 378 |  | 
| 393 | 379 |              if error and on_failure:
 | 
| 394 | 380 |                  on_failure(remote_spec.url, error)
 | 
| ... | ... | @@ -747,7 +733,7 @@ class ArtifactCache(): | 
| 747 | 733 |                                  "servers are configured as push remotes.")
 | 
| 748 | 734 |  | 
| 749 | 735 |          for remote in push_remotes:
 | 
| 750 | -            message_digest = self.cas.push_message(remote, message)
 | |
| 736 | +            message_digest = remote.push_message(message)
 | |
| 751 | 737 |  | 
| 752 | 738 |          return message_digest
 | 
| 753 | 739 |  | 
| ... | ... | @@ -17,4 +17,5 @@ | 
| 17 | 17 |  #  Authors:
 | 
| 18 | 18 |  #        Tristan Van Berkom <tristan vanberkom codethink co uk>
 | 
| 19 | 19 |  | 
| 20 | -from .artifactcache import ArtifactCache, ArtifactCacheSpec, CACHE_SIZE_FILE | |
| 20 | +from .cascache import CASCache
 | |
| 21 | +from .casremote import CASRemote, CASRemoteSpec | 
| ... | ... | @@ -17,85 +17,23 @@ | 
| 17 | 17 |  #  Authors:
 | 
| 18 | 18 |  #        Jürg Billeter <juerg billeter codethink co uk>
 | 
| 19 | 19 |  | 
| 20 | -from collections import namedtuple
 | |
| 21 | 20 |  import hashlib
 | 
| 22 | 21 |  import itertools
 | 
| 23 | -import io
 | |
| 24 | 22 |  import os
 | 
| 25 | 23 |  import stat
 | 
| 26 | 24 |  import tempfile
 | 
| 27 | 25 |  import uuid
 | 
| 28 | 26 |  import contextlib
 | 
| 29 | -from urllib.parse import urlparse
 | |
| 30 | 27 |  | 
| 31 | 28 |  import grpc
 | 
| 32 | 29 |  | 
| 33 | -from .._protos.google.rpc import code_pb2
 | |
| 34 | -from .._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
 | |
| 35 | -from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
 | |
| 36 | -from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc
 | |
| 30 | +from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
 | |
| 31 | +from .._protos.buildstream.v2 import buildstream_pb2
 | |
| 37 | 32 |  | 
| 38 | 33 |  from .. import utils
 | 
| 39 | -from .._exceptions import CASError, LoadError, LoadErrorReason
 | |
| 40 | -from .. import _yaml
 | |
| 34 | +from .._exceptions import CASCacheError
 | |
| 41 | 35 |  | 
| 42 | - | |
| 43 | -# The default limit for gRPC messages is 4 MiB.
 | |
| 44 | -# Limit payload to 1 MiB to leave sufficient headroom for metadata.
 | |
| 45 | -_MAX_PAYLOAD_BYTES = 1024 * 1024
 | |
| 46 | - | |
| 47 | - | |
| 48 | -class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key client_cert instance_name')):
 | |
| 49 | - | |
| 50 | -    # _new_from_config_node
 | |
| 51 | -    #
 | |
| 52 | -    # Creates an CASRemoteSpec() from a YAML loaded node
 | |
| 53 | -    #
 | |
| 54 | -    @staticmethod
 | |
| 55 | -    def _new_from_config_node(spec_node, basedir=None):
 | |
| 56 | -        _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance-name'])
 | |
| 57 | -        url = _yaml.node_get(spec_node, str, 'url')
 | |
| 58 | -        push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
 | |
| 59 | -        if not url:
 | |
| 60 | -            provenance = _yaml.node_get_provenance(spec_node, 'url')
 | |
| 61 | -            raise LoadError(LoadErrorReason.INVALID_DATA,
 | |
| 62 | -                            "{}: empty artifact cache URL".format(provenance))
 | |
| 63 | - | |
| 64 | -        instance_name = _yaml.node_get(spec_node, str, 'instance-name', default_value=None)
 | |
| 65 | - | |
| 66 | -        server_cert = _yaml.node_get(spec_node, str, 'server-cert', default_value=None)
 | |
| 67 | -        if server_cert and basedir:
 | |
| 68 | -            server_cert = os.path.join(basedir, server_cert)
 | |
| 69 | - | |
| 70 | -        client_key = _yaml.node_get(spec_node, str, 'client-key', default_value=None)
 | |
| 71 | -        if client_key and basedir:
 | |
| 72 | -            client_key = os.path.join(basedir, client_key)
 | |
| 73 | - | |
| 74 | -        client_cert = _yaml.node_get(spec_node, str, 'client-cert', default_value=None)
 | |
| 75 | -        if client_cert and basedir:
 | |
| 76 | -            client_cert = os.path.join(basedir, client_cert)
 | |
| 77 | - | |
| 78 | -        if client_key and not client_cert:
 | |
| 79 | -            provenance = _yaml.node_get_provenance(spec_node, 'client-key')
 | |
| 80 | -            raise LoadError(LoadErrorReason.INVALID_DATA,
 | |
| 81 | -                            "{}: 'client-key' was specified without 'client-cert'".format(provenance))
 | |
| 82 | - | |
| 83 | -        if client_cert and not client_key:
 | |
| 84 | -            provenance = _yaml.node_get_provenance(spec_node, 'client-cert')
 | |
| 85 | -            raise LoadError(LoadErrorReason.INVALID_DATA,
 | |
| 86 | -                            "{}: 'client-cert' was specified without 'client-key'".format(provenance))
 | |
| 87 | - | |
| 88 | -        return CASRemoteSpec(url, push, server_cert, client_key, client_cert, instance_name)
 | |
| 89 | - | |
| 90 | - | |
| 91 | -CASRemoteSpec.__new__.__defaults__ = (None, None, None, None)
 | |
| 92 | - | |
| 93 | - | |
| 94 | -class BlobNotFound(CASError):
 | |
| 95 | - | |
| 96 | -    def __init__(self, blob, msg):
 | |
| 97 | -        self.blob = blob
 | |
| 98 | -        super().__init__(msg)
 | |
| 36 | +from .casremote import BlobNotFound, _CASBatchRead, _CASBatchUpdate
 | |
| 99 | 37 |  | 
| 100 | 38 |  | 
| 101 | 39 |  # A CASCache manages a CAS repository as specified in the Remote Execution API.
 | 
| ... | ... | @@ -120,7 +58,7 @@ class CASCache(): | 
| 120 | 58 |          headdir = os.path.join(self.casdir, 'refs', 'heads')
 | 
| 121 | 59 |          objdir = os.path.join(self.casdir, 'objects')
 | 
| 122 | 60 |          if not (os.path.isdir(headdir) and os.path.isdir(objdir)):
 | 
| 123 | -            raise CASError("CAS repository check failed for '{}'".format(self.casdir))
 | |
| 61 | +            raise CASCacheError("CAS repository check failed for '{}'".format(self.casdir))
 | |
| 124 | 62 |  | 
| 125 | 63 |      # contains():
 | 
| 126 | 64 |      #
 | 
| ... | ... | @@ -169,7 +107,7 @@ class CASCache(): | 
| 169 | 107 |      #     subdir (str): Optional specific dir to extract
 | 
| 170 | 108 |      #
 | 
| 171 | 109 |      # Raises:
 | 
| 172 | -    #     CASError: In cases there was an OSError, or if the ref did not exist.
 | |
| 110 | +    #     CASCacheError: In cases there was an OSError, or if the ref did not exist.
 | |
| 173 | 111 |      #
 | 
| 174 | 112 |      # Returns: path to extracted directory
 | 
| 175 | 113 |      #
 | 
| ... | ... | @@ -201,7 +139,7 @@ class CASCache(): | 
| 201 | 139 |                  # Another process beat us to rename
 | 
| 202 | 140 |                  pass
 | 
| 203 | 141 |              except OSError as e:
 | 
| 204 | -                raise CASError("Failed to extract directory for ref '{}': {}".format(ref, e)) from e
 | |
| 142 | +                raise CASCacheError("Failed to extract directory for ref '{}': {}".format(ref, e)) from e
 | |
| 205 | 143 |  | 
| 206 | 144 |          return originaldest
 | 
| 207 | 145 |  | 
| ... | ... | @@ -245,29 +183,6 @@ class CASCache(): | 
| 245 | 183 |  | 
| 246 | 184 |          return modified, removed, added
 | 
| 247 | 185 |  | 
| 248 | -    def initialize_remote(self, remote_spec, q):
 | |
| 249 | -        try:
 | |
| 250 | -            remote = CASRemote(remote_spec)
 | |
| 251 | -            remote.init()
 | |
| 252 | - | |
| 253 | -            request = buildstream_pb2.StatusRequest(instance_name=remote_spec.instance_name)
 | |
| 254 | -            response = remote.ref_storage.Status(request)
 | |
| 255 | - | |
| 256 | -            if remote_spec.push and not response.allow_updates:
 | |
| 257 | -                q.put('CAS server does not allow push')
 | |
| 258 | -            else:
 | |
| 259 | -                # No error
 | |
| 260 | -                q.put(None)
 | |
| 261 | - | |
| 262 | -        except grpc.RpcError as e:
 | |
| 263 | -            # str(e) is too verbose for errors reported to the user
 | |
| 264 | -            q.put(e.details())
 | |
| 265 | - | |
| 266 | -        except Exception as e:               # pylint: disable=broad-except
 | |
| 267 | -            # Whatever happens, we need to return it to the calling process
 | |
| 268 | -            #
 | |
| 269 | -            q.put(str(e))
 | |
| 270 | - | |
| 271 | 186 |      # pull():
 | 
| 272 | 187 |      #
 | 
| 273 | 188 |      # Pull a ref from a remote repository.
 | 
| ... | ... | @@ -306,7 +221,7 @@ class CASCache(): | 
| 306 | 221 |              return True
 | 
| 307 | 222 |          except grpc.RpcError as e:
 | 
| 308 | 223 |              if e.code() != grpc.StatusCode.NOT_FOUND:
 | 
| 309 | -                raise CASError("Failed to pull ref {}: {}".format(ref, e)) from e
 | |
| 224 | +                raise CASCacheError("Failed to pull ref {}: {}".format(ref, e)) from e
 | |
| 310 | 225 |              else:
 | 
| 311 | 226 |                  return False
 | 
| 312 | 227 |          except BlobNotFound as e:
 | 
| ... | ... | @@ -360,7 +275,7 @@ class CASCache(): | 
| 360 | 275 |      #   (bool): True if any remote was updated, False if no pushes were required
 | 
| 361 | 276 |      #
 | 
| 362 | 277 |      # Raises:
 | 
| 363 | -    #   (CASError): if there was an error
 | |
| 278 | +    #   (CASCacheError): if there was an error
 | |
| 364 | 279 |      #
 | 
| 365 | 280 |      def push(self, refs, remote):
 | 
| 366 | 281 |          skipped_remote = True
 | 
| ... | ... | @@ -395,7 +310,7 @@ class CASCache(): | 
| 395 | 310 |                  skipped_remote = False
 | 
| 396 | 311 |          except grpc.RpcError as e:
 | 
| 397 | 312 |              if e.code() != grpc.StatusCode.RESOURCE_EXHAUSTED:
 | 
| 398 | -                raise CASError("Failed to push ref {}: {}".format(refs, e), temporary=True) from e
 | |
| 313 | +                raise CASCacheError("Failed to push ref {}: {}".format(refs, e), temporary=True) from e
 | |
| 399 | 314 |  | 
| 400 | 315 |          return not skipped_remote
 | 
| 401 | 316 |  | 
| ... | ... | @@ -408,57 +323,13 @@ class CASCache(): | 
| 408 | 323 |      #     directory (Directory): A virtual directory object to push.
 | 
| 409 | 324 |      #
 | 
| 410 | 325 |      # Raises:
 | 
| 411 | -    #     (CASError): if there was an error
 | |
| 326 | +    #     (CASCacheError): if there was an error
 | |
| 412 | 327 |      #
 | 
| 413 | 328 |      def push_directory(self, remote, directory):
 | 
| 414 | 329 |          remote.init()
 | 
| 415 | 330 |  | 
| 416 | 331 |          self._send_directory(remote, directory.ref)
 | 
| 417 | 332 |  | 
| 418 | -    # push_message():
 | |
| 419 | -    #
 | |
| 420 | -    # Push the given protobuf message to a remote.
 | |
| 421 | -    #
 | |
| 422 | -    # Args:
 | |
| 423 | -    #     remote (CASRemote): The remote to push to
 | |
| 424 | -    #     message (Message): A protobuf message to push.
 | |
| 425 | -    #
 | |
| 426 | -    # Raises:
 | |
| 427 | -    #     (CASError): if there was an error
 | |
| 428 | -    #
 | |
| 429 | -    def push_message(self, remote, message):
 | |
| 430 | - | |
| 431 | -        message_buffer = message.SerializeToString()
 | |
| 432 | -        message_digest = utils._message_digest(message_buffer)
 | |
| 433 | - | |
| 434 | -        remote.init()
 | |
| 435 | - | |
| 436 | -        with io.BytesIO(message_buffer) as b:
 | |
| 437 | -            self._send_blob(remote, message_digest, b)
 | |
| 438 | - | |
| 439 | -        return message_digest
 | |
| 440 | - | |
| 441 | -    # verify_digest_on_remote():
 | |
| 442 | -    #
 | |
| 443 | -    # Check whether the object is already on the server in which case
 | |
| 444 | -    # there is no need to upload it.
 | |
| 445 | -    #
 | |
| 446 | -    # Args:
 | |
| 447 | -    #     remote (CASRemote): The remote to check
 | |
| 448 | -    #     digest (Digest): The object digest.
 | |
| 449 | -    #
 | |
| 450 | -    def verify_digest_on_remote(self, remote, digest):
 | |
| 451 | -        remote.init()
 | |
| 452 | - | |
| 453 | -        request = remote_execution_pb2.FindMissingBlobsRequest(instance_name=remote.spec.instance_name)
 | |
| 454 | -        request.blob_digests.extend([digest])
 | |
| 455 | - | |
| 456 | -        response = remote.cas.FindMissingBlobs(request)
 | |
| 457 | -        if digest in response.missing_blob_digests:
 | |
| 458 | -            return False
 | |
| 459 | - | |
| 460 | -        return True
 | |
| 461 | - | |
| 462 | 333 |      # objpath():
 | 
| 463 | 334 |      #
 | 
| 464 | 335 |      # Return the path of an object based on its digest.
 | 
| ... | ... | @@ -531,7 +402,7 @@ class CASCache(): | 
| 531 | 402 |              pass
 | 
| 532 | 403 |  | 
| 533 | 404 |          except OSError as e:
 | 
| 534 | -            raise CASError("Failed to hash object: {}".format(e)) from e
 | |
| 405 | +            raise CASCacheError("Failed to hash object: {}".format(e)) from e
 | |
| 535 | 406 |  | 
| 536 | 407 |          return digest
 | 
| 537 | 408 |  | 
| ... | ... | @@ -572,7 +443,7 @@ class CASCache(): | 
| 572 | 443 |                  return digest
 | 
| 573 | 444 |  | 
| 574 | 445 |          except FileNotFoundError as e:
 | 
| 575 | -            raise CASError("Attempt to access unavailable ref: {}".format(e)) from e
 | |
| 446 | +            raise CASCacheError("Attempt to access unavailable ref: {}".format(e)) from e
 | |
| 576 | 447 |  | 
| 577 | 448 |      # update_mtime()
 | 
| 578 | 449 |      #
 | 
| ... | ... | @@ -585,7 +456,7 @@ class CASCache(): | 
| 585 | 456 |          try:
 | 
| 586 | 457 |              os.utime(self._refpath(ref))
 | 
| 587 | 458 |          except FileNotFoundError as e:
 | 
| 588 | -            raise CASError("Attempt to access unavailable ref: {}".format(e)) from e
 | |
| 459 | +            raise CASCacheError("Attempt to access unavailable ref: {}".format(e)) from e
 | |
| 589 | 460 |  | 
| 590 | 461 |      # calculate_cache_size()
 | 
| 591 | 462 |      #
 | 
| ... | ... | @@ -676,7 +547,7 @@ class CASCache(): | 
| 676 | 547 |          # Remove cache ref
 | 
| 677 | 548 |          refpath = self._refpath(ref)
 | 
| 678 | 549 |          if not os.path.exists(refpath):
 | 
| 679 | -            raise CASError("Could not find ref '{}'".format(ref))
 | |
| 550 | +            raise CASCacheError("Could not find ref '{}'".format(ref))
 | |
| 680 | 551 |  | 
| 681 | 552 |          os.unlink(refpath)
 | 
| 682 | 553 |  | 
| ... | ... | @@ -792,7 +663,7 @@ class CASCache(): | 
| 792 | 663 |                  # The process serving the socket can't be cached anyway
 | 
| 793 | 664 |                  pass
 | 
| 794 | 665 |              else:
 | 
| 795 | -                raise CASError("Unsupported file type for {}".format(full_path))
 | |
| 666 | +                raise CASCacheError("Unsupported file type for {}".format(full_path))
 | |
| 796 | 667 |  | 
| 797 | 668 |          return self.add_object(digest=dir_digest,
 | 
| 798 | 669 |                                 buffer=directory.SerializeToString())
 | 
| ... | ... | @@ -811,7 +682,7 @@ class CASCache(): | 
| 811 | 682 |              if dirnode.name == name:
 | 
| 812 | 683 |                  return dirnode.digest
 | 
| 813 | 684 |  | 
| 814 | -        raise CASError("Subdirectory {} not found".format(name))
 | |
| 685 | +        raise CASCacheError("Subdirectory {} not found".format(name))
 | |
| 815 | 686 |  | 
| 816 | 687 |      def _diff_trees(self, tree_a, tree_b, *, added, removed, modified, path=""):
 | 
| 817 | 688 |          dir_a = remote_execution_pb2.Directory()
 | 
| ... | ... | @@ -909,23 +780,6 @@ class CASCache(): | 
| 909 | 780 |          for dirnode in directory.directories:
 | 
| 910 | 781 |              yield from self._required_blobs(dirnode.digest)
 | 
| 911 | 782 |  | 
| 912 | -    def _fetch_blob(self, remote, digest, stream):
 | |
| 913 | -        resource_name_components = ['blobs', digest.hash, str(digest.size_bytes)]
 | |
| 914 | - | |
| 915 | -        if remote.spec.instance_name:
 | |
| 916 | -            resource_name_components.insert(0, remote.spec.instance_name)
 | |
| 917 | - | |
| 918 | -        resource_name = '/'.join(resource_name_components)
 | |
| 919 | - | |
| 920 | -        request = bytestream_pb2.ReadRequest()
 | |
| 921 | -        request.resource_name = resource_name
 | |
| 922 | -        request.read_offset = 0
 | |
| 923 | -        for response in remote.bytestream.Read(request):
 | |
| 924 | -            stream.write(response.data)
 | |
| 925 | -        stream.flush()
 | |
| 926 | - | |
| 927 | -        assert digest.size_bytes == os.fstat(stream.fileno()).st_size
 | |
| 928 | - | |
| 929 | 783 |      # _ensure_blob():
 | 
| 930 | 784 |      #
 | 
| 931 | 785 |      # Fetch and add blob if it's not already local.
 | 
| ... | ... | @@ -944,7 +798,7 @@ class CASCache(): | 
| 944 | 798 |              return objpath
 | 
| 945 | 799 |  | 
| 946 | 800 |          with tempfile.NamedTemporaryFile(dir=self.tmpdir) as f:
 | 
| 947 | -            self._fetch_blob(remote, digest, f)
 | |
| 801 | +            remote._fetch_blob(digest, f)
 | |
| 948 | 802 |  | 
| 949 | 803 |              added_digest = self.add_object(path=f.name, link_directly=True)
 | 
| 950 | 804 |              assert added_digest.hash == digest.hash
 | 
| ... | ... | @@ -1051,7 +905,7 @@ class CASCache(): | 
| 1051 | 905 |      def _fetch_tree(self, remote, digest):
 | 
| 1052 | 906 |          # download but do not store the Tree object
 | 
| 1053 | 907 |          with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
 | 
| 1054 | -            self._fetch_blob(remote, digest, out)
 | |
| 908 | +            remote._fetch_blob(digest, out)
 | |
| 1055 | 909 |  | 
| 1056 | 910 |              tree = remote_execution_pb2.Tree()
 | 
| 1057 | 911 |  | 
| ... | ... | @@ -1071,39 +925,6 @@ class CASCache(): | 
| 1071 | 925 |  | 
| 1072 | 926 |          return dirdigest
 | 
| 1073 | 927 |  | 
| 1074 | -    def _send_blob(self, remote, digest, stream, u_uid=uuid.uuid4()):
 | |
| 1075 | -        resource_name_components = ['uploads', str(u_uid), 'blobs',
 | |
| 1076 | -                                    digest.hash, str(digest.size_bytes)]
 | |
| 1077 | - | |
| 1078 | -        if remote.spec.instance_name:
 | |
| 1079 | -            resource_name_components.insert(0, remote.spec.instance_name)
 | |
| 1080 | - | |
| 1081 | -        resource_name = '/'.join(resource_name_components)
 | |
| 1082 | - | |
| 1083 | -        def request_stream(resname, instream):
 | |
| 1084 | -            offset = 0
 | |
| 1085 | -            finished = False
 | |
| 1086 | -            remaining = digest.size_bytes
 | |
| 1087 | -            while not finished:
 | |
| 1088 | -                chunk_size = min(remaining, _MAX_PAYLOAD_BYTES)
 | |
| 1089 | -                remaining -= chunk_size
 | |
| 1090 | - | |
| 1091 | -                request = bytestream_pb2.WriteRequest()
 | |
| 1092 | -                request.write_offset = offset
 | |
| 1093 | -                # max. _MAX_PAYLOAD_BYTES chunks
 | |
| 1094 | -                request.data = instream.read(chunk_size)
 | |
| 1095 | -                request.resource_name = resname
 | |
| 1096 | -                request.finish_write = remaining <= 0
 | |
| 1097 | - | |
| 1098 | -                yield request
 | |
| 1099 | - | |
| 1100 | -                offset += chunk_size
 | |
| 1101 | -                finished = request.finish_write
 | |
| 1102 | - | |
| 1103 | -        response = remote.bytestream.Write(request_stream(resource_name, stream))
 | |
| 1104 | - | |
| 1105 | -        assert response.committed_size == digest.size_bytes
 | |
| 1106 | - | |
| 1107 | 928 |      def _send_directory(self, remote, digest, u_uid=uuid.uuid4()):
 | 
| 1108 | 929 |          required_blobs = self._required_blobs(digest)
 | 
| 1109 | 930 |  | 
| ... | ... | @@ -1137,7 +958,7 @@ class CASCache(): | 
| 1137 | 958 |                  if (digest.size_bytes >= remote.max_batch_total_size_bytes or
 | 
| 1138 | 959 |                          not remote.batch_update_supported):
 | 
| 1139 | 960 |                      # Too large for batch request, upload in independent request.
 | 
| 1140 | -                    self._send_blob(remote, digest, f, u_uid=u_uid)
 | |
| 961 | +                    remote._send_blob(digest, f, u_uid=u_uid)
 | |
| 1141 | 962 |                  else:
 | 
| 1142 | 963 |                      if not batch.add(digest, f):
 | 
| 1143 | 964 |                          # Not enough space left in batch request.
 | 
| ... | ... | @@ -1150,183 +971,6 @@ class CASCache(): | 
| 1150 | 971 |          batch.send()
 | 
| 1151 | 972 |  | 
| 1152 | 973 |  | 
| 1153 | -# Represents a single remote CAS cache.
 | |
| 1154 | -#
 | |
| 1155 | -class CASRemote():
 | |
| 1156 | -    def __init__(self, spec):
 | |
| 1157 | -        self.spec = spec
 | |
| 1158 | -        self._initialized = False
 | |
| 1159 | -        self.channel = None
 | |
| 1160 | -        self.bytestream = None
 | |
| 1161 | -        self.cas = None
 | |
| 1162 | -        self.ref_storage = None
 | |
| 1163 | -        self.batch_update_supported = None
 | |
| 1164 | -        self.batch_read_supported = None
 | |
| 1165 | -        self.capabilities = None
 | |
| 1166 | -        self.max_batch_total_size_bytes = None
 | |
| 1167 | - | |
| 1168 | -    def init(self):
 | |
| 1169 | -        if not self._initialized:
 | |
| 1170 | -            url = urlparse(self.spec.url)
 | |
| 1171 | -            if url.scheme == 'http':
 | |
| 1172 | -                port = url.port or 80
 | |
| 1173 | -                self.channel = grpc.insecure_channel('{}:{}'.format(url.hostname, port))
 | |
| 1174 | -            elif url.scheme == 'https':
 | |
| 1175 | -                port = url.port or 443
 | |
| 1176 | - | |
| 1177 | -                if self.spec.server_cert:
 | |
| 1178 | -                    with open(self.spec.server_cert, 'rb') as f:
 | |
| 1179 | -                        server_cert_bytes = f.read()
 | |
| 1180 | -                else:
 | |
| 1181 | -                    server_cert_bytes = None
 | |
| 1182 | - | |
| 1183 | -                if self.spec.client_key:
 | |
| 1184 | -                    with open(self.spec.client_key, 'rb') as f:
 | |
| 1185 | -                        client_key_bytes = f.read()
 | |
| 1186 | -                else:
 | |
| 1187 | -                    client_key_bytes = None
 | |
| 1188 | - | |
| 1189 | -                if self.spec.client_cert:
 | |
| 1190 | -                    with open(self.spec.client_cert, 'rb') as f:
 | |
| 1191 | -                        client_cert_bytes = f.read()
 | |
| 1192 | -                else:
 | |
| 1193 | -                    client_cert_bytes = None
 | |
| 1194 | - | |
| 1195 | -                credentials = grpc.ssl_channel_credentials(root_certificates=server_cert_bytes,
 | |
| 1196 | -                                                           private_key=client_key_bytes,
 | |
| 1197 | -                                                           certificate_chain=client_cert_bytes)
 | |
| 1198 | -                self.channel = grpc.secure_channel('{}:{}'.format(url.hostname, port), credentials)
 | |
| 1199 | -            else:
 | |
| 1200 | -                raise CASError("Unsupported URL: {}".format(self.spec.url))
 | |
| 1201 | - | |
| 1202 | -            self.bytestream = bytestream_pb2_grpc.ByteStreamStub(self.channel)
 | |
| 1203 | -            self.cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(self.channel)
 | |
| 1204 | -            self.capabilities = remote_execution_pb2_grpc.CapabilitiesStub(self.channel)
 | |
| 1205 | -            self.ref_storage = buildstream_pb2_grpc.ReferenceStorageStub(self.channel)
 | |
| 1206 | - | |
| 1207 | -            self.max_batch_total_size_bytes = _MAX_PAYLOAD_BYTES
 | |
| 1208 | -            try:
 | |
| 1209 | -                request = remote_execution_pb2.GetCapabilitiesRequest(instance_name=self.spec.instance_name)
 | |
| 1210 | -                response = self.capabilities.GetCapabilities(request)
 | |
| 1211 | -                server_max_batch_total_size_bytes = response.cache_capabilities.max_batch_total_size_bytes
 | |
| 1212 | -                if 0 < server_max_batch_total_size_bytes < self.max_batch_total_size_bytes:
 | |
| 1213 | -                    self.max_batch_total_size_bytes = server_max_batch_total_size_bytes
 | |
| 1214 | -            except grpc.RpcError as e:
 | |
| 1215 | -                # Simply use the defaults for servers that don't implement GetCapabilities()
 | |
| 1216 | -                if e.code() != grpc.StatusCode.UNIMPLEMENTED:
 | |
| 1217 | -                    raise
 | |
| 1218 | - | |
| 1219 | -            # Check whether the server supports BatchReadBlobs()
 | |
| 1220 | -            self.batch_read_supported = False
 | |
| 1221 | -            try:
 | |
| 1222 | -                request = remote_execution_pb2.BatchReadBlobsRequest(instance_name=self.spec.instance_name)
 | |
| 1223 | -                response = self.cas.BatchReadBlobs(request)
 | |
| 1224 | -                self.batch_read_supported = True
 | |
| 1225 | -            except grpc.RpcError as e:
 | |
| 1226 | -                if e.code() != grpc.StatusCode.UNIMPLEMENTED:
 | |
| 1227 | -                    raise
 | |
| 1228 | - | |
| 1229 | -            # Check whether the server supports BatchUpdateBlobs()
 | |
| 1230 | -            self.batch_update_supported = False
 | |
| 1231 | -            try:
 | |
| 1232 | -                request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=self.spec.instance_name)
 | |
| 1233 | -                response = self.cas.BatchUpdateBlobs(request)
 | |
| 1234 | -                self.batch_update_supported = True
 | |
| 1235 | -            except grpc.RpcError as e:
 | |
| 1236 | -                if (e.code() != grpc.StatusCode.UNIMPLEMENTED and
 | |
| 1237 | -                        e.code() != grpc.StatusCode.PERMISSION_DENIED):
 | |
| 1238 | -                    raise
 | |
| 1239 | - | |
| 1240 | -            self._initialized = True
 | |
| 1241 | - | |
| 1242 | - | |
| 1243 | -# Represents a batch of blobs queued for fetching.
 | |
| 1244 | -#
 | |
| 1245 | -class _CASBatchRead():
 | |
| 1246 | -    def __init__(self, remote):
 | |
| 1247 | -        self._remote = remote
 | |
| 1248 | -        self._max_total_size_bytes = remote.max_batch_total_size_bytes
 | |
| 1249 | -        self._request = remote_execution_pb2.BatchReadBlobsRequest(instance_name=remote.spec.instance_name)
 | |
| 1250 | -        self._size = 0
 | |
| 1251 | -        self._sent = False
 | |
| 1252 | - | |
| 1253 | -    def add(self, digest):
 | |
| 1254 | -        assert not self._sent
 | |
| 1255 | - | |
| 1256 | -        new_batch_size = self._size + digest.size_bytes
 | |
| 1257 | -        if new_batch_size > self._max_total_size_bytes:
 | |
| 1258 | -            # Not enough space left in current batch
 | |
| 1259 | -            return False
 | |
| 1260 | - | |
| 1261 | -        request_digest = self._request.digests.add()
 | |
| 1262 | -        request_digest.hash = digest.hash
 | |
| 1263 | -        request_digest.size_bytes = digest.size_bytes
 | |
| 1264 | -        self._size = new_batch_size
 | |
| 1265 | -        return True
 | |
| 1266 | - | |
| 1267 | -    def send(self):
 | |
| 1268 | -        assert not self._sent
 | |
| 1269 | -        self._sent = True
 | |
| 1270 | - | |
| 1271 | -        if not self._request.digests:
 | |
| 1272 | -            return
 | |
| 1273 | - | |
| 1274 | -        batch_response = self._remote.cas.BatchReadBlobs(self._request)
 | |
| 1275 | - | |
| 1276 | -        for response in batch_response.responses:
 | |
| 1277 | -            if response.status.code == code_pb2.NOT_FOUND:
 | |
| 1278 | -                raise BlobNotFound(response.digest.hash, "Failed to download blob {}: {}".format(
 | |
| 1279 | -                    response.digest.hash, response.status.code))
 | |
| 1280 | -            if response.status.code != code_pb2.OK:
 | |
| 1281 | -                raise CASError("Failed to download blob {}: {}".format(
 | |
| 1282 | -                    response.digest.hash, response.status.code))
 | |
| 1283 | -            if response.digest.size_bytes != len(response.data):
 | |
| 1284 | -                raise CASError("Failed to download blob {}: expected {} bytes, received {} bytes".format(
 | |
| 1285 | -                    response.digest.hash, response.digest.size_bytes, len(response.data)))
 | |
| 1286 | - | |
| 1287 | -            yield (response.digest, response.data)
 | |
| 1288 | - | |
| 1289 | - | |
| 1290 | -# Represents a batch of blobs queued for upload.
 | |
| 1291 | -#
 | |
| 1292 | -class _CASBatchUpdate():
 | |
| 1293 | -    def __init__(self, remote):
 | |
| 1294 | -        self._remote = remote
 | |
| 1295 | -        self._max_total_size_bytes = remote.max_batch_total_size_bytes
 | |
| 1296 | -        self._request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=remote.spec.instance_name)
 | |
| 1297 | -        self._size = 0
 | |
| 1298 | -        self._sent = False
 | |
| 1299 | - | |
| 1300 | -    def add(self, digest, stream):
 | |
| 1301 | -        assert not self._sent
 | |
| 1302 | - | |
| 1303 | -        new_batch_size = self._size + digest.size_bytes
 | |
| 1304 | -        if new_batch_size > self._max_total_size_bytes:
 | |
| 1305 | -            # Not enough space left in current batch
 | |
| 1306 | -            return False
 | |
| 1307 | - | |
| 1308 | -        blob_request = self._request.requests.add()
 | |
| 1309 | -        blob_request.digest.hash = digest.hash
 | |
| 1310 | -        blob_request.digest.size_bytes = digest.size_bytes
 | |
| 1311 | -        blob_request.data = stream.read(digest.size_bytes)
 | |
| 1312 | -        self._size = new_batch_size
 | |
| 1313 | -        return True
 | |
| 1314 | - | |
| 1315 | -    def send(self):
 | |
| 1316 | -        assert not self._sent
 | |
| 1317 | -        self._sent = True
 | |
| 1318 | - | |
| 1319 | -        if not self._request.requests:
 | |
| 1320 | -            return
 | |
| 1321 | - | |
| 1322 | -        batch_response = self._remote.cas.BatchUpdateBlobs(self._request)
 | |
| 1323 | - | |
| 1324 | -        for response in batch_response.responses:
 | |
| 1325 | -            if response.status.code != code_pb2.OK:
 | |
| 1326 | -                raise CASError("Failed to upload blob {}: {}".format(
 | |
| 1327 | -                    response.digest.hash, response.status.code))
 | |
| 1328 | - | |
| 1329 | - | |
| 1330 | 974 |  def _grouper(iterable, n):
 | 
| 1331 | 975 |      while True:
 | 
| 1332 | 976 |          try:
 | 
| 1 | +from collections import namedtuple
 | |
| 2 | +import io
 | |
| 3 | +import os
 | |
| 4 | +import multiprocessing
 | |
| 5 | +import signal
 | |
| 6 | +from urllib.parse import urlparse
 | |
| 7 | +import uuid
 | |
| 8 | + | |
| 9 | +import grpc
 | |
| 10 | + | |
| 11 | +from .. import _yaml
 | |
| 12 | +from .._protos.google.rpc import code_pb2
 | |
| 13 | +from .._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
 | |
| 14 | +from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
 | |
| 15 | +from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc
 | |
| 16 | + | |
| 17 | +from .._exceptions import CASRemoteError, LoadError, LoadErrorReason
 | |
| 18 | +from .. import _signals
 | |
| 19 | +from .. import utils
 | |
| 20 | + | |
| 21 | +# The default limit for gRPC messages is 4 MiB.
 | |
| 22 | +# Limit payload to 1 MiB to leave sufficient headroom for metadata.
 | |
| 23 | +_MAX_PAYLOAD_BYTES = 1024 * 1024
 | |
| 24 | + | |
| 25 | + | |
| 26 | +class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key client_cert instance_name')):
 | |
| 27 | + | |
| 28 | +    # _new_from_config_node
 | |
| 29 | +    #
 | |
| 30 | +    # Creates an CASRemoteSpec() from a YAML loaded node
 | |
| 31 | +    #
 | |
| 32 | +    @staticmethod
 | |
| 33 | +    def _new_from_config_node(spec_node, basedir=None):
 | |
| 34 | +        _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance_name'])
 | |
| 35 | +        url = _yaml.node_get(spec_node, str, 'url')
 | |
| 36 | +        push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
 | |
| 37 | +        if not url:
 | |
| 38 | +            provenance = _yaml.node_get_provenance(spec_node, 'url')
 | |
| 39 | +            raise LoadError(LoadErrorReason.INVALID_DATA,
 | |
| 40 | +                            "{}: empty artifact cache URL".format(provenance))
 | |
| 41 | + | |
| 42 | +        instance_name = _yaml.node_get(spec_node, str, 'server-cert', default_value=None)
 | |
| 43 | + | |
| 44 | +        server_cert = _yaml.node_get(spec_node, str, 'server-cert', default_value=None)
 | |
| 45 | +        if server_cert and basedir:
 | |
| 46 | +            server_cert = os.path.join(basedir, server_cert)
 | |
| 47 | + | |
| 48 | +        client_key = _yaml.node_get(spec_node, str, 'client-key', default_value=None)
 | |
| 49 | +        if client_key and basedir:
 | |
| 50 | +            client_key = os.path.join(basedir, client_key)
 | |
| 51 | + | |
| 52 | +        client_cert = _yaml.node_get(spec_node, str, 'client-cert', default_value=None)
 | |
| 53 | +        if client_cert and basedir:
 | |
| 54 | +            client_cert = os.path.join(basedir, client_cert)
 | |
| 55 | + | |
| 56 | +        if client_key and not client_cert:
 | |
| 57 | +            provenance = _yaml.node_get_provenance(spec_node, 'client-key')
 | |
| 58 | +            raise LoadError(LoadErrorReason.INVALID_DATA,
 | |
| 59 | +                            "{}: 'client-key' was specified without 'client-cert'".format(provenance))
 | |
| 60 | + | |
| 61 | +        if client_cert and not client_key:
 | |
| 62 | +            provenance = _yaml.node_get_provenance(spec_node, 'client-cert')
 | |
| 63 | +            raise LoadError(LoadErrorReason.INVALID_DATA,
 | |
| 64 | +                            "{}: 'client-cert' was specified without 'client-key'".format(provenance))
 | |
| 65 | + | |
| 66 | +        return CASRemoteSpec(url, push, server_cert, client_key, client_cert, instance_name)
 | |
| 67 | + | |
| 68 | + | |
| 69 | +CASRemoteSpec.__new__.__defaults__ = (None, None, None, None)
 | |
| 70 | + | |
| 71 | + | |
| 72 | +class BlobNotFound(CASRemoteError):
 | |
| 73 | + | |
| 74 | +    def __init__(self, blob, msg):
 | |
| 75 | +        self.blob = blob
 | |
| 76 | +        super().__init__(msg)
 | |
| 77 | + | |
| 78 | + | |
| 79 | +# Represents a single remote CAS cache.
 | |
| 80 | +#
 | |
| 81 | +class CASRemote():
 | |
| 82 | +    def __init__(self, spec):
 | |
| 83 | +        self.spec = spec
 | |
| 84 | +        self._initialized = False
 | |
| 85 | +        self.channel = None
 | |
| 86 | +        self.bytestream = None
 | |
| 87 | +        self.cas = None
 | |
| 88 | +        self.ref_storage = None
 | |
| 89 | +        self.batch_update_supported = None
 | |
| 90 | +        self.batch_read_supported = None
 | |
| 91 | +        self.capabilities = None
 | |
| 92 | +        self.max_batch_total_size_bytes = None
 | |
| 93 | + | |
| 94 | +    def init(self):
 | |
| 95 | +        if not self._initialized:
 | |
| 96 | +            url = urlparse(self.spec.url)
 | |
| 97 | +            if url.scheme == 'http':
 | |
| 98 | +                port = url.port or 80
 | |
| 99 | +                self.channel = grpc.insecure_channel('{}:{}'.format(url.hostname, port))
 | |
| 100 | +            elif url.scheme == 'https':
 | |
| 101 | +                port = url.port or 443
 | |
| 102 | + | |
| 103 | +                if self.spec.server_cert:
 | |
| 104 | +                    with open(self.spec.server_cert, 'rb') as f:
 | |
| 105 | +                        server_cert_bytes = f.read()
 | |
| 106 | +                else:
 | |
| 107 | +                    server_cert_bytes = None
 | |
| 108 | + | |
| 109 | +                if self.spec.client_key:
 | |
| 110 | +                    with open(self.spec.client_key, 'rb') as f:
 | |
| 111 | +                        client_key_bytes = f.read()
 | |
| 112 | +                else:
 | |
| 113 | +                    client_key_bytes = None
 | |
| 114 | + | |
| 115 | +                if self.spec.client_cert:
 | |
| 116 | +                    with open(self.spec.client_cert, 'rb') as f:
 | |
| 117 | +                        client_cert_bytes = f.read()
 | |
| 118 | +                else:
 | |
| 119 | +                    client_cert_bytes = None
 | |
| 120 | + | |
| 121 | +                credentials = grpc.ssl_channel_credentials(root_certificates=server_cert_bytes,
 | |
| 122 | +                                                           private_key=client_key_bytes,
 | |
| 123 | +                                                           certificate_chain=client_cert_bytes)
 | |
| 124 | +                self.channel = grpc.secure_channel('{}:{}'.format(url.hostname, port), credentials)
 | |
| 125 | +            else:
 | |
| 126 | +                raise CASRemoteError("Unsupported URL: {}".format(self.spec.url))
 | |
| 127 | + | |
| 128 | +            self.bytestream = bytestream_pb2_grpc.ByteStreamStub(self.channel)
 | |
| 129 | +            self.cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(self.channel)
 | |
| 130 | +            self.capabilities = remote_execution_pb2_grpc.CapabilitiesStub(self.channel)
 | |
| 131 | +            self.ref_storage = buildstream_pb2_grpc.ReferenceStorageStub(self.channel)
 | |
| 132 | + | |
| 133 | +            self.max_batch_total_size_bytes = _MAX_PAYLOAD_BYTES
 | |
| 134 | +            try:
 | |
| 135 | +                request = remote_execution_pb2.GetCapabilitiesRequest()
 | |
| 136 | +                response = self.capabilities.GetCapabilities(request)
 | |
| 137 | +                server_max_batch_total_size_bytes = response.cache_capabilities.max_batch_total_size_bytes
 | |
| 138 | +                if 0 < server_max_batch_total_size_bytes < self.max_batch_total_size_bytes:
 | |
| 139 | +                    self.max_batch_total_size_bytes = server_max_batch_total_size_bytes
 | |
| 140 | +            except grpc.RpcError as e:
 | |
| 141 | +                # Simply use the defaults for servers that don't implement GetCapabilities()
 | |
| 142 | +                if e.code() != grpc.StatusCode.UNIMPLEMENTED:
 | |
| 143 | +                    raise
 | |
| 144 | + | |
| 145 | +            # Check whether the server supports BatchReadBlobs()
 | |
| 146 | +            self.batch_read_supported = False
 | |
| 147 | +            try:
 | |
| 148 | +                request = remote_execution_pb2.BatchReadBlobsRequest()
 | |
| 149 | +                response = self.cas.BatchReadBlobs(request)
 | |
| 150 | +                self.batch_read_supported = True
 | |
| 151 | +            except grpc.RpcError as e:
 | |
| 152 | +                if e.code() != grpc.StatusCode.UNIMPLEMENTED:
 | |
| 153 | +                    raise
 | |
| 154 | + | |
| 155 | +            # Check whether the server supports BatchUpdateBlobs()
 | |
| 156 | +            self.batch_update_supported = False
 | |
| 157 | +            try:
 | |
| 158 | +                request = remote_execution_pb2.BatchUpdateBlobsRequest()
 | |
| 159 | +                response = self.cas.BatchUpdateBlobs(request)
 | |
| 160 | +                self.batch_update_supported = True
 | |
| 161 | +            except grpc.RpcError as e:
 | |
| 162 | +                if (e.code() != grpc.StatusCode.UNIMPLEMENTED and
 | |
| 163 | +                        e.code() != grpc.StatusCode.PERMISSION_DENIED):
 | |
| 164 | +                    raise
 | |
| 165 | + | |
| 166 | +            self._initialized = True
 | |
| 167 | + | |
| 168 | +    # check_remote
 | |
| 169 | +    #
 | |
| 170 | +    # Used when checking whether remote_specs work in the buildstream main
 | |
| 171 | +    # thread, runs this in a seperate process to avoid creation of gRPC threads
 | |
| 172 | +    # in the main BuildStream process
 | |
| 173 | +    # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
 | |
| 174 | +    @classmethod
 | |
| 175 | +    def check_remote(cls, remote_spec, q):
 | |
| 176 | + | |
| 177 | +        def __check_remote():
 | |
| 178 | +            try:
 | |
| 179 | +                remote = cls(remote_spec)
 | |
| 180 | +                remote.init()
 | |
| 181 | + | |
| 182 | +                request = buildstream_pb2.StatusRequest()
 | |
| 183 | +                response = remote.ref_storage.Status(request)
 | |
| 184 | + | |
| 185 | +                if remote_spec.push and not response.allow_updates:
 | |
| 186 | +                    q.put('CAS server does not allow push')
 | |
| 187 | +                else:
 | |
| 188 | +                    # No error
 | |
| 189 | +                    q.put(None)
 | |
| 190 | + | |
| 191 | +            except grpc.RpcError as e:
 | |
| 192 | +                # str(e) is too verbose for errors reported to the user
 | |
| 193 | +                q.put(e.details())
 | |
| 194 | + | |
| 195 | +            except Exception as e:               # pylint: disable=broad-except
 | |
| 196 | +                # Whatever happens, we need to return it to the calling process
 | |
| 197 | +                #
 | |
| 198 | +                q.put(str(e))
 | |
| 199 | + | |
| 200 | +        p = multiprocessing.Process(target=__check_remote)
 | |
| 201 | + | |
| 202 | +        try:
 | |
| 203 | +            # Keep SIGINT blocked in the child process
 | |
| 204 | +            with _signals.blocked([signal.SIGINT], ignore=False):
 | |
| 205 | +                p.start()
 | |
| 206 | + | |
| 207 | +            error = q.get()
 | |
| 208 | +            p.join()
 | |
| 209 | +        except KeyboardInterrupt:
 | |
| 210 | +            utils._kill_process_tree(p.pid)
 | |
| 211 | +            raise
 | |
| 212 | + | |
| 213 | +        return error
 | |
| 214 | + | |
| 215 | +    # verify_digest_on_remote():
 | |
| 216 | +    #
 | |
| 217 | +    # Check whether the object is already on the server in which case
 | |
| 218 | +    # there is no need to upload it.
 | |
| 219 | +    #
 | |
| 220 | +    # Args:
 | |
| 221 | +    #     digest (Digest): The object digest.
 | |
| 222 | +    #
 | |
| 223 | +    def verify_digest_on_remote(self, digest):
 | |
| 224 | +        self.init()
 | |
| 225 | + | |
| 226 | +        request = remote_execution_pb2.FindMissingBlobsRequest()
 | |
| 227 | +        request.blob_digests.extend([digest])
 | |
| 228 | + | |
| 229 | +        response = self.cas.FindMissingBlobs(request)
 | |
| 230 | +        if digest in response.missing_blob_digests:
 | |
| 231 | +            return False
 | |
| 232 | + | |
| 233 | +        return True
 | |
| 234 | + | |
| 235 | +    # push_message():
 | |
| 236 | +    #
 | |
| 237 | +    # Push the given protobuf message to a remote.
 | |
| 238 | +    #
 | |
| 239 | +    # Args:
 | |
| 240 | +    #     message (Message): A protobuf message to push.
 | |
| 241 | +    #
 | |
| 242 | +    # Raises:
 | |
| 243 | +    #     (CASRemoteError): if there was an error
 | |
| 244 | +    #
 | |
| 245 | +    def push_message(self, message):
 | |
| 246 | + | |
| 247 | +        message_buffer = message.SerializeToString()
 | |
| 248 | +        message_digest = utils._message_digest(message_buffer)
 | |
| 249 | + | |
| 250 | +        self.init()
 | |
| 251 | + | |
| 252 | +        with io.BytesIO(message_buffer) as b:
 | |
| 253 | +            self._send_blob(message_digest, b)
 | |
| 254 | + | |
| 255 | +        return message_digest
 | |
| 256 | + | |
| 257 | +    ################################################
 | |
| 258 | +    #             Local Private Methods            #
 | |
| 259 | +    ################################################
 | |
| 260 | +    def _fetch_blob(self, digest, stream):
 | |
| 261 | +        resource_name = '/'.join(['blobs', digest.hash, str(digest.size_bytes)])
 | |
| 262 | +        request = bytestream_pb2.ReadRequest()
 | |
| 263 | +        request.resource_name = resource_name
 | |
| 264 | +        request.read_offset = 0
 | |
| 265 | +        for response in self.bytestream.Read(request):
 | |
| 266 | +            stream.write(response.data)
 | |
| 267 | +        stream.flush()
 | |
| 268 | + | |
| 269 | +        assert digest.size_bytes == os.fstat(stream.fileno()).st_size
 | |
| 270 | + | |
| 271 | +    def _send_blob(self, digest, stream, u_uid=uuid.uuid4()):
 | |
| 272 | +        resource_name = '/'.join(['uploads', str(u_uid), 'blobs',
 | |
| 273 | +                                  digest.hash, str(digest.size_bytes)])
 | |
| 274 | + | |
| 275 | +        def request_stream(resname, instream):
 | |
| 276 | +            offset = 0
 | |
| 277 | +            finished = False
 | |
| 278 | +            remaining = digest.size_bytes
 | |
| 279 | +            while not finished:
 | |
| 280 | +                chunk_size = min(remaining, _MAX_PAYLOAD_BYTES)
 | |
| 281 | +                remaining -= chunk_size
 | |
| 282 | + | |
| 283 | +                request = bytestream_pb2.WriteRequest()
 | |
| 284 | +                request.write_offset = offset
 | |
| 285 | +                # max. _MAX_PAYLOAD_BYTES chunks
 | |
| 286 | +                request.data = instream.read(chunk_size)
 | |
| 287 | +                request.resource_name = resname
 | |
| 288 | +                request.finish_write = remaining <= 0
 | |
| 289 | + | |
| 290 | +                yield request
 | |
| 291 | + | |
| 292 | +                offset += chunk_size
 | |
| 293 | +                finished = request.finish_write
 | |
| 294 | + | |
| 295 | +        response = self.bytestream.Write(request_stream(resource_name, stream))
 | |
| 296 | + | |
| 297 | +        assert response.committed_size == digest.size_bytes
 | |
| 298 | + | |
| 299 | + | |
| 300 | +# Represents a batch of blobs queued for fetching.
 | |
| 301 | +#
 | |
| 302 | +class _CASBatchRead():
 | |
| 303 | +    def __init__(self, remote):
 | |
| 304 | +        self._remote = remote
 | |
| 305 | +        self._max_total_size_bytes = remote.max_batch_total_size_bytes
 | |
| 306 | +        self._request = remote_execution_pb2.BatchReadBlobsRequest()
 | |
| 307 | +        self._size = 0
 | |
| 308 | +        self._sent = False
 | |
| 309 | + | |
| 310 | +    def add(self, digest):
 | |
| 311 | +        assert not self._sent
 | |
| 312 | + | |
| 313 | +        new_batch_size = self._size + digest.size_bytes
 | |
| 314 | +        if new_batch_size > self._max_total_size_bytes:
 | |
| 315 | +            # Not enough space left in current batch
 | |
| 316 | +            return False
 | |
| 317 | + | |
| 318 | +        request_digest = self._request.digests.add()
 | |
| 319 | +        request_digest.hash = digest.hash
 | |
| 320 | +        request_digest.size_bytes = digest.size_bytes
 | |
| 321 | +        self._size = new_batch_size
 | |
| 322 | +        return True
 | |
| 323 | + | |
| 324 | +    def send(self):
 | |
| 325 | +        assert not self._sent
 | |
| 326 | +        self._sent = True
 | |
| 327 | + | |
| 328 | +        if not self._request.digests:
 | |
| 329 | +            return
 | |
| 330 | + | |
| 331 | +        batch_response = self._remote.cas.BatchReadBlobs(self._request)
 | |
| 332 | + | |
| 333 | +        for response in batch_response.responses:
 | |
| 334 | +            if response.status.code == code_pb2.NOT_FOUND:
 | |
| 335 | +                raise BlobNotFound(response.digest.hash, "Failed to download blob {}: {}".format(
 | |
| 336 | +                    response.digest.hash, response.status.code))
 | |
| 337 | +            if response.status.code != code_pb2.OK:
 | |
| 338 | +                raise CASRemoteError("Failed to download blob {}: {}".format(
 | |
| 339 | +                    response.digest.hash, response.status.code))
 | |
| 340 | +            if response.digest.size_bytes != len(response.data):
 | |
| 341 | +                raise CASRemoteError("Failed to download blob {}: expected {} bytes, received {} bytes".format(
 | |
| 342 | +                    response.digest.hash, response.digest.size_bytes, len(response.data)))
 | |
| 343 | + | |
| 344 | +            yield (response.digest, response.data)
 | |
| 345 | + | |
| 346 | + | |
| 347 | +# Represents a batch of blobs queued for upload.
 | |
| 348 | +#
 | |
| 349 | +class _CASBatchUpdate():
 | |
| 350 | +    def __init__(self, remote):
 | |
| 351 | +        self._remote = remote
 | |
| 352 | +        self._max_total_size_bytes = remote.max_batch_total_size_bytes
 | |
| 353 | +        self._request = remote_execution_pb2.BatchUpdateBlobsRequest()
 | |
| 354 | +        self._size = 0
 | |
| 355 | +        self._sent = False
 | |
| 356 | + | |
| 357 | +    def add(self, digest, stream):
 | |
| 358 | +        assert not self._sent
 | |
| 359 | + | |
| 360 | +        new_batch_size = self._size + digest.size_bytes
 | |
| 361 | +        if new_batch_size > self._max_total_size_bytes:
 | |
| 362 | +            # Not enough space left in current batch
 | |
| 363 | +            return False
 | |
| 364 | + | |
| 365 | +        blob_request = self._request.requests.add()
 | |
| 366 | +        blob_request.digest.hash = digest.hash
 | |
| 367 | +        blob_request.digest.size_bytes = digest.size_bytes
 | |
| 368 | +        blob_request.data = stream.read(digest.size_bytes)
 | |
| 369 | +        self._size = new_batch_size
 | |
| 370 | +        return True
 | |
| 371 | + | |
| 372 | +    def send(self):
 | |
| 373 | +        assert not self._sent
 | |
| 374 | +        self._sent = True
 | |
| 375 | + | |
| 376 | +        if not self._request.requests:
 | |
| 377 | +            return
 | |
| 378 | + | |
| 379 | +        batch_response = self._remote.cas.BatchUpdateBlobs(self._request)
 | |
| 380 | + | |
| 381 | +        for response in batch_response.responses:
 | |
| 382 | +            if response.status.code != code_pb2.OK:
 | |
| 383 | +                raise CASRemoteError("Failed to upload blob {}: {}".format(
 | |
| 384 | +                    response.digest.hash, response.status.code)) | 
| ... | ... | @@ -31,7 +31,7 @@ from ._exceptions import LoadError, LoadErrorReason, BstError | 
| 31 | 31 |  from ._message import Message, MessageType
 | 
| 32 | 32 |  from ._profile import Topics, profile_start, profile_end
 | 
| 33 | 33 |  from ._artifactcache import ArtifactCache
 | 
| 34 | -from ._artifactcache.cascache import CASCache
 | |
| 34 | +from ._cas import CASCache
 | |
| 35 | 35 |  from ._workspaces import Workspaces, WorkspaceProjectCache, WORKSPACE_PROJECT_FILE
 | 
| 36 | 36 |  from .plugin import _plugin_lookup
 | 
| 37 | 37 |  from .sandbox import SandboxRemote
 | 
| ... | ... | @@ -317,11 +317,18 @@ class Context(): | 
| 317 | 317 |      # invoked with as opposed to a junctioned subproject.
 | 
| 318 | 318 |      #
 | 
| 319 | 319 |      # Returns:
 | 
| 320 | -    #    (list): The list of projects
 | |
| 320 | +    #    (Project): The Project object
 | |
| 321 | 321 |      #
 | 
| 322 | 322 |      def get_toplevel_project(self):
 | 
| 323 | 323 |          return self._projects[0]
 | 
| 324 | 324 |  | 
| 325 | +    # get_workspaces():
 | |
| 326 | +    #
 | |
| 327 | +    # Return a Workspaces object containing a list of workspaces.
 | |
| 328 | +    #
 | |
| 329 | +    # Returns:
 | |
| 330 | +    #    (Workspaces): The Workspaces object
 | |
| 331 | +    #
 | |
| 325 | 332 |      def get_workspaces(self):
 | 
| 326 | 333 |          return self._workspaces
 | 
| 327 | 334 |  | 
| ... | ... | @@ -284,6 +284,21 @@ class CASError(BstError): | 
| 284 | 284 |          super().__init__(message, detail=detail, domain=ErrorDomain.CAS, reason=reason, temporary=True)
 | 
| 285 | 285 |  | 
| 286 | 286 |  | 
| 287 | +# CASRemoteError
 | |
| 288 | +#
 | |
| 289 | +# Raised when errors are encountered in the remote CAS
 | |
| 290 | +class CASRemoteError(CASError):
 | |
| 291 | +    pass
 | |
| 292 | + | |
| 293 | + | |
| 294 | +# CASCacheError
 | |
| 295 | +#
 | |
| 296 | +# Raised when errors are encountered in the local CASCacheError
 | |
| 297 | +#
 | |
| 298 | +class CASCacheError(CASError):
 | |
| 299 | +    pass
 | |
| 300 | + | |
| 301 | + | |
| 287 | 302 |  # PipelineError
 | 
| 288 | 303 |  #
 | 
| 289 | 304 |  # Raised from pipeline operations
 | 
| ... | ... | @@ -2,6 +2,7 @@ import os | 
| 2 | 2 |  import sys
 | 
| 3 | 3 |  from contextlib import ExitStack
 | 
| 4 | 4 |  from fnmatch import fnmatch
 | 
| 5 | +from functools import partial
 | |
| 5 | 6 |  from tempfile import TemporaryDirectory
 | 
| 6 | 7 |  | 
| 7 | 8 |  import click
 | 
| ... | ... | @@ -111,14 +112,25 @@ def complete_target(args, incomplete): | 
| 111 | 112 |      return complete_list
 | 
| 112 | 113 |  | 
| 113 | 114 |  | 
| 114 | -def complete_artifact(args, incomplete):
 | |
| 115 | +def complete_artifact(orig_args, args, incomplete):
 | |
| 115 | 116 |      from .._context import Context
 | 
| 116 | 117 |      ctx = Context()
 | 
| 117 | 118 |  | 
| 118 | 119 |      config = None
 | 
| 119 | -    for i, arg in enumerate(args):
 | |
| 120 | -        if arg in ('-c', '--config'):
 | |
| 121 | -            config = args[i + 1]
 | |
| 120 | +    if orig_args:
 | |
| 121 | +        for i, arg in enumerate(orig_args):
 | |
| 122 | +            if arg in ('-c', '--config'):
 | |
| 123 | +                try:
 | |
| 124 | +                    config = orig_args[i + 1]
 | |
| 125 | +                except IndexError:
 | |
| 126 | +                    pass
 | |
| 127 | +    if args:
 | |
| 128 | +        for i, arg in enumerate(args):
 | |
| 129 | +            if arg in ('-c', '--config'):
 | |
| 130 | +                try:
 | |
| 131 | +                    config = args[i + 1]
 | |
| 132 | +                except IndexError:
 | |
| 133 | +                    pass
 | |
| 122 | 134 |      ctx.load(config)
 | 
| 123 | 135 |  | 
| 124 | 136 |      # element targets are valid artifact names
 | 
| ... | ... | @@ -128,8 +140,9 @@ def complete_artifact(args, incomplete): | 
| 128 | 140 |      return complete_list
 | 
| 129 | 141 |  | 
| 130 | 142 |  | 
| 131 | -def override_completions(cmd, cmd_param, args, incomplete):
 | |
| 143 | +def override_completions(orig_args, cmd, cmd_param, args, incomplete):
 | |
| 132 | 144 |      """
 | 
| 145 | +    :param orig_args: original, non-completion args
 | |
| 133 | 146 |      :param cmd_param: command definition
 | 
| 134 | 147 |      :param args: full list of args typed before the incomplete arg
 | 
| 135 | 148 |      :param incomplete: the incomplete text to autocomplete
 | 
| ... | ... | @@ -150,7 +163,7 @@ def override_completions(cmd, cmd_param, args, incomplete): | 
| 150 | 163 |                  cmd_param.opts == ['--track-except']):
 | 
| 151 | 164 |              return complete_target(args, incomplete)
 | 
| 152 | 165 |          if cmd_param.name == 'artifacts':
 | 
| 153 | -            return complete_artifact(args, incomplete)
 | |
| 166 | +            return complete_artifact(orig_args, args, incomplete)
 | |
| 154 | 167 |  | 
| 155 | 168 |      raise CompleteUnhandled()
 | 
| 156 | 169 |  | 
| ... | ... | @@ -161,7 +174,7 @@ def override_main(self, args=None, prog_name=None, complete_var=None, | 
| 161 | 174 |      # Hook for the Bash completion.  This only activates if the Bash
 | 
| 162 | 175 |      # completion is actually enabled, otherwise this is quite a fast
 | 
| 163 | 176 |      # noop.
 | 
| 164 | -    if main_bashcomplete(self, prog_name, override_completions):
 | |
| 177 | +    if main_bashcomplete(self, prog_name, partial(override_completions, args)):
 | |
| 165 | 178 |  | 
| 166 | 179 |          # If we're running tests we cant just go calling exit()
 | 
| 167 | 180 |          # from the main process.
 | 
| ... | ... | @@ -296,18 +296,24 @@ class GitMirror(SourceFetcher): | 
| 296 | 296 |              shallow = set()
 | 
| 297 | 297 |              for _, commit_ref, _ in self.tags:
 | 
| 298 | 298 |  | 
| 299 | -                _, out = self.source.check_output([self.source.host_git, 'rev-list',
 | |
| 300 | -                                                   '--boundary', '{}..{}'.format(commit_ref, self.ref)],
 | |
| 301 | -                                                  fail="Failed to get git history {}..{} in directory: {}"
 | |
| 302 | -                                                  .format(commit_ref, self.ref, fullpath),
 | |
| 303 | -                                                  fail_temporarily=True,
 | |
| 304 | -                                                  cwd=self.mirror)
 | |
| 305 | -                for line in out.splitlines():
 | |
| 306 | -                    rev = line.lstrip('-')
 | |
| 307 | -                    if line[0] == '-':
 | |
| 308 | -                        shallow.add(rev)
 | |
| 309 | -                    else:
 | |
| 310 | -                        included.add(rev)
 | |
| 299 | +                if commit_ref == self.ref:
 | |
| 300 | +                    # rev-list does not work in case of same rev
 | |
| 301 | +                    shallow.add(self.ref)
 | |
| 302 | +                else:
 | |
| 303 | +                    _, out = self.source.check_output([self.source.host_git, 'rev-list',
 | |
| 304 | +                                                       '--ancestry-path', '--boundary',
 | |
| 305 | +                                                       '{}..{}'.format(commit_ref, self.ref)],
 | |
| 306 | +                                                      fail="Failed to get git history {}..{} in directory: {}"
 | |
| 307 | +                                                      .format(commit_ref, self.ref, fullpath),
 | |
| 308 | +                                                      fail_temporarily=True,
 | |
| 309 | +                                                      cwd=self.mirror)
 | |
| 310 | +                    self.source.warn("refs {}..{}: {}".format(commit_ref, self.ref, out.splitlines()))
 | |
| 311 | +                    for line in out.splitlines():
 | |
| 312 | +                        rev = line.lstrip('-')
 | |
| 313 | +                        if line[0] == '-':
 | |
| 314 | +                            shallow.add(rev)
 | |
| 315 | +                        else:
 | |
| 316 | +                            included.add(rev)
 | |
| 311 | 317 |  | 
| 312 | 318 |              shallow -= included
 | 
| 313 | 319 |              included |= shallow
 | 
| ... | ... | @@ -22,6 +22,7 @@ | 
| 22 | 22 |  import os
 | 
| 23 | 23 |  import itertools
 | 
| 24 | 24 |  from operator import itemgetter
 | 
| 25 | +from collections import OrderedDict
 | |
| 25 | 26 |  | 
| 26 | 27 |  from ._exceptions import PipelineError
 | 
| 27 | 28 |  from ._message import Message, MessageType
 | 
| ... | ... | @@ -479,7 +480,7 @@ class Pipeline(): | 
| 479 | 480 |  #
 | 
| 480 | 481 |  class _Planner():
 | 
| 481 | 482 |      def __init__(self):
 | 
| 482 | -        self.depth_map = {}
 | |
| 483 | +        self.depth_map = OrderedDict()
 | |
| 483 | 484 |          self.visiting_elements = set()
 | 
| 484 | 485 |  | 
| 485 | 486 |      # Here we want to traverse the same element more than once when
 | 
| ... | ... | @@ -232,7 +232,8 @@ class Project(): | 
| 232 | 232 |              'artifacts', 'options',
 | 
| 233 | 233 |              'fail-on-overlap', 'shell', 'fatal-warnings',
 | 
| 234 | 234 |              'ref-storage', 'sandbox', 'mirrors', 'remote-execution',
 | 
| 235 | -            'sources', '(@)'
 | |
| 235 | +            'sources', '(@)',
 | |
| 236 | +            'supress-deprecation-warnings'
 | |
| 236 | 237 |          ])
 | 
| 237 | 238 |  | 
| 238 | 239 |      # create_element()
 | 
| ... | ... | @@ -428,6 +429,10 @@ class Project(): | 
| 428 | 429 |  | 
| 429 | 430 |          self._validate_node(pre_config_node)
 | 
| 430 | 431 |  | 
| 432 | +        # Load plugin deprecation warning supression config
 | |
| 433 | +        self.supressed_deprecation_warnings = _yaml.node_get(
 | |
| 434 | +            pre_config_node, list, 'supress-deprecation-warnings', default_value=[])
 | |
| 435 | + | |
| 431 | 436 |          # FIXME:
 | 
| 432 | 437 |          #
 | 
| 433 | 438 |          #   Performing this check manually in the absense
 | 
| ... | ... | @@ -170,9 +170,9 @@ class Queue(): | 
| 170 | 170 |          skip = [job for job in jobs if self.status(job.element) == QueueStatus.SKIP]
 | 
| 171 | 171 |          wait = [job for job in jobs if job not in skip]
 | 
| 172 | 172 |  | 
| 173 | +        self.skipped_elements.extend([job.element for job in skip])
 | |
| 173 | 174 |          self._wait_queue.extend(wait)
 | 
| 174 | 175 |          self._done_queue.extend(skip)
 | 
| 175 | -        self.skipped_elements.extend(skip)
 | |
| 176 | 176 |  | 
| 177 | 177 |      # dequeue()
 | 
| 178 | 178 |      #
 | 
| ... | ... | @@ -164,6 +164,25 @@ class Plugin(): | 
| 164 | 164 |         core format version :ref:`core format version <project_format_version>`.
 | 
| 165 | 165 |      """
 | 
| 166 | 166 |  | 
| 167 | +    BST_PLUGIN_DEPRECATED = False
 | |
| 168 | +    """True if this element plugin has been deprecated.
 | |
| 169 | + | |
| 170 | +    If this is set to true, the plugin will call self.eimit_deprecation_warning()
 | |
| 171 | +    on instantiation. Plugin authors may override this method to provide
 | |
| 172 | +    custom deprecation warnings
 | |
| 173 | + | |
| 174 | +    *Since 1.4*
 | |
| 175 | +    """
 | |
| 176 | + | |
| 177 | +    BST_PLUGIN_DEPRECATION_MESSAGE = ""
 | |
| 178 | +    """ The message printed when this element shows a deprecation warning.
 | |
| 179 | + | |
| 180 | +    This should be set if BST_PLUGIN_DEPRECATED is True and should direct the user
 | |
| 181 | +    to the deprecated plug-in's replacement.
 | |
| 182 | + | |
| 183 | +    *Since 1.4*
 | |
| 184 | +    """
 | |
| 185 | + | |
| 167 | 186 |      def __init__(self, name, context, project, provenance, type_tag):
 | 
| 168 | 187 |  | 
| 169 | 188 |          self.name = name
 | 
| ... | ... | @@ -188,6 +207,13 @@ class Plugin(): | 
| 188 | 207 |          self.__kind = modulename.split('.')[-1]
 | 
| 189 | 208 |          self.debug("Created: {}".format(self))
 | 
| 190 | 209 |  | 
| 210 | +        # If this plugin has been deprecated, emit a warning.
 | |
| 211 | +        silenced_warnings = self.__project.supressed_deprecation_warnings
 | |
| 212 | +        if self.BST_PLUGIN_DEPRECATED and self.__kind not in silenced_warnings:
 | |
| 213 | +            detail = "Using deprecated plugin {}: {}".format(self.__kind,
 | |
| 214 | +                                                             BST_PLUGIN_DEPRECATION_MESSAGE)
 | |
| 215 | +            self.__.message(MessageType.WARN, detail)
 | |
| 216 | + | |
| 191 | 217 |      def __del__(self):
 | 
| 192 | 218 |          # Dont send anything through the Message() pipeline at destruction time,
 | 
| 193 | 219 |          # any subsequent lookup of plugin by unique id would raise KeyError.
 | 
| ... | ... | @@ -38,7 +38,7 @@ from .._protos.google.rpc import code_pb2 | 
| 38 | 38 |  from .._exceptions import SandboxError
 | 
| 39 | 39 |  from .. import _yaml
 | 
| 40 | 40 |  from .._protos.google.longrunning import operations_pb2, operations_pb2_grpc
 | 
| 41 | -from .._artifactcache.cascache import CASRemote, CASRemoteSpec
 | |
| 41 | +from .._cas import CASRemote, CASRemoteSpec
 | |
| 42 | 42 |  | 
| 43 | 43 |  | 
| 44 | 44 |  class RemoteExecutionSpec(namedtuple('RemoteExecutionSpec', 'exec_service storage_service action_service')):
 | 
| ... | ... | @@ -348,17 +348,17 @@ class SandboxRemote(Sandbox): | 
| 348 | 348 |              except grpc.RpcError as e:
 | 
| 349 | 349 |                  raise SandboxError("Failed to push source directory to remote: {}".format(e)) from e
 | 
| 350 | 350 |  | 
| 351 | -            if not cascache.verify_digest_on_remote(casremote, upload_vdir.ref):
 | |
| 351 | +            if not casremote.verify_digest_on_remote(upload_vdir.ref):
 | |
| 352 | 352 |                  raise SandboxError("Failed to verify that source has been pushed to the remote artifact cache.")
 | 
| 353 | 353 |  | 
| 354 | 354 |              # Push command and action
 | 
| 355 | 355 |              try:
 | 
| 356 | -                cascache.push_message(casremote, command_proto)
 | |
| 356 | +                casremote.push_message(command_proto)
 | |
| 357 | 357 |              except grpc.RpcError as e:
 | 
| 358 | 358 |                  raise SandboxError("Failed to push command to remote: {}".format(e))
 | 
| 359 | 359 |  | 
| 360 | 360 |              try:
 | 
| 361 | -                cascache.push_message(casremote, action)
 | |
| 361 | +                casremote.push_message(action)
 | |
| 362 | 362 |              except grpc.RpcError as e:
 | 
| 363 | 363 |                  raise SandboxError("Failed to push action to remote: {}".format(e))
 | 
| 364 | 364 |  | 
| ... | ... | @@ -32,7 +32,7 @@ def pytest_addoption(parser): | 
| 32 | 32 |  | 
| 33 | 33 |  | 
| 34 | 34 |  def pytest_runtest_setup(item):
 | 
| 35 | -    if item.get_marker('integration') and not item.config.getvalue('integration'):
 | |
| 35 | +    if item.get_closest_marker('integration') and not item.config.getvalue('integration'):
 | |
| 36 | 36 |          pytest.skip('skipping integration test')
 | 
| 37 | 37 |  | 
| 38 | 38 |  | 
| ... | ... | @@ -94,7 +94,7 @@ requiring BuildStream's more exigent dependencies by setting the | 
| 94 | 94 |  Command reference
 | 
| 95 | 95 |  ~~~~~~~~~~~~~~~~~
 | 
| 96 | 96 |  | 
| 97 | -.. click:: buildstream._artifactcache.casserver:server_main
 | |
| 97 | +.. click:: buildstream._cas.casserver:server_main
 | |
| 98 | 98 |     :prog: bst-artifact-server
 | 
| 99 | 99 |  | 
| 100 | 100 |  | 
| ... | ... | @@ -2,7 +2,7 @@ coverage==4.4 | 
| 2 | 2 |  pylint==2.2.2
 | 
| 3 | 3 |  pycodestyle==2.4.0
 | 
| 4 | 4 |  pytest==4.0.2
 | 
| 5 | -pytest-cov==2.6.0
 | |
| 5 | +pytest-cov==2.6.1
 | |
| 6 | 6 |  pytest-datafiles==2.0
 | 
| 7 | 7 |  pytest-env==0.6.2
 | 
| 8 | 8 |  pytest-xdist==1.25.0
 | 
| ... | ... | @@ -3,8 +3,7 @@ import pytest | 
| 3 | 3 |  import itertools
 | 
| 4 | 4 |  import os
 | 
| 5 | 5 |  | 
| 6 | -from buildstream._artifactcache import ArtifactCacheSpec
 | |
| 7 | -from buildstream._artifactcache.artifactcache import _configured_remote_artifact_cache_specs
 | |
| 6 | +from buildstream._artifactcache import ArtifactCacheSpec, _configured_remote_artifact_cache_specs
 | |
| 8 | 7 |  from buildstream._context import Context
 | 
| 9 | 8 |  from buildstream._project import Project
 | 
| 10 | 9 |  from buildstream.utils import _deduplicate
 | 
| ... | ... | @@ -342,13 +342,13 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, success): | 
| 342 | 342 |          total_space = 10000
 | 
| 343 | 343 |  | 
| 344 | 344 |      volume_space_patch = mock.patch(
 | 
| 345 | -        "buildstream._artifactcache.artifactcache.ArtifactCache._get_volume_space_info_for",
 | |
| 345 | +        "buildstream._artifactcache.ArtifactCache._get_volume_space_info_for",
 | |
| 346 | 346 |          autospec=True,
 | 
| 347 | 347 |          return_value=(free_space, total_space),
 | 
| 348 | 348 |      )
 | 
| 349 | 349 |  | 
| 350 | 350 |      cache_size_patch = mock.patch(
 | 
| 351 | -        "buildstream._artifactcache.artifactcache.ArtifactCache.get_cache_size",
 | |
| 351 | +        "buildstream._artifactcache.ArtifactCache.get_cache_size",
 | |
| 352 | 352 |          autospec=True,
 | 
| 353 | 353 |          return_value=0,
 | 
| 354 | 354 |      )
 | 
| ... | ... | @@ -281,3 +281,44 @@ def test_argument_element_invalid(datafiles, cli, project, cmd, word_idx, expect | 
| 281 | 281 |  ])
 | 
| 282 | 282 |  def test_help_commands(cli, cmd, word_idx, expected):
 | 
| 283 | 283 |      assert_completion(cli, cmd, word_idx, expected)
 | 
| 284 | + | |
| 285 | + | |
| 286 | +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
 | |
| 287 | +def test_argument_artifact(cli, tmpdir, datafiles):
 | |
| 288 | +    project = os.path.join(datafiles.dirname, datafiles.basename)
 | |
| 289 | + | |
| 290 | +    # Build an import element with no dependencies (as there will only be ONE cache key)
 | |
| 291 | +    result = cli.run(project=project, args=['build', 'import-bin.bst'])  # Has no dependencies
 | |
| 292 | +    result.assert_success()
 | |
| 293 | + | |
| 294 | +    # Get the key and the artifact ref ($project/$element_name/$key)
 | |
| 295 | +    key = cli.get_element_key(project, 'import-bin.bst')
 | |
| 296 | +    artifact = os.path.join('test', 'import-bin', key)
 | |
| 297 | + | |
| 298 | +    # Test autocompletion of the artifact
 | |
| 299 | +    cmds = [
 | |
| 300 | +        'bst artifact log ',
 | |
| 301 | +        'bst artifact log t',
 | |
| 302 | +        'bst artifact log test/'
 | |
| 303 | +    ]
 | |
| 304 | + | |
| 305 | +    for i, cmd in enumerate(cmds):
 | |
| 306 | +        word_idx = 3
 | |
| 307 | +        result = cli.run(project=project, cwd=project, env={
 | |
| 308 | +            '_BST_COMPLETION': 'complete',
 | |
| 309 | +            'COMP_WORDS': cmd,
 | |
| 310 | +            'COMP_CWORD': str(word_idx)
 | |
| 311 | +        })
 | |
| 312 | +        words = []
 | |
| 313 | +        if result.output:
 | |
| 314 | +            words = result.output.splitlines()  # This leaves an extra space on each e.g. ['foo.bst ']
 | |
| 315 | +            words = [word.strip() for word in words]
 | |
| 316 | + | |
| 317 | +            if i == 0:
 | |
| 318 | +                expected = PROJECT_ELEMENTS + [artifact]  # We should now be able to see the artifact
 | |
| 319 | +            elif i == 1:
 | |
| 320 | +                expected = ['target.bst', artifact]
 | |
| 321 | +            elif i == 2:
 | |
| 322 | +                expected = [artifact]
 | |
| 323 | + | |
| 324 | +            assert expected == words | 
| 1 | +import os
 | |
| 2 | + | |
| 3 | +import pytest
 | |
| 4 | +from tests.testutils import cli, create_repo
 | |
| 5 | + | |
| 6 | +from buildstream import _yaml
 | |
| 7 | + | |
| 8 | +# Project directory
 | |
| 9 | +DATA_DIR = os.path.join(
 | |
| 10 | +    os.path.dirname(os.path.realpath(__file__)),
 | |
| 11 | +    "project",
 | |
| 12 | +)
 | |
| 13 | + | |
| 14 | + | |
| 15 | +def create_element(repo, name, path, dependencies, ref=None):
 | |
| 16 | +    element = {
 | |
| 17 | +        'kind': 'import',
 | |
| 18 | +        'sources': [
 | |
| 19 | +            repo.source_config(ref=ref)
 | |
| 20 | +        ],
 | |
| 21 | +        'depends': dependencies
 | |
| 22 | +    }
 | |
| 23 | +    _yaml.dump(element, os.path.join(path, name))
 | |
| 24 | + | |
| 25 | + | |
| 26 | +# This tests a variety of scenarios and checks that the order in
 | |
| 27 | +# which things are processed remains stable.
 | |
| 28 | +#
 | |
| 29 | +# This is especially important in order to ensure that our
 | |
| 30 | +# depth sorting and optimization of which elements should be
 | |
| 31 | +# processed first is doing it's job right, and that we are
 | |
| 32 | +# promoting elements to the build queue as soon as possible
 | |
| 33 | +#
 | |
| 34 | +# Parameters:
 | |
| 35 | +#    targets (target elements): The targets to invoke bst with
 | |
| 36 | +#    template (dict): The project template dictionary, for create_element()
 | |
| 37 | +#    expected (list): A list of element names in the expected order
 | |
| 38 | +#
 | |
| 39 | +@pytest.mark.datafiles(os.path.join(DATA_DIR))
 | |
| 40 | +@pytest.mark.parametrize("target,template,expected", [
 | |
| 41 | +    # First simple test
 | |
| 42 | +    ('3.bst', {
 | |
| 43 | +        '0.bst': ['1.bst'],
 | |
| 44 | +        '1.bst': [],
 | |
| 45 | +        '2.bst': ['0.bst'],
 | |
| 46 | +        '3.bst': ['0.bst', '1.bst', '2.bst']
 | |
| 47 | +    }, ['1.bst', '0.bst', '2.bst', '3.bst']),
 | |
| 48 | + | |
| 49 | +    # A more complicated test with build of build dependencies
 | |
| 50 | +    ('target.bst', {
 | |
| 51 | +        'a.bst': [],
 | |
| 52 | +        'base.bst': [],
 | |
| 53 | +        'timezones.bst': [],
 | |
| 54 | +        'middleware.bst': [{'filename': 'base.bst', 'type': 'build'}],
 | |
| 55 | +        'app.bst': [{'filename': 'middleware.bst', 'type': 'build'}],
 | |
| 56 | +        'target.bst': ['a.bst', 'base.bst', 'middleware.bst', 'app.bst', 'timezones.bst']
 | |
| 57 | +    }, ['base.bst', 'middleware.bst', 'a.bst', 'app.bst', 'timezones.bst', 'target.bst']),
 | |
| 58 | +])
 | |
| 59 | +@pytest.mark.parametrize("operation", [('show'), ('fetch'), ('build')])
 | |
| 60 | +def test_order(cli, datafiles, tmpdir, operation, target, template, expected):
 | |
| 61 | +    project = os.path.join(datafiles.dirname, datafiles.basename)
 | |
| 62 | +    dev_files_path = os.path.join(project, 'files', 'dev-files')
 | |
| 63 | +    element_path = os.path.join(project, 'elements')
 | |
| 64 | + | |
| 65 | +    # FIXME: Remove this when the test passes reliably.
 | |
| 66 | +    #
 | |
| 67 | +    #        There is no reason why the order should not
 | |
| 68 | +    #        be preserved when the builders is set to 1,
 | |
| 69 | +    #        the scheduler queue processing still seems to
 | |
| 70 | +    #        be losing the order.
 | |
| 71 | +    #
 | |
| 72 | +    if operation == 'build':
 | |
| 73 | +        pytest.skip("FIXME: This still only sometimes passes")
 | |
| 74 | + | |
| 75 | +    # Configure to only allow one fetcher at a time, make it easy to
 | |
| 76 | +    # determine what is being planned in what order.
 | |
| 77 | +    cli.configure({
 | |
| 78 | +        'scheduler': {
 | |
| 79 | +            'fetchers': 1,
 | |
| 80 | +            'builders': 1
 | |
| 81 | +        }
 | |
| 82 | +    })
 | |
| 83 | + | |
| 84 | +    # Build the project from the template, make import elements
 | |
| 85 | +    # all with the same repo
 | |
| 86 | +    #
 | |
| 87 | +    repo = create_repo('git', str(tmpdir))
 | |
| 88 | +    ref = repo.create(dev_files_path)
 | |
| 89 | +    for element, dependencies in template.items():
 | |
| 90 | +        create_element(repo, element, element_path, dependencies, ref=ref)
 | |
| 91 | +        repo.add_commit()
 | |
| 92 | + | |
| 93 | +    # Run test and collect results
 | |
| 94 | +    if operation == 'show':
 | |
| 95 | +        result = cli.run(args=['show', '--deps', 'plan', '--format', '%{name}', target], project=project, silent=True)
 | |
| 96 | +        result.assert_success()
 | |
| 97 | +        results = result.output.splitlines()
 | |
| 98 | +    else:
 | |
| 99 | +        if operation == 'fetch':
 | |
| 100 | +            result = cli.run(args=['source', 'fetch', target], project=project, silent=True)
 | |
| 101 | +        else:
 | |
| 102 | +            result = cli.run(args=[operation, target], project=project, silent=True)
 | |
| 103 | +        result.assert_success()
 | |
| 104 | +        results = result.get_start_order(operation)
 | |
| 105 | + | |
| 106 | +    # Assert the order
 | |
| 107 | +    print("Expected order: {}".format(expected))
 | |
| 108 | +    print("Observed result order: {}".format(results))
 | |
| 109 | +    assert results == expected | 
| ... | ... | @@ -3,7 +3,7 @@ import pytest | 
| 3 | 3 |  | 
| 4 | 4 |  from buildstream._exceptions import ErrorDomain
 | 
| 5 | 5 |  | 
| 6 | -from buildstream._artifactcache.cascache import CASCache
 | |
| 6 | +from buildstream._cas import CASCache
 | |
| 7 | 7 |  from buildstream.storage._casbaseddirectory import CasBasedDirectory
 | 
| 8 | 8 |  from buildstream.storage._filebaseddirectory import FileBasedDirectory
 | 
| 9 | 9 |  | 
| ... | ... | @@ -883,6 +883,195 @@ def test_git_describe(cli, tmpdir, datafiles, ref_storage, tag_type): | 
| 883 | 883 |      assert p.returncode != 0
 | 
| 884 | 884 |  | 
| 885 | 885 |  | 
| 886 | +@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
 | |
| 887 | +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
 | |
| 888 | +@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
 | |
| 889 | +@pytest.mark.parametrize("tag_type", [('annotated'), ('lightweight')])
 | |
| 890 | +def test_git_describe_head_is_tagged(cli, tmpdir, datafiles, ref_storage, tag_type):
 | |
| 891 | +    project = str(datafiles)
 | |
| 892 | + | |
| 893 | +    project_config = _yaml.load(os.path.join(project, 'project.conf'))
 | |
| 894 | +    project_config['ref-storage'] = ref_storage
 | |
| 895 | +    _yaml.dump(_yaml.node_sanitize(project_config), os.path.join(project, 'project.conf'))
 | |
| 896 | + | |
| 897 | +    repofiles = os.path.join(str(tmpdir), 'repofiles')
 | |
| 898 | +    os.makedirs(repofiles, exist_ok=True)
 | |
| 899 | +    file0 = os.path.join(repofiles, 'file0')
 | |
| 900 | +    with open(file0, 'w') as f:
 | |
| 901 | +        f.write('test\n')
 | |
| 902 | + | |
| 903 | +    repo = create_repo('git', str(tmpdir))
 | |
| 904 | + | |
| 905 | +    def tag(name):
 | |
| 906 | +        if tag_type == 'annotated':
 | |
| 907 | +            repo.add_annotated_tag(name, name)
 | |
| 908 | +        else:
 | |
| 909 | +            repo.add_tag(name)
 | |
| 910 | + | |
| 911 | +    ref = repo.create(repofiles)
 | |
| 912 | +    tag('uselesstag')
 | |
| 913 | + | |
| 914 | +    file1 = os.path.join(str(tmpdir), 'file1')
 | |
| 915 | +    with open(file1, 'w') as f:
 | |
| 916 | +        f.write('test\n')
 | |
| 917 | +    repo.add_file(file1)
 | |
| 918 | + | |
| 919 | +    file2 = os.path.join(str(tmpdir), 'file2')
 | |
| 920 | +    with open(file2, 'w') as f:
 | |
| 921 | +        f.write('test\n')
 | |
| 922 | +    repo.branch('branch2')
 | |
| 923 | +    repo.add_file(file2)
 | |
| 924 | + | |
| 925 | +    repo.checkout('master')
 | |
| 926 | +    file3 = os.path.join(str(tmpdir), 'file3')
 | |
| 927 | +    with open(file3, 'w') as f:
 | |
| 928 | +        f.write('test\n')
 | |
| 929 | +    repo.add_file(file3)
 | |
| 930 | + | |
| 931 | +    tagged_ref = repo.merge('branch2')
 | |
| 932 | +    tag('tag')
 | |
| 933 | + | |
| 934 | +    config = repo.source_config()
 | |
| 935 | +    config['track'] = repo.latest_commit()
 | |
| 936 | +    config['track-tags'] = True
 | |
| 937 | + | |
| 938 | +    # Write out our test target
 | |
| 939 | +    element = {
 | |
| 940 | +        'kind': 'import',
 | |
| 941 | +        'sources': [
 | |
| 942 | +            config
 | |
| 943 | +        ],
 | |
| 944 | +    }
 | |
| 945 | +    element_path = os.path.join(project, 'target.bst')
 | |
| 946 | +    _yaml.dump(element, element_path)
 | |
| 947 | + | |
| 948 | +    if ref_storage == 'inline':
 | |
| 949 | +        result = cli.run(project=project, args=['source', 'track', 'target.bst'])
 | |
| 950 | +        result.assert_success()
 | |
| 951 | +    else:
 | |
| 952 | +        result = cli.run(project=project, args=['source', 'track', 'target.bst', '--deps', 'all'])
 | |
| 953 | +        result.assert_success()
 | |
| 954 | + | |
| 955 | +    if ref_storage == 'inline':
 | |
| 956 | +        element = _yaml.load(element_path)
 | |
| 957 | +        tags = _yaml.node_sanitize(element['sources'][0]['tags'])
 | |
| 958 | +        assert len(tags) == 1
 | |
| 959 | +        for tag in tags:
 | |
| 960 | +            assert 'tag' in tag
 | |
| 961 | +            assert 'commit' in tag
 | |
| 962 | +            assert 'annotated' in tag
 | |
| 963 | +            assert tag['annotated'] == (tag_type == 'annotated')
 | |
| 964 | + | |
| 965 | +        assert set([(tag['tag'], tag['commit']) for tag in tags]) == set([('tag', repo.rev_parse('tag^{commit}'))])
 | |
| 966 | + | |
| 967 | +    checkout = os.path.join(str(tmpdir), 'checkout')
 | |
| 968 | + | |
| 969 | +    result = cli.run(project=project, args=['build', 'target.bst'])
 | |
| 970 | +    result.assert_success()
 | |
| 971 | +    result = cli.run(project=project, args=['checkout', 'target.bst', checkout])
 | |
| 972 | +    result.assert_success()
 | |
| 973 | + | |
| 974 | +    if tag_type == 'annotated':
 | |
| 975 | +        options = []
 | |
| 976 | +    else:
 | |
| 977 | +        options = ['--tags']
 | |
| 978 | +    describe = subprocess.check_output(['git', 'describe'] + options,
 | |
| 979 | +                                       cwd=checkout).decode('ascii')
 | |
| 980 | +    assert describe.startswith('tag')
 | |
| 981 | + | |
| 982 | +    tags = subprocess.check_output(['git', 'tag'],
 | |
| 983 | +                                   cwd=checkout).decode('ascii')
 | |
| 984 | +    tags = set(tags.splitlines())
 | |
| 985 | +    assert tags == set(['tag'])
 | |
| 986 | + | |
| 987 | +    rev_list = subprocess.check_output(['git', 'rev-list', '--all'],
 | |
| 988 | +                                       cwd=checkout).decode('ascii')
 | |
| 989 | + | |
| 990 | +    assert set(rev_list.splitlines()) == set([tagged_ref])
 | |
| 991 | + | |
| 992 | +    p = subprocess.run(['git', 'log', repo.rev_parse('uselesstag')],
 | |
| 993 | +                       cwd=checkout)
 | |
| 994 | +    assert p.returncode != 0
 | |
| 995 | + | |
| 996 | + | |
| 997 | +@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
 | |
| 998 | +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
 | |
| 999 | +def test_git_describe_relevant_history(cli, tmpdir, datafiles):
 | |
| 1000 | +    project = str(datafiles)
 | |
| 1001 | + | |
| 1002 | +    project_config = _yaml.load(os.path.join(project, 'project.conf'))
 | |
| 1003 | +    project_config['ref-storage'] = 'project.refs'
 | |
| 1004 | +    _yaml.dump(_yaml.node_sanitize(project_config), os.path.join(project, 'project.conf'))
 | |
| 1005 | + | |
| 1006 | +    repofiles = os.path.join(str(tmpdir), 'repofiles')
 | |
| 1007 | +    os.makedirs(repofiles, exist_ok=True)
 | |
| 1008 | +    file0 = os.path.join(repofiles, 'file0')
 | |
| 1009 | +    with open(file0, 'w') as f:
 | |
| 1010 | +        f.write('test\n')
 | |
| 1011 | + | |
| 1012 | +    repo = create_repo('git', str(tmpdir))
 | |
| 1013 | +    repo.create(repofiles)
 | |
| 1014 | + | |
| 1015 | +    file1 = os.path.join(str(tmpdir), 'file1')
 | |
| 1016 | +    with open(file1, 'w') as f:
 | |
| 1017 | +        f.write('test\n')
 | |
| 1018 | +    repo.add_file(file1)
 | |
| 1019 | +    repo.branch('branch')
 | |
| 1020 | +    repo.checkout('master')
 | |
| 1021 | + | |
| 1022 | +    file2 = os.path.join(str(tmpdir), 'file2')
 | |
| 1023 | +    with open(file2, 'w') as f:
 | |
| 1024 | +        f.write('test\n')
 | |
| 1025 | +    repo.add_file(file2)
 | |
| 1026 | + | |
| 1027 | +    file3 = os.path.join(str(tmpdir), 'file3')
 | |
| 1028 | +    with open(file3, 'w') as f:
 | |
| 1029 | +        f.write('test\n')
 | |
| 1030 | +    branch_boundary = repo.add_file(file3)
 | |
| 1031 | + | |
| 1032 | +    repo.checkout('branch')
 | |
| 1033 | +    file4 = os.path.join(str(tmpdir), 'file4')
 | |
| 1034 | +    with open(file4, 'w') as f:
 | |
| 1035 | +        f.write('test\n')
 | |
| 1036 | +    tagged_ref = repo.add_file(file4)
 | |
| 1037 | +    repo.add_annotated_tag('tag1', 'tag1')
 | |
| 1038 | + | |
| 1039 | +    head = repo.merge('master')
 | |
| 1040 | + | |
| 1041 | +    config = repo.source_config()
 | |
| 1042 | +    config['track'] = head
 | |
| 1043 | +    config['track-tags'] = True
 | |
| 1044 | + | |
| 1045 | +    # Write out our test target
 | |
| 1046 | +    element = {
 | |
| 1047 | +        'kind': 'import',
 | |
| 1048 | +        'sources': [
 | |
| 1049 | +            config
 | |
| 1050 | +        ],
 | |
| 1051 | +    }
 | |
| 1052 | +    element_path = os.path.join(project, 'target.bst')
 | |
| 1053 | +    _yaml.dump(element, element_path)
 | |
| 1054 | + | |
| 1055 | +    result = cli.run(project=project, args=['source', 'track', 'target.bst', '--deps', 'all'])
 | |
| 1056 | +    result.assert_success()
 | |
| 1057 | + | |
| 1058 | +    checkout = os.path.join(str(tmpdir), 'checkout')
 | |
| 1059 | + | |
| 1060 | +    result = cli.run(project=project, args=['build', 'target.bst'])
 | |
| 1061 | +    result.assert_success()
 | |
| 1062 | +    result = cli.run(project=project, args=['checkout', 'target.bst', checkout])
 | |
| 1063 | +    result.assert_success()
 | |
| 1064 | + | |
| 1065 | +    describe = subprocess.check_output(['git', 'describe'],
 | |
| 1066 | +                                       cwd=checkout).decode('ascii')
 | |
| 1067 | +    assert describe.startswith('tag1-2-')
 | |
| 1068 | + | |
| 1069 | +    rev_list = subprocess.check_output(['git', 'rev-list', '--all'],
 | |
| 1070 | +                                       cwd=checkout).decode('ascii')
 | |
| 1071 | + | |
| 1072 | +    assert set(rev_list.splitlines()) == set([head, tagged_ref, branch_boundary])
 | |
| 1073 | + | |
| 1074 | + | |
| 886 | 1075 |  @pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
 | 
| 887 | 1076 |  @pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
 | 
| 888 | 1077 |  def test_default_do_not_track_tags(cli, tmpdir, datafiles):
 | 
| ... | ... | @@ -8,7 +8,7 @@ from tests.testutils import cli | 
| 8 | 8 |  from buildstream.storage._casbaseddirectory import CasBasedDirectory
 | 
| 9 | 9 |  from buildstream.storage._filebaseddirectory import FileBasedDirectory
 | 
| 10 | 10 |  from buildstream._artifactcache import ArtifactCache
 | 
| 11 | -from buildstream._artifactcache.cascache import CASCache
 | |
| 11 | +from buildstream._cas import CASCache
 | |
| 12 | 12 |  from buildstream import utils
 | 
| 13 | 13 |  | 
| 14 | 14 |  | 
| ... | ... | @@ -11,8 +11,8 @@ from multiprocessing import Process, Queue | 
| 11 | 11 |  import pytest_cov
 | 
| 12 | 12 |  | 
| 13 | 13 |  from buildstream import _yaml
 | 
| 14 | -from buildstream._artifactcache.cascache import CASCache
 | |
| 15 | -from buildstream._artifactcache.casserver import create_server
 | |
| 14 | +from buildstream._cas import CASCache
 | |
| 15 | +from buildstream._cas.casserver import create_server
 | |
| 16 | 16 |  from buildstream._exceptions import CASError
 | 
| 17 | 17 |  from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
 | 
| 18 | 18 |  | 
| ... | ... | @@ -167,6 +167,23 @@ class Result(): | 
| 167 | 167 |      def assert_shell_error(self, fail_message=''):
 | 
| 168 | 168 |          assert self.exit_code == 1, fail_message
 | 
| 169 | 169 |  | 
| 170 | +    # get_start_order()
 | |
| 171 | +    #
 | |
| 172 | +    # Gets the list of elements processed in a given queue, in the
 | |
| 173 | +    # order of their first appearances in the session.
 | |
| 174 | +    #
 | |
| 175 | +    # Args:
 | |
| 176 | +    #    activity (str): The queue activity name (like 'fetch')
 | |
| 177 | +    #
 | |
| 178 | +    # Returns:
 | |
| 179 | +    #    (list): A list of element names in the order which they first appeared in the result
 | |
| 180 | +    #
 | |
| 181 | +    def get_start_order(self, activity):
 | |
| 182 | +        results = re.findall(r'\[\s*{}:(\S+)\s*\]\s*START\s*.*\.log'.format(activity), self.stderr)
 | |
| 183 | +        if results is None:
 | |
| 184 | +            return []
 | |
| 185 | +        return list(results)
 | |
| 186 | + | |
| 170 | 187 |      # get_tracked_elements()
 | 
| 171 | 188 |      #
 | 
| 172 | 189 |      # Produces a list of element names on which tracking occurred
 | 
| ... | ... | @@ -23,7 +23,7 @@ def test_parse_size_over_1024T(cli, tmpdir): | 
| 23 | 23 |      _yaml.dump({'name': 'main'}, str(project.join("project.conf")))
 | 
| 24 | 24 |  | 
| 25 | 25 |      volume_space_patch = mock.patch(
 | 
| 26 | -        "buildstream._artifactcache.artifactcache.ArtifactCache._get_volume_space_info_for",
 | |
| 26 | +        "buildstream._artifactcache.ArtifactCache._get_volume_space_info_for",
 | |
| 27 | 27 |          autospec=True,
 | 
| 28 | 28 |          return_value=(1025 * TiB, 1025 * TiB)
 | 
| 29 | 29 |      )
 | 
