[Notes] [Git][BuildStream/buildstream][tpollard/566] 27 commits: sandbox/sandbox.py: Use assertions for programming errors instead of BstErrors.



Title: GitLab

Tom Pollard pushed to branch tpollard/566 at BuildStream / buildstream

Commits:

27 changed files:

Changes:

  • .coveragerc
    ... ... @@ -4,11 +4,15 @@ include =
    4 4
       */buildstream/*
    
    5 5
     
    
    6 6
     omit =
    
    7
    -  # Omit profiling helper module
    
    7
    +  # Omit some internals
    
    8 8
       */buildstream/_profile.py
    
    9
    +  */buildstream/__main__.py
    
    10
    +  */buildstream/_version.py
    
    9 11
       # Omit generated code
    
    10 12
       */buildstream/_protos/*
    
    11 13
       */.eggs/*
    
    14
    +  # Omit .tox directory
    
    15
    +  */.tox/*
    
    12 16
     
    
    13 17
     [report]
    
    14 18
     show_missing = True
    

  • .gitignore
    ... ... @@ -13,11 +13,12 @@ tests/**/*.pyc
    13 13
     integration-cache/
    
    14 14
     tmp
    
    15 15
     .coverage
    
    16
    +.coverage-reports/
    
    16 17
     .coverage.*
    
    17 18
     .cache
    
    18 19
     .pytest_cache/
    
    19 20
     *.bst/
    
    20
    -.tox
    
    21
    +.tox/
    
    21 22
     
    
    22 23
     # Pycache, in case buildstream is ran directly from within the source
    
    23 24
     # tree
    

  • .gitlab-ci.yml
    ... ... @@ -13,6 +13,7 @@ variables:
    13 13
       PYTEST_ADDOPTS: "--color=yes"
    
    14 14
       INTEGRATION_CACHE: "${CI_PROJECT_DIR}/cache/integration-cache"
    
    15 15
       TEST_COMMAND: "tox -- --color=yes --integration"
    
    16
    +  COVERAGE_PREFIX: "${CI_JOB_NAME}."
    
    16 17
     
    
    17 18
     
    
    18 19
     #####################################################
    
    ... ... @@ -24,9 +25,6 @@ variables:
    24 25
     .tests-template: &tests
    
    25 26
       stage: test
    
    26 27
     
    
    27
    -  variables:
    
    28
    -    COVERAGE_DIR: coverage-linux
    
    29
    -
    
    30 28
       before_script:
    
    31 29
       # Diagnostics
    
    32 30
       - mount
    
    ... ... @@ -40,14 +38,11 @@ variables:
    40 38
       - su buildstream -c "${TEST_COMMAND}"
    
    41 39
     
    
    42 40
       after_script:
    
    43
    -  # Collect our reports
    
    44
    -  - mkdir -p ${COVERAGE_DIR}
    
    45
    -  - cp .coverage ${COVERAGE_DIR}/coverage."${CI_JOB_NAME}"
    
    46 41
       except:
    
    47 42
       - schedules
    
    48 43
       artifacts:
    
    49 44
         paths:
    
    50
    -    - ${COVERAGE_DIR}
    
    45
    +    - .coverage-reports
    
    51 46
     
    
    52 47
     tests-debian-9:
    
    53 48
       image: buildstream/testsuite-debian:9-5da27168-32c47d1c
    
    ... ... @@ -83,7 +78,6 @@ tests-unix:
    83 78
       <<: *tests
    
    84 79
       variables:
    
    85 80
         BST_FORCE_BACKEND: "unix"
    
    86
    -    COVERAGE_DIR: coverage-unix
    
    87 81
     
    
    88 82
       script:
    
    89 83
     
    
    ... ... @@ -239,22 +233,22 @@ coverage:
    239 233
       stage: post
    
    240 234
       coverage: '/TOTAL +\d+ +\d+ +(\d+\.\d+)%/'
    
    241 235
       script:
    
    242
    -    - pip3 install -r requirements/requirements.txt -r requirements/dev-requirements.txt
    
    243
    -    - pip3 install --no-index .
    
    244
    -    - mkdir report
    
    245
    -    - cd report
    
    246
    -    - cp ../coverage-unix/coverage.* .
    
    247
    -    - cp ../coverage-linux/coverage.* .
    
    248
    -    - ls coverage.*
    
    249
    -    - coverage combine --rcfile=../.coveragerc -a coverage.*
    
    250
    -    - coverage report --rcfile=../.coveragerc -m
    
    236
    +    - cp -a .coverage-reports/ ./coverage-sources
    
    237
    +    - tox -e coverage
    
    238
    +    - cp -a .coverage-reports/ ./coverage-report
    
    251 239
       dependencies:
    
    252 240
       - tests-debian-9
    
    253 241
       - tests-fedora-27
    
    254 242
       - tests-fedora-28
    
    243
    +  - tests-fedora-missing-deps
    
    244
    +  - tests-ubuntu-18.04
    
    255 245
       - tests-unix
    
    256 246
       except:
    
    257 247
       - schedules
    
    248
    +  artifacts:
    
    249
    +    paths:
    
    250
    +    - coverage-sources/
    
    251
    +    - coverage-report/
    
    258 252
     
    
    259 253
     # Deploy, only for merges which land on master branch.
    
    260 254
     #
    

  • CONTRIBUTING.rst
    ... ... @@ -553,7 +553,7 @@ One problem which arises from this is that we end up having symbols
    553 553
     which are *public* according to the :ref:`rules discussed in the previous section
    
    554 554
     <contributing_public_and_private>`, but must be hidden away from the
    
    555 555
     *"Public API Surface"*. For example, BuildStream internal classes need
    
    556
    -to invoke methods on the ``Element`` and ``Source`` classes, wheras these
    
    556
    +to invoke methods on the ``Element`` and ``Source`` classes, whereas these
    
    557 557
     methods need to be hidden from the *"Public API Surface"*.
    
    558 558
     
    
    559 559
     This is where BuildStream deviates from the PEP-8 standard for public
    
    ... ... @@ -631,7 +631,7 @@ An element plugin will derive from Element by importing::
    631 631
     
    
    632 632
       from buildstream import Element
    
    633 633
     
    
    634
    -When importing utilities specifically, dont import function names
    
    634
    +When importing utilities specifically, don't import function names
    
    635 635
     from there, instead import the module itself::
    
    636 636
     
    
    637 637
       from . import utils
    
    ... ... @@ -737,7 +737,7 @@ Abstract methods
    737 737
     ~~~~~~~~~~~~~~~~
    
    738 738
     In BuildStream, an *"Abstract Method"* is a bit of a misnomer and does
    
    739 739
     not match up to how Python defines abstract methods, we need to seek out
    
    740
    -a new nomanclature to refer to these methods.
    
    740
    +a new nomenclature to refer to these methods.
    
    741 741
     
    
    742 742
     In Python, an *"Abstract Method"* is a method which **must** be
    
    743 743
     implemented by a subclass, whereas all methods in Python can be
    
    ... ... @@ -960,7 +960,7 @@ possible, and avoid any cyclic relationships in modules.
    960 960
     For instance, the ``Source`` objects are owned by ``Element``
    
    961 961
     objects in the BuildStream data model, and as such the ``Element``
    
    962 962
     will delegate some activities to the ``Source`` objects in its
    
    963
    -possesion. The ``Source`` objects should however never call functions
    
    963
    +possession. The ``Source`` objects should however never call functions
    
    964 964
     on the ``Element`` object, nor should the ``Source`` object itself
    
    965 965
     have any understanding of what an ``Element`` is.
    
    966 966
     
    
    ... ... @@ -1223,7 +1223,7 @@ For further information about using the reStructuredText with sphinx, please see
    1223 1223
     Building Docs
    
    1224 1224
     ~~~~~~~~~~~~~
    
    1225 1225
     Before you can build the docs, you will end to ensure that you have installed
    
    1226
    -the required :ref:`buid dependencies <contributing_build_deps>` as mentioned
    
    1226
    +the required :ref:`build dependencies <contributing_build_deps>` as mentioned
    
    1227 1227
     in the testing section above.
    
    1228 1228
     
    
    1229 1229
     To build the documentation, just run the following::
    
    ... ... @@ -1365,7 +1365,7 @@ Structure of an example
    1365 1365
     '''''''''''''''''''''''
    
    1366 1366
     The :ref:`tutorial <tutorial>` and the :ref:`examples <examples>` sections
    
    1367 1367
     of the documentation contain a series of sample projects, each chapter in
    
    1368
    -the tutoral, or standalone example uses a sample project.
    
    1368
    +the tutorial, or standalone example uses a sample project.
    
    1369 1369
     
    
    1370 1370
     Here is the the structure for adding new examples and tutorial chapters.
    
    1371 1371
     
    
    ... ... @@ -1471,8 +1471,8 @@ Installing build dependencies
    1471 1471
     ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    
    1472 1472
     Some of BuildStream's dependencies have non-python build dependencies. When
    
    1473 1473
     running tests with ``tox``, you will first need to install these dependencies.
    
    1474
    -Exact steps to install these will depend on your oprtation systemm. Commands
    
    1475
    -for installing them for some common distributions are lised below.
    
    1474
    +Exact steps to install these will depend on your operating system. Commands
    
    1475
    +for installing them for some common distributions are listed below.
    
    1476 1476
     
    
    1477 1477
     For Fedora-based systems::
    
    1478 1478
     
    
    ... ... @@ -1498,6 +1498,13 @@ option when running tox::
    1498 1498
     
    
    1499 1499
       tox -e py37
    
    1500 1500
     
    
    1501
    +If you would like to test and lint at the same time, or if you do have multiple
    
    1502
    +python versions installed and would like to test against multiple versions, then
    
    1503
    +we recommend using `detox <https://github.com/tox-dev/detox>`_, just run it with
    
    1504
    +the same arguments you would give `tox`::
    
    1505
    +
    
    1506
    +  detox -e lint,py36,py37
    
    1507
    +
    
    1501 1508
     Linting is performed separately from testing. In order to run the linting step which
    
    1502 1509
     consists of running the ``pycodestyle`` and ``pylint`` tools, run the following::
    
    1503 1510
     
    
    ... ... @@ -1533,7 +1540,7 @@ the frontend tests you can do::
    1533 1540
     
    
    1534 1541
       tox -- tests/frontend/
    
    1535 1542
     
    
    1536
    -Specific tests can be chosen by using the :: delimeter after the test module.
    
    1543
    +Specific tests can be chosen by using the :: delimiter after the test module.
    
    1537 1544
     If you wanted to run the test_build_track test within frontend/buildtrack.py you could do::
    
    1538 1545
     
    
    1539 1546
       tox -- tests/frontend/buildtrack.py::test_build_track
    
    ... ... @@ -1553,7 +1560,7 @@ can run ``tox`` with ``-r`` or ``--recreate`` option.
    1553 1560
     .. note::
    
    1554 1561
     
    
    1555 1562
        By default, we do not allow use of site packages in our ``tox``
    
    1556
    -   confguration to enable running the tests in an isolated environment.
    
    1563
    +   configuration to enable running the tests in an isolated environment.
    
    1557 1564
        If you need to enable use of site packages for whatever reason, you can
    
    1558 1565
        do so by passing the ``--sitepackages`` option to ``tox``. Also, you will
    
    1559 1566
        not need to install any of the build dependencies mentioned above if you
    
    ... ... @@ -1574,10 +1581,23 @@ can run ``tox`` with ``-r`` or ``--recreate`` option.
    1574 1581
          ./setup.py test --addopts 'tests/frontend/buildtrack.py::test_build_track'
    
    1575 1582
     
    
    1576 1583
     
    
    1584
    +Observing coverage
    
    1585
    +~~~~~~~~~~~~~~~~~~
    
    1586
    +Once you have run the tests using `tox` (or `detox`), some coverage reports will
    
    1587
    +have been left behind.
    
    1588
    +
    
    1589
    +To view the coverage report of the last test run, simply run::
    
    1590
    +
    
    1591
    +  tox -e coverage
    
    1592
    +
    
    1593
    +This will collate any reports from separate python environments that may be
    
    1594
    +under test before displaying the combined coverage.
    
    1595
    +
    
    1596
    +
    
    1577 1597
     Adding tests
    
    1578 1598
     ~~~~~~~~~~~~
    
    1579 1599
     Tests are found in the tests subdirectory, inside of which
    
    1580
    -there is a separarate directory for each *domain* of tests.
    
    1600
    +there is a separate directory for each *domain* of tests.
    
    1581 1601
     All tests are collected as::
    
    1582 1602
     
    
    1583 1603
       tests/*/*.py
    

  • buildstream/_artifactcache/artifactcache.py
    ... ... @@ -74,6 +74,7 @@ class ArtifactCache():
    74 74
     
    
    75 75
             self._has_fetch_remotes = False
    
    76 76
             self._has_push_remotes = False
    
    77
    +        self._has_partial_push_remotes = False
    
    77 78
     
    
    78 79
             os.makedirs(self.extractdir, exist_ok=True)
    
    79 80
     
    
    ... ... @@ -398,6 +399,9 @@ class ArtifactCache():
    398 399
                     self._has_fetch_remotes = True
    
    399 400
                     if remote_spec.push:
    
    400 401
                         self._has_push_remotes = True
    
    402
    +                    # Partial push requires generic push option to also be set
    
    403
    +                    if remote_spec.partial_push:
    
    404
    +                        self._has_partial_push_remotes = True
    
    401 405
     
    
    402 406
                     remotes[remote_spec.url] = CASRemote(remote_spec)
    
    403 407
     
    
    ... ... @@ -596,6 +600,32 @@ class ArtifactCache():
    596 600
                 remotes_for_project = self._remotes[element._get_project()]
    
    597 601
                 return any(remote.spec.push for remote in remotes_for_project)
    
    598 602
     
    
    603
    +    # has_partial_push_remotes():
    
    604
    +    #
    
    605
    +    # Check whether any remote repositories are available for pushing
    
    606
    +    # non-complete artifacts. This option requires the generic push value
    
    607
    +    # to also be set.
    
    608
    +    #
    
    609
    +    # Args:
    
    610
    +    #     element (Element): The Element to check
    
    611
    +    #
    
    612
    +    # Returns:
    
    613
    +    #   (bool): True if any remote repository is configured for optional
    
    614
    +    #            partial pushes, False otherwise
    
    615
    +    #
    
    616
    +    def has_partial_push_remotes(self, *, element=None):
    
    617
    +        # If there's no partial push remotes available, we can't partial push at all
    
    618
    +        if not self._has_partial_push_remotes:
    
    619
    +            return False
    
    620
    +        elif element is None:
    
    621
    +            # At least one remote is set to allow partial pushes
    
    622
    +            return True
    
    623
    +        else:
    
    624
    +            # Check whether the specified element's project has push remotes configured
    
    625
    +            # to not accept partial artifact pushes
    
    626
    +            remotes_for_project = self._remotes[element._get_project()]
    
    627
    +            return any(remote.spec.partial_push for remote in remotes_for_project)
    
    628
    +
    
    599 629
         # push():
    
    600 630
         #
    
    601 631
         # Push committed artifact to remote repository.
    
    ... ... @@ -603,6 +633,8 @@ class ArtifactCache():
    603 633
         # Args:
    
    604 634
         #     element (Element): The Element whose artifact is to be pushed
    
    605 635
         #     keys (list): The cache keys to use
    
    636
    +    #     partial(bool): If the artifact is cached in a partial state
    
    637
    +    #     subdir(string): Optional subdir to not push
    
    606 638
         #
    
    607 639
         # Returns:
    
    608 640
         #   (bool): True if any remote was updated, False if no pushes were required
    
    ... ... @@ -610,12 +642,25 @@ class ArtifactCache():
    610 642
         # Raises:
    
    611 643
         #   (ArtifactError): if there was an error
    
    612 644
         #
    
    613
    -    def push(self, element, keys):
    
    645
    +    def push(self, element, keys, partial=False, subdir=None):
    
    614 646
             refs = [self.get_artifact_fullname(element, key) for key in list(keys)]
    
    615 647
     
    
    616 648
             project = element._get_project()
    
    617 649
     
    
    618
    -        push_remotes = [r for r in self._remotes[project] if r.spec.push]
    
    650
    +        push_remotes = []
    
    651
    +        partial_remotes = []
    
    652
    +
    
    653
    +        # Create list of remotes to push to, given current element and partial push config
    
    654
    +        if not partial:
    
    655
    +            push_remotes = [r for r in self._remotes[project] if (r.spec.push and not r.spec.partial_push)]
    
    656
    +
    
    657
    +        if self._has_partial_push_remotes:
    
    658
    +            # Create a specific list of the remotes expecting the artifact to be push in a partial
    
    659
    +            # state. This list needs to be pushed in a partial state, without the optional subdir if
    
    660
    +            # exists locally. No need to attempt pushing a partial artifact to a remote that is queued to
    
    661
    +            # to also recieve a full artifact
    
    662
    +            partial_remotes = [r for r in self._remotes[project] if (r.spec.partial_push and r.spec.push) and
    
    663
    +                               r not in push_remotes]
    
    619 664
     
    
    620 665
             pushed = False
    
    621 666
     
    
    ... ... @@ -624,7 +669,9 @@ class ArtifactCache():
    624 669
                 display_key = element._get_brief_display_key()
    
    625 670
                 element.status("Pushing artifact {} -> {}".format(display_key, remote.spec.url))
    
    626 671
     
    
    627
    -            if self.cas.push(refs, remote):
    
    672
    +            # Passing the optional subdir allows for remote artifacts that are cached in a 'partial'
    
    673
    +            # state to be completed
    
    674
    +            if self.cas.push(refs, remote, subdir=subdir):
    
    628 675
                     element.info("Pushed artifact {} -> {}".format(display_key, remote.spec.url))
    
    629 676
                     pushed = True
    
    630 677
                 else:
    
    ... ... @@ -632,6 +679,19 @@ class ArtifactCache():
    632 679
                         remote.spec.url, element._get_brief_display_key()
    
    633 680
                     ))
    
    634 681
     
    
    682
    +        for remote in partial_remotes:
    
    683
    +            remote.init()
    
    684
    +            display_key = element._get_brief_display_key()
    
    685
    +            element.status("Pushing partial artifact {} -> {}".format(display_key, remote.spec.url))
    
    686
    +
    
    687
    +            if self.cas.push(refs, remote, excluded_subdirs=subdir):
    
    688
    +                element.info("Pushed partial artifact {} -> {}".format(display_key, remote.spec.url))
    
    689
    +                pushed = True
    
    690
    +            else:
    
    691
    +                element.info("Remote ({}) already has {} partial cached".format(
    
    692
    +                    remote.spec.url, element._get_brief_display_key()
    
    693
    +                ))
    
    694
    +
    
    635 695
             return pushed
    
    636 696
     
    
    637 697
         # pull():
    
    ... ... @@ -659,14 +719,23 @@ class ArtifactCache():
    659 719
                     element.status("Pulling artifact {} <- {}".format(display_key, remote.spec.url))
    
    660 720
     
    
    661 721
                     if self.cas.pull(ref, remote, progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs):
    
    662
    -                    element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
    
    663 722
                         if subdir:
    
    664
    -                        # Attempt to extract subdir into artifact extract dir if it already exists
    
    665
    -                        # without containing the subdir. If the respective artifact extract dir does not
    
    666
    -                        # exist a complete extraction will complete.
    
    667
    -                        self.extract(element, key, subdir)
    
    668
    -                    # no need to pull from additional remotes
    
    669
    -                    return True
    
    723
    +                        if not self.contains_subdir_artifact(element, key, subdir):
    
    724
    +                            # The pull was expecting the specific subdir to be present in the remote, attempt
    
    725
    +                            # to find it in other available remotes
    
    726
    +                            element.info("Pulled partial artifact {} <- {}. Attempting to retrieve {} from remotes"
    
    727
    +                                         .format(display_key, remote.spec.url, subdir))
    
    728
    +                        else:
    
    729
    +                            element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
    
    730
    +                            # Attempt to extract subdir into artifact extract dir if it already exists
    
    731
    +                            # without containing the subdir. If the respective artifact extract dir does not
    
    732
    +                            # exist a complete extraction will complete.
    
    733
    +                            self.extract(element, key, subdir)
    
    734
    +                            # no need to pull from additional remotes
    
    735
    +                            return True
    
    736
    +                    else:
    
    737
    +                        element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
    
    738
    +                        return True
    
    670 739
                     else:
    
    671 740
                         element.info("Remote ({}) does not have {} cached".format(
    
    672 741
                             remote.spec.url, element._get_brief_display_key()
    

  • buildstream/_artifactcache/cascache.py
    ... ... @@ -45,7 +45,8 @@ from .. import _yaml
    45 45
     _MAX_PAYLOAD_BYTES = 1024 * 1024
    
    46 46
     
    
    47 47
     
    
    48
    -class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key client_cert instance_name')):
    
    48
    +class CASRemoteSpec(namedtuple('CASRemoteSpec',
    
    49
    +                               'url push partial_push server_cert client_key client_cert instance_name')):
    
    49 50
     
    
    50 51
         # _new_from_config_node
    
    51 52
         #
    
    ... ... @@ -53,15 +54,24 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key
    53 54
         #
    
    54 55
         @staticmethod
    
    55 56
         def _new_from_config_node(spec_node, basedir=None):
    
    56
    -        _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance_name'])
    
    57
    +        _yaml.node_validate(spec_node,
    
    58
    +                            ['url', 'push', 'allow-partial-push', 'server-cert', 'client-key',
    
    59
    +                             'client-cert', 'instance_name'])
    
    57 60
             url = _yaml.node_get(spec_node, str, 'url')
    
    58 61
             push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
    
    62
    +        partial_push = _yaml.node_get(spec_node, bool, 'allow-partial-push', default_value=False)
    
    63
    +
    
    64
    +        # partial_push depends on push, raise error if not configured correctly
    
    65
    +        if partial_push and not push:
    
    66
    +            provenance = _yaml.node_get_provenance(spec_node, 'allow-partial-push')
    
    67
    +            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    68
    +                            "{}: allow-partial-push also requires push to be set".format(provenance))
    
    59 69
             if not url:
    
    60 70
                 provenance = _yaml.node_get_provenance(spec_node, 'url')
    
    61 71
                 raise LoadError(LoadErrorReason.INVALID_DATA,
    
    62 72
                                 "{}: empty artifact cache URL".format(provenance))
    
    63 73
     
    
    64
    -        instance_name = _yaml.node_get(spec_node, str, 'instance_name', default_value=None)
    
    74
    +        instance_name = _yaml.node_get(spec_node, str, 'instance-name', default_value=None)
    
    65 75
     
    
    66 76
             server_cert = _yaml.node_get(spec_node, str, 'server-cert', default_value=None)
    
    67 77
             if server_cert and basedir:
    
    ... ... @@ -85,10 +95,10 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key
    85 95
                 raise LoadError(LoadErrorReason.INVALID_DATA,
    
    86 96
                                 "{}: 'client-cert' was specified without 'client-key'".format(provenance))
    
    87 97
     
    
    88
    -        return CASRemoteSpec(url, push, server_cert, client_key, client_cert, instance_name)
    
    98
    +        return CASRemoteSpec(url, push, partial_push, server_cert, client_key, client_cert, instance_name)
    
    89 99
     
    
    90 100
     
    
    91
    -CASRemoteSpec.__new__.__defaults__ = (None, None, None, None)
    
    101
    +CASRemoteSpec.__new__.__defaults__ = (False, None, None, None, None)
    
    92 102
     
    
    93 103
     
    
    94 104
     class BlobNotFound(CASError):
    
    ... ... @@ -283,34 +293,47 @@ class CASCache():
    283 293
         #   (bool): True if pull was successful, False if ref was not available
    
    284 294
         #
    
    285 295
         def pull(self, ref, remote, *, progress=None, subdir=None, excluded_subdirs=None):
    
    286
    -        try:
    
    287
    -            remote.init()
    
    288 296
     
    
    289
    -            request = buildstream_pb2.GetReferenceRequest(instance_name=remote.spec.instance_name)
    
    290
    -            request.key = ref
    
    291
    -            response = remote.ref_storage.GetReference(request)
    
    297
    +        tree_found = False
    
    292 298
     
    
    293
    -            tree = remote_execution_pb2.Digest()
    
    294
    -            tree.hash = response.digest.hash
    
    295
    -            tree.size_bytes = response.digest.size_bytes
    
    299
    +        while True:
    
    300
    +            try:
    
    301
    +                if not tree_found:
    
    302
    +                    remote.init()
    
    296 303
     
    
    297
    -            # Check if the element artifact is present, if so just fetch the subdir.
    
    298
    -            if subdir and os.path.exists(self.objpath(tree)):
    
    299
    -                self._fetch_subdir(remote, tree, subdir)
    
    300
    -            else:
    
    301
    -                # Fetch artifact, excluded_subdirs determined in pullqueue
    
    302
    -                self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
    
    304
    +                    request = buildstream_pb2.GetReferenceRequest(instance_name=remote.spec.instance_name)
    
    305
    +                    request.key = ref
    
    306
    +                    response = remote.ref_storage.GetReference(request)
    
    303 307
     
    
    304
    -            self.set_ref(ref, tree)
    
    308
    +                    tree = remote_execution_pb2.Digest()
    
    309
    +                    tree.hash = response.digest.hash
    
    310
    +                    tree.size_bytes = response.digest.size_bytes
    
    305 311
     
    
    306
    -            return True
    
    307
    -        except grpc.RpcError as e:
    
    308
    -            if e.code() != grpc.StatusCode.NOT_FOUND:
    
    309
    -                raise CASError("Failed to pull ref {}: {}".format(ref, e)) from e
    
    310
    -            else:
    
    311
    -                return False
    
    312
    -        except BlobNotFound as e:
    
    313
    -            return False
    
    312
    +                # Check if the element artifact is present, if so just fetch the subdir.
    
    313
    +                if subdir and os.path.exists(self.objpath(tree)):
    
    314
    +                    self._fetch_subdir(remote, tree, subdir)
    
    315
    +                else:
    
    316
    +                    # Fetch artifact, excluded_subdirs determined in pullqueue
    
    317
    +                    self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
    
    318
    +
    
    319
    +                self.set_ref(ref, tree)
    
    320
    +
    
    321
    +                return True
    
    322
    +            except grpc.RpcError as e:
    
    323
    +                if e.code() != grpc.StatusCode.NOT_FOUND:
    
    324
    +                    raise CASError("Failed to pull ref {}: {}".format(ref, e)) from e
    
    325
    +                else:
    
    326
    +                    return False
    
    327
    +            except BlobNotFound as e:
    
    328
    +                if not excluded_subdirs and subdir:
    
    329
    +                    # The remote has the top level digest but could not complete a full pull,
    
    330
    +                    # attempt partial without the need to initialise and check for the artifact
    
    331
    +                    # digest. This default behaviour of dropping back to partial pulls could
    
    332
    +                    # be made a configurable warning given at artfictcache level.
    
    333
    +                    tree_found = True
    
    334
    +                    excluded_subdirs, subdir = subdir, excluded_subdirs
    
    335
    +                else:
    
    336
    +                    return False
    
    314 337
     
    
    315 338
         # pull_tree():
    
    316 339
         #
    
    ... ... @@ -355,6 +378,8 @@ class CASCache():
    355 378
         # Args:
    
    356 379
         #     refs (list): The refs to push
    
    357 380
         #     remote (CASRemote): The remote to push to
    
    381
    +    #     subdir (string): Optional specific subdir to include in the push
    
    382
    +    #     excluded_subdirs (list): The optional list of subdirs to not push
    
    358 383
         #
    
    359 384
         # Returns:
    
    360 385
         #   (bool): True if any remote was updated, False if no pushes were required
    
    ... ... @@ -362,7 +387,7 @@ class CASCache():
    362 387
         # Raises:
    
    363 388
         #   (CASError): if there was an error
    
    364 389
         #
    
    365
    -    def push(self, refs, remote):
    
    390
    +    def push(self, refs, remote, *, subdir=None, excluded_subdirs=None):
    
    366 391
             skipped_remote = True
    
    367 392
             try:
    
    368 393
                 for ref in refs:
    
    ... ... @@ -376,15 +401,18 @@ class CASCache():
    376 401
                         response = remote.ref_storage.GetReference(request)
    
    377 402
     
    
    378 403
                         if response.digest.hash == tree.hash and response.digest.size_bytes == tree.size_bytes:
    
    379
    -                        # ref is already on the server with the same tree
    
    380
    -                        continue
    
    404
    +                        # ref is already on the server with the same tree, however it might be partially cached.
    
    405
    +                        # If artifact is not set to be pushed partially attempt to 'complete' the remote artifact if
    
    406
    +                        # needed, else continue.
    
    407
    +                        if excluded_subdirs or self.verify_digest_on_remote(remote, self._get_subdir(tree, subdir)):
    
    408
    +                            continue
    
    381 409
     
    
    382 410
                     except grpc.RpcError as e:
    
    383 411
                         if e.code() != grpc.StatusCode.NOT_FOUND:
    
    384 412
                             # Intentionally re-raise RpcError for outer except block.
    
    385 413
                             raise
    
    386 414
     
    
    387
    -                self._send_directory(remote, tree)
    
    415
    +                self._send_directory(remote, tree, excluded_dir=excluded_subdirs)
    
    388 416
     
    
    389 417
                     request = buildstream_pb2.UpdateReferenceRequest(instance_name=remote.spec.instance_name)
    
    390 418
                     request.keys.append(ref)
    
    ... ... @@ -866,10 +894,17 @@ class CASCache():
    866 894
                     a += 1
    
    867 895
                     b += 1
    
    868 896
     
    
    869
    -    def _reachable_refs_dir(self, reachable, tree, update_mtime=False):
    
    897
    +    def _reachable_refs_dir(self, reachable, tree, update_mtime=False, subdir=False):
    
    870 898
             if tree.hash in reachable:
    
    871 899
                 return
    
    872 900
     
    
    901
    +        # If looping through subdir digests, skip processing if
    
    902
    +        # ref path does not exist, allowing for partial objects
    
    903
    +        if subdir and not os.path.exists(self.objpath(tree)):
    
    904
    +            return
    
    905
    +
    
    906
    +        # Raises FileNotFound exception is path does not exist,
    
    907
    +        # which should only be thrown on the top level digest
    
    873 908
             if update_mtime:
    
    874 909
                 os.utime(self.objpath(tree))
    
    875 910
     
    
    ... ... @@ -886,9 +921,9 @@ class CASCache():
    886 921
                 reachable.add(filenode.digest.hash)
    
    887 922
     
    
    888 923
             for dirnode in directory.directories:
    
    889
    -            self._reachable_refs_dir(reachable, dirnode.digest, update_mtime=update_mtime)
    
    924
    +            self._reachable_refs_dir(reachable, dirnode.digest, update_mtime=update_mtime, subdir=True)
    
    890 925
     
    
    891
    -    def _required_blobs(self, directory_digest):
    
    926
    +    def _required_blobs(self, directory_digest, excluded_dir=None):
    
    892 927
             # parse directory, and recursively add blobs
    
    893 928
             d = remote_execution_pb2.Digest()
    
    894 929
             d.hash = directory_digest.hash
    
    ... ... @@ -907,7 +942,8 @@ class CASCache():
    907 942
                 yield d
    
    908 943
     
    
    909 944
             for dirnode in directory.directories:
    
    910
    -            yield from self._required_blobs(dirnode.digest)
    
    945
    +            if dirnode.name != excluded_dir:
    
    946
    +                yield from self._required_blobs(dirnode.digest)
    
    911 947
     
    
    912 948
         def _fetch_blob(self, remote, digest, stream):
    
    913 949
             resource_name_components = ['blobs', digest.hash, str(digest.size_bytes)]
    
    ... ... @@ -1029,6 +1065,7 @@ class CASCache():
    1029 1065
                 objpath = self._ensure_blob(remote, dir_digest)
    
    1030 1066
     
    
    1031 1067
                 directory = remote_execution_pb2.Directory()
    
    1068
    +
    
    1032 1069
                 with open(objpath, 'rb') as f:
    
    1033 1070
                     directory.ParseFromString(f.read())
    
    1034 1071
     
    
    ... ... @@ -1104,9 +1141,8 @@ class CASCache():
    1104 1141
     
    
    1105 1142
             assert response.committed_size == digest.size_bytes
    
    1106 1143
     
    
    1107
    -    def _send_directory(self, remote, digest, u_uid=uuid.uuid4()):
    
    1108
    -        required_blobs = self._required_blobs(digest)
    
    1109
    -
    
    1144
    +    def _send_directory(self, remote, digest, u_uid=uuid.uuid4(), excluded_dir=None):
    
    1145
    +        required_blobs = self._required_blobs(digest, excluded_dir=excluded_dir)
    
    1110 1146
             missing_blobs = dict()
    
    1111 1147
             # Limit size of FindMissingBlobs request
    
    1112 1148
             for required_blobs_group in _grouper(required_blobs, 512):
    

  • buildstream/_context.py
    ... ... @@ -34,6 +34,7 @@ from ._artifactcache import ArtifactCache
    34 34
     from ._artifactcache.cascache import CASCache
    
    35 35
     from ._workspaces import Workspaces, WorkspaceProjectCache, WORKSPACE_PROJECT_FILE
    
    36 36
     from .plugin import _plugin_lookup
    
    37
    +from .sandbox import SandboxRemote
    
    37 38
     
    
    38 39
     
    
    39 40
     # Context()
    
    ... ... @@ -72,6 +73,9 @@ class Context():
    72 73
             # The locations from which to push and pull prebuilt artifacts
    
    73 74
             self.artifact_cache_specs = None
    
    74 75
     
    
    76
    +        # The global remote execution configuration
    
    77
    +        self.remote_execution_specs = None
    
    78
    +
    
    75 79
             # The directory to store build logs
    
    76 80
             self.logdir = None
    
    77 81
     
    
    ... ... @@ -187,7 +191,7 @@ class Context():
    187 191
             _yaml.node_validate(defaults, [
    
    188 192
                 'sourcedir', 'builddir', 'artifactdir', 'logdir',
    
    189 193
                 'scheduler', 'artifacts', 'logging', 'projects',
    
    190
    -            'cache', 'prompt', 'workspacedir',
    
    194
    +            'cache', 'prompt', 'workspacedir', 'remote-execution'
    
    191 195
             ])
    
    192 196
     
    
    193 197
             for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
    
    ... ... @@ -212,6 +216,8 @@ class Context():
    212 216
             # Load artifact share configuration
    
    213 217
             self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
    
    214 218
     
    
    219
    +        self.remote_execution_specs = SandboxRemote.specs_from_config_node(defaults)
    
    220
    +
    
    215 221
             # Load pull build trees configuration
    
    216 222
             self.pull_buildtrees = _yaml.node_get(cache, bool, 'pull-buildtrees')
    
    217 223
     
    
    ... ... @@ -271,7 +277,8 @@ class Context():
    271 277
             # Shallow validation of overrides, parts of buildstream which rely
    
    272 278
             # on the overrides are expected to validate elsewhere.
    
    273 279
             for _, overrides in _yaml.node_items(self._project_overrides):
    
    274
    -            _yaml.node_validate(overrides, ['artifacts', 'options', 'strict', 'default-mirror'])
    
    280
    +            _yaml.node_validate(overrides, ['artifacts', 'options', 'strict', 'default-mirror',
    
    281
    +                                            'remote-execution'])
    
    275 282
     
    
    276 283
             profile_end(Topics.LOAD_CONTEXT, 'load')
    
    277 284
     
    

  • buildstream/_options/optionarch.py
    ... ... @@ -17,6 +17,8 @@
    17 17
     #  Authors:
    
    18 18
     #        Tristan Van Berkom <tristan vanberkom codethink co uk>
    
    19 19
     
    
    20
    +from .. import _yaml
    
    21
    +from .._exceptions import LoadError, LoadErrorReason, PlatformError
    
    20 22
     from .._platform import Platform
    
    21 23
     from .optionenum import OptionEnum
    
    22 24
     
    
    ... ... @@ -41,7 +43,34 @@ class OptionArch(OptionEnum):
    41 43
             super(OptionArch, self).load(node, allow_default_definition=False)
    
    42 44
     
    
    43 45
         def load_default_value(self, node):
    
    44
    -        return Platform.get_host_arch()
    
    46
    +        arch = Platform.get_host_arch()
    
    47
    +
    
    48
    +        default_value = None
    
    49
    +
    
    50
    +        for index, value in enumerate(self.values):
    
    51
    +            try:
    
    52
    +                canonical_value = Platform.canonicalize_arch(value)
    
    53
    +                if default_value is None and canonical_value == arch:
    
    54
    +                    default_value = value
    
    55
    +                    # Do not terminate the loop early to ensure we validate
    
    56
    +                    # all values in the list.
    
    57
    +            except PlatformError as e:
    
    58
    +                provenance = _yaml.node_get_provenance(node, key='values', indices=[index])
    
    59
    +                prefix = ""
    
    60
    +                if provenance:
    
    61
    +                    prefix = "{}: ".format(provenance)
    
    62
    +                raise LoadError(LoadErrorReason.INVALID_DATA,
    
    63
    +                                "{}Invalid value for {} option '{}': {}"
    
    64
    +                                .format(prefix, self.OPTION_TYPE, self.name, e))
    
    65
    +
    
    66
    +        if default_value is None:
    
    67
    +            # Host architecture is not supported by the project.
    
    68
    +            # Do not raise an error here as the user may override it.
    
    69
    +            # If the user does not override it, an error will be raised
    
    70
    +            # by resolve()/validate().
    
    71
    +            default_value = arch
    
    72
    +
    
    73
    +        return default_value
    
    45 74
     
    
    46 75
         def resolve(self):
    
    47 76
     
    

  • buildstream/_pipeline.py
    ... ... @@ -22,6 +22,7 @@
    22 22
     import os
    
    23 23
     import itertools
    
    24 24
     from operator import itemgetter
    
    25
    +from collections import OrderedDict
    
    25 26
     
    
    26 27
     from ._exceptions import PipelineError
    
    27 28
     from ._message import Message, MessageType
    
    ... ... @@ -479,7 +480,7 @@ class Pipeline():
    479 480
     #
    
    480 481
     class _Planner():
    
    481 482
         def __init__(self):
    
    482
    -        self.depth_map = {}
    
    483
    +        self.depth_map = OrderedDict()
    
    483 484
             self.visiting_elements = set()
    
    484 485
     
    
    485 486
         # Here we want to traverse the same element more than once when
    

  • buildstream/_platform/platform.py
    ... ... @@ -77,20 +77,17 @@ class Platform():
    77 77
         def get_host_os():
    
    78 78
             return os.uname()[0]
    
    79 79
     
    
    80
    -    # get_host_arch():
    
    80
    +    # canonicalize_arch():
    
    81 81
         #
    
    82
    -    # This returns the architecture of the host machine. The possible values
    
    83
    -    # map from uname -m in order to be a OS independent list.
    
    82
    +    # This returns the canonical, OS-independent architecture name
    
    83
    +    # or raises a PlatformError if the architecture is unknown.
    
    84 84
         #
    
    85
    -    # Returns:
    
    86
    -    #    (string): String representing the architecture
    
    87 85
         @staticmethod
    
    88
    -    def get_host_arch():
    
    89
    -        # get the hardware identifier from uname
    
    90
    -        uname_machine = os.uname()[4]
    
    91
    -        uname_to_arch = {
    
    86
    +    def canonicalize_arch(arch):
    
    87
    +        aliases = {
    
    88
    +            "aarch32": "aarch32",
    
    92 89
                 "aarch64": "aarch64",
    
    93
    -            "aarch64_be": "aarch64-be",
    
    90
    +            "aarch64-be": "aarch64-be",
    
    94 91
                 "amd64": "x86-64",
    
    95 92
                 "arm": "aarch32",
    
    96 93
                 "armv8l": "aarch64",
    
    ... ... @@ -99,17 +96,34 @@ class Platform():
    99 96
                 "i486": "x86-32",
    
    100 97
                 "i586": "x86-32",
    
    101 98
                 "i686": "x86-32",
    
    99
    +            "power-isa-be": "power-isa-be",
    
    100
    +            "power-isa-le": "power-isa-le",
    
    102 101
                 "ppc64": "power-isa-be",
    
    103 102
                 "ppc64le": "power-isa-le",
    
    104 103
                 "sparc": "sparc-v9",
    
    105 104
                 "sparc64": "sparc-v9",
    
    106
    -            "x86_64": "x86-64"
    
    105
    +            "sparc-v9": "sparc-v9",
    
    106
    +            "x86-32": "x86-32",
    
    107
    +            "x86-64": "x86-64"
    
    107 108
             }
    
    109
    +
    
    108 110
             try:
    
    109
    -            return uname_to_arch[uname_machine]
    
    111
    +            return aliases[arch.replace('_', '-')]
    
    110 112
             except KeyError:
    
    111
    -            raise PlatformError("uname gave unsupported machine architecture: {}"
    
    112
    -                                .format(uname_machine))
    
    113
    +            raise PlatformError("Unknown architecture: {}".format(arch))
    
    114
    +
    
    115
    +    # get_host_arch():
    
    116
    +    #
    
    117
    +    # This returns the architecture of the host machine. The possible values
    
    118
    +    # map from uname -m in order to be a OS independent list.
    
    119
    +    #
    
    120
    +    # Returns:
    
    121
    +    #    (string): String representing the architecture
    
    122
    +    @staticmethod
    
    123
    +    def get_host_arch():
    
    124
    +        # get the hardware identifier from uname
    
    125
    +        uname_machine = os.uname()[4]
    
    126
    +        return Platform.canonicalize_arch(uname_machine)
    
    113 127
     
    
    114 128
         ##################################################################
    
    115 129
         #                        Sandbox functions                       #
    

  • buildstream/_project.py
    ... ... @@ -507,7 +507,16 @@ class Project():
    507 507
             self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config, self.directory)
    
    508 508
     
    
    509 509
             # Load remote-execution configuration for this project
    
    510
    -        self.remote_execution_specs = SandboxRemote.specs_from_config_node(config, self.directory)
    
    510
    +        project_specs = SandboxRemote.specs_from_config_node(config, self.directory)
    
    511
    +        override_specs = SandboxRemote.specs_from_config_node(
    
    512
    +            self._context.get_overrides(self.name), self.directory)
    
    513
    +
    
    514
    +        if override_specs is not None:
    
    515
    +            self.remote_execution_specs = override_specs
    
    516
    +        elif project_specs is not None:
    
    517
    +            self.remote_execution_specs = project_specs
    
    518
    +        else:
    
    519
    +            self.remote_execution_specs = self._context.remote_execution_specs
    
    511 520
     
    
    512 521
             # Load sandbox environment variables
    
    513 522
             self.base_environment = _yaml.node_get(config, Mapping, 'environment')
    

  • buildstream/element.py
    ... ... @@ -1800,13 +1800,19 @@ class Element(Plugin):
    1800 1800
         #   (bool): True if this element does not need a push job to be created
    
    1801 1801
         #
    
    1802 1802
         def _skip_push(self):
    
    1803
    +
    
    1803 1804
             if not self.__artifacts.has_push_remotes(element=self):
    
    1804 1805
                 # No push remotes for this element's project
    
    1805 1806
                 return True
    
    1806 1807
     
    
    1807 1808
             # Do not push elements that aren't cached, or that are cached with a dangling buildtree
    
    1808
    -        # artifact unless element type is expected to have an an empty buildtree directory
    
    1809
    -        if not self._cached_buildtree():
    
    1809
    +        # artifact unless element type is expected to have an an empty buildtree directory. Check
    
    1810
    +        # that this default behaviour is not overriden via a remote configured to allow pushing
    
    1811
    +        # artifacts without their corresponding buildtree.
    
    1812
    +        if not self._cached():
    
    1813
    +            return True
    
    1814
    +
    
    1815
    +        if not self._cached_buildtree() and not self.__artifacts.has_partial_push_remotes(element=self):
    
    1810 1816
                 return True
    
    1811 1817
     
    
    1812 1818
             # Do not push tainted artifact
    
    ... ... @@ -1817,11 +1823,14 @@ class Element(Plugin):
    1817 1823
     
    
    1818 1824
         # _push():
    
    1819 1825
         #
    
    1820
    -    # Push locally cached artifact to remote artifact repository.
    
    1826
    +    # Push locally cached artifact to remote artifact repository. An attempt
    
    1827
    +    # will be made to push partial artifacts if given current config dictates.
    
    1828
    +    # If a remote set for 'full' artifact pushes is found to be cached partially
    
    1829
    +    # in the remote, an attempt will be made to 'complete' it.
    
    1821 1830
         #
    
    1822 1831
         # Returns:
    
    1823 1832
         #   (bool): True if the remote was updated, False if it already existed
    
    1824
    -    #           and no updated was required
    
    1833
    +    #           and no update was required
    
    1825 1834
         #
    
    1826 1835
         def _push(self):
    
    1827 1836
             self.__assert_cached()
    
    ... ... @@ -1830,8 +1839,17 @@ class Element(Plugin):
    1830 1839
                 self.warn("Not pushing tainted artifact.")
    
    1831 1840
                 return False
    
    1832 1841
     
    
    1833
    -        # Push all keys used for local commit
    
    1834
    -        pushed = self.__artifacts.push(self, self.__get_cache_keys_for_commit())
    
    1842
    +        # Push all keys used for local commit, this could be full or partial,
    
    1843
    +        # given previous _skip_push() logic. If buildtree isn't cached, then
    
    1844
    +        # set partial push
    
    1845
    +
    
    1846
    +        partial = False
    
    1847
    +        subdir = 'buildtree'
    
    1848
    +        if not self._cached_buildtree():
    
    1849
    +            partial = True
    
    1850
    +
    
    1851
    +        pushed = self.__artifacts.push(self, self.__get_cache_keys_for_commit(), partial=partial, subdir=subdir)
    
    1852
    +
    
    1835 1853
             if not pushed:
    
    1836 1854
                 return False
    
    1837 1855
     
    
    ... ... @@ -2441,11 +2459,17 @@ class Element(Plugin):
    2441 2459
             # Sandbox config, unlike others, has fixed members so we should validate them
    
    2442 2460
             _yaml.node_validate(sandbox_config, ['build-uid', 'build-gid', 'build-os', 'build-arch'])
    
    2443 2461
     
    
    2462
    +        build_arch = self.node_get_member(sandbox_config, str, 'build-arch', default=None)
    
    2463
    +        if build_arch:
    
    2464
    +            build_arch = Platform.canonicalize_arch(build_arch)
    
    2465
    +        else:
    
    2466
    +            build_arch = host_arch
    
    2467
    +
    
    2444 2468
             return SandboxConfig(
    
    2445 2469
                 self.node_get_member(sandbox_config, int, 'build-uid'),
    
    2446 2470
                 self.node_get_member(sandbox_config, int, 'build-gid'),
    
    2447 2471
                 self.node_get_member(sandbox_config, str, 'build-os', default=host_os),
    
    2448
    -            self.node_get_member(sandbox_config, str, 'build-arch', default=host_arch))
    
    2472
    +            build_arch)
    
    2449 2473
     
    
    2450 2474
         # This makes a special exception for the split rules, which
    
    2451 2475
         # elements may extend but whos defaults are defined in the project.
    

  • buildstream/plugins/elements/script.py
    ... ... @@ -42,6 +42,9 @@ import buildstream
    42 42
     class ScriptElement(buildstream.ScriptElement):
    
    43 43
         # pylint: disable=attribute-defined-outside-init
    
    44 44
     
    
    45
    +    # This plugin has been modified to avoid the use of Sandbox.get_directory
    
    46
    +    BST_VIRTUAL_DIRECTORY = True
    
    47
    +
    
    45 48
         def configure(self, node):
    
    46 49
             for n in self.node_get_member(node, list, 'layout', []):
    
    47 50
                 dst = self.node_subst_member(n, 'destination')
    

  • buildstream/sandbox/_sandboxremote.py
    ... ... @@ -62,10 +62,32 @@ class SandboxRemote(Sandbox):
    62 62
             self.storage_url = config.storage_service['url']
    
    63 63
             self.exec_url = config.exec_service['url']
    
    64 64
     
    
    65
    +        exec_certs = {}
    
    66
    +        for key in ['client-cert', 'client-key', 'server-cert']:
    
    67
    +            if key in config.exec_service:
    
    68
    +                with open(config.exec_service[key], 'rb') as f:
    
    69
    +                    exec_certs[key] = f.read()
    
    70
    +
    
    71
    +        self.exec_credentials = grpc.ssl_channel_credentials(
    
    72
    +            root_certificates=exec_certs.get('server-cert'),
    
    73
    +            private_key=exec_certs.get('client-key'),
    
    74
    +            certificate_chain=exec_certs.get('client-cert'))
    
    75
    +
    
    76
    +        action_certs = {}
    
    77
    +        for key in ['client-cert', 'client-key', 'server-cert']:
    
    78
    +            if key in config.action_service:
    
    79
    +                with open(config.action_service[key], 'rb') as f:
    
    80
    +                    action_certs[key] = f.read()
    
    81
    +
    
    65 82
             if config.action_service:
    
    66 83
                 self.action_url = config.action_service['url']
    
    84
    +            self.action_credentials = grpc.ssl_channel_credentials(
    
    85
    +                root_certificates=action_certs.get('server-cert'),
    
    86
    +                private_key=action_certs.get('client-key'),
    
    87
    +                certificate_chain=action_certs.get('client-cert'))
    
    67 88
             else:
    
    68 89
                 self.action_url = None
    
    90
    +            self.action_credentials = None
    
    69 91
     
    
    70 92
             self.server_instance = config.exec_service.get('instance', None)
    
    71 93
             self.storage_instance = config.storage_service.get('instance', None)
    
    ... ... @@ -81,7 +103,7 @@ class SandboxRemote(Sandbox):
    81 103
             self._get_context().message(Message(None, MessageType.INFO, msg))
    
    82 104
     
    
    83 105
         @staticmethod
    
    84
    -    def specs_from_config_node(config_node, basedir):
    
    106
    +    def specs_from_config_node(config_node, basedir=None):
    
    85 107
     
    
    86 108
             def require_node(config, keyname):
    
    87 109
                 val = config.get(keyname)
    
    ... ... @@ -109,10 +131,10 @@ class SandboxRemote(Sandbox):
    109 131
             remote_exec_storage_config = require_node(remote_config, 'storage-service')
    
    110 132
             remote_exec_action_config = remote_config.get('action-cache-service', {})
    
    111 133
     
    
    112
    -        _yaml.node_validate(remote_exec_service_config, ['url', 'instance'])
    
    134
    +        _yaml.node_validate(remote_exec_service_config, ['url', 'instance'] + tls_keys)
    
    113 135
             _yaml.node_validate(remote_exec_storage_config, ['url', 'instance'] + tls_keys)
    
    114 136
             if remote_exec_action_config:
    
    115
    -            _yaml.node_validate(remote_exec_action_config, ['url'])
    
    137
    +            _yaml.node_validate(remote_exec_action_config, ['url'] + tls_keys)
    
    116 138
             else:
    
    117 139
                 remote_config['action-service'] = None
    
    118 140
     
    
    ... ... @@ -135,6 +157,19 @@ class SandboxRemote(Sandbox):
    135 157
                                           "remote-execution configuration. Your config is missing '{}'."
    
    136 158
                                           .format(str(provenance), tls_keys, key))
    
    137 159
     
    
    160
    +        def resolve_path(path):
    
    161
    +            if basedir and path:
    
    162
    +                return os.path.join(basedir, path)
    
    163
    +            else:
    
    164
    +                return path
    
    165
    +
    
    166
    +        for key in tls_keys:
    
    167
    +            for d in (remote_config['execution-service'],
    
    168
    +                      remote_config['storage-service'],
    
    169
    +                      remote_exec_action_config):
    
    170
    +                if key in d:
    
    171
    +                    d[key] = resolve_path(d[key])
    
    172
    +
    
    138 173
             spec = RemoteExecutionSpec(remote_config['execution-service'],
    
    139 174
                                        remote_config['storage-service'],
    
    140 175
                                        remote_exec_action_config)
    
    ... ... @@ -295,6 +330,8 @@ class SandboxRemote(Sandbox):
    295 330
                                    "for example: http://buildservice:50051.")
    
    296 331
             if url.scheme == 'http':
    
    297 332
                 channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
    
    333
    +        elif url.scheme == 'https':
    
    334
    +            channel = grpc.secure_channel('{}:{}'.format(url.hostname, url.port), self.exec_credentials)
    
    298 335
             else:
    
    299 336
                 raise SandboxError("Remote execution currently only supports the 'http' protocol "
    
    300 337
                                    "and '{}' was supplied.".format(url.scheme))
    
    ... ... @@ -352,11 +389,11 @@ class SandboxRemote(Sandbox):
    352 389
             if not url.port:
    
    353 390
                 raise SandboxError("You must supply a protocol and port number in the action-cache-service url, "
    
    354 391
                                    "for example: http://buildservice:50051.")
    
    355
    -        if not url.scheme == "http":
    
    356
    -            raise SandboxError("Currently only support http for the action cache"
    
    357
    -                               "and {} was supplied".format(url.scheme))
    
    392
    +        if url.scheme == 'http':
    
    393
    +            channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
    
    394
    +        elif url.scheme == 'https':
    
    395
    +            channel = grpc.secure_channel('{}:{}'.format(url.hostname, url.port), self.action_credentials)
    
    358 396
     
    
    359
    -        channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
    
    360 397
             request = remote_execution_pb2.GetActionResultRequest(action_digest=action_digest)
    
    361 398
             stub = remote_execution_pb2_grpc.ActionCacheStub(channel)
    
    362 399
             try:
    

  • buildstream/sandbox/sandbox.py
    ... ... @@ -288,8 +288,8 @@ class Sandbox():
    288 288
                 command = [command]
    
    289 289
     
    
    290 290
             if self.__batch:
    
    291
    -            if flags != self.__batch.flags:
    
    292
    -                raise SandboxError("Inconsistent sandbox flags in single command batch")
    
    291
    +            assert flags == self.__batch.flags, \
    
    292
    +                "Inconsistent sandbox flags in single command batch"
    
    293 293
     
    
    294 294
                 batch_command = _SandboxBatchCommand(command, cwd=cwd, env=env, label=label)
    
    295 295
     
    
    ... ... @@ -326,8 +326,8 @@ class Sandbox():
    326 326
     
    
    327 327
             if self.__batch:
    
    328 328
                 # Nested batch
    
    329
    -            if flags != self.__batch.flags:
    
    330
    -                raise SandboxError("Inconsistent sandbox flags in single command batch")
    
    329
    +            assert flags == self.__batch.flags, \
    
    330
    +                "Inconsistent sandbox flags in single command batch"
    
    331 331
     
    
    332 332
                 parent_group = self.__batch.current_group
    
    333 333
                 parent_group.append(group)
    

  • doc/source/format_project.rst
    ... ... @@ -218,6 +218,7 @@ The use of ports are required to distinguish between pull only access and
    218 218
     push/pull access. For information regarding the server/client certificates
    
    219 219
     and keys, please see: :ref:`Key pair for the server <server_authentication>`.
    
    220 220
     
    
    221
    +.. _project_remote_execution:
    
    221 222
     
    
    222 223
     Remote execution
    
    223 224
     ~~~~~~~~~~~~~~~~
    
    ... ... @@ -243,9 +244,6 @@ using the `remote-execution` option:
    243 244
         action-cache-service:
    
    244 245
           url: http://bar.action.com:50052
    
    245 246
     
    
    246
    -The execution-service part of remote execution does not support encrypted
    
    247
    -connections yet, so the protocol must always be http.
    
    248
    -
    
    249 247
     storage-service specifies a remote CAS store and the parameters are the
    
    250 248
     same as those used to specify an :ref:`artifact server <artifacts>`.
    
    251 249
     
    
    ... ... @@ -268,6 +266,9 @@ instance names.
    268 266
     
    
    269 267
     The Remote Execution API can be found via https://github.com/bazelbuild/remote-apis.
    
    270 268
     
    
    269
    +Remote execution configuration can be also provided in the `user
    
    270
    +configuration <user_config_remote_execution>`.
    
    271
    +
    
    271 272
     .. _project_essentials_mirrors:
    
    272 273
     
    
    273 274
     Mirrors
    

  • doc/source/using_config.rst
    ... ... @@ -59,6 +59,15 @@ configuration:
    59 59
          # Add another cache to pull from
    
    60 60
          - url: https://anothercache.com/artifacts:8080
    
    61 61
            server-cert: another_server.crt
    
    62
    +     # Add a cache to push/pull to/from, specifying
    
    63
    +       that you wish to push artifacts in a 'partial'
    
    64
    +       state (this being without the respective buildtree).
    
    65
    +       Note that allow-partial-push requires push to also
    
    66
    +       be set.
    
    67
    +     - url: https://anothercache.com/artifacts:11003
    
    68
    +       push: true
    
    69
    +       allow-partial-push: true
    
    70
    +
    
    62 71
     
    
    63 72
     .. note::
    
    64 73
     
    
    ... ... @@ -86,6 +95,14 @@ configuration:
    86 95
              # Add another cache to pull from
    
    87 96
              - url: https://ourprojectcache.com/artifacts:8080
    
    88 97
                server-cert: project_server.crt
    
    98
    +         # Add a cache to push/pull to/from, specifying
    
    99
    +           that you wish to push artifacts in a 'partial'
    
    100
    +           state (this being without the respective buildtree).
    
    101
    +           Note that allow-partial-push requires push to also
    
    102
    +           be set.
    
    103
    +         - url: https://anothercache.com/artifacts:11003
    
    104
    +           push: true
    
    105
    +           allow-partial-push: true
    
    89 106
     
    
    90 107
     
    
    91 108
     .. note::
    
    ... ... @@ -100,6 +117,54 @@ pull only access and push/pull access. For information regarding this and the
    100 117
     server/client certificates and keys, please see:
    
    101 118
     :ref:`Key pair for the server <server_authentication>`.
    
    102 119
     
    
    120
    +.. _user_config_remote_execution:
    
    121
    +
    
    122
    +Remote execution
    
    123
    +~~~~~~~~~~~~~~~~
    
    124
    +
    
    125
    +The same configuration for :ref:`remote execution <project_remote_execution>`
    
    126
    +in ``project.conf`` can be provided in the user configuation.
    
    127
    +
    
    128
    +There is only one remote execution configuration used per project.
    
    129
    +
    
    130
    +The project overrides will be taken in priority. The global
    
    131
    +configuration will be used as fallback.
    
    132
    +
    
    133
    +1. Global remote execution fallback:
    
    134
    +
    
    135
    +.. code:: yaml
    
    136
    +
    
    137
    +  remote-execution:
    
    138
    +    execution-service:
    
    139
    +      url: http://execution.fallback.example.com:50051
    
    140
    +      instance-name: main
    
    141
    +    storage-service:
    
    142
    +      url: https://storage.fallback.example.com:11002/
    
    143
    +      server-cert: /keys/server.crt
    
    144
    +      client-cert: /keys/client.crt
    
    145
    +      client-key: /keys/client.key
    
    146
    +      instance-name: main
    
    147
    +    action-cache-service:
    
    148
    +      url: http://action.flalback.example.com:50052
    
    149
    +
    
    150
    +2. Project override:
    
    151
    +
    
    152
    +.. code:: yaml
    
    153
    +
    
    154
    +  projects:
    
    155
    +    some_project:
    
    156
    +      remote-execution:
    
    157
    +        execution-service:
    
    158
    +          url: http://execution.some_project.example.com:50051
    
    159
    +          instance-name: main
    
    160
    +        storage-service:
    
    161
    +          url: https://storage.some_project.example.com:11002/
    
    162
    +          server-cert: /some_project_keys/server.crt
    
    163
    +          client-cert: /some_project_keys/client.crt
    
    164
    +          client-key: /some_project_keys/client.key
    
    165
    +          instance-name: main
    
    166
    +        action-cache-service:
    
    167
    +          url: http://action.some_project.example.com:50052
    
    103 168
     
    
    104 169
     
    
    105 170
     Strict build plan
    

  • tests/artifactcache/config.py
    ... ... @@ -140,3 +140,28 @@ def test_missing_certs(cli, datafiles, config_key, config_value):
    140 140
         # This does not happen for a simple `bst show`.
    
    141 141
         result = cli.run(project=project, args=['pull', 'element.bst'])
    
    142 142
         result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
    
    143
    +
    
    144
    +
    
    145
    +# Assert that if allow-partial-push is specified as true without push also being
    
    146
    +# set likewise, we get a comprehensive LoadError instead of an unhandled exception.
    
    147
    +@pytest.mark.datafiles(DATA_DIR)
    
    148
    +def test_partial_push_error(cli, datafiles):
    
    149
    +    project = os.path.join(datafiles.dirname, datafiles.basename, 'project', 'elements')
    
    150
    +
    
    151
    +    project_conf = {
    
    152
    +        'name': 'test',
    
    153
    +
    
    154
    +        'artifacts': {
    
    155
    +            'url': 'https://cache.example.com:12345',
    
    156
    +            'allow-partial-push': 'True'
    
    157
    +        }
    
    158
    +    }
    
    159
    +    project_conf_file = os.path.join(project, 'project.conf')
    
    160
    +    _yaml.dump(project_conf, project_conf_file)
    
    161
    +
    
    162
    +    # Use `pull` here to ensure we try to initialize the remotes, triggering the error
    
    163
    +    #
    
    164
    +    # This does not happen for a simple `bst show`.
    
    165
    +    result = cli.run(project=project, args=['pull', 'target.bst'])
    
    166
    +    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
    
    167
    +    assert "allow-partial-push also requires push to be set" in result.stderr

  • tests/format/option-arch-alias/element.bst
    1
    +kind: autotools
    
    2
    +variables:
    
    3
    +  result: "Nothing"
    
    4
    +  (?):
    
    5
    +  - machine_arch == "arm":
    
    6
    +      result: "Army"
    
    7
    +  - machine_arch == "x86_64":
    
    8
    +      result: "X86-64y"

  • tests/format/option-arch-alias/project.conf
    1
    +name: test
    
    2
    +
    
    3
    +options:
    
    4
    +  machine_arch:
    
    5
    +    type: arch
    
    6
    +    description: The machine architecture
    
    7
    +    values:
    
    8
    +    - arm
    
    9
    +    - x86_64

  • tests/format/option-arch-unknown/element.bst
    1
    +kind: autotools
    
    2
    +variables:
    
    3
    +  result: "Nothing"
    
    4
    +  (?):
    
    5
    +  - machine_arch == "aarch32":
    
    6
    +      result: "Army"
    
    7
    +  - machine_arch == "aarch64":
    
    8
    +      result: "Aarchy"
    
    9
    +  - machine_arch == "x86-128":
    
    10
    +      result: "X86-128y"

  • tests/format/option-arch-unknown/project.conf
    1
    +name: test
    
    2
    +
    
    3
    +options:
    
    4
    +  machine_arch:
    
    5
    +    type: arch
    
    6
    +    description: The machine architecture
    
    7
    +    values:
    
    8
    +    - aarch32
    
    9
    +    - aarch64
    
    10
    +    - x86-128

  • tests/format/optionarch.py
    ... ... @@ -75,3 +75,47 @@ def test_unsupported_arch(cli, datafiles):
    75 75
             ])
    
    76 76
     
    
    77 77
             result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
    
    78
    +
    
    79
    +
    
    80
    +@pytest.mark.datafiles(DATA_DIR)
    
    81
    +def test_alias(cli, datafiles):
    
    82
    +
    
    83
    +    with override_uname_arch("arm"):
    
    84
    +        project = os.path.join(datafiles.dirname, datafiles.basename, 'option-arch-alias')
    
    85
    +        result = cli.run(project=project, silent=True, args=[
    
    86
    +            'show',
    
    87
    +            '--deps', 'none',
    
    88
    +            '--format', '%{vars}',
    
    89
    +            'element.bst'
    
    90
    +        ])
    
    91
    +
    
    92
    +        result.assert_success()
    
    93
    +
    
    94
    +
    
    95
    +@pytest.mark.datafiles(DATA_DIR)
    
    96
    +def test_unknown_host_arch(cli, datafiles):
    
    97
    +
    
    98
    +    with override_uname_arch("x86_128"):
    
    99
    +        project = os.path.join(datafiles.dirname, datafiles.basename, 'option-arch')
    
    100
    +        result = cli.run(project=project, silent=True, args=[
    
    101
    +            'show',
    
    102
    +            '--deps', 'none',
    
    103
    +            '--format', '%{vars}',
    
    104
    +            'element.bst'
    
    105
    +        ])
    
    106
    +
    
    107
    +        result.assert_main_error(ErrorDomain.PLATFORM, None)
    
    108
    +
    
    109
    +
    
    110
    +@pytest.mark.datafiles(DATA_DIR)
    
    111
    +def test_unknown_project_arch(cli, datafiles):
    
    112
    +
    
    113
    +    project = os.path.join(datafiles.dirname, datafiles.basename, 'option-arch-unknown')
    
    114
    +    result = cli.run(project=project, silent=True, args=[
    
    115
    +        'show',
    
    116
    +        '--deps', 'none',
    
    117
    +        '--format', '%{vars}',
    
    118
    +        'element.bst'
    
    119
    +    ])
    
    120
    +
    
    121
    +    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)

  • tests/frontend/order.py
    1
    +import os
    
    2
    +
    
    3
    +import pytest
    
    4
    +from tests.testutils import cli, create_repo
    
    5
    +
    
    6
    +from buildstream import _yaml
    
    7
    +
    
    8
    +# Project directory
    
    9
    +DATA_DIR = os.path.join(
    
    10
    +    os.path.dirname(os.path.realpath(__file__)),
    
    11
    +    "project",
    
    12
    +)
    
    13
    +
    
    14
    +
    
    15
    +def create_element(repo, name, path, dependencies, ref=None):
    
    16
    +    element = {
    
    17
    +        'kind': 'import',
    
    18
    +        'sources': [
    
    19
    +            repo.source_config(ref=ref)
    
    20
    +        ],
    
    21
    +        'depends': dependencies
    
    22
    +    }
    
    23
    +    _yaml.dump(element, os.path.join(path, name))
    
    24
    +
    
    25
    +
    
    26
    +# This tests a variety of scenarios and checks that the order in
    
    27
    +# which things are processed remains stable.
    
    28
    +#
    
    29
    +# This is especially important in order to ensure that our
    
    30
    +# depth sorting and optimization of which elements should be
    
    31
    +# processed first is doing it's job right, and that we are
    
    32
    +# promoting elements to the build queue as soon as possible
    
    33
    +#
    
    34
    +# Parameters:
    
    35
    +#    targets (target elements): The targets to invoke bst with
    
    36
    +#    template (dict): The project template dictionary, for create_element()
    
    37
    +#    expected (list): A list of element names in the expected order
    
    38
    +#
    
    39
    +@pytest.mark.datafiles(os.path.join(DATA_DIR))
    
    40
    +@pytest.mark.parametrize("target,template,expected", [
    
    41
    +    # First simple test
    
    42
    +    ('3.bst', {
    
    43
    +        '0.bst': ['1.bst'],
    
    44
    +        '1.bst': [],
    
    45
    +        '2.bst': ['0.bst'],
    
    46
    +        '3.bst': ['0.bst', '1.bst', '2.bst']
    
    47
    +    }, ['1.bst', '0.bst', '2.bst', '3.bst']),
    
    48
    +
    
    49
    +    # A more complicated test with build of build dependencies
    
    50
    +    ('target.bst', {
    
    51
    +        'a.bst': [],
    
    52
    +        'base.bst': [],
    
    53
    +        'timezones.bst': [],
    
    54
    +        'middleware.bst': [{'filename': 'base.bst', 'type': 'build'}],
    
    55
    +        'app.bst': [{'filename': 'middleware.bst', 'type': 'build'}],
    
    56
    +        'target.bst': ['a.bst', 'base.bst', 'middleware.bst', 'app.bst', 'timezones.bst']
    
    57
    +    }, ['base.bst', 'middleware.bst', 'a.bst', 'app.bst', 'timezones.bst', 'target.bst']),
    
    58
    +])
    
    59
    +@pytest.mark.parametrize("operation", [('show'), ('fetch'), ('build')])
    
    60
    +def test_order(cli, datafiles, tmpdir, operation, target, template, expected):
    
    61
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    62
    +    dev_files_path = os.path.join(project, 'files', 'dev-files')
    
    63
    +    element_path = os.path.join(project, 'elements')
    
    64
    +
    
    65
    +    # FIXME: Remove this when the test passes reliably.
    
    66
    +    #
    
    67
    +    #        There is no reason why the order should not
    
    68
    +    #        be preserved when the builders is set to 1,
    
    69
    +    #        the scheduler queue processing still seems to
    
    70
    +    #        be losing the order.
    
    71
    +    #
    
    72
    +    if operation == 'build':
    
    73
    +        pytest.skip("FIXME: This still only sometimes passes")
    
    74
    +
    
    75
    +    # Configure to only allow one fetcher at a time, make it easy to
    
    76
    +    # determine what is being planned in what order.
    
    77
    +    cli.configure({
    
    78
    +        'scheduler': {
    
    79
    +            'fetchers': 1,
    
    80
    +            'builders': 1
    
    81
    +        }
    
    82
    +    })
    
    83
    +
    
    84
    +    # Build the project from the template, make import elements
    
    85
    +    # all with the same repo
    
    86
    +    #
    
    87
    +    repo = create_repo('git', str(tmpdir))
    
    88
    +    ref = repo.create(dev_files_path)
    
    89
    +    for element, dependencies in template.items():
    
    90
    +        create_element(repo, element, element_path, dependencies, ref=ref)
    
    91
    +        repo.add_commit()
    
    92
    +
    
    93
    +    # Run test and collect results
    
    94
    +    if operation == 'show':
    
    95
    +        result = cli.run(args=['show', '--deps', 'plan', '--format', '%{name}', target], project=project, silent=True)
    
    96
    +        result.assert_success()
    
    97
    +        results = result.output.splitlines()
    
    98
    +    else:
    
    99
    +        if operation == 'fetch':
    
    100
    +            result = cli.run(args=['source', 'fetch', target], project=project, silent=True)
    
    101
    +        else:
    
    102
    +            result = cli.run(args=[operation, target], project=project, silent=True)
    
    103
    +        result.assert_success()
    
    104
    +        results = result.get_start_order(operation)
    
    105
    +
    
    106
    +    # Assert the order
    
    107
    +    print("Expected order: {}".format(expected))
    
    108
    +    print("Observed result order: {}".format(results))
    
    109
    +    assert results == expected

  • tests/integration/pushbuildtrees.py
    1
    +import os
    
    2
    +import shutil
    
    3
    +import pytest
    
    4
    +import subprocess
    
    5
    +
    
    6
    +from buildstream import _yaml
    
    7
    +from tests.testutils import cli_integration as cli, create_artifact_share
    
    8
    +from tests.testutils.integration import assert_contains
    
    9
    +from tests.testutils.site import HAVE_BWRAP, IS_LINUX
    
    10
    +from buildstream._exceptions import ErrorDomain, LoadErrorReason
    
    11
    +
    
    12
    +
    
    13
    +DATA_DIR = os.path.join(
    
    14
    +    os.path.dirname(os.path.realpath(__file__)),
    
    15
    +    "project"
    
    16
    +)
    
    17
    +
    
    18
    +
    
    19
    +# Remove artifact cache & set cli.config value of pull-buildtrees
    
    20
    +# to false, which is the default user context. The cache has to be
    
    21
    +# cleared as just forcefully removing the refpath leaves dangling objects.
    
    22
    +def default_state(cli, tmpdir, share):
    
    23
    +    shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
    
    24
    +    cli.configure({
    
    25
    +        'artifacts': {'url': share.repo, 'push': False},
    
    26
    +        'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
    
    27
    +        'cache': {'pull-buildtrees': False},
    
    28
    +    })
    
    29
    +
    
    30
    +
    
    31
    +# Tests to capture the integration of the optionl push of buildtrees.
    
    32
    +# The behaviour should encompass pushing artifacts that are already cached
    
    33
    +# without a buildtree as well as artifacts that are cached with their buildtree.
    
    34
    +# This option is handled via 'allow-partial-push' on a per artifact remote config
    
    35
    +# node basis. Multiple remote config nodes can point to the same url and as such can
    
    36
    +# have different 'allow-partial-push' options, tests need to cover this using project
    
    37
    +# confs.
    
    38
    +@pytest.mark.integration
    
    39
    +@pytest.mark.datafiles(DATA_DIR)
    
    40
    +@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
    
    41
    +def test_pushbuildtrees(cli, tmpdir, datafiles, integration_cache):
    
    42
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    43
    +    element_name = 'autotools/amhello.bst'
    
    44
    +
    
    45
    +    # Create artifact shares for pull & push testing
    
    46
    +    with create_artifact_share(os.path.join(str(tmpdir), 'share1')) as share1,\
    
    47
    +        create_artifact_share(os.path.join(str(tmpdir), 'share2')) as share2,\
    
    48
    +        create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3,\
    
    49
    +        create_artifact_share(os.path.join(str(tmpdir), 'share4')) as share4:
    
    50
    +
    
    51
    +        cli.configure({
    
    52
    +            'artifacts': {'url': share1.repo, 'push': True},
    
    53
    +            'artifactdir': os.path.join(str(tmpdir), 'artifacts')
    
    54
    +        })
    
    55
    +
    
    56
    +        cli.configure({'artifacts': [{'url': share1.repo, 'push': True},
    
    57
    +                                     {'url': share2.repo, 'push': True, 'allow-partial-push': True}]})
    
    58
    +
    
    59
    +        # Build autotools element, checked pushed, delete local.
    
    60
    +        # As share 2 has push & allow-partial-push set a true, it
    
    61
    +        # should have pushed the artifacts, without the cached buildtrees,
    
    62
    +        # to it.
    
    63
    +        result = cli.run(project=project, args=['build', element_name])
    
    64
    +        assert result.exit_code == 0
    
    65
    +        assert cli.get_element_state(project, element_name) == 'cached'
    
    66
    +        elementdigest = share1.has_artifact('test', element_name, cli.get_element_key(project, element_name))
    
    67
    +        buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
    
    68
    +                                    elementdigest.hash, 'buildtree')
    
    69
    +        assert os.path.isdir(buildtreedir)
    
    70
    +        assert element_name in result.get_partial_pushed_elements()
    
    71
    +        assert element_name in result.get_pushed_elements()
    
    72
    +        assert share1.has_artifact('test', element_name, cli.get_element_key(project, element_name))
    
    73
    +        assert share2.has_artifact('test', element_name, cli.get_element_key(project, element_name))
    
    74
    +        default_state(cli, tmpdir, share1)
    
    75
    +
    
    76
    +        # Check that after explictly pulling an artifact without it's buildtree,
    
    77
    +        # we can push it to another remote that is configured to accept the partial
    
    78
    +        # artifact
    
    79
    +        result = cli.run(project=project, args=['pull', element_name])
    
    80
    +        assert element_name in result.get_pulled_elements()
    
    81
    +        cli.configure({'artifacts': {'url': share3.repo, 'push': True, 'allow-partial-push': True}})
    
    82
    +        assert cli.get_element_state(project, element_name) == 'cached'
    
    83
    +        assert not os.path.isdir(buildtreedir)
    
    84
    +        result = cli.run(project=project, args=['push', element_name])
    
    85
    +        assert result.exit_code == 0
    
    86
    +        assert element_name in result.get_partial_pushed_elements()
    
    87
    +        assert element_name not in result.get_pushed_elements()
    
    88
    +        assert share3.has_artifact('test', element_name, cli.get_element_key(project, element_name))
    
    89
    +        default_state(cli, tmpdir, share3)
    
    90
    +
    
    91
    +        # Delete the local cache and pull the partial artifact from share 3,
    
    92
    +        # this should not include the buildtree when extracted locally, even when
    
    93
    +        # pull-buildtrees is given as a cli parameter as no available remotes will
    
    94
    +        # contain the buildtree
    
    95
    +        assert not os.path.isdir(buildtreedir)
    
    96
    +        assert cli.get_element_state(project, element_name) != 'cached'
    
    97
    +        result = cli.run(project=project, args=['--pull-buildtrees', 'pull', element_name])
    
    98
    +        assert element_name in result.get_partial_pulled_elements()
    
    99
    +        assert not os.path.isdir(buildtreedir)
    
    100
    +        default_state(cli, tmpdir, share3)
    
    101
    +
    
    102
    +        # Delete the local cache and attempt to pull a 'full' artifact, including its
    
    103
    +        # buildtree. As with before share3 being the first listed remote will not have
    
    104
    +        # the buildtree available and should spawn a partial pull. Having share1 as the
    
    105
    +        # second available remote should allow the buildtree to be pulled thus 'completing'
    
    106
    +        # the artifact
    
    107
    +        cli.configure({'artifacts': [{'url': share3.repo, 'push': True, 'allow-partial-push': True},
    
    108
    +                                     {'url': share1.repo, 'push': True}]})
    
    109
    +        assert cli.get_element_state(project, element_name) != 'cached'
    
    110
    +        result = cli.run(project=project, args=['--pull-buildtrees', 'pull', element_name])
    
    111
    +        assert element_name in result.get_partial_pulled_elements()
    
    112
    +        assert element_name in result.get_pulled_elements()
    
    113
    +        assert "Attempting to retrieve buildtree from remotes" in result.stderr
    
    114
    +        assert os.path.isdir(buildtreedir)
    
    115
    +        assert cli.get_element_state(project, element_name) == 'cached'
    
    116
    +
    
    117
    +        # Test that we are able to 'complete' an artifact on a server which is cached partially,
    
    118
    +        # but has now been configured for full artifact pushing. This should require only pushing
    
    119
    +        # the missing blobs, which should be those of just the buildtree. In this case changing
    
    120
    +        # share3 to full pushes should exercise this
    
    121
    +        cli.configure({'artifacts': {'url': share3.repo, 'push': True}})
    
    122
    +        result = cli.run(project=project, args=['push', element_name])
    
    123
    +        assert element_name in result.get_pushed_elements()
    
    124
    +        
    
    125
    +        # Ensure that the same remote url can be defined multiple times with differing push
    
    126
    +        # config. Buildstream supports the same remote having different configurations which
    
    127
    +        # partial pushing could be different for elements defined at a top level project.conf to
    
    128
    +        # those from a junctioned project. Assert that elements are pushed to the same remote in
    
    129
    +        # a state defined via their respective project.confs
    
    130
    +        default_state(cli, tmpdir, share1)
    
    131
    +        cli.configure({'artifactdir': os.path.join(str(tmpdir), 'artifacts')}, reset=True)
    
    132
    +        junction = os.path.join(project, 'elements', 'junction')
    
    133
    +        os.mkdir(junction)
    
    134
    +        shutil.copy2(os.path.join(project, 'elements', element_name), junction)
    
    135
    +
    
    136
    +        junction_conf = {}
    
    137
    +        junction_conf['name'] = 'amhello'
    
    138
    +        junction_conf['artifacts'] = {'url': share4.repo, 'push': True, 'allow-partial-push': True}
    
    139
    +        _yaml.dump(junction_conf, os.path.join(junction, 'project.conf'))
    
    140
    +        with open(os.path.join(project, 'project.conf'), 'a') as f:
    
    141
    +            f.write("artifacts:\n  url: {}\n  push: True\n".format(share4.repo))
    
    142
    +
    
    143
    +        # Read project.conf, the junction project.conf and buildstream.conf
    
    144
    +        # before running bst
    
    145
    +        with open(os.path.join(project, 'project.conf'), 'r') as f:
    
    146
    +            print(f.read())
    
    147
    +        with open(os.path.join(junction, 'project.conf'), 'r') as f:
    
    148
    +            print(f.read())
    
    149
    +        with open(os.path.join(project, 'cache', 'buildstream.conf'), 'r') as f:
    
    150
    +            print(f.read())
    
    151
    +
    
    152
    +        result = cli.run(project=project, args=['build', 'junction/amhello.bst'])
    
    153
    +
    
    154
    +        # Read project.conf, the junction project.conf and buildstream.conf
    
    155
    +        # after running bst
    
    156
    +        with open(os.path.join(project, 'project.conf'), 'r') as f:
    
    157
    +            print(f.read())
    
    158
    +        with open(os.path.join(junction, 'project.conf'), 'r') as f:
    
    159
    +            print(f.read())
    
    160
    +        with open(os.path.join(project, 'cache', 'buildstream.conf'), 'r') as f:
    
    161
    +            print(f.read())
    
    162
    +
    
    163
    +        assert 'junction/amhello.bst' in result.get_partial_pushed_elements()
    
    164
    +        assert 'base/base-alpine.bst' in result.get_pushed_elements()

  • tests/testutils/runcli.py
    ... ... @@ -167,6 +167,23 @@ class Result():
    167 167
         def assert_shell_error(self, fail_message=''):
    
    168 168
             assert self.exit_code == 1, fail_message
    
    169 169
     
    
    170
    +    # get_start_order()
    
    171
    +    #
    
    172
    +    # Gets the list of elements processed in a given queue, in the
    
    173
    +    # order of their first appearances in the session.
    
    174
    +    #
    
    175
    +    # Args:
    
    176
    +    #    activity (str): The queue activity name (like 'fetch')
    
    177
    +    #
    
    178
    +    # Returns:
    
    179
    +    #    (list): A list of element names in the order which they first appeared in the result
    
    180
    +    #
    
    181
    +    def get_start_order(self, activity):
    
    182
    +        results = re.findall(r'\[\s*{}:(\S+)\s*\]\s*START\s*.*\.log'.format(activity), self.stderr)
    
    183
    +        if results is None:
    
    184
    +            return []
    
    185
    +        return list(results)
    
    186
    +
    
    170 187
         # get_tracked_elements()
    
    171 188
         #
    
    172 189
         # Produces a list of element names on which tracking occurred
    
    ... ... @@ -191,6 +208,13 @@ class Result():
    191 208
     
    
    192 209
             return list(pushed)
    
    193 210
     
    
    211
    +    def get_partial_pushed_elements(self):
    
    212
    +        pushed = re.findall(r'\[\s*push:(\S+)\s*\]\s*INFO\s*Pushed partial artifact', self.stderr)
    
    213
    +        if pushed is None:
    
    214
    +            return []
    
    215
    +
    
    216
    +        return list(pushed)
    
    217
    +
    
    194 218
         def get_pulled_elements(self):
    
    195 219
             pulled = re.findall(r'\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled artifact', self.stderr)
    
    196 220
             if pulled is None:
    
    ... ... @@ -198,6 +222,13 @@ class Result():
    198 222
     
    
    199 223
             return list(pulled)
    
    200 224
     
    
    225
    +    def get_partial_pulled_elements(self):
    
    226
    +        pulled = re.findall(r'\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled partial artifact', self.stderr)
    
    227
    +        if pulled is None:
    
    228
    +            return []
    
    229
    +
    
    230
    +        return list(pulled)
    
    231
    +
    
    201 232
     
    
    202 233
     class Cli():
    
    203 234
     
    
    ... ... @@ -218,11 +249,15 @@ class Cli():
    218 249
         #
    
    219 250
         # Args:
    
    220 251
         #    config (dict): The user configuration to use
    
    252
    +    #    reset (bool): Optional reset of stored config
    
    221 253
         #
    
    222
    -    def configure(self, config):
    
    254
    +    def configure(self, config, reset=False):
    
    223 255
             if self.config is None:
    
    224 256
                 self.config = {}
    
    225 257
     
    
    258
    +        if reset:
    
    259
    +            self.config.clear()
    
    260
    +
    
    226 261
             for key, val in config.items():
    
    227 262
                 self.config[key] = val
    
    228 263
     
    

  • tox.ini
    1
    +#
    
    2
    +# Tox global configuration
    
    3
    +#
    
    1 4
     [tox]
    
    2 5
     envlist = py35,py36,py37
    
    3 6
     skip_missing_interpreters = true
    
    4 7
     
    
    8
    +#
    
    9
    +# Defaults for all environments
    
    10
    +#
    
    11
    +# Anything specified here is iherited by the sections
    
    12
    +#
    
    5 13
     [testenv]
    
    6
    -commands = pytest {posargs}
    
    14
    +commands =
    
    15
    +    pytest --basetemp {envtmpdir} {posargs}
    
    16
    +    mkdir -p .coverage-reports
    
    17
    +    mv {envtmpdir}/.coverage {toxinidir}/.coverage-reports/.coverage.{env:COVERAGE_PREFIX:}{envname}
    
    7 18
     deps =
    
    8 19
         -rrequirements/requirements.txt
    
    9 20
         -rrequirements/dev-requirements.txt
    
    ... ... @@ -13,6 +24,32 @@ passenv =
    13 24
         GI_TYPELIB_PATH
    
    14 25
         INTEGRATION_CACHE
    
    15 26
     
    
    27
    +#
    
    28
    +# These keys are not inherited by any other sections
    
    29
    +#
    
    30
    +setenv =
    
    31
    +    py{35,36,37}: COVERAGE_FILE = {envtmpdir}/.coverage
    
    32
    +whitelist_externals =
    
    33
    +    py{35,36,37}:
    
    34
    +        mv
    
    35
    +        mkdir
    
    36
    +
    
    37
    +#
    
    38
    +# Coverage reporting
    
    39
    +#
    
    40
    +[testenv:coverage]
    
    41
    +commands =
    
    42
    +    - coverage combine --rcfile={toxinidir}/.coveragerc {toxinidir}/.coverage-reports/
    
    43
    +    coverage report --rcfile={toxinidir}/.coveragerc -m
    
    44
    +deps =
    
    45
    +    -rrequirements/requirements.txt
    
    46
    +    -rrequirements/dev-requirements.txt
    
    47
    +setenv =
    
    48
    +    COVERAGE_FILE = {toxinidir}/.coverage-reports/.coverage
    
    49
    +
    
    50
    +#
    
    51
    +# Running linters
    
    52
    +#
    
    16 53
     [testenv:lint]
    
    17 54
     commands =
    
    18 55
         pycodestyle
    
    ... ... @@ -22,6 +59,9 @@ deps =
    22 59
         -rrequirements/dev-requirements.txt
    
    23 60
         -rrequirements/plugin-requirements.txt
    
    24 61
     
    
    62
    +#
    
    63
    +# Building documentation
    
    64
    +#
    
    25 65
     [testenv:docs]
    
    26 66
     commands =
    
    27 67
         make -C doc
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]