[Notes] [Git][BuildStream/buildstream][jennis/add_artifacts_completion] 22 commits: Use relative path to project directory for remote execution certificates/keys



Title: GitLab

James Ennis pushed to branch jennis/add_artifacts_completion at BuildStream / buildstream

Commits:

22 changed files:

Changes:

  • .coveragerc
    ... ... @@ -4,11 +4,15 @@ include =
    4 4
       */buildstream/*
    
    5 5
     
    
    6 6
     omit =
    
    7
    -  # Omit profiling helper module
    
    7
    +  # Omit some internals
    
    8 8
       */buildstream/_profile.py
    
    9
    +  */buildstream/__main__.py
    
    10
    +  */buildstream/_version.py
    
    9 11
       # Omit generated code
    
    10 12
       */buildstream/_protos/*
    
    11 13
       */.eggs/*
    
    14
    +  # Omit .tox directory
    
    15
    +  */.tox/*
    
    12 16
     
    
    13 17
     [report]
    
    14 18
     show_missing = True
    

  • .gitignore
    ... ... @@ -13,11 +13,12 @@ tests/**/*.pyc
    13 13
     integration-cache/
    
    14 14
     tmp
    
    15 15
     .coverage
    
    16
    +.coverage-reports/
    
    16 17
     .coverage.*
    
    17 18
     .cache
    
    18 19
     .pytest_cache/
    
    19 20
     *.bst/
    
    20
    -.tox
    
    21
    +.tox/
    
    21 22
     
    
    22 23
     # Pycache, in case buildstream is ran directly from within the source
    
    23 24
     # tree
    

  • .gitlab-ci.yml
    ... ... @@ -13,6 +13,7 @@ variables:
    13 13
       PYTEST_ADDOPTS: "--color=yes"
    
    14 14
       INTEGRATION_CACHE: "${CI_PROJECT_DIR}/cache/integration-cache"
    
    15 15
       TEST_COMMAND: "tox -- --color=yes --integration"
    
    16
    +  COVERAGE_PREFIX: "${CI_JOB_NAME}."
    
    16 17
     
    
    17 18
     
    
    18 19
     #####################################################
    
    ... ... @@ -24,9 +25,6 @@ variables:
    24 25
     .tests-template: &tests
    
    25 26
       stage: test
    
    26 27
     
    
    27
    -  variables:
    
    28
    -    COVERAGE_DIR: coverage-linux
    
    29
    -
    
    30 28
       before_script:
    
    31 29
       # Diagnostics
    
    32 30
       - mount
    
    ... ... @@ -40,14 +38,11 @@ variables:
    40 38
       - su buildstream -c "${TEST_COMMAND}"
    
    41 39
     
    
    42 40
       after_script:
    
    43
    -  # Collect our reports
    
    44
    -  - mkdir -p ${COVERAGE_DIR}
    
    45
    -  - cp .coverage ${COVERAGE_DIR}/coverage."${CI_JOB_NAME}"
    
    46 41
       except:
    
    47 42
       - schedules
    
    48 43
       artifacts:
    
    49 44
         paths:
    
    50
    -    - ${COVERAGE_DIR}
    
    45
    +    - .coverage-reports
    
    51 46
     
    
    52 47
     tests-debian-9:
    
    53 48
       image: buildstream/testsuite-debian:9-5da27168-32c47d1c
    
    ... ... @@ -83,7 +78,6 @@ tests-unix:
    83 78
       <<: *tests
    
    84 79
       variables:
    
    85 80
         BST_FORCE_BACKEND: "unix"
    
    86
    -    COVERAGE_DIR: coverage-unix
    
    87 81
     
    
    88 82
       script:
    
    89 83
     
    
    ... ... @@ -239,22 +233,22 @@ coverage:
    239 233
       stage: post
    
    240 234
       coverage: '/TOTAL +\d+ +\d+ +(\d+\.\d+)%/'
    
    241 235
       script:
    
    242
    -    - pip3 install -r requirements/requirements.txt -r requirements/dev-requirements.txt
    
    243
    -    - pip3 install --no-index .
    
    244
    -    - mkdir report
    
    245
    -    - cd report
    
    246
    -    - cp ../coverage-unix/coverage.* .
    
    247
    -    - cp ../coverage-linux/coverage.* .
    
    248
    -    - ls coverage.*
    
    249
    -    - coverage combine --rcfile=../.coveragerc -a coverage.*
    
    250
    -    - coverage report --rcfile=../.coveragerc -m
    
    236
    +    - cp -a .coverage-reports/ ./coverage-sources
    
    237
    +    - tox -e coverage
    
    238
    +    - cp -a .coverage-reports/ ./coverage-report
    
    251 239
       dependencies:
    
    252 240
       - tests-debian-9
    
    253 241
       - tests-fedora-27
    
    254 242
       - tests-fedora-28
    
    243
    +  - tests-fedora-missing-deps
    
    244
    +  - tests-ubuntu-18.04
    
    255 245
       - tests-unix
    
    256 246
       except:
    
    257 247
       - schedules
    
    248
    +  artifacts:
    
    249
    +    paths:
    
    250
    +    - coverage-sources/
    
    251
    +    - coverage-report/
    
    258 252
     
    
    259 253
     # Deploy, only for merges which land on master branch.
    
    260 254
     #
    

  • CONTRIBUTING.rst
    ... ... @@ -553,7 +553,7 @@ One problem which arises from this is that we end up having symbols
    553 553
     which are *public* according to the :ref:`rules discussed in the previous section
    
    554 554
     <contributing_public_and_private>`, but must be hidden away from the
    
    555 555
     *"Public API Surface"*. For example, BuildStream internal classes need
    
    556
    -to invoke methods on the ``Element`` and ``Source`` classes, wheras these
    
    556
    +to invoke methods on the ``Element`` and ``Source`` classes, whereas these
    
    557 557
     methods need to be hidden from the *"Public API Surface"*.
    
    558 558
     
    
    559 559
     This is where BuildStream deviates from the PEP-8 standard for public
    
    ... ... @@ -631,7 +631,7 @@ An element plugin will derive from Element by importing::
    631 631
     
    
    632 632
       from buildstream import Element
    
    633 633
     
    
    634
    -When importing utilities specifically, dont import function names
    
    634
    +When importing utilities specifically, don't import function names
    
    635 635
     from there, instead import the module itself::
    
    636 636
     
    
    637 637
       from . import utils
    
    ... ... @@ -737,7 +737,7 @@ Abstract methods
    737 737
     ~~~~~~~~~~~~~~~~
    
    738 738
     In BuildStream, an *"Abstract Method"* is a bit of a misnomer and does
    
    739 739
     not match up to how Python defines abstract methods, we need to seek out
    
    740
    -a new nomanclature to refer to these methods.
    
    740
    +a new nomenclature to refer to these methods.
    
    741 741
     
    
    742 742
     In Python, an *"Abstract Method"* is a method which **must** be
    
    743 743
     implemented by a subclass, whereas all methods in Python can be
    
    ... ... @@ -960,7 +960,7 @@ possible, and avoid any cyclic relationships in modules.
    960 960
     For instance, the ``Source`` objects are owned by ``Element``
    
    961 961
     objects in the BuildStream data model, and as such the ``Element``
    
    962 962
     will delegate some activities to the ``Source`` objects in its
    
    963
    -possesion. The ``Source`` objects should however never call functions
    
    963
    +possession. The ``Source`` objects should however never call functions
    
    964 964
     on the ``Element`` object, nor should the ``Source`` object itself
    
    965 965
     have any understanding of what an ``Element`` is.
    
    966 966
     
    
    ... ... @@ -1223,7 +1223,7 @@ For further information about using the reStructuredText with sphinx, please see
    1223 1223
     Building Docs
    
    1224 1224
     ~~~~~~~~~~~~~
    
    1225 1225
     Before you can build the docs, you will end to ensure that you have installed
    
    1226
    -the required :ref:`buid dependencies <contributing_build_deps>` as mentioned
    
    1226
    +the required :ref:`build dependencies <contributing_build_deps>` as mentioned
    
    1227 1227
     in the testing section above.
    
    1228 1228
     
    
    1229 1229
     To build the documentation, just run the following::
    
    ... ... @@ -1365,7 +1365,7 @@ Structure of an example
    1365 1365
     '''''''''''''''''''''''
    
    1366 1366
     The :ref:`tutorial <tutorial>` and the :ref:`examples <examples>` sections
    
    1367 1367
     of the documentation contain a series of sample projects, each chapter in
    
    1368
    -the tutoral, or standalone example uses a sample project.
    
    1368
    +the tutorial, or standalone example uses a sample project.
    
    1369 1369
     
    
    1370 1370
     Here is the the structure for adding new examples and tutorial chapters.
    
    1371 1371
     
    
    ... ... @@ -1471,8 +1471,8 @@ Installing build dependencies
    1471 1471
     ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    
    1472 1472
     Some of BuildStream's dependencies have non-python build dependencies. When
    
    1473 1473
     running tests with ``tox``, you will first need to install these dependencies.
    
    1474
    -Exact steps to install these will depend on your oprtation systemm. Commands
    
    1475
    -for installing them for some common distributions are lised below.
    
    1474
    +Exact steps to install these will depend on your operating system. Commands
    
    1475
    +for installing them for some common distributions are listed below.
    
    1476 1476
     
    
    1477 1477
     For Fedora-based systems::
    
    1478 1478
     
    
    ... ... @@ -1498,6 +1498,13 @@ option when running tox::
    1498 1498
     
    
    1499 1499
       tox -e py37
    
    1500 1500
     
    
    1501
    +If you would like to test and lint at the same time, or if you do have multiple
    
    1502
    +python versions installed and would like to test against multiple versions, then
    
    1503
    +we recommend using `detox <https://github.com/tox-dev/detox>`_, just run it with
    
    1504
    +the same arguments you would give `tox`::
    
    1505
    +
    
    1506
    +  detox -e lint,py36,py37
    
    1507
    +
    
    1501 1508
     Linting is performed separately from testing. In order to run the linting step which
    
    1502 1509
     consists of running the ``pycodestyle`` and ``pylint`` tools, run the following::
    
    1503 1510
     
    
    ... ... @@ -1533,7 +1540,7 @@ the frontend tests you can do::
    1533 1540
     
    
    1534 1541
       tox -- tests/frontend/
    
    1535 1542
     
    
    1536
    -Specific tests can be chosen by using the :: delimeter after the test module.
    
    1543
    +Specific tests can be chosen by using the :: delimiter after the test module.
    
    1537 1544
     If you wanted to run the test_build_track test within frontend/buildtrack.py you could do::
    
    1538 1545
     
    
    1539 1546
       tox -- tests/frontend/buildtrack.py::test_build_track
    
    ... ... @@ -1553,7 +1560,7 @@ can run ``tox`` with ``-r`` or ``--recreate`` option.
    1553 1560
     .. note::
    
    1554 1561
     
    
    1555 1562
        By default, we do not allow use of site packages in our ``tox``
    
    1556
    -   confguration to enable running the tests in an isolated environment.
    
    1563
    +   configuration to enable running the tests in an isolated environment.
    
    1557 1564
        If you need to enable use of site packages for whatever reason, you can
    
    1558 1565
        do so by passing the ``--sitepackages`` option to ``tox``. Also, you will
    
    1559 1566
        not need to install any of the build dependencies mentioned above if you
    
    ... ... @@ -1574,10 +1581,23 @@ can run ``tox`` with ``-r`` or ``--recreate`` option.
    1574 1581
          ./setup.py test --addopts 'tests/frontend/buildtrack.py::test_build_track'
    
    1575 1582
     
    
    1576 1583
     
    
    1584
    +Observing coverage
    
    1585
    +~~~~~~~~~~~~~~~~~~
    
    1586
    +Once you have run the tests using `tox` (or `detox`), some coverage reports will
    
    1587
    +have been left behind.
    
    1588
    +
    
    1589
    +To view the coverage report of the last test run, simply run::
    
    1590
    +
    
    1591
    +  tox -e coverage
    
    1592
    +
    
    1593
    +This will collate any reports from separate python environments that may be
    
    1594
    +under test before displaying the combined coverage.
    
    1595
    +
    
    1596
    +
    
    1577 1597
     Adding tests
    
    1578 1598
     ~~~~~~~~~~~~
    
    1579 1599
     Tests are found in the tests subdirectory, inside of which
    
    1580
    -there is a separarate directory for each *domain* of tests.
    
    1600
    +there is a separate directory for each *domain* of tests.
    
    1581 1601
     All tests are collected as::
    
    1582 1602
     
    
    1583 1603
       tests/*/*.py
    

  • buildstream/_artifactcache/cascache.py
    ... ... @@ -53,7 +53,7 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key
    53 53
         #
    
    54 54
         @staticmethod
    
    55 55
         def _new_from_config_node(spec_node, basedir=None):
    
    56
    -        _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance_name'])
    
    56
    +        _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance-name'])
    
    57 57
             url = _yaml.node_get(spec_node, str, 'url')
    
    58 58
             push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
    
    59 59
             if not url:
    
    ... ... @@ -61,7 +61,7 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key
    61 61
                 raise LoadError(LoadErrorReason.INVALID_DATA,
    
    62 62
                                 "{}: empty artifact cache URL".format(provenance))
    
    63 63
     
    
    64
    -        instance_name = _yaml.node_get(spec_node, str, 'instance_name', default_value=None)
    
    64
    +        instance_name = _yaml.node_get(spec_node, str, 'instance-name', default_value=None)
    
    65 65
     
    
    66 66
             server_cert = _yaml.node_get(spec_node, str, 'server-cert', default_value=None)
    
    67 67
             if server_cert and basedir:
    

  • buildstream/_context.py
    ... ... @@ -34,6 +34,7 @@ from ._artifactcache import ArtifactCache
    34 34
     from ._artifactcache.cascache import CASCache
    
    35 35
     from ._workspaces import Workspaces, WorkspaceProjectCache, WORKSPACE_PROJECT_FILE
    
    36 36
     from .plugin import _plugin_lookup
    
    37
    +from .sandbox import SandboxRemote
    
    37 38
     
    
    38 39
     
    
    39 40
     # Context()
    
    ... ... @@ -72,6 +73,9 @@ class Context():
    72 73
             # The locations from which to push and pull prebuilt artifacts
    
    73 74
             self.artifact_cache_specs = None
    
    74 75
     
    
    76
    +        # The global remote execution configuration
    
    77
    +        self.remote_execution_specs = None
    
    78
    +
    
    75 79
             # The directory to store build logs
    
    76 80
             self.logdir = None
    
    77 81
     
    
    ... ... @@ -187,7 +191,7 @@ class Context():
    187 191
             _yaml.node_validate(defaults, [
    
    188 192
                 'sourcedir', 'builddir', 'artifactdir', 'logdir',
    
    189 193
                 'scheduler', 'artifacts', 'logging', 'projects',
    
    190
    -            'cache', 'prompt', 'workspacedir',
    
    194
    +            'cache', 'prompt', 'workspacedir', 'remote-execution'
    
    191 195
             ])
    
    192 196
     
    
    193 197
             for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
    
    ... ... @@ -212,6 +216,8 @@ class Context():
    212 216
             # Load artifact share configuration
    
    213 217
             self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
    
    214 218
     
    
    219
    +        self.remote_execution_specs = SandboxRemote.specs_from_config_node(defaults)
    
    220
    +
    
    215 221
             # Load pull build trees configuration
    
    216 222
             self.pull_buildtrees = _yaml.node_get(cache, bool, 'pull-buildtrees')
    
    217 223
     
    
    ... ... @@ -271,7 +277,8 @@ class Context():
    271 277
             # Shallow validation of overrides, parts of buildstream which rely
    
    272 278
             # on the overrides are expected to validate elsewhere.
    
    273 279
             for _, overrides in _yaml.node_items(self._project_overrides):
    
    274
    -            _yaml.node_validate(overrides, ['artifacts', 'options', 'strict', 'default-mirror'])
    
    280
    +            _yaml.node_validate(overrides, ['artifacts', 'options', 'strict', 'default-mirror',
    
    281
    +                                            'remote-execution'])
    
    275 282
     
    
    276 283
             profile_end(Topics.LOAD_CONTEXT, 'load')
    
    277 284
     
    

  • buildstream/_frontend/cli.py
    ... ... @@ -2,6 +2,7 @@ import os
    2 2
     import sys
    
    3 3
     from contextlib import ExitStack
    
    4 4
     from fnmatch import fnmatch
    
    5
    +from functools import partial
    
    5 6
     from tempfile import TemporaryDirectory
    
    6 7
     
    
    7 8
     import click
    
    ... ... @@ -111,11 +112,14 @@ def complete_target(args, incomplete):
    111 112
         return complete_list
    
    112 113
     
    
    113 114
     
    
    114
    -def complete_artifact(args, incomplete):
    
    115
    +def complete_artifact(orig_args, args, incomplete):
    
    115 116
         from .._context import Context
    
    116 117
         ctx = Context()
    
    117 118
     
    
    118 119
         config = None
    
    120
    +    for i, arg in enumerate(orig_args):
    
    121
    +        if arg in ('-c', '--config'):
    
    122
    +            config = orig_args[i + 1]
    
    119 123
         for i, arg in enumerate(args):
    
    120 124
             if arg in ('-c', '--config'):
    
    121 125
                 config = args[i + 1]
    
    ... ... @@ -128,8 +132,9 @@ def complete_artifact(args, incomplete):
    128 132
         return complete_list
    
    129 133
     
    
    130 134
     
    
    131
    -def override_completions(cmd, cmd_param, args, incomplete):
    
    135
    +def override_completions(orig_args, cmd, cmd_param, args, incomplete):
    
    132 136
         """
    
    137
    +    :param orig_args: original, non-completion args
    
    133 138
         :param cmd_param: command definition
    
    134 139
         :param args: full list of args typed before the incomplete arg
    
    135 140
         :param incomplete: the incomplete text to autocomplete
    
    ... ... @@ -150,7 +155,7 @@ def override_completions(cmd, cmd_param, args, incomplete):
    150 155
                     cmd_param.opts == ['--track-except']):
    
    151 156
                 return complete_target(args, incomplete)
    
    152 157
             if cmd_param.name == 'artifacts':
    
    153
    -            return complete_artifact(args, incomplete)
    
    158
    +            return complete_artifact(orig_args, args, incomplete)
    
    154 159
     
    
    155 160
         raise CompleteUnhandled()
    
    156 161
     
    
    ... ... @@ -161,7 +166,7 @@ def override_main(self, args=None, prog_name=None, complete_var=None,
    161 166
         # Hook for the Bash completion.  This only activates if the Bash
    
    162 167
         # completion is actually enabled, otherwise this is quite a fast
    
    163 168
         # noop.
    
    164
    -    if main_bashcomplete(self, prog_name, override_completions):
    
    169
    +    if main_bashcomplete(self, prog_name, partial(override_completions, args)):
    
    165 170
     
    
    166 171
             # If we're running tests we cant just go calling exit()
    
    167 172
             # from the main process.
    

  • buildstream/_options/optionarch.py
    ... ... @@ -17,6 +17,8 @@
    17 17
     #  Authors:
    
    18 18
     #        Tristan Van Berkom <tristan vanberkom codethink co uk>
    
    19 19
     
    
    20
    +from .. import _yaml
    
    21
    +from .._exceptions import LoadError, LoadErrorReason, PlatformError
    
    20 22
     from .._platform import Platform
    
    21 23
     from .optionenum import OptionEnum
    
    22 24
     
    
    ... ... @@ -41,7 +43,34 @@ class OptionArch(OptionEnum):
    41 43
             super(OptionArch, self).load(node, allow_default_definition=False)
    
    42 44
     
    
    43 45
         def load_default_value(self, node):
    
    44
    -        return Platform.get_host_arch()
    
    46
    +        arch = Platform.get_host_arch()
    
    47
    +
    
    48
    +        default_value = None
    
    49
    +
    
    50
    +        for index, value in enumerate(self.values):
    
    51
    +            try:
    
    52
    +                canonical_value = Platform.canonicalize_arch(value)
    
    53
    +                if default_value is None and canonical_value == arch:
    
    54
    +                    default_value = value
    
    55
    +                    # Do not terminate the loop early to ensure we validate
    
    56
    +                    # all values in the list.
    
    57
    +            except PlatformError as e:
    
    58
    +                provenance = _yaml.node_get_provenance(node, key='values', indices=[index])
    
    59
    +                prefix = ""
    
    60
    +                if provenance:
    
    61
    +                    prefix = "{}: ".format(provenance)
    
    62
    +                raise LoadError(LoadErrorReason.INVALID_DATA,
    
    63
    +                                "{}Invalid value for {} option '{}': {}"
    
    64
    +                                .format(prefix, self.OPTION_TYPE, self.name, e))
    
    65
    +
    
    66
    +        if default_value is None:
    
    67
    +            # Host architecture is not supported by the project.
    
    68
    +            # Do not raise an error here as the user may override it.
    
    69
    +            # If the user does not override it, an error will be raised
    
    70
    +            # by resolve()/validate().
    
    71
    +            default_value = arch
    
    72
    +
    
    73
    +        return default_value
    
    45 74
     
    
    46 75
         def resolve(self):
    
    47 76
     
    

  • buildstream/_platform/platform.py
    ... ... @@ -77,20 +77,17 @@ class Platform():
    77 77
         def get_host_os():
    
    78 78
             return os.uname()[0]
    
    79 79
     
    
    80
    -    # get_host_arch():
    
    80
    +    # canonicalize_arch():
    
    81 81
         #
    
    82
    -    # This returns the architecture of the host machine. The possible values
    
    83
    -    # map from uname -m in order to be a OS independent list.
    
    82
    +    # This returns the canonical, OS-independent architecture name
    
    83
    +    # or raises a PlatformError if the architecture is unknown.
    
    84 84
         #
    
    85
    -    # Returns:
    
    86
    -    #    (string): String representing the architecture
    
    87 85
         @staticmethod
    
    88
    -    def get_host_arch():
    
    89
    -        # get the hardware identifier from uname
    
    90
    -        uname_machine = os.uname()[4]
    
    91
    -        uname_to_arch = {
    
    86
    +    def canonicalize_arch(arch):
    
    87
    +        aliases = {
    
    88
    +            "aarch32": "aarch32",
    
    92 89
                 "aarch64": "aarch64",
    
    93
    -            "aarch64_be": "aarch64-be",
    
    90
    +            "aarch64-be": "aarch64-be",
    
    94 91
                 "amd64": "x86-64",
    
    95 92
                 "arm": "aarch32",
    
    96 93
                 "armv8l": "aarch64",
    
    ... ... @@ -99,17 +96,34 @@ class Platform():
    99 96
                 "i486": "x86-32",
    
    100 97
                 "i586": "x86-32",
    
    101 98
                 "i686": "x86-32",
    
    99
    +            "power-isa-be": "power-isa-be",
    
    100
    +            "power-isa-le": "power-isa-le",
    
    102 101
                 "ppc64": "power-isa-be",
    
    103 102
                 "ppc64le": "power-isa-le",
    
    104 103
                 "sparc": "sparc-v9",
    
    105 104
                 "sparc64": "sparc-v9",
    
    106
    -            "x86_64": "x86-64"
    
    105
    +            "sparc-v9": "sparc-v9",
    
    106
    +            "x86-32": "x86-32",
    
    107
    +            "x86-64": "x86-64"
    
    107 108
             }
    
    109
    +
    
    108 110
             try:
    
    109
    -            return uname_to_arch[uname_machine]
    
    111
    +            return aliases[arch.replace('_', '-')]
    
    110 112
             except KeyError:
    
    111
    -            raise PlatformError("uname gave unsupported machine architecture: {}"
    
    112
    -                                .format(uname_machine))
    
    113
    +            raise PlatformError("Unknown architecture: {}".format(arch))
    
    114
    +
    
    115
    +    # get_host_arch():
    
    116
    +    #
    
    117
    +    # This returns the architecture of the host machine. The possible values
    
    118
    +    # map from uname -m in order to be a OS independent list.
    
    119
    +    #
    
    120
    +    # Returns:
    
    121
    +    #    (string): String representing the architecture
    
    122
    +    @staticmethod
    
    123
    +    def get_host_arch():
    
    124
    +        # get the hardware identifier from uname
    
    125
    +        uname_machine = os.uname()[4]
    
    126
    +        return Platform.canonicalize_arch(uname_machine)
    
    113 127
     
    
    114 128
         ##################################################################
    
    115 129
         #                        Sandbox functions                       #
    

  • buildstream/_project.py
    ... ... @@ -507,7 +507,16 @@ class Project():
    507 507
             self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config, self.directory)
    
    508 508
     
    
    509 509
             # Load remote-execution configuration for this project
    
    510
    -        self.remote_execution_specs = SandboxRemote.specs_from_config_node(config, self.directory)
    
    510
    +        project_specs = SandboxRemote.specs_from_config_node(config, self.directory)
    
    511
    +        override_specs = SandboxRemote.specs_from_config_node(
    
    512
    +            self._context.get_overrides(self.name), self.directory)
    
    513
    +
    
    514
    +        if override_specs is not None:
    
    515
    +            self.remote_execution_specs = override_specs
    
    516
    +        elif project_specs is not None:
    
    517
    +            self.remote_execution_specs = project_specs
    
    518
    +        else:
    
    519
    +            self.remote_execution_specs = self._context.remote_execution_specs
    
    511 520
     
    
    512 521
             # Load sandbox environment variables
    
    513 522
             self.base_environment = _yaml.node_get(config, Mapping, 'environment')
    

  • buildstream/element.py
    ... ... @@ -2441,11 +2441,17 @@ class Element(Plugin):
    2441 2441
             # Sandbox config, unlike others, has fixed members so we should validate them
    
    2442 2442
             _yaml.node_validate(sandbox_config, ['build-uid', 'build-gid', 'build-os', 'build-arch'])
    
    2443 2443
     
    
    2444
    +        build_arch = self.node_get_member(sandbox_config, str, 'build-arch', default=None)
    
    2445
    +        if build_arch:
    
    2446
    +            build_arch = Platform.canonicalize_arch(build_arch)
    
    2447
    +        else:
    
    2448
    +            build_arch = host_arch
    
    2449
    +
    
    2444 2450
             return SandboxConfig(
    
    2445 2451
                 self.node_get_member(sandbox_config, int, 'build-uid'),
    
    2446 2452
                 self.node_get_member(sandbox_config, int, 'build-gid'),
    
    2447 2453
                 self.node_get_member(sandbox_config, str, 'build-os', default=host_os),
    
    2448
    -            self.node_get_member(sandbox_config, str, 'build-arch', default=host_arch))
    
    2454
    +            build_arch)
    
    2449 2455
     
    
    2450 2456
         # This makes a special exception for the split rules, which
    
    2451 2457
         # elements may extend but whos defaults are defined in the project.
    

  • buildstream/plugins/elements/script.py
    ... ... @@ -42,6 +42,9 @@ import buildstream
    42 42
     class ScriptElement(buildstream.ScriptElement):
    
    43 43
         # pylint: disable=attribute-defined-outside-init
    
    44 44
     
    
    45
    +    # This plugin has been modified to avoid the use of Sandbox.get_directory
    
    46
    +    BST_VIRTUAL_DIRECTORY = True
    
    47
    +
    
    45 48
         def configure(self, node):
    
    46 49
             for n in self.node_get_member(node, list, 'layout', []):
    
    47 50
                 dst = self.node_subst_member(n, 'destination')
    

  • buildstream/sandbox/_sandboxremote.py
    ... ... @@ -62,10 +62,32 @@ class SandboxRemote(Sandbox):
    62 62
             self.storage_url = config.storage_service['url']
    
    63 63
             self.exec_url = config.exec_service['url']
    
    64 64
     
    
    65
    +        exec_certs = {}
    
    66
    +        for key in ['client-cert', 'client-key', 'server-cert']:
    
    67
    +            if key in config.exec_service:
    
    68
    +                with open(config.exec_service[key], 'rb') as f:
    
    69
    +                    exec_certs[key] = f.read()
    
    70
    +
    
    71
    +        self.exec_credentials = grpc.ssl_channel_credentials(
    
    72
    +            root_certificates=exec_certs.get('server-cert'),
    
    73
    +            private_key=exec_certs.get('client-key'),
    
    74
    +            certificate_chain=exec_certs.get('client-cert'))
    
    75
    +
    
    76
    +        action_certs = {}
    
    77
    +        for key in ['client-cert', 'client-key', 'server-cert']:
    
    78
    +            if key in config.action_service:
    
    79
    +                with open(config.action_service[key], 'rb') as f:
    
    80
    +                    action_certs[key] = f.read()
    
    81
    +
    
    65 82
             if config.action_service:
    
    66 83
                 self.action_url = config.action_service['url']
    
    84
    +            self.action_credentials = grpc.ssl_channel_credentials(
    
    85
    +                root_certificates=action_certs.get('server-cert'),
    
    86
    +                private_key=action_certs.get('client-key'),
    
    87
    +                certificate_chain=action_certs.get('client-cert'))
    
    67 88
             else:
    
    68 89
                 self.action_url = None
    
    90
    +            self.action_credentials = None
    
    69 91
     
    
    70 92
             self.server_instance = config.exec_service.get('instance', None)
    
    71 93
             self.storage_instance = config.storage_service.get('instance', None)
    
    ... ... @@ -81,7 +103,7 @@ class SandboxRemote(Sandbox):
    81 103
             self._get_context().message(Message(None, MessageType.INFO, msg))
    
    82 104
     
    
    83 105
         @staticmethod
    
    84
    -    def specs_from_config_node(config_node, basedir):
    
    106
    +    def specs_from_config_node(config_node, basedir=None):
    
    85 107
     
    
    86 108
             def require_node(config, keyname):
    
    87 109
                 val = config.get(keyname)
    
    ... ... @@ -109,10 +131,10 @@ class SandboxRemote(Sandbox):
    109 131
             remote_exec_storage_config = require_node(remote_config, 'storage-service')
    
    110 132
             remote_exec_action_config = remote_config.get('action-cache-service', {})
    
    111 133
     
    
    112
    -        _yaml.node_validate(remote_exec_service_config, ['url', 'instance'])
    
    134
    +        _yaml.node_validate(remote_exec_service_config, ['url', 'instance'] + tls_keys)
    
    113 135
             _yaml.node_validate(remote_exec_storage_config, ['url', 'instance'] + tls_keys)
    
    114 136
             if remote_exec_action_config:
    
    115
    -            _yaml.node_validate(remote_exec_action_config, ['url'])
    
    137
    +            _yaml.node_validate(remote_exec_action_config, ['url'] + tls_keys)
    
    116 138
             else:
    
    117 139
                 remote_config['action-service'] = None
    
    118 140
     
    
    ... ... @@ -135,6 +157,19 @@ class SandboxRemote(Sandbox):
    135 157
                                           "remote-execution configuration. Your config is missing '{}'."
    
    136 158
                                           .format(str(provenance), tls_keys, key))
    
    137 159
     
    
    160
    +        def resolve_path(path):
    
    161
    +            if basedir and path:
    
    162
    +                return os.path.join(basedir, path)
    
    163
    +            else:
    
    164
    +                return path
    
    165
    +
    
    166
    +        for key in tls_keys:
    
    167
    +            for d in (remote_config['execution-service'],
    
    168
    +                      remote_config['storage-service'],
    
    169
    +                      remote_exec_action_config):
    
    170
    +                if key in d:
    
    171
    +                    d[key] = resolve_path(d[key])
    
    172
    +
    
    138 173
             spec = RemoteExecutionSpec(remote_config['execution-service'],
    
    139 174
                                        remote_config['storage-service'],
    
    140 175
                                        remote_exec_action_config)
    
    ... ... @@ -295,6 +330,8 @@ class SandboxRemote(Sandbox):
    295 330
                                    "for example: http://buildservice:50051.")
    
    296 331
             if url.scheme == 'http':
    
    297 332
                 channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
    
    333
    +        elif url.scheme == 'https':
    
    334
    +            channel = grpc.secure_channel('{}:{}'.format(url.hostname, url.port), self.exec_credentials)
    
    298 335
             else:
    
    299 336
                 raise SandboxError("Remote execution currently only supports the 'http' protocol "
    
    300 337
                                    "and '{}' was supplied.".format(url.scheme))
    
    ... ... @@ -352,11 +389,11 @@ class SandboxRemote(Sandbox):
    352 389
             if not url.port:
    
    353 390
                 raise SandboxError("You must supply a protocol and port number in the action-cache-service url, "
    
    354 391
                                    "for example: http://buildservice:50051.")
    
    355
    -        if not url.scheme == "http":
    
    356
    -            raise SandboxError("Currently only support http for the action cache"
    
    357
    -                               "and {} was supplied".format(url.scheme))
    
    392
    +        if url.scheme == 'http':
    
    393
    +            channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
    
    394
    +        elif url.scheme == 'https':
    
    395
    +            channel = grpc.secure_channel('{}:{}'.format(url.hostname, url.port), self.action_credentials)
    
    358 396
     
    
    359
    -        channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
    
    360 397
             request = remote_execution_pb2.GetActionResultRequest(action_digest=action_digest)
    
    361 398
             stub = remote_execution_pb2_grpc.ActionCacheStub(channel)
    
    362 399
             try:
    

  • doc/source/format_project.rst
    ... ... @@ -218,6 +218,7 @@ The use of ports are required to distinguish between pull only access and
    218 218
     push/pull access. For information regarding the server/client certificates
    
    219 219
     and keys, please see: :ref:`Key pair for the server <server_authentication>`.
    
    220 220
     
    
    221
    +.. _project_remote_execution:
    
    221 222
     
    
    222 223
     Remote execution
    
    223 224
     ~~~~~~~~~~~~~~~~
    
    ... ... @@ -243,9 +244,6 @@ using the `remote-execution` option:
    243 244
         action-cache-service:
    
    244 245
           url: http://bar.action.com:50052
    
    245 246
     
    
    246
    -The execution-service part of remote execution does not support encrypted
    
    247
    -connections yet, so the protocol must always be http.
    
    248
    -
    
    249 247
     storage-service specifies a remote CAS store and the parameters are the
    
    250 248
     same as those used to specify an :ref:`artifact server <artifacts>`.
    
    251 249
     
    
    ... ... @@ -268,6 +266,9 @@ instance names.
    268 266
     
    
    269 267
     The Remote Execution API can be found via https://github.com/bazelbuild/remote-apis.
    
    270 268
     
    
    269
    +Remote execution configuration can be also provided in the `user
    
    270
    +configuration <user_config_remote_execution>`.
    
    271
    +
    
    271 272
     .. _project_essentials_mirrors:
    
    272 273
     
    
    273 274
     Mirrors
    

  • doc/source/using_config.rst
    ... ... @@ -100,6 +100,54 @@ pull only access and push/pull access. For information regarding this and the
    100 100
     server/client certificates and keys, please see:
    
    101 101
     :ref:`Key pair for the server <server_authentication>`.
    
    102 102
     
    
    103
    +.. _user_config_remote_execution:
    
    104
    +
    
    105
    +Remote execution
    
    106
    +~~~~~~~~~~~~~~~~
    
    107
    +
    
    108
    +The same configuration for :ref:`remote execution <project_remote_execution>`
    
    109
    +in ``project.conf`` can be provided in the user configuation.
    
    110
    +
    
    111
    +There is only one remote execution configuration used per project.
    
    112
    +
    
    113
    +The project overrides will be taken in priority. The global
    
    114
    +configuration will be used as fallback.
    
    115
    +
    
    116
    +1. Global remote execution fallback:
    
    117
    +
    
    118
    +.. code:: yaml
    
    119
    +
    
    120
    +  remote-execution:
    
    121
    +    execution-service:
    
    122
    +      url: http://execution.fallback.example.com:50051
    
    123
    +      instance-name: main
    
    124
    +    storage-service:
    
    125
    +      url: https://storage.fallback.example.com:11002/
    
    126
    +      server-cert: /keys/server.crt
    
    127
    +      client-cert: /keys/client.crt
    
    128
    +      client-key: /keys/client.key
    
    129
    +      instance-name: main
    
    130
    +    action-cache-service:
    
    131
    +      url: http://action.flalback.example.com:50052
    
    132
    +
    
    133
    +2. Project override:
    
    134
    +
    
    135
    +.. code:: yaml
    
    136
    +
    
    137
    +  projects:
    
    138
    +    some_project:
    
    139
    +      remote-execution:
    
    140
    +        execution-service:
    
    141
    +          url: http://execution.some_project.example.com:50051
    
    142
    +          instance-name: main
    
    143
    +        storage-service:
    
    144
    +          url: https://storage.some_project.example.com:11002/
    
    145
    +          server-cert: /some_project_keys/server.crt
    
    146
    +          client-cert: /some_project_keys/client.crt
    
    147
    +          client-key: /some_project_keys/client.key
    
    148
    +          instance-name: main
    
    149
    +        action-cache-service:
    
    150
    +          url: http://action.some_project.example.com:50052
    
    103 151
     
    
    104 152
     
    
    105 153
     Strict build plan
    

  • tests/completions/completions.py
    ... ... @@ -281,3 +281,44 @@ def test_argument_element_invalid(datafiles, cli, project, cmd, word_idx, expect
    281 281
     ])
    
    282 282
     def test_help_commands(cli, cmd, word_idx, expected):
    
    283 283
         assert_completion(cli, cmd, word_idx, expected)
    
    284
    +
    
    285
    +
    
    286
    +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'project'))
    
    287
    +def test_argument_artifact(cli, tmpdir, datafiles):
    
    288
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    289
    +
    
    290
    +    # Build an import element with no dependencies (as there will only be ONE cache key)
    
    291
    +    result = cli.run(project=project, args=['build', 'import-bin.bst'])  # Has no dependencies
    
    292
    +    result.assert_success()
    
    293
    +
    
    294
    +    # Get the key and the artifact ref ($project/$element_name/$key)
    
    295
    +    key = cli.get_element_key(project, 'import-bin.bst')
    
    296
    +    artifact = os.path.join('test', 'import-bin', key)
    
    297
    +
    
    298
    +    # Test autocompletion of the artifact
    
    299
    +    cmds = [
    
    300
    +        'bst artifact log ',
    
    301
    +        'bst artifact log t',
    
    302
    +        'bst artifact log test/'
    
    303
    +    ]
    
    304
    +
    
    305
    +    for i, cmd in enumerate(cmds):
    
    306
    +        word_idx = 3
    
    307
    +        result = cli.run(project=project, cwd=project, env={
    
    308
    +            '_BST_COMPLETION': 'complete',
    
    309
    +            'COMP_WORDS': cmd,
    
    310
    +            'COMP_CWORD': str(word_idx)
    
    311
    +        })
    
    312
    +        words = []
    
    313
    +        if result.output:
    
    314
    +            words = result.output.splitlines()  # This leaves an extra space on each e.g. 'foo.bst ']
    
    315
    +            words = [word.strip() for word in words]
    
    316
    +
    
    317
    +            if i == 0:
    
    318
    +                expected = PROJECT_ELEMENTS + [artifact]  # We should now be able to see the artifact
    
    319
    +            elif i == 1:
    
    320
    +                expected = ['target.bst', artifact]
    
    321
    +            elif i == 2:
    
    322
    +                expected = [artifact]
    
    323
    +
    
    324
    +            assert expected == words

  • tests/format/option-arch-alias/element.bst
    1
    +kind: autotools
    
    2
    +variables:
    
    3
    +  result: "Nothing"
    
    4
    +  (?):
    
    5
    +  - machine_arch == "arm":
    
    6
    +      result: "Army"
    
    7
    +  - machine_arch == "x86_64":
    
    8
    +      result: "X86-64y"

  • tests/format/option-arch-alias/project.conf
    1
    +name: test
    
    2
    +
    
    3
    +options:
    
    4
    +  machine_arch:
    
    5
    +    type: arch
    
    6
    +    description: The machine architecture
    
    7
    +    values:
    
    8
    +    - arm
    
    9
    +    - x86_64

  • tests/format/option-arch-unknown/element.bst
    1
    +kind: autotools
    
    2
    +variables:
    
    3
    +  result: "Nothing"
    
    4
    +  (?):
    
    5
    +  - machine_arch == "aarch32":
    
    6
    +      result: "Army"
    
    7
    +  - machine_arch == "aarch64":
    
    8
    +      result: "Aarchy"
    
    9
    +  - machine_arch == "x86-128":
    
    10
    +      result: "X86-128y"

  • tests/format/option-arch-unknown/project.conf
    1
    +name: test
    
    2
    +
    
    3
    +options:
    
    4
    +  machine_arch:
    
    5
    +    type: arch
    
    6
    +    description: The machine architecture
    
    7
    +    values:
    
    8
    +    - aarch32
    
    9
    +    - aarch64
    
    10
    +    - x86-128

  • tests/format/optionarch.py
    ... ... @@ -75,3 +75,47 @@ def test_unsupported_arch(cli, datafiles):
    75 75
             ])
    
    76 76
     
    
    77 77
             result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
    
    78
    +
    
    79
    +
    
    80
    +@pytest.mark.datafiles(DATA_DIR)
    
    81
    +def test_alias(cli, datafiles):
    
    82
    +
    
    83
    +    with override_uname_arch("arm"):
    
    84
    +        project = os.path.join(datafiles.dirname, datafiles.basename, 'option-arch-alias')
    
    85
    +        result = cli.run(project=project, silent=True, args=[
    
    86
    +            'show',
    
    87
    +            '--deps', 'none',
    
    88
    +            '--format', '%{vars}',
    
    89
    +            'element.bst'
    
    90
    +        ])
    
    91
    +
    
    92
    +        result.assert_success()
    
    93
    +
    
    94
    +
    
    95
    +@pytest.mark.datafiles(DATA_DIR)
    
    96
    +def test_unknown_host_arch(cli, datafiles):
    
    97
    +
    
    98
    +    with override_uname_arch("x86_128"):
    
    99
    +        project = os.path.join(datafiles.dirname, datafiles.basename, 'option-arch')
    
    100
    +        result = cli.run(project=project, silent=True, args=[
    
    101
    +            'show',
    
    102
    +            '--deps', 'none',
    
    103
    +            '--format', '%{vars}',
    
    104
    +            'element.bst'
    
    105
    +        ])
    
    106
    +
    
    107
    +        result.assert_main_error(ErrorDomain.PLATFORM, None)
    
    108
    +
    
    109
    +
    
    110
    +@pytest.mark.datafiles(DATA_DIR)
    
    111
    +def test_unknown_project_arch(cli, datafiles):
    
    112
    +
    
    113
    +    project = os.path.join(datafiles.dirname, datafiles.basename, 'option-arch-unknown')
    
    114
    +    result = cli.run(project=project, silent=True, args=[
    
    115
    +        'show',
    
    116
    +        '--deps', 'none',
    
    117
    +        '--format', '%{vars}',
    
    118
    +        'element.bst'
    
    119
    +    ])
    
    120
    +
    
    121
    +    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)

  • tox.ini
    1
    +#
    
    2
    +# Tox global configuration
    
    3
    +#
    
    1 4
     [tox]
    
    2 5
     envlist = py35,py36,py37
    
    3 6
     skip_missing_interpreters = true
    
    4 7
     
    
    8
    +#
    
    9
    +# Defaults for all environments
    
    10
    +#
    
    11
    +# Anything specified here is iherited by the sections
    
    12
    +#
    
    5 13
     [testenv]
    
    6
    -commands = pytest {posargs}
    
    14
    +commands =
    
    15
    +    pytest --basetemp {envtmpdir} {posargs}
    
    16
    +    mkdir -p .coverage-reports
    
    17
    +    mv {envtmpdir}/.coverage {toxinidir}/.coverage-reports/.coverage.{env:COVERAGE_PREFIX:}{envname}
    
    7 18
     deps =
    
    8 19
         -rrequirements/requirements.txt
    
    9 20
         -rrequirements/dev-requirements.txt
    
    ... ... @@ -13,6 +24,32 @@ passenv =
    13 24
         GI_TYPELIB_PATH
    
    14 25
         INTEGRATION_CACHE
    
    15 26
     
    
    27
    +#
    
    28
    +# These keys are not inherited by any other sections
    
    29
    +#
    
    30
    +setenv =
    
    31
    +    py{35,36,37}: COVERAGE_FILE = {envtmpdir}/.coverage
    
    32
    +whitelist_externals =
    
    33
    +    py{35,36,37}:
    
    34
    +        mv
    
    35
    +        mkdir
    
    36
    +
    
    37
    +#
    
    38
    +# Coverage reporting
    
    39
    +#
    
    40
    +[testenv:coverage]
    
    41
    +commands =
    
    42
    +    - coverage combine --rcfile={toxinidir}/.coveragerc {toxinidir}/.coverage-reports/
    
    43
    +    coverage report --rcfile={toxinidir}/.coveragerc -m
    
    44
    +deps =
    
    45
    +    -rrequirements/requirements.txt
    
    46
    +    -rrequirements/dev-requirements.txt
    
    47
    +setenv =
    
    48
    +    COVERAGE_FILE = {toxinidir}/.coverage-reports/.coverage
    
    49
    +
    
    50
    +#
    
    51
    +# Running linters
    
    52
    +#
    
    16 53
     [testenv:lint]
    
    17 54
     commands =
    
    18 55
         pycodestyle
    
    ... ... @@ -22,6 +59,9 @@ deps =
    22 59
         -rrequirements/dev-requirements.txt
    
    23 60
         -rrequirements/plugin-requirements.txt
    
    24 61
     
    
    62
    +#
    
    63
    +# Building documentation
    
    64
    +#
    
    25 65
     [testenv:docs]
    
    26 66
     commands =
    
    27 67
         make -C doc
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]