[Notes] [Git][BuildStream/buildstream][tpollard/752] 7 commits: _platform/linux.py: Move get_bwrap_version into _site.py



Title: GitLab

Tom Pollard pushed to branch tpollard/752 at BuildStream / buildstream

Commits:

16 changed files:

Changes:

  • NEWS
    ... ... @@ -45,6 +45,15 @@ buildstream 1.3.1
    45 45
         instead of just a specially-formatted build-root with a `root` and `scratch`
    
    46 46
         subdirectory.
    
    47 47
     
    
    48
    +  o bst interaction with defined artifact servers can be controlled more granularly.
    
    49
    +    This can be done via the user configuration option `useremotes` or via the bst cli
    
    50
    +    main option '--use-remotes'. This can be set as 'none', 'user' or the default value
    
    51
    +    'all'. Unless specifically overriden, when considering wether to pull or push to
    
    52
    +    available artifact servers (be it user or project defined) this optional config option
    
    53
    +    will be used. Setting this value to 'user' for example and performing a build would
    
    54
    +    lead to any project or junction defined artifact server to be ignored, whilst still
    
    55
    +    attempting to any user defined remotes.
    
    56
    +
    
    48 57
     
    
    49 58
     =================
    
    50 59
     buildstream 1.1.5
    

  • buildstream/_artifactcache/artifactcache.py
    ... ... @@ -156,7 +156,7 @@ class ArtifactCache():
    156 156
         # Sets up which remotes to use
    
    157 157
         #
    
    158 158
         # Args:
    
    159
    -    #    use_config (bool): Whether to use project configuration
    
    159
    +    #    use_config (bool): Whether to use configuration
    
    160 160
         #    remote_url (str): Remote artifact cache URL
    
    161 161
         #
    
    162 162
         # This requires that all of the projects which are to be processed in the session
    
    ... ... @@ -175,11 +175,16 @@ class ArtifactCache():
    175 175
                 self._set_remotes([ArtifactCacheSpec(remote_url, push=True)])
    
    176 176
                 has_remote_caches = True
    
    177 177
             if use_config:
    
    178
    -            for project in self.context.get_projects():
    
    179
    -                artifact_caches = _configured_remote_artifact_cache_specs(self.context, project)
    
    180
    -                if artifact_caches:  # artifact_caches is a list of ArtifactCacheSpec instances
    
    181
    -                    self._set_remotes(artifact_caches, project=project)
    
    182
    -                    has_remote_caches = True
    
    178
    +            if self.context.use_remotes == 'all':
    
    179
    +                for project in self.context.get_projects():
    
    180
    +                    artifact_caches = _configured_remote_artifact_cache_specs(self.context, project)
    
    181
    +                    if artifact_caches:  # artifact_caches is a list of ArtifactCacheSpec instances
    
    182
    +                        self._set_remotes(artifact_caches, project=project)
    
    183
    +                        has_remote_caches = True
    
    184
    +            # If configured to only use user configured remotes, pass existing user cache spec
    
    185
    +            elif self.context.use_remotes == 'user' and self.context.artifact_cache_specs:
    
    186
    +                self._set_remotes(self.context.artifact_cache_specs)
    
    187
    +                has_remote_caches = True
    
    183 188
             if has_remote_caches:
    
    184 189
                 self._initialize_remotes()
    
    185 190
     
    

  • buildstream/_context.py
    ... ... @@ -110,6 +110,9 @@ class Context():
    110 110
             # Make sure the XDG vars are set in the environment before loading anything
    
    111 111
             self._init_xdg()
    
    112 112
     
    
    113
    +        # Which remote artifact servers to interact with. all, user or none
    
    114
    +        self.use_remotes = 'all'
    
    115
    +
    
    113 116
             # Private variables
    
    114 117
             self._cache_key = None
    
    115 118
             self._message_handler = None
    
    ... ... @@ -160,7 +163,7 @@ class Context():
    160 163
             _yaml.node_validate(defaults, [
    
    161 164
                 'sourcedir', 'builddir', 'artifactdir', 'logdir',
    
    162 165
                 'scheduler', 'artifacts', 'logging', 'projects',
    
    163
    -            'cache'
    
    166
    +            'cache', 'useremotes'
    
    164 167
             ])
    
    165 168
     
    
    166 169
             for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir']:
    
    ... ... @@ -185,6 +188,13 @@ class Context():
    185 188
             # Load artifact share configuration
    
    186 189
             self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
    
    187 190
     
    
    191
    +        # Load remote artifact server usage
    
    192
    +        self.use_remotes = _yaml.node_get(defaults, str, 'useremotes', default_value='all')
    
    193
    +        valid_actions = ['all', 'user', 'none']
    
    194
    +        if self.use_remotes not in valid_actions:
    
    195
    +            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    196
    +                            "useremotes should be one of: {}".format(", ".join(valid_actions)))
    
    197
    +
    
    188 198
             # Load logging config
    
    189 199
             logging = _yaml.node_get(defaults, Mapping, 'logging')
    
    190 200
             _yaml.node_validate(logging, [
    

  • buildstream/_frontend/app.py
    ... ... @@ -182,7 +182,8 @@ class App():
    182 182
                 'fetchers': 'sched_fetchers',
    
    183 183
                 'builders': 'sched_builders',
    
    184 184
                 'pushers': 'sched_pushers',
    
    185
    -            'network_retries': 'sched_network_retries'
    
    185
    +            'network_retries': 'sched_network_retries',
    
    186
    +            'use_remotes': 'use_remotes'
    
    186 187
             }
    
    187 188
             for cli_option, context_attr in override_map.items():
    
    188 189
                 option_value = self._main_options.get(cli_option)
    

  • buildstream/_frontend/cli.py
    ... ... @@ -219,6 +219,9 @@ def print_version(ctx, param, value):
    219 219
                   help="Specify a project option")
    
    220 220
     @click.option('--default-mirror', default=None,
    
    221 221
                   help="The mirror to fetch from first, before attempting other mirrors")
    
    222
    +@click.option('--use-remotes', default='all',
    
    223
    +              type=click.Choice(['all', 'user', 'none']),
    
    224
    +              help='The remote artifact caches to interact with (default: all)')
    
    222 225
     @click.pass_context
    
    223 226
     def cli(context, **kwargs):
    
    224 227
         """Build and manipulate BuildStream projects
    

  • buildstream/_platform/linux.py
    ... ... @@ -18,9 +18,9 @@
    18 18
     #        Tristan Maat <tristan maat codethink co uk>
    
    19 19
     
    
    20 20
     import os
    
    21
    -import shutil
    
    22 21
     import subprocess
    
    23 22
     
    
    23
    +from .. import _site
    
    24 24
     from .. import utils
    
    25 25
     from ..sandbox import SandboxDummy
    
    26 26
     
    
    ... ... @@ -38,16 +38,18 @@ class Linux(Platform):
    38 38
     
    
    39 39
             self._have_fuse = os.path.exists("/dev/fuse")
    
    40 40
     
    
    41
    -        bwrap_version = self._get_bwrap_version()
    
    41
    +        bwrap_version = _site.get_bwrap_version()
    
    42 42
     
    
    43 43
             if bwrap_version is None:
    
    44 44
                 self._bwrap_exists = False
    
    45 45
                 self._have_good_bwrap = False
    
    46 46
                 self._die_with_parent_available = False
    
    47
    +            self._json_status_available = False
    
    47 48
             else:
    
    48 49
                 self._bwrap_exists = True
    
    49 50
                 self._have_good_bwrap = (0, 1, 2) <= bwrap_version
    
    50 51
                 self._die_with_parent_available = (0, 1, 8) <= bwrap_version
    
    52
    +            self._json_status_available = (0, 3, 2) <= bwrap_version
    
    51 53
     
    
    52 54
             self._local_sandbox_available = self._have_fuse and self._have_good_bwrap
    
    53 55
     
    
    ... ... @@ -97,6 +99,7 @@ class Linux(Platform):
    97 99
             # Inform the bubblewrap sandbox as to whether it can use user namespaces or not
    
    98 100
             kwargs['user_ns_available'] = self._user_ns_available
    
    99 101
             kwargs['die_with_parent_available'] = self._die_with_parent_available
    
    102
    +        kwargs['json_status_available'] = self._json_status_available
    
    100 103
             return SandboxBwrap(*args, **kwargs)
    
    101 104
     
    
    102 105
         def _check_user_ns_available(self):
    
    ... ... @@ -119,21 +122,3 @@ class Linux(Platform):
    119 122
                 output = ''
    
    120 123
     
    
    121 124
             return output == 'root'
    122
    -
    
    123
    -    def _get_bwrap_version(self):
    
    124
    -        # Get the current bwrap version
    
    125
    -        #
    
    126
    -        # returns None if no bwrap was found
    
    127
    -        # otherwise returns a tuple of 3 int: major, minor, patch
    
    128
    -        bwrap_path = shutil.which('bwrap')
    
    129
    -
    
    130
    -        if not bwrap_path:
    
    131
    -            return None
    
    132
    -
    
    133
    -        cmd = [bwrap_path, "--version"]
    
    134
    -        try:
    
    135
    -            version = str(subprocess.check_output(cmd).split()[1], "utf-8")
    
    136
    -        except subprocess.CalledProcessError:
    
    137
    -            return None
    
    138
    -
    
    139
    -        return tuple(int(x) for x in version.split("."))

  • buildstream/_site.py
    ... ... @@ -18,6 +18,8 @@
    18 18
     #        Tristan Van Berkom <tristan vanberkom codethink co uk>
    
    19 19
     
    
    20 20
     import os
    
    21
    +import shutil
    
    22
    +import subprocess
    
    21 23
     
    
    22 24
     #
    
    23 25
     # Private module declaring some info about where the buildstream
    
    ... ... @@ -44,3 +46,22 @@ build_all_template = os.path.join(root, 'data', 'build-all.sh.in')
    44 46
     
    
    45 47
     # Module building script template
    
    46 48
     build_module_template = os.path.join(root, 'data', 'build-module.sh.in')
    
    49
    +
    
    50
    +
    
    51
    +def get_bwrap_version():
    
    52
    +    # Get the current bwrap version
    
    53
    +    #
    
    54
    +    # returns None if no bwrap was found
    
    55
    +    # otherwise returns a tuple of 3 int: major, minor, patch
    
    56
    +    bwrap_path = shutil.which('bwrap')
    
    57
    +
    
    58
    +    if not bwrap_path:
    
    59
    +        return None
    
    60
    +
    
    61
    +    cmd = [bwrap_path, "--version"]
    
    62
    +    try:
    
    63
    +        version = str(subprocess.check_output(cmd).split()[1], "utf-8")
    
    64
    +    except subprocess.CalledProcessError:
    
    65
    +        return None
    
    66
    +
    
    67
    +    return tuple(int(x) for x in version.split("."))

  • buildstream/sandbox/_sandboxbwrap.py
    ... ... @@ -17,6 +17,8 @@
    17 17
     #  Authors:
    
    18 18
     #        Andrew Leeming <andrew leeming codethink co uk>
    
    19 19
     #        Tristan Van Berkom <tristan vanberkom codethink co uk>
    
    20
    +import collections
    
    21
    +import json
    
    20 22
     import os
    
    21 23
     import sys
    
    22 24
     import time
    
    ... ... @@ -24,7 +26,8 @@ import errno
    24 26
     import signal
    
    25 27
     import subprocess
    
    26 28
     import shutil
    
    27
    -from contextlib import ExitStack
    
    29
    +from contextlib import ExitStack, suppress
    
    30
    +from tempfile import TemporaryFile
    
    28 31
     
    
    29 32
     import psutil
    
    30 33
     
    
    ... ... @@ -53,6 +56,7 @@ class SandboxBwrap(Sandbox):
    53 56
             super().__init__(*args, **kwargs)
    
    54 57
             self.user_ns_available = kwargs['user_ns_available']
    
    55 58
             self.die_with_parent_available = kwargs['die_with_parent_available']
    
    59
    +        self.json_status_available = kwargs['json_status_available']
    
    56 60
     
    
    57 61
         def run(self, command, flags, *, cwd=None, env=None):
    
    58 62
             stdout, stderr = self._get_output()
    
    ... ... @@ -160,24 +164,31 @@ class SandboxBwrap(Sandbox):
    160 164
                     gid = self._get_config().build_gid
    
    161 165
                     bwrap_command += ['--uid', str(uid), '--gid', str(gid)]
    
    162 166
     
    
    163
    -        # Add the command
    
    164
    -        bwrap_command += command
    
    165
    -
    
    166
    -        # bwrap might create some directories while being suid
    
    167
    -        # and may give them to root gid, if it does, we'll want
    
    168
    -        # to clean them up after, so record what we already had
    
    169
    -        # there just in case so that we can safely cleanup the debris.
    
    170
    -        #
    
    171
    -        existing_basedirs = {
    
    172
    -            directory: os.path.exists(os.path.join(root_directory, directory))
    
    173
    -            for directory in ['tmp', 'dev', 'proc']
    
    174
    -        }
    
    175
    -
    
    176
    -        # Use the MountMap context manager to ensure that any redirected
    
    177
    -        # mounts through fuse layers are in context and ready for bwrap
    
    178
    -        # to mount them from.
    
    179
    -        #
    
    180 167
             with ExitStack() as stack:
    
    168
    +            pass_fds = ()
    
    169
    +            # Improve error reporting with json-status if available
    
    170
    +            if self.json_status_available:
    
    171
    +                json_status_file = stack.enter_context(TemporaryFile())
    
    172
    +                pass_fds = (json_status_file.fileno(),)
    
    173
    +                bwrap_command += ['--json-status-fd', str(json_status_file.fileno())]
    
    174
    +
    
    175
    +            # Add the command
    
    176
    +            bwrap_command += command
    
    177
    +
    
    178
    +            # bwrap might create some directories while being suid
    
    179
    +            # and may give them to root gid, if it does, we'll want
    
    180
    +            # to clean them up after, so record what we already had
    
    181
    +            # there just in case so that we can safely cleanup the debris.
    
    182
    +            #
    
    183
    +            existing_basedirs = {
    
    184
    +                directory: os.path.exists(os.path.join(root_directory, directory))
    
    185
    +                for directory in ['tmp', 'dev', 'proc']
    
    186
    +            }
    
    187
    +
    
    188
    +            # Use the MountMap context manager to ensure that any redirected
    
    189
    +            # mounts through fuse layers are in context and ready for bwrap
    
    190
    +            # to mount them from.
    
    191
    +            #
    
    181 192
                 stack.enter_context(mount_map.mounted(self))
    
    182 193
     
    
    183 194
                 # If we're interactive, we want to inherit our stdin,
    
    ... ... @@ -190,7 +201,7 @@ class SandboxBwrap(Sandbox):
    190 201
     
    
    191 202
                 # Run bubblewrap !
    
    192 203
                 exit_code = self.run_bwrap(bwrap_command, stdin, stdout, stderr,
    
    193
    -                                       (flags & SandboxFlags.INTERACTIVE))
    
    204
    +                                       (flags & SandboxFlags.INTERACTIVE), pass_fds)
    
    194 205
     
    
    195 206
                 # Cleanup things which bwrap might have left behind, while
    
    196 207
                 # everything is still mounted because bwrap can be creating
    
    ... ... @@ -238,10 +249,27 @@ class SandboxBwrap(Sandbox):
    238 249
                             # a bug, bwrap mounted a tempfs here and when it exits, that better be empty.
    
    239 250
                             pass
    
    240 251
     
    
    252
    +            if self.json_status_available:
    
    253
    +                json_status_file.seek(0, 0)
    
    254
    +                child_exit_code = None
    
    255
    +                # The JSON status file's output is a JSON object per line
    
    256
    +                # with the keys present identifying the type of message.
    
    257
    +                # The only message relevant to us now is the exit-code of the subprocess.
    
    258
    +                for line in json_status_file:
    
    259
    +                    with suppress(json.decoder.JSONDecodeError):
    
    260
    +                        o = json.loads(line)
    
    261
    +                        if isinstance(o, collections.abc.Mapping) and 'exit-code' in o:
    
    262
    +                            child_exit_code = o['exit-code']
    
    263
    +                            break
    
    264
    +                if child_exit_code is None:
    
    265
    +                    raise SandboxError("`bwrap' terminated during sandbox setup with exitcode {}".format(exit_code),
    
    266
    +                                       reason="bwrap-sandbox-fail")
    
    267
    +                exit_code = child_exit_code
    
    268
    +
    
    241 269
             self._vdir._mark_changed()
    
    242 270
             return exit_code
    
    243 271
     
    
    244
    -    def run_bwrap(self, argv, stdin, stdout, stderr, interactive):
    
    272
    +    def run_bwrap(self, argv, stdin, stdout, stderr, interactive, pass_fds):
    
    245 273
             # Wrapper around subprocess.Popen() with common settings.
    
    246 274
             #
    
    247 275
             # This function blocks until the subprocess has terminated.
    
    ... ... @@ -317,6 +345,7 @@ class SandboxBwrap(Sandbox):
    317 345
                     # The default is to share file descriptors from the parent process
    
    318 346
                     # to the subprocess, which is rarely good for sandboxing.
    
    319 347
                     close_fds=True,
    
    348
    +                pass_fds=pass_fds,
    
    320 349
                     stdin=stdin,
    
    321 350
                     stdout=stdout,
    
    322 351
                     stderr=stderr,
    

  • tests/completions/completions.py
    ... ... @@ -44,6 +44,7 @@ MAIN_OPTIONS = [
    44 44
         "--on-error ",
    
    45 45
         "--pushers ",
    
    46 46
         "--strict ",
    
    47
    +    "--use-remotes ",
    
    47 48
         "--verbose ",
    
    48 49
         "--version ",
    
    49 50
     ]
    
    ... ... @@ -117,6 +118,7 @@ def test_options(cli, cmd, word_idx, expected):
    117 118
     
    
    118 119
     @pytest.mark.parametrize("cmd,word_idx,expected", [
    
    119 120
         ('bst --on-error ', 2, ['continue ', 'quit ', 'terminate ']),
    
    121
    +    ('bst --use-remotes ', 2, ['all ', 'user ', 'none ']),
    
    120 122
         ('bst show --deps ', 3, ['all ', 'build ', 'none ', 'plan ', 'run ']),
    
    121 123
         ('bst show --deps=', 2, ['all ', 'build ', 'none ', 'plan ', 'run ']),
    
    122 124
         ('bst show --deps b', 3, ['build ']),
    

  • tests/frontend/pull.py
    ... ... @@ -358,3 +358,75 @@ def test_pull_missing_notifies_user(caplog, cli, tmpdir, datafiles):
    358 358
     
    
    359 359
             assert "INFO    Remote ({}) does not have".format(share.repo) in result.stderr
    
    360 360
             assert "SKIPPED Pull" in result.stderr
    
    361
    +
    
    362
    +
    
    363
    +# Tests that:
    
    364
    +#
    
    365
    +#  * The bst main option --use-remotes limits remote action
    
    366
    +#    as expected for pull jobs
    
    367
    +#
    
    368
    +@pytest.mark.datafiles(DATA_DIR)
    
    369
    +def test_useremotes_cli_options(cli, tmpdir, datafiles):
    
    370
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    371
    +
    
    372
    +    with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1')) as shareuser,\
    
    373
    +        create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as shareproject:
    
    374
    +
    
    375
    +        # Add shareproject repo url to project.conf
    
    376
    +        with open(os.path.join(project, "project.conf"), "a") as projconf:
    
    377
    +            projconf.write("artifacts:\n  url: {}\n  push: True".format(shareproject.repo))
    
    378
    +
    
    379
    +        # First build the target element and push to the remotes.
    
    380
    +        # We need the artifact available in the remotes to test against.
    
    381
    +        cli.configure({
    
    382
    +            'artifacts': {'url': shareuser.repo, 'push': True}
    
    383
    +        })
    
    384
    +        result = cli.run(project=project, args=['build', 'target.bst'])
    
    385
    +        result.assert_success()
    
    386
    +        assert cli.get_element_state(project, 'target.bst') == 'cached'
    
    387
    +
    
    388
    +        # Assert that everything is now cached in the remotes.
    
    389
    +        all_elements = ['target.bst', 'import-bin.bst', 'compose-all.bst']
    
    390
    +        for element_name in all_elements:
    
    391
    +            assert_shared(cli, shareuser, project, element_name)
    
    392
    +            assert_shared(cli, shareproject, project, element_name)
    
    393
    +
    
    394
    +        # Now we've pushed, delete the user's local artifact cache
    
    395
    +        artifacts = os.path.join(cli.directory, 'artifacts')
    
    396
    +        shutil.rmtree(artifacts)
    
    397
    +
    
    398
    +        # Assert that nothing is cached locally anymore
    
    399
    +        for element_name in all_elements:
    
    400
    +            assert cli.get_element_state(project, element_name) != 'cached'
    
    401
    +
    
    402
    +        # Attempt bst build with --use-remotes set as none, this should lead to
    
    403
    +        # a complete rebuild without pulling from either artifact remote cache
    
    404
    +        result = cli.run(project=project, args=['--use-remotes', 'none', 'build', 'target.bst'])
    
    405
    +        result.assert_success()
    
    406
    +        for element_name in all_elements:
    
    407
    +            assert element_name not in result.get_pulled_elements()
    
    408
    +
    
    409
    +        # Delete local cache again
    
    410
    +        artifacts = os.path.join(cli.directory, 'artifacts')
    
    411
    +        shutil.rmtree(artifacts)
    
    412
    +
    
    413
    +        # Attempt bst build with --use-remotes set as user, as the shareuser is
    
    414
    +        # passed in as user config and not via a project, assert project remote
    
    415
    +        # was not attempted by it not being in the output
    
    416
    +        result = cli.run(project=project, args=['--use-remotes', 'user', 'build', 'target.bst'])
    
    417
    +        result.assert_success()
    
    418
    +        for element_name in all_elements:
    
    419
    +            assert element_name in result.get_pulled_elements()
    
    420
    +        assert shareproject.repo not in result.stderr
    
    421
    +
    
    422
    +        # Delete local cache again
    
    423
    +        artifacts = os.path.join(cli.directory, 'artifacts')
    
    424
    +        shutil.rmtree(artifacts)
    
    425
    +
    
    426
    +        # Attempt bst build with --use-remotes set as all, this time
    
    427
    +        # assert that project remote is attempted and in the output
    
    428
    +        result = cli.run(project=project, args=['--use-remotes', 'all', 'build', 'target.bst'])
    
    429
    +        result.assert_success()
    
    430
    +        for element_name in all_elements:
    
    431
    +            assert element_name in result.get_pulled_elements()
    
    432
    +        assert shareproject.repo in result.stderr

  • tests/frontend/push.py
    ... ... @@ -409,3 +409,68 @@ def test_push_already_cached(caplog, cli, tmpdir, datafiles):
    409 409
             assert not result.get_pushed_elements(), "No elements should have been pushed since the cache was populated"
    
    410 410
             assert "INFO    Remote ({}) already has ".format(share.repo) in result.stderr
    
    411 411
             assert "SKIPPED Push" in result.stderr
    
    412
    +
    
    413
    +
    
    414
    +# Tests that:
    
    415
    +#
    
    416
    +#  * The bst main option --use-remotes limits remote action
    
    417
    +#    as expected for push jobs
    
    418
    +#
    
    419
    +@pytest.mark.datafiles(DATA_DIR)
    
    420
    +def test_useremotes_cli_options(cli, tmpdir, datafiles):
    
    421
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    422
    +
    
    423
    +    with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1')) as shareuser,\
    
    424
    +        create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as shareproject:
    
    425
    +
    
    426
    +        # Add shareproject repo url to project.conf
    
    427
    +        with open(os.path.join(project, "project.conf"), "a") as projconf:
    
    428
    +            projconf.write("artifacts:\n  url: {}\n  push: True".format(shareproject.repo))
    
    429
    +
    
    430
    +        # Configure shareuser remote in user conf
    
    431
    +        cli.configure({
    
    432
    +            'artifacts': {'url': shareuser.repo, 'push': True}
    
    433
    +        })
    
    434
    +
    
    435
    +        # First build the target element with --use-remotes set as none.
    
    436
    +        # This should lead to a complete build without pushing to either artifact
    
    437
    +        # remote cache
    
    438
    +        result = cli.run(project=project, args=['--use-remotes', 'none', 'build', 'target.bst'])
    
    439
    +        result.assert_success()
    
    440
    +        assert not result.get_pushed_elements()
    
    441
    +        assert cli.get_element_state(project, 'target.bst') == 'cached'
    
    442
    +
    
    443
    +        # Delete the artifacts from the local artifact cache
    
    444
    +        all_elements = ['target.bst', 'import-bin.bst', 'compose-all.bst']
    
    445
    +        for element_name in all_elements:
    
    446
    +            cli.remove_artifact_from_cache(project, element_name)
    
    447
    +
    
    448
    +        # Assert that nothing is cached locally anymore
    
    449
    +        for element_name in all_elements:
    
    450
    +            assert cli.get_element_state(project, element_name) != 'cached'
    
    451
    +
    
    452
    +        # Attempt bst build with --use-remotes set as user, this should lead to
    
    453
    +        # a complete rebuild, with artifacts pushed to the shareuser remote artifact cache
    
    454
    +        # only. Assert project remote was not attempted by it not being in the output
    
    455
    +        result = cli.run(project=project, args=['--use-remotes', 'user', 'build', 'target.bst'])
    
    456
    +        result.assert_success()
    
    457
    +        for element_name in all_elements:
    
    458
    +            assert element_name in result.get_pushed_elements()
    
    459
    +        for element_name in all_elements:
    
    460
    +            assert_shared(cli, shareuser, project, element_name)
    
    461
    +        assert shareproject.repo not in result.stderr
    
    462
    +
    
    463
    +        # Delete the artifacts from the local artifact cache
    
    464
    +        all_elements = ['target.bst', 'import-bin.bst', 'compose-all.bst']
    
    465
    +        for element_name in all_elements:
    
    466
    +            cli.remove_artifact_from_cache(project, element_name)
    
    467
    +
    
    468
    +        # Attempt bst build with --use-remotes set as all, this should lead to
    
    469
    +        # a complete rebuild, with artifacts pushed to both the shareuser and
    
    470
    +        # shareproject remote artifacts caches
    
    471
    +        result = cli.run(project=project, args=['--use-remotes', 'all', 'build', 'target.bst'])
    
    472
    +        result.assert_success()
    
    473
    +        for element_name in all_elements:
    
    474
    +            assert element_name in result.get_pushed_elements()
    
    475
    +        for element_name in all_elements:
    
    476
    +            assert_shared(cli, shareproject, project, element_name)

  • tests/integration/project/elements/sandbox-bwrap/break-shell.bst
    1
    +kind: manual
    
    2
    +depends:
    
    3
    +  - base/base-alpine.bst
    
    4
    +
    
    5
    +public:
    
    6
    +  bst:
    
    7
    +    integration-commands:
    
    8
    +    - |
    
    9
    +      chmod a-x /bin/sh

  • tests/integration/project/elements/sandbox-bwrap/command-exit-42.bst
    1
    +kind: manual
    
    2
    +depends:
    
    3
    +  - base/base-alpine.bst
    
    4
    +
    
    5
    +config:
    
    6
    +  build-commands:
    
    7
    +  - |
    
    8
    +    exit 42

  • tests/integration/project/elements/sandbox-bwrap/non-executable-shell.bst
    1
    +kind: manual
    
    2
    +
    
    3
    +depends:
    
    4
    +  - sandbox-bwrap/break-shell.bst
    
    5
    +
    
    6
    +config:
    
    7
    +  build-commands:
    
    8
    +  - |
    
    9
    +    exit 42

  • tests/integration/sandbox-bwrap.py
    1 1
     import os
    
    2 2
     import pytest
    
    3 3
     
    
    4
    +from buildstream._exceptions import ErrorDomain
    
    5
    +
    
    4 6
     from tests.testutils import cli_integration as cli
    
    5 7
     from tests.testutils.integration import assert_contains
    
    6
    -from tests.testutils.site import HAVE_BWRAP
    
    8
    +from tests.testutils.site import HAVE_BWRAP, HAVE_BWRAP_JSON_STATUS
    
    7 9
     
    
    8 10
     
    
    9 11
     pytestmark = pytest.mark.integration
    
    ... ... @@ -29,3 +31,32 @@ def test_sandbox_bwrap_cleanup_build(cli, tmpdir, datafiles):
    29 31
         # Here, BuildStream should not attempt any rmdir etc.
    
    30 32
         result = cli.run(project=project, args=['build', element_name])
    
    31 33
         assert result.exit_code == 0
    
    34
    +
    
    35
    +
    
    36
    +@pytest.mark.skipif(not HAVE_BWRAP, reason='Only available with bubblewrap')
    
    37
    +@pytest.mark.skipif(not HAVE_BWRAP_JSON_STATUS, reason='Only available with bubblewrap supporting --json-status-fd')
    
    38
    +@pytest.mark.datafiles(DATA_DIR)
    
    39
    +def test_sandbox_bwrap_distinguish_setup_error(cli, tmpdir, datafiles):
    
    40
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    41
    +    element_name = 'sandbox-bwrap/non-executable-shell.bst'
    
    42
    +
    
    43
    +    result = cli.run(project=project, args=['build', element_name])
    
    44
    +    result.assert_task_error(error_domain=ErrorDomain.SANDBOX, error_reason="bwrap-sandbox-fail")
    
    45
    +
    
    46
    +
    
    47
    +@pytest.mark.integration
    
    48
    +@pytest.mark.skipif(not HAVE_BWRAP, reason='Only available with bubblewrap')
    
    49
    +@pytest.mark.datafiles(DATA_DIR)
    
    50
    +def test_sandbox_bwrap_return_subprocess(cli, tmpdir, datafiles):
    
    51
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    52
    +    element_name = 'sandbox-bwrap/command-exit-42.bst'
    
    53
    +
    
    54
    +    cli.configure({
    
    55
    +        "logging": {
    
    56
    +            "message-format": "%{element}|%{message}",
    
    57
    +        },
    
    58
    +    })
    
    59
    +
    
    60
    +    result = cli.run(project=project, args=['build', element_name])
    
    61
    +    result.assert_task_error(error_domain=ErrorDomain.ELEMENT, error_reason=None)
    
    62
    +    assert "sandbox-bwrap/command-exit-42.bst|Command 'exit 42' failed with exitcode 42" in result.stderr

  • tests/testutils/site.py
    ... ... @@ -4,7 +4,7 @@
    4 4
     import os
    
    5 5
     import sys
    
    6 6
     
    
    7
    -from buildstream import utils, ProgramNotFoundError
    
    7
    +from buildstream import _site, utils, ProgramNotFoundError
    
    8 8
     
    
    9 9
     try:
    
    10 10
         utils.get_host_tool('bzr')
    
    ... ... @@ -33,8 +33,10 @@ except (ImportError, ValueError):
    33 33
     try:
    
    34 34
         utils.get_host_tool('bwrap')
    
    35 35
         HAVE_BWRAP = True
    
    36
    +    HAVE_BWRAP_JSON_STATUS = _site.get_bwrap_version() >= (0, 3, 2)
    
    36 37
     except ProgramNotFoundError:
    
    37 38
         HAVE_BWRAP = False
    
    39
    +    HAVE_BWRAP_JSON_STATUS = False
    
    38 40
     
    
    39 41
     try:
    
    40 42
         utils.get_host_tool('lzip')
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]