[Notes] [Git][BuildStream/buildstream][richardmaw/fix-chroot-sandbox-devices] 13 commits: _project.py: Fix option resolution in element & source overrides



Title: GitLab

Tristan Van Berkom pushed to branch richardmaw/fix-chroot-sandbox-devices at BuildStream / buildstream

Commits:

17 changed files:

Changes:

  • buildstream/_artifactcache/artifactcache.py
    ... ... @@ -51,7 +51,7 @@ class ArtifactCacheSpec(namedtuple('ArtifactCacheSpec', 'url push server_cert cl
    51 51
             url = _yaml.node_get(spec_node, str, 'url')
    
    52 52
             push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
    
    53 53
             if not url:
    
    54
    -            provenance = _yaml.node_get_provenance(spec_node)
    
    54
    +            provenance = _yaml.node_get_provenance(spec_node, 'url')
    
    55 55
                 raise LoadError(LoadErrorReason.INVALID_DATA,
    
    56 56
                                 "{}: empty artifact cache URL".format(provenance))
    
    57 57
     
    
    ... ... @@ -67,6 +67,16 @@ class ArtifactCacheSpec(namedtuple('ArtifactCacheSpec', 'url push server_cert cl
    67 67
             if client_cert and basedir:
    
    68 68
                 client_cert = os.path.join(basedir, client_cert)
    
    69 69
     
    
    70
    +        if client_key and not client_cert:
    
    71
    +            provenance = _yaml.node_get_provenance(spec_node, 'client-key')
    
    72
    +            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    73
    +                            "{}: 'client-key' was specified without 'client-cert'".format(provenance))
    
    74
    +
    
    75
    +        if client_cert and not client_key:
    
    76
    +            provenance = _yaml.node_get_provenance(spec_node, 'client-cert')
    
    77
    +            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    78
    +                            "{}: 'client-cert' was specified without 'client-key'".format(provenance))
    
    79
    +
    
    70 80
             return ArtifactCacheSpec(url, push, server_cert, client_key, client_cert)
    
    71 81
     
    
    72 82
     
    
    ... ... @@ -91,6 +101,7 @@ class ArtifactCache():
    91 101
             self._cache_size = None               # The current cache size, sometimes it's an estimate
    
    92 102
             self._cache_quota = None              # The cache quota
    
    93 103
             self._cache_lower_threshold = None    # The target cache size for a cleanup
    
    104
    +        self._remotes_setup = False           # Check to prevent double-setup of remotes
    
    94 105
     
    
    95 106
             os.makedirs(self.extractdir, exist_ok=True)
    
    96 107
             os.makedirs(self.tmpdir, exist_ok=True)
    
    ... ... @@ -143,6 +154,10 @@ class ArtifactCache():
    143 154
         #
    
    144 155
         def setup_remotes(self, *, use_config=False, remote_url=None):
    
    145 156
     
    
    157
    +        # Ensure we do not double-initialise since this can be expensive
    
    158
    +        assert(not self._remotes_setup)
    
    159
    +        self._remotes_setup = True
    
    160
    +
    
    146 161
             # Initialize remote artifact caches. We allow the commandline to override
    
    147 162
             # the user config in some cases (for example `bst push --remote=...`).
    
    148 163
             has_remote_caches = False
    

  • buildstream/_fuse/hardlinks.py
    ... ... @@ -42,9 +42,10 @@ from .mount import Mount
    42 42
     #
    
    43 43
     class SafeHardlinks(Mount):
    
    44 44
     
    
    45
    -    def __init__(self, directory, tempdir):
    
    45
    +    def __init__(self, directory, tempdir, fuse_mount_options={}):
    
    46 46
             self.directory = directory
    
    47 47
             self.tempdir = tempdir
    
    48
    +        super().__init__(fuse_mount_options=fuse_mount_options)
    
    48 49
     
    
    49 50
         def create_operations(self):
    
    50 51
             return SafeHardlinkOps(self.directory, self.tempdir)
    
    ... ... @@ -121,7 +122,7 @@ class SafeHardlinkOps(Operations):
    121 122
             st = os.lstat(full_path)
    
    122 123
             return dict((key, getattr(st, key)) for key in (
    
    123 124
                 'st_atime', 'st_ctime', 'st_gid', 'st_mode',
    
    124
    -            'st_mtime', 'st_nlink', 'st_size', 'st_uid'))
    
    125
    +            'st_mtime', 'st_nlink', 'st_size', 'st_uid', 'st_rdev'))
    
    125 126
     
    
    126 127
         def readdir(self, path, fh):
    
    127 128
             full_path = self._full_path(path)
    

  • buildstream/_fuse/mount.py
    ... ... @@ -87,6 +87,9 @@ class Mount():
    87 87
         #               User Facing API                #
    
    88 88
         ################################################
    
    89 89
     
    
    90
    +    def __init__(self, fuse_mount_options={}):
    
    91
    +        self._fuse_mount_options = fuse_mount_options
    
    92
    +
    
    90 93
         # mount():
    
    91 94
         #
    
    92 95
         # User facing API for mounting a fuse subclass implementation
    
    ... ... @@ -184,7 +187,8 @@ class Mount():
    184 187
             # Run fuse in foreground in this child process, internally libfuse
    
    185 188
             # will handle SIGTERM and gracefully exit it's own little main loop.
    
    186 189
             #
    
    187
    -        FUSE(self.__operations, self.__mountpoint, nothreads=True, foreground=True, nonempty=True)
    
    190
    +        FUSE(self.__operations, self.__mountpoint, nothreads=True, foreground=True, nonempty=True,
    
    191
    +             **self._fuse_mount_options)
    
    188 192
     
    
    189 193
             # Explicit 0 exit code, if the operations crashed for some reason, the exit
    
    190 194
             # code will not be 0, and we want to know about it.
    

  • buildstream/_project.py
    ... ... @@ -598,7 +598,10 @@ class Project():
    598 598
             # any conditionals specified for project option declarations,
    
    599 599
             # or conditionally specifying the project name; will be ignored.
    
    600 600
             #
    
    601
    +        # Don't forget to also resolve options in the element and source overrides.
    
    601 602
             output.options.process_node(config)
    
    603
    +        output.options.process_node(output.element_overrides)
    
    604
    +        output.options.process_node(output.source_overrides)
    
    602 605
     
    
    603 606
             # Load base variables
    
    604 607
             output.base_variables = _yaml.node_get(config, Mapping, 'variables')
    

  • buildstream/sandbox/_mount.py
    ... ... @@ -30,7 +30,7 @@ from .._fuse import SafeHardlinks
    30 30
     # Helper data object representing a single mount point in the mount map
    
    31 31
     #
    
    32 32
     class Mount():
    
    33
    -    def __init__(self, sandbox, mount_point, safe_hardlinks):
    
    33
    +    def __init__(self, sandbox, mount_point, safe_hardlinks, fuse_mount_options={}):
    
    34 34
             scratch_directory = sandbox._get_scratch_directory()
    
    35 35
             # Getting _get_underlying_directory() here is acceptable as
    
    36 36
             # we're part of the sandbox code. This will fail if our
    
    ... ... @@ -39,6 +39,7 @@ class Mount():
    39 39
     
    
    40 40
             self.mount_point = mount_point
    
    41 41
             self.safe_hardlinks = safe_hardlinks
    
    42
    +        self._fuse_mount_options = fuse_mount_options
    
    42 43
     
    
    43 44
             # FIXME: When the criteria for mounting something and it's parent
    
    44 45
             #        mount is identical, then there is no need to mount an additional
    
    ... ... @@ -82,7 +83,7 @@ class Mount():
    82 83
         @contextmanager
    
    83 84
         def mounted(self, sandbox):
    
    84 85
             if self.safe_hardlinks:
    
    85
    -            mount = SafeHardlinks(self.mount_origin, self.mount_tempdir)
    
    86
    +            mount = SafeHardlinks(self.mount_origin, self.mount_tempdir, self._fuse_mount_options)
    
    86 87
                 with mount.mounted(self.mount_source):
    
    87 88
                     yield
    
    88 89
             else:
    
    ... ... @@ -100,12 +101,12 @@ class Mount():
    100 101
     #
    
    101 102
     class MountMap():
    
    102 103
     
    
    103
    -    def __init__(self, sandbox, root_readonly):
    
    104
    +    def __init__(self, sandbox, root_readonly, fuse_mount_options={}):
    
    104 105
             # We will be doing the mounts in the order in which they were declared.
    
    105 106
             self.mounts = OrderedDict()
    
    106 107
     
    
    107 108
             # We want safe hardlinks on rootfs whenever root is not readonly
    
    108
    -        self.mounts['/'] = Mount(sandbox, '/', not root_readonly)
    
    109
    +        self.mounts['/'] = Mount(sandbox, '/', not root_readonly, fuse_mount_options)
    
    109 110
     
    
    110 111
             for mark in sandbox._get_marked_directories():
    
    111 112
                 directory = mark['directory']
    
    ... ... @@ -113,7 +114,7 @@ class MountMap():
    113 114
     
    
    114 115
                 # We want safe hardlinks for any non-root directory where
    
    115 116
                 # artifacts will be staged to
    
    116
    -            self.mounts[directory] = Mount(sandbox, directory, artifact)
    
    117
    +            self.mounts[directory] = Mount(sandbox, directory, artifact, fuse_mount_options)
    
    117 118
     
    
    118 119
         # get_mount_source()
    
    119 120
         #
    

  • buildstream/sandbox/_sandboxchroot.py
    ... ... @@ -35,6 +35,9 @@ from . import Sandbox, SandboxFlags
    35 35
     
    
    36 36
     
    
    37 37
     class SandboxChroot(Sandbox):
    
    38
    +
    
    39
    +    _FUSE_MOUNT_OPTIONS = {'dev': True}
    
    40
    +
    
    38 41
         def __init__(self, *args, **kwargs):
    
    39 42
             super().__init__(*args, **kwargs)
    
    40 43
     
    
    ... ... @@ -67,7 +70,8 @@ class SandboxChroot(Sandbox):
    67 70
     
    
    68 71
             # Create the mount map, this will tell us where
    
    69 72
             # each mount point needs to be mounted from and to
    
    70
    -        self.mount_map = MountMap(self, flags & SandboxFlags.ROOT_READ_ONLY)
    
    73
    +        self.mount_map = MountMap(self, flags & SandboxFlags.ROOT_READ_ONLY,
    
    74
    +                                  self._FUSE_MOUNT_OPTIONS)
    
    71 75
             root_mount_source = self.mount_map.get_mount_source('/')
    
    72 76
     
    
    73 77
             # Create a sysroot and run the command inside it
    

  • buildstream/sandbox/_sandboxremote.py
    ... ... @@ -27,7 +27,7 @@ from . import Sandbox
    27 27
     from ..storage._filebaseddirectory import FileBasedDirectory
    
    28 28
     from ..storage._casbaseddirectory import CasBasedDirectory
    
    29 29
     from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
    
    30
    -from .._artifactcache.cascache import CASCache
    
    30
    +from .._platform import Platform
    
    31 31
     
    
    32 32
     
    
    33 33
     class SandboxError(Exception):
    
    ... ... @@ -43,7 +43,6 @@ class SandboxRemote(Sandbox):
    43 43
     
    
    44 44
         def __init__(self, *args, **kwargs):
    
    45 45
             super().__init__(*args, **kwargs)
    
    46
    -        self.cascache = None
    
    47 46
     
    
    48 47
             url = urlparse(kwargs['server_url'])
    
    49 48
             if not url.scheme or not url.hostname or not url.port:
    
    ... ... @@ -56,12 +55,6 @@ class SandboxRemote(Sandbox):
    56 55
     
    
    57 56
             self.server_url = '{}:{}'.format(url.hostname, url.port)
    
    58 57
     
    
    59
    -    def _get_cascache(self):
    
    60
    -        if self.cascache is None:
    
    61
    -            self.cascache = CASCache(self._get_context())
    
    62
    -            self.cascache.setup_remotes(use_config=True)
    
    63
    -        return self.cascache
    
    64
    -
    
    65 58
         def run_remote_command(self, command, input_root_digest, working_directory, environment):
    
    66 59
             # Sends an execution request to the remote execution server.
    
    67 60
             #
    
    ... ... @@ -78,8 +71,8 @@ class SandboxRemote(Sandbox):
    78 71
                                                           output_files=[],
    
    79 72
                                                           output_directories=[self._output_directory],
    
    80 73
                                                           platform=None)
    
    81
    -
    
    82
    -        cascache = self._get_cascache()
    
    74
    +        platform = Platform.get_platform()
    
    75
    +        cascache = platform.artifactcache
    
    83 76
             # Upload the Command message to the remote CAS server
    
    84 77
             command_digest = cascache.push_message(self._get_project(), remote_command)
    
    85 78
             if not command_digest or not cascache.verify_digest_pushed(self._get_project(), command_digest):
    
    ... ... @@ -141,7 +134,8 @@ class SandboxRemote(Sandbox):
    141 134
             if tree_digest is None or not tree_digest.hash:
    
    142 135
                 raise SandboxError("Output directory structure had no digest attached.")
    
    143 136
     
    
    144
    -        cascache = self._get_cascache()
    
    137
    +        platform = Platform.get_platform()
    
    138
    +        cascache = platform.artifactcache
    
    145 139
             # Now do a pull to ensure we have the necessary parts.
    
    146 140
             dir_digest = cascache.pull_tree(self._get_project(), tree_digest)
    
    147 141
             if dir_digest is None or not dir_digest.hash or not dir_digest.size_bytes:
    
    ... ... @@ -176,7 +170,8 @@ class SandboxRemote(Sandbox):
    176 170
     
    
    177 171
             upload_vdir.recalculate_hash()
    
    178 172
     
    
    179
    -        cascache = self._get_cascache()
    
    173
    +        platform = Platform.get_platform()
    
    174
    +        cascache = platform.artifactcache
    
    180 175
             # Now, push that key (without necessarily needing a ref) to the remote.
    
    181 176
             vdir_digest = cascache.push_directory(self._get_project(), upload_vdir)
    
    182 177
             if not vdir_digest or not cascache.verify_digest_pushed(self._get_project(), vdir_digest):
    

  • tests/artifactcache/config.py
    ... ... @@ -9,8 +9,12 @@ from buildstream._context import Context
    9 9
     from buildstream._project import Project
    
    10 10
     from buildstream.utils import _deduplicate
    
    11 11
     from buildstream import _yaml
    
    12
    +from buildstream._exceptions import ErrorDomain, LoadErrorReason
    
    12 13
     
    
    14
    +from tests.testutils.runcli import cli
    
    13 15
     
    
    16
    +
    
    17
    +DATA_DIR = os.path.dirname(os.path.realpath(__file__))
    
    14 18
     cache1 = ArtifactCacheSpec(url='https://example.com/cache1', push=True)
    
    15 19
     cache2 = ArtifactCacheSpec(url='https://example.com/cache2', push=False)
    
    16 20
     cache3 = ArtifactCacheSpec(url='https://example.com/cache3', push=False)
    
    ... ... @@ -106,3 +110,33 @@ def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user
    106 110
         # Verify that it was correctly read.
    
    107 111
         expected_cache_specs = list(_deduplicate(itertools.chain(override_caches, project_caches, user_caches)))
    
    108 112
         assert parsed_cache_specs == expected_cache_specs
    
    113
    +
    
    114
    +
    
    115
    +# Assert that if either the client key or client cert is specified
    
    116
    +# without specifying it's counterpart, we get a comprehensive LoadError
    
    117
    +# instead of an unhandled exception.
    
    118
    +@pytest.mark.datafiles(DATA_DIR)
    
    119
    +@pytest.mark.parametrize('config_key, config_value', [
    
    120
    +    ('client-cert', 'client.crt'),
    
    121
    +    ('client-key', 'client.key')
    
    122
    +])
    
    123
    +def test_missing_certs(cli, datafiles, config_key, config_value):
    
    124
    +    project = os.path.join(datafiles.dirname, datafiles.basename, 'missing-certs')
    
    125
    +
    
    126
    +    project_conf = {
    
    127
    +        'name': 'test',
    
    128
    +
    
    129
    +        'artifacts': {
    
    130
    +            'url': 'https://cache.example.com:12345',
    
    131
    +            'push': 'true',
    
    132
    +            config_key: config_value
    
    133
    +        }
    
    134
    +    }
    
    135
    +    project_conf_file = os.path.join(project, 'project.conf')
    
    136
    +    _yaml.dump(project_conf, project_conf_file)
    
    137
    +
    
    138
    +    # Use `pull` here to ensure we try to initialize the remotes, triggering the error
    
    139
    +    #
    
    140
    +    # This does not happen for a simple `bst show`.
    
    141
    +    result = cli.run(project=project, args=['pull', 'element.bst'])
    
    142
    +    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)

  • tests/artifactcache/missing-certs/certificates/client.crt

  • tests/artifactcache/missing-certs/certificates/client.key

  • tests/artifactcache/missing-certs/element.bst
    1
    +kind: autotools

  • tests/artifactcache/pull.py
    ... ... @@ -137,7 +137,6 @@ def _test_pull(user_config_file, project_dir, artifact_dir,
    137 137
     
    
    138 138
         # Manually setup the CAS remote
    
    139 139
         cas.setup_remotes(use_config=True)
    
    140
    -    cas.initialize_remotes()
    
    141 140
     
    
    142 141
         if cas.has_push_remotes(element=element):
    
    143 142
             # Push the element's artifact
    
    ... ... @@ -274,7 +273,6 @@ def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest
    274 273
     
    
    275 274
         # Manually setup the CAS remote
    
    276 275
         cas.setup_remotes(use_config=True)
    
    277
    -    cas.initialize_remotes()
    
    278 276
     
    
    279 277
         if cas.has_push_remotes():
    
    280 278
             directory = remote_execution_pb2.Directory()
    
    ... ... @@ -310,7 +308,6 @@ def _test_pull_tree(user_config_file, project_dir, artifact_dir, artifact_digest
    310 308
     
    
    311 309
         # Manually setup the CAS remote
    
    312 310
         cas.setup_remotes(use_config=True)
    
    313
    -    cas.initialize_remotes()
    
    314 311
     
    
    315 312
         if cas.has_push_remotes():
    
    316 313
             # Pull the artifact using the Tree object
    

  • tests/format/option-overrides/element.bst
    1
    +kind: autotools

  • tests/format/option-overrides/project.conf
    1
    +# Test case ensuring that we can use options
    
    2
    +# in the element overrides.
    
    3
    +#
    
    4
    +name: test
    
    5
    +
    
    6
    +options:
    
    7
    +  arch:
    
    8
    +    type: arch
    
    9
    +    description: architecture
    
    10
    +    values: [i686, x86_64]
    
    11
    +
    
    12
    +elements:
    
    13
    +  autotools:
    
    14
    +    variables:
    
    15
    +      (?):
    
    16
    +      - arch == 'i686':
    
    17
    +          conf-global: --host=i686-unknown-linux-gnu
    
    18
    +      - arch == 'x86_64':
    
    19
    +          conf-global: --host=x86_64-unknown-linux-gnu

  • tests/format/optionoverrides.py
    1
    +import os
    
    2
    +import pytest
    
    3
    +from buildstream import _yaml
    
    4
    +from tests.testutils.runcli import cli
    
    5
    +
    
    6
    +# Project directory
    
    7
    +DATA_DIR = os.path.dirname(os.path.realpath(__file__))
    
    8
    +
    
    9
    +
    
    10
    +@pytest.mark.datafiles(DATA_DIR)
    
    11
    +@pytest.mark.parametrize("arch", [('i686'), ('x86_64')])
    
    12
    +def test_override(cli, datafiles, arch):
    
    13
    +    project = os.path.join(datafiles.dirname, datafiles.basename, 'option-overrides')
    
    14
    +
    
    15
    +    bst_args = ['--option', 'arch', arch]
    
    16
    +    bst_args += [
    
    17
    +        'show',
    
    18
    +        '--deps', 'none',
    
    19
    +        '--format', '%{vars}',
    
    20
    +        'element.bst'
    
    21
    +    ]
    
    22
    +    result = cli.run(project=project, silent=True, args=bst_args)
    
    23
    +    result.assert_success()
    
    24
    +
    
    25
    +    # See the associated project.conf for the expected values
    
    26
    +    expected_value = '--host={}-unknown-linux-gnu'.format(arch)
    
    27
    +
    
    28
    +    loaded = _yaml.load_data(result.output)
    
    29
    +    assert loaded['conf-global'] == expected_value

  • tests/integration/project/elements/integration.bst
    1
    +kind: manual
    
    2
    +depends:
    
    3
    +- base.bst
    
    4
    +
    
    5
    +public:
    
    6
    +  bst:
    
    7
    +    integration-commands:
    
    8
    +    - |
    
    9
    +      echo noise >/dev/null

  • tests/integration/shell.py
    ... ... @@ -342,3 +342,13 @@ def test_sysroot_workspace_visible(cli, tmpdir, datafiles):
    342 342
         ])
    
    343 343
         assert result.exit_code == 0
    
    344 344
         assert result.output == workspace_hello
    
    345
    +
    
    346
    +
    
    347
    +# Test system integration commands can access devices in /dev
    
    348
    +@pytest.mark.datafiles(DATA_DIR)
    
    349
    +def test_integration_devices(cli, tmpdir, datafiles):
    
    350
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    351
    +    element_name = 'integration.bst'
    
    352
    +
    
    353
    +    result = execute_shell(cli, project, ["true"], element=element_name)
    
    354
    +    assert result.exit_code == 0



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]