[Notes] [Git][BuildStream/buildstream][537-mirror-fallback-does-not-work-for-git] 20 commits: doc: Mention the Fedora packages



Title: GitLab

Jonathan Maw pushed to branch 537-mirror-fallback-does-not-work-for-git at BuildStream / buildstream

Commits:

12 changed files:

Changes:

  • .gitlab/issue_templates/bst_task.md
    ... ... @@ -6,9 +6,9 @@
    6 6
     
    
    7 7
     [//]: # (Short summary of the action to be executed)
    
    8 8
     
    
    9
    -* [  ] Action 1
    
    10
    -* [  ] Action 2
    
    11
    -* [  ] Action 3
    
    9
    +* [ ] Action 1
    
    10
    +* [ ] Action 2
    
    11
    +* [ ] Action 3
    
    12 12
     
    
    13 13
     ## Acceptance Criteria
    
    14 14
     
    

  • buildstream/_scheduler/queues/pullqueue.py
    ... ... @@ -29,7 +29,7 @@ class PullQueue(Queue):
    29 29
     
    
    30 30
         action_name = "Pull"
    
    31 31
         complete_name = "Pulled"
    
    32
    -    resources = [ResourceType.UPLOAD]
    
    32
    +    resources = [ResourceType.DOWNLOAD]
    
    33 33
     
    
    34 34
         def process(self, element):
    
    35 35
             # returns whether an artifact was downloaded or not
    

  • buildstream/_versions.py
    ... ... @@ -23,7 +23,7 @@
    23 23
     # This version is bumped whenever enhancements are made
    
    24 24
     # to the `project.conf` format or the core element format.
    
    25 25
     #
    
    26
    -BST_FORMAT_VERSION = 12
    
    26
    +BST_FORMAT_VERSION = 13
    
    27 27
     
    
    28 28
     
    
    29 29
     # The base BuildStream artifact version
    

  • buildstream/data/userconfig.yaml
    ... ... @@ -35,13 +35,13 @@ cache:
    35 35
     #
    
    36 36
     scheduler:
    
    37 37
     
    
    38
    -  # Maximum number of simultaneous source downloading tasks.
    
    38
    +  # Maximum number of simultaneous downloading tasks.
    
    39 39
       fetchers: 10
    
    40 40
     
    
    41 41
       # Maximum number of simultaneous build tasks.
    
    42 42
       builders: 4
    
    43 43
     
    
    44
    -  # Maximum number of simultaneous artifact uploading tasks.
    
    44
    +  # Maximum number of simultaneous uploading tasks.
    
    45 45
       pushers: 4
    
    46 46
     
    
    47 47
       # Maximum number of retries for network tasks.
    

  • buildstream/plugins/sources/git.py
    ... ... @@ -91,16 +91,18 @@ GIT_MODULES = '.gitmodules'
    91 91
     #
    
    92 92
     class GitMirror(SourceFetcher):
    
    93 93
     
    
    94
    -    def __init__(self, source, path, url, ref):
    
    94
    +    def __init__(self, source, path, url, ref, *, parent=None):
    
    95 95
     
    
    96 96
             super().__init__()
    
    97 97
             self.source = source
    
    98
    -        self.path = path
    
    98
    +        self.parent = parent
    
    99 99
             self.url = url
    
    100
    -        self.ref = ref
    
    101 100
             self.mirror = os.path.join(source.get_mirror_directory(), utils.url_directory_name(url))
    
    102 101
             self.mark_download_url(url)
    
    103 102
     
    
    103
    +        self._path = path
    
    104
    +        self._ref = ref
    
    105
    +
    
    104 106
         # Ensures that the mirror exists
    
    105 107
         def ensure(self, alias_override=None):
    
    106 108
     
    
    ... ... @@ -223,8 +225,7 @@ class GitMirror(SourceFetcher):
    223 225
                              fail="Failed to checkout git ref {}".format(self.ref),
    
    224 226
                              cwd=fullpath)
    
    225 227
     
    
    226
    -    # List the submodules (path/url tuples) present at the given ref of this repo
    
    227
    -    def submodule_list(self):
    
    228
    +    def _read_gitmodules(self):
    
    228 229
             modules = "{}:{}".format(self.ref, GIT_MODULES)
    
    229 230
             exit_code, output = self.source.check_output(
    
    230 231
                 [self.source.host_git, 'show', modules], cwd=self.mirror)
    
    ... ... @@ -247,10 +248,15 @@ class GitMirror(SourceFetcher):
    247 248
             for section in parser.sections():
    
    248 249
                 # validate section name against the 'submodule "foo"' pattern
    
    249 250
                 if re.match(r'submodule "(.*)"', section):
    
    250
    -                path = parser.get(section, 'path')
    
    251
    -                url = parser.get(section, 'url')
    
    251
    +                yield (parser, section)
    
    252 252
     
    
    253
    -                yield (path, url)
    
    253
    +    # List the submodules (path/url tuples) present at the given ref of this repo
    
    254
    +    def submodule_list(self):
    
    255
    +        for parser, section in self._read_gitmodules():
    
    256
    +            path = parser.get(section, 'path')
    
    257
    +            url = parser.get(section, 'url')
    
    258
    +
    
    259
    +            yield (path, url)
    
    254 260
     
    
    255 261
         # Fetch the ref which this mirror requires its submodule to have,
    
    256 262
         # at the given ref of this mirror.
    
    ... ... @@ -287,6 +293,33 @@ class GitMirror(SourceFetcher):
    287 293
     
    
    288 294
                 return None
    
    289 295
     
    
    296
    +    def get_submodule_path(self, url):
    
    297
    +        for parser, section in self._read_gitmodules():
    
    298
    +            parsed_url = parser.get(section, 'url')
    
    299
    +            if parsed_url == url:
    
    300
    +                return parser.get(section, 'path')
    
    301
    +
    
    302
    +        raise SourceError("{}: No submodule found with url '{}'".format(self.source, url))
    
    303
    +
    
    304
    +    @property
    
    305
    +    def path(self):
    
    306
    +        if self._path is None:
    
    307
    +            self._path = self.parent.get_submodule_path()
    
    308
    +
    
    309
    +        return self._path
    
    310
    +
    
    311
    +    @property
    
    312
    +    def ref(self):
    
    313
    +        # The top-level GitMirror may have ref as None, submodules don't.
    
    314
    +        if self._ref is None and self.parent:
    
    315
    +            self._ref = self.parent.submodule_ref(self.path)
    
    316
    +
    
    317
    +        return self._ref
    
    318
    +
    
    319
    +    @ref.setter
    
    320
    +    def ref(self, ref):
    
    321
    +        self._ref = ref
    
    322
    +
    
    290 323
     
    
    291 324
     class GitSource(Source):
    
    292 325
         # pylint: disable=attribute-defined-outside-init
    
    ... ... @@ -303,6 +336,8 @@ class GitSource(Source):
    303 336
             self.checkout_submodules = self.node_get_member(node, bool, 'checkout-submodules', True)
    
    304 337
             self.submodules = []
    
    305 338
     
    
    339
    +        self.using_source_fetchers = (self.original_url != self.translate_url(self.original_url))
    
    340
    +
    
    306 341
             # Parse a dict of submodule overrides, stored in the submodule_overrides
    
    307 342
             # and submodule_checkout_overrides dictionaries.
    
    308 343
             self.submodule_overrides = {}
    
    ... ... @@ -311,6 +346,11 @@ class GitSource(Source):
    311 346
             for path, _ in self.node_items(modules):
    
    312 347
                 submodule = self.node_get_member(modules, Mapping, path)
    
    313 348
                 url = self.node_get_member(submodule, str, 'url', None)
    
    349
    +
    
    350
    +            if self.using_source_fetchers:
    
    351
    +                submodule_mirror = GitMirror(self, None, url, None, parent=self.mirror)
    
    352
    +                self.submodules.append(submodule_mirror)
    
    353
    +
    
    314 354
                 self.submodule_overrides[path] = url
    
    315 355
                 if 'checkout' in submodule:
    
    316 356
                     checkout = self.node_get_member(submodule, bool, 'checkout')
    
    ... ... @@ -376,6 +416,24 @@ class GitSource(Source):
    376 416
     
    
    377 417
             return ret
    
    378 418
     
    
    419
    +    def fetch(self):
    
    420
    +
    
    421
    +        with self.timed_activity("Fetching {}".format(self.mirror.url), silent_nested=True):
    
    422
    +
    
    423
    +            # Here we are only interested in ensuring that our mirror contains
    
    424
    +            # the self.mirror.ref commit.
    
    425
    +            self.mirror.ensure()
    
    426
    +            if not self.mirror.has_ref():
    
    427
    +                self.mirror.fetch()
    
    428
    +
    
    429
    +            self.mirror.assert_ref()
    
    430
    +
    
    431
    +            # Here after performing any fetches, we need to also ensure that
    
    432
    +            # we've cached the desired refs in our mirrors of submodules.
    
    433
    +            #
    
    434
    +            self.refresh_submodules()
    
    435
    +            self.fetch_submodules()
    
    436
    +
    
    379 437
         def init_workspace(self, directory):
    
    380 438
             # XXX: may wish to refactor this as some code dupe with stage()
    
    381 439
             self.refresh_submodules()
    
    ... ... @@ -408,8 +466,11 @@ class GitSource(Source):
    408 466
                         mirror.stage(directory)
    
    409 467
     
    
    410 468
         def get_source_fetchers(self):
    
    411
    -        self.refresh_submodules()
    
    412
    -        return [self.mirror] + self.submodules
    
    469
    +        # If the url does not contain an alias, then it does not need SourceFetchers
    
    470
    +        if self.mirror.url == self.translate_url(self.mirror.url):
    
    471
    +            return []
    
    472
    +        else:
    
    473
    +            return [self.mirror] + self.submodules
    
    413 474
     
    
    414 475
         ###########################################################
    
    415 476
         #                     Local Functions                     #
    
    ... ... @@ -432,6 +493,11 @@ class GitSource(Source):
    432 493
         # Assumes that we have our mirror and we have the ref which we point to
    
    433 494
         #
    
    434 495
         def refresh_submodules(self):
    
    496
    +
    
    497
    +        # When using source fetchers, the submodule list is defined by the 'submodules' config field
    
    498
    +        if self.using_source_fetchers:
    
    499
    +            return
    
    500
    +
    
    435 501
             self.mirror.ensure()
    
    436 502
             submodules = []
    
    437 503
     
    
    ... ... @@ -454,6 +520,19 @@ class GitSource(Source):
    454 520
     
    
    455 521
             self.submodules = submodules
    
    456 522
     
    
    523
    +    # Ensures that we have mirrored git repositories for all
    
    524
    +    # the submodules existing at the given commit of the main git source.
    
    525
    +    #
    
    526
    +    # Also ensure that these mirrors have the required commits
    
    527
    +    # referred to at the given commit of the main git source.
    
    528
    +    #
    
    529
    +    def fetch_submodules(self):
    
    530
    +        for mirror in self.submodules:
    
    531
    +            mirror.ensure()
    
    532
    +            if not mirror.has_ref():
    
    533
    +                mirror.fetch()
    
    534
    +                mirror.assert_ref()
    
    535
    +
    
    457 536
     
    
    458 537
     # Plugin entry point
    
    459 538
     def setup():
    

  • buildstream/plugins/sources/remote.py
    ... ... @@ -35,6 +35,10 @@ remote - stage files from remote urls
    35 35
        # If not specified, the basename of the url will be used.
    
    36 36
        # filename: customfilename
    
    37 37
     
    
    38
    +   # Optionally specify whether the downloaded file should be
    
    39
    +   # marked executable.
    
    40
    +   # executable: true
    
    41
    +
    
    38 42
        # Specify the url. Using an alias defined in your project
    
    39 43
        # configuration is encouraged. 'bst track' will update the
    
    40 44
        # sha256sum in 'ref' to the downloaded file's sha256sum.
    
    ... ... @@ -43,6 +47,8 @@ remote - stage files from remote urls
    43 47
        # Specify the ref. It's a sha256sum of the file you download.
    
    44 48
        ref: 6c9f6f68a131ec6381da82f2bff978083ed7f4f7991d931bfa767b7965ebc94b
    
    45 49
     
    
    50
    +
    
    51
    +
    
    46 52
     .. note::
    
    47 53
     
    
    48 54
        The ``remote`` plugin is available since :ref:`format version 10 <project_format_version>`
    
    ... ... @@ -60,22 +66,31 @@ class RemoteSource(DownloadableFileSource):
    60 66
             super().configure(node)
    
    61 67
     
    
    62 68
             self.filename = self.node_get_member(node, str, 'filename', os.path.basename(self.url))
    
    69
    +        self.executable = self.node_get_member(node, bool, 'executable', False)
    
    63 70
     
    
    64 71
             if os.sep in self.filename:
    
    65 72
                 raise SourceError('{}: filename parameter cannot contain directories'.format(self),
    
    66 73
                                   reason="filename-contains-directory")
    
    67
    -        self.node_validate(node, DownloadableFileSource.COMMON_CONFIG_KEYS + ['filename'])
    
    74
    +        self.node_validate(node, DownloadableFileSource.COMMON_CONFIG_KEYS + ['filename', 'executable'])
    
    68 75
     
    
    69 76
         def get_unique_key(self):
    
    70
    -        return super().get_unique_key() + [self.filename]
    
    77
    +        return super().get_unique_key() + [self.filename, self.executable]
    
    71 78
     
    
    72 79
         def stage(self, directory):
    
    73 80
             # Same as in local plugin, don't use hardlinks to stage sources, they
    
    74 81
             # are not write protected in the sandbox.
    
    75 82
             dest = os.path.join(directory, self.filename)
    
    76 83
             with self.timed_activity("Staging remote file to {}".format(dest)):
    
    84
    +
    
    77 85
                 utils.safe_copy(self._get_mirror_file(), dest)
    
    78 86
     
    
    87
    +            # To prevent user's umask introducing variability here, explicitly set
    
    88
    +            # file modes.
    
    89
    +            if self.executable:
    
    90
    +                os.chmod(dest, 0o755)
    
    91
    +            else:
    
    92
    +                os.chmod(dest, 0o644)
    
    93
    +
    
    79 94
     
    
    80 95
     def setup():
    
    81 96
         return RemoteSource

  • buildstream/source.py
    ... ... @@ -393,8 +393,8 @@ class Source(Plugin):
    393 393
             """Get the objects that are used for fetching
    
    394 394
     
    
    395 395
             If this source doesn't download from multiple URLs,
    
    396
    -        returning None and falling back on the default behaviour
    
    397
    -        is recommended.
    
    396
    +        returning an empty list and falling back on the default
    
    397
    +        behaviour is recommended.
    
    398 398
     
    
    399 399
             Returns:
    
    400 400
                list: A list of SourceFetchers. If SourceFetchers are not supported,
    

  • doc/source/install_artifacts.rst
    ... ... @@ -143,6 +143,50 @@ Instance with push and requiring client authentication:
    143 143
     
    
    144 144
         bst-artifact-server --port 11002 --server-key server.key --server-cert server.crt --client-certs authorized.crt --enable-push /home/artifacts/artifacts
    
    145 145
     
    
    146
    +Managing the cache with systemd
    
    147
    +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    
    148
    +
    
    149
    +It is better to run the cache as a systemd service, especially if it is running on a dedicated server, as this will allow systemd to manage the cache, incase the server ever encounters any issues.
    
    150
    +
    
    151
    +Below are two examples of how to run the cache server as a systemd service, one is for pull only and the other is configured for push & pull.
    
    152
    +
    
    153
    +.. code:: ini
    
    154
    +
    
    155
    +   #
    
    156
    +   # Pull
    
    157
    +   #
    
    158
    +   [Unit]
    
    159
    +   Description=Buildstream Artifact pull server
    
    160
    +   After=remote-fs.target network-online.target
    
    161
    +
    
    162
    +   [Service]
    
    163
    +   Environment="LC_ALL=C.UTF-8"
    
    164
    +   ExecStart=/usr/local/bin/bst-artifact-server --port 11001 --server-key {{certs_path}}/privkey.pem --
    
    165
    +   server-cert {{certs_path}}/fullchain.pem {{artifacts_path}}
    
    166
    +   User=artifacts
    
    167
    +
    
    168
    +   [Install]
    
    169
    +   WantedBy=multi-user.target
    
    170
    +
    
    171
    +
    
    172
    +   #
    
    173
    +   # Pull/Push
    
    174
    +   #
    
    175
    +   [Unit]
    
    176
    +   Description=Buildstream Artifact pull/push server
    
    177
    +   After=remote-fs.target network-online.target
    
    178
    +
    
    179
    +   [Service]
    
    180
    +   Environment="LC_ALL=C.UTF-8"
    
    181
    +   ExecStart=/usr/local/bin/bst-artifact-server --port 11002 --server-key {{certs_path}}/privkey.pem --
    
    182
    +   server-cert {{certs_path}}/fullchain.pem --client-certs /home/artifacts/authorized.crt --enable-push /
    
    183
    +   {{artifacts_path}}
    
    184
    +   User=artifacts
    
    185
    +
    
    186
    +   [Install]
    
    187
    +   WantedBy=multi-user.target
    
    188
    +
    
    189
    +Here we define when systemd should start the service, which is after the networking stack has been started, we then define how to run the cache with the desired configuration, under the artifacts user. The {{ }} are there to denote where you should change these files to point to your desired locations.
    
    146 190
     
    
    147 191
     User configuration
    
    148 192
     ~~~~~~~~~~~~~~~~~~
    

  • doc/source/install_linux_distro.rst
    ... ... @@ -57,9 +57,20 @@ Install the dependencies with::
    57 57
     For the default plugins::
    
    58 58
     
    
    59 59
       sudo pacman -S \
    
    60
    -      bzr git lzip ostree patch python-arpy python-gobject
    
    60
    +      bzr git lzip ostree patch python-gobject
    
    61 61
     
    
    62 62
     
    
    63
    +The package *python-arpy* is required by the deb source plugin. This is not
    
    64
    +obtainable via `pacman`, you must get *python-arpy* from AUR:
    
    65
    +https://aur.archlinux.org/packages/python-arpy/
    
    66
    +
    
    67
    +To install::
    
    68
    +
    
    69
    +  wget https://aur.archlinux.org/cgit/aur.git/snapshot/python-arpy.tar.gz
    
    70
    +  tar -xvf python-arpy.tar.gz
    
    71
    +  cd python-arpy
    
    72
    +  makepkg -si
    
    73
    +
    
    63 74
     Debian
    
    64 75
     ++++++
    
    65 76
     Install the dependencies with::
    
    ... ... @@ -132,8 +143,8 @@ For the default plugins::
    132 143
     
    
    133 144
     Ubuntu 16.04 LTS
    
    134 145
     ^^^^^^^^^^^^^^^^
    
    135
    -On Ubuntu 16.04, neither `bubblewrap<https://github.com/projectatomic/bubblewrap/>`
    
    136
    -or `ostree<https://github.com/ostreedev/ostree>` are available in the official repositories.
    
    146
    +On Ubuntu 16.04, neither `bubblewrap <https://github.com/projectatomic/bubblewrap/>`_
    
    147
    +or `ostree <https://github.com/ostreedev/ostree>`_ are available in the official repositories.
    
    137 148
     You will need to install them in whichever way you see fit. Refer the the upstream documentation
    
    138 149
     for advice on this.
    
    139 150
     
    
    ... ... @@ -216,8 +227,21 @@ Installing from distro packages
    216 227
     
    
    217 228
     Arch Linux
    
    218 229
     ~~~~~~~~~~
    
    219
    -Install `buildstream <https://aur.archlinux.org/packages/buildstream>`_
    
    220
    -from `AUR <https://wiki.archlinux.org/index.php/Arch_User_Repository#Installing_packages>`_.
    
    221
    -Alternatively, use
    
    222
    -`buildstream-git <https://aur.archlinux.org/packages/buildstream-git>`_
    
    223
    -for the lastest version of the development branch.
    230
    +Packages for Arch exist in `AUR <https://wiki.archlinux.org/index.php/Arch_User_Repository#Installing_packages>`_.
    
    231
    +Two different package versions are available:
    
    232
    +
    
    233
    +* Latest release: `buildstream <https://aur.archlinux.org/packages/buildstream>`_
    
    234
    +* Latest development snapshot: `buildstream-git <https://aur.archlinux.org/packages/buildstream-git>`_
    
    235
    +
    
    236
    +
    
    237
    +Fedora
    
    238
    +~~~~~~
    
    239
    +
    
    240
    +BuildStream is not yet in the official Fedora repositories, but you can
    
    241
    +install it from a Copr::
    
    242
    +
    
    243
    +  sudo dnf copr enable bochecha/buildstream
    
    244
    +  sudo dnf install buildstream
    
    245
    +
    
    246
    +Optionally, install the ``buildstream-docs`` package to have the BuildStream
    
    247
    +documentation in Devhelp or GNOME Builder.

  • tests/frontend/mirror.py
    ... ... @@ -139,6 +139,63 @@ def test_mirror_fetch(cli, tmpdir, datafiles, kind):
    139 139
         result.assert_success()
    
    140 140
     
    
    141 141
     
    
    142
    +@pytest.mark.datafiles(DATA_DIR)
    
    143
    +@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
    
    144
    +def test_mirror_fetch_upstream_absent(cli, tmpdir, datafiles, kind):
    
    145
    +    bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
    
    146
    +    dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
    
    147
    +    upstream_repodir = os.path.join(str(tmpdir), 'upstream')
    
    148
    +    mirror_repodir = os.path.join(str(tmpdir), 'mirror')
    
    149
    +    project_dir = os.path.join(str(tmpdir), 'project')
    
    150
    +    os.makedirs(project_dir)
    
    151
    +    element_dir = os.path.join(project_dir, 'elements')
    
    152
    +
    
    153
    +    # Create repo objects of the upstream and mirror
    
    154
    +    upstream_repo = create_repo(kind, upstream_repodir)
    
    155
    +    ref = upstream_repo.create(dev_files_path)
    
    156
    +    mirror_repo = upstream_repo.copy(mirror_repodir)
    
    157
    +
    
    158
    +    element = {
    
    159
    +        'kind': 'import',
    
    160
    +        'sources': [
    
    161
    +            upstream_repo.source_config(ref=ref)
    
    162
    +        ]
    
    163
    +    }
    
    164
    +
    
    165
    +    element_name = 'test.bst'
    
    166
    +    element_path = os.path.join(element_dir, element_name)
    
    167
    +    full_repo = element['sources'][0]['url']
    
    168
    +    upstream_map, repo_name = os.path.split(full_repo)
    
    169
    +    alias = 'foo-' + kind
    
    170
    +    aliased_repo = alias + ':' + repo_name
    
    171
    +    element['sources'][0]['url'] = aliased_repo
    
    172
    +    full_mirror = mirror_repo.source_config()['url']
    
    173
    +    mirror_map, _ = os.path.split(full_mirror)
    
    174
    +    os.makedirs(element_dir)
    
    175
    +    _yaml.dump(element, element_path)
    
    176
    +
    
    177
    +    project = {
    
    178
    +        'name': 'test',
    
    179
    +        'element-path': 'elements',
    
    180
    +        'aliases': {
    
    181
    +            alias: 'http://www.example.com/'
    
    182
    +        },
    
    183
    +        'mirrors': [
    
    184
    +            {
    
    185
    +                'name': 'middle-earth',
    
    186
    +                'aliases': {
    
    187
    +                    alias: [mirror_map + "/"],
    
    188
    +                },
    
    189
    +            },
    
    190
    +        ]
    
    191
    +    }
    
    192
    +    project_file = os.path.join(project_dir, 'project.conf')
    
    193
    +    _yaml.dump(project, project_file)
    
    194
    +
    
    195
    +    result = cli.run(project=project_dir, args=['fetch', element_name])
    
    196
    +    result.assert_success()
    
    197
    +
    
    198
    +
    
    142 199
     @pytest.mark.datafiles(DATA_DIR)
    
    143 200
     def test_mirror_fetch_multi(cli, tmpdir, datafiles):
    
    144 201
         output_file = os.path.join(str(tmpdir), "output.txt")
    

  • tests/sources/remote.py
    1 1
     import os
    
    2
    +import stat
    
    2 3
     import pytest
    
    3 4
     
    
    4 5
     from buildstream._exceptions import ErrorDomain
    
    ... ... @@ -82,7 +83,14 @@ def test_simple_file_build(cli, tmpdir, datafiles):
    82 83
         result.assert_success()
    
    83 84
         # Note that the url of the file in target.bst is actually /dir/file
    
    84 85
         # but this tests confirms we take the basename
    
    85
    -    assert(os.path.exists(os.path.join(checkoutdir, 'file')))
    
    86
    +    checkout_file = os.path.join(checkoutdir, 'file')
    
    87
    +    assert(os.path.exists(checkout_file))
    
    88
    +
    
    89
    +    mode = os.stat(checkout_file).st_mode
    
    90
    +    # Assert not executable by anyone
    
    91
    +    assert(not (mode & (stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH)))
    
    92
    +    # Assert not writeable by anyone other than me
    
    93
    +    assert(not (mode & (stat.S_IWGRP | stat.S_IWOTH)))
    
    86 94
     
    
    87 95
     
    
    88 96
     @pytest.mark.datafiles(os.path.join(DATA_DIR, 'single-file-custom-name'))
    
    ... ... @@ -119,6 +127,7 @@ def test_unique_key(cli, tmpdir, datafiles):
    119 127
         generate_project(project, tmpdir)
    
    120 128
         assert cli.get_element_state(project, 'target.bst') == "fetch needed"
    
    121 129
         assert cli.get_element_state(project, 'target-custom.bst') == "fetch needed"
    
    130
    +    assert cli.get_element_state(project, 'target-custom-executable.bst') == "fetch needed"
    
    122 131
         # Try to fetch it
    
    123 132
         result = cli.run(project=project, args=[
    
    124 133
             'fetch', 'target.bst'
    
    ... ... @@ -127,7 +136,31 @@ def test_unique_key(cli, tmpdir, datafiles):
    127 136
         # We should download the file only once
    
    128 137
         assert cli.get_element_state(project, 'target.bst') == 'buildable'
    
    129 138
         assert cli.get_element_state(project, 'target-custom.bst') == 'buildable'
    
    139
    +    assert cli.get_element_state(project, 'target-custom-executable.bst') == 'buildable'
    
    130 140
     
    
    131 141
         # But the cache key is different because the 'filename' is different.
    
    132 142
         assert cli.get_element_key(project, 'target.bst') != \
    
    133
    -        cli.get_element_key(project, 'target-custom.bst')
    143
    +        cli.get_element_key(project, 'target-custom.bst') != \
    
    144
    +        cli.get_element_key(project, 'target-custom-executable.bst')
    
    145
    +
    
    146
    +
    
    147
    +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'unique-keys'))
    
    148
    +def test_executable(cli, tmpdir, datafiles):
    
    149
    +    '''This test confirms that the 'ecxecutable' parameter is honoured.
    
    150
    +    '''
    
    151
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    152
    +    generate_project(project, tmpdir)
    
    153
    +    checkoutdir = os.path.join(str(tmpdir), "checkout")
    
    154
    +    assert cli.get_element_state(project, 'target-custom-executable.bst') == "fetch needed"
    
    155
    +    # Try to fetch it
    
    156
    +    result = cli.run(project=project, args=[
    
    157
    +        'build', 'target-custom-executable.bst'
    
    158
    +    ])
    
    159
    +
    
    160
    +    result = cli.run(project=project, args=[
    
    161
    +        'checkout', 'target-custom-executable.bst', checkoutdir
    
    162
    +    ])
    
    163
    +    mode = os.stat(os.path.join(checkoutdir, 'some-custom-file')).st_mode
    
    164
    +    assert (mode & stat.S_IEXEC)
    
    165
    +    # Assert executable by anyone
    
    166
    +    assert(mode & (stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH))

  • tests/sources/remote/unique-keys/target-custom-executable.bst
    1
    +kind: import
    
    2
    +description: test
    
    3
    +sources:
    
    4
    +- kind: remote
    
    5
    +  url: tmpdir:/dir/file
    
    6
    +  ref: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
    
    7
    +  filename: some-custom-file
    
    8
    +  executable: true



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]