[Notes] [Git][BuildStream/buildstream][tpollard/566] 16 commits: source.py: Add new delegate method validate_cache()



Title: GitLab

Tom Pollard pushed to branch tpollard/566 at BuildStream / buildstream

Commits:

15 changed files:

Changes:

  • buildstream/_artifactcache/artifactcache.py
    ... ... @@ -74,6 +74,7 @@ class ArtifactCache():
    74 74
     
    
    75 75
             self._has_fetch_remotes = False
    
    76 76
             self._has_push_remotes = False
    
    77
    +        self._has_partial_push_remotes = False
    
    77 78
     
    
    78 79
             os.makedirs(self.extractdir, exist_ok=True)
    
    79 80
     
    
    ... ... @@ -398,6 +399,8 @@ class ArtifactCache():
    398 399
                     self._has_fetch_remotes = True
    
    399 400
                     if remote_spec.push:
    
    400 401
                         self._has_push_remotes = True
    
    402
    +                    if remote_spec.partial_push:
    
    403
    +                        self._has_partial_push_remotes = True
    
    401 404
     
    
    402 405
                     remotes[remote_spec.url] = CASRemote(remote_spec)
    
    403 406
     
    
    ... ... @@ -596,6 +599,31 @@ class ArtifactCache():
    596 599
                 remotes_for_project = self._remotes[element._get_project()]
    
    597 600
                 return any(remote.spec.push for remote in remotes_for_project)
    
    598 601
     
    
    602
    +    # has_partial_push_remotes():
    
    603
    +    #
    
    604
    +    # Check whether any remote repositories are available for pushing
    
    605
    +    # non-complete artifacts
    
    606
    +    #
    
    607
    +    # Args:
    
    608
    +    #     element (Element): The Element to check
    
    609
    +    #
    
    610
    +    # Returns:
    
    611
    +    #   (bool): True if any remote repository is configured for optional
    
    612
    +    #            partial pushes, False otherwise
    
    613
    +    #
    
    614
    +    def has_partial_push_remotes(self, *, element=None):
    
    615
    +        # If there's no partial push remotes available, we can't partial push at all
    
    616
    +        if not self._has_partial_push_remotes:
    
    617
    +            return False
    
    618
    +        elif element is None:
    
    619
    +            # At least one remote is set to allow partial pushes
    
    620
    +            return True
    
    621
    +        else:
    
    622
    +            # Check whether the specified element's project has push remotes configured
    
    623
    +            # to not accept partial artifact pushes
    
    624
    +            remotes_for_project = self._remotes[element._get_project()]
    
    625
    +            return any(remote.spec.partial_push for remote in remotes_for_project)
    
    626
    +
    
    599 627
         # push():
    
    600 628
         #
    
    601 629
         # Push committed artifact to remote repository.
    
    ... ... @@ -603,6 +631,8 @@ class ArtifactCache():
    603 631
         # Args:
    
    604 632
         #     element (Element): The Element whose artifact is to be pushed
    
    605 633
         #     keys (list): The cache keys to use
    
    634
    +    #     partial(bool): If the artifact is cached in a partial state
    
    635
    +    #     subdir(string): Optional subdir to not push
    
    606 636
         #
    
    607 637
         # Returns:
    
    608 638
         #   (bool): True if any remote was updated, False if no pushes were required
    
    ... ... @@ -610,12 +640,23 @@ class ArtifactCache():
    610 640
         # Raises:
    
    611 641
         #   (ArtifactError): if there was an error
    
    612 642
         #
    
    613
    -    def push(self, element, keys):
    
    643
    +    def push(self, element, keys, partial=False, subdir=None):
    
    614 644
             refs = [self.get_artifact_fullname(element, key) for key in list(keys)]
    
    615 645
     
    
    616 646
             project = element._get_project()
    
    617 647
     
    
    618
    -        push_remotes = [r for r in self._remotes[project] if r.spec.push]
    
    648
    +        push_remotes = []
    
    649
    +        partial_remotes = []
    
    650
    +
    
    651
    +        # Create list of remotes to push to, given current element and partial push config
    
    652
    +        if not partial:
    
    653
    +            push_remotes = [r for r in self._remotes[project] if r.spec.push]
    
    654
    +
    
    655
    +        if self._has_partial_push_remotes:
    
    656
    +            # Create a specific list of the remotes expecting the artifact to be push in a partial
    
    657
    +            # state. This list needs to be pushed in a partial state, without the optional subdir if
    
    658
    +            # exists locally
    
    659
    +            partial_remotes = [r for r in self._remotes[project] if (r.spec.partial_push and r.spec.push)]
    
    619 660
     
    
    620 661
             pushed = False
    
    621 662
     
    
    ... ... @@ -632,6 +673,19 @@ class ArtifactCache():
    632 673
                         remote.spec.url, element._get_brief_display_key()
    
    633 674
                     ))
    
    634 675
     
    
    676
    +        for remote in partial_remotes:
    
    677
    +            remote.init()
    
    678
    +            display_key = element._get_brief_display_key()
    
    679
    +            element.status("Pushing partial artifact {} -> {}".format(display_key, remote.spec.url))
    
    680
    +
    
    681
    +            if self.cas.push(refs, remote, subdir=subdir):
    
    682
    +                element.info("Pushed partial artifact {} -> {}".format(display_key, remote.spec.url))
    
    683
    +                pushed = True
    
    684
    +            else:
    
    685
    +                element.info("Remote ({}) already has {} partial cached".format(
    
    686
    +                    remote.spec.url, element._get_brief_display_key()
    
    687
    +                ))
    
    688
    +
    
    635 689
             return pushed
    
    636 690
     
    
    637 691
         # pull():
    

  • buildstream/_artifactcache/cascache.py
    ... ... @@ -45,7 +45,7 @@ from .. import _yaml
    45 45
     _MAX_PAYLOAD_BYTES = 1024 * 1024
    
    46 46
     
    
    47 47
     
    
    48
    -class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key client_cert')):
    
    48
    +class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push partial_push server_cert client_key client_cert')):
    
    49 49
     
    
    50 50
         # _new_from_config_node
    
    51 51
         #
    
    ... ... @@ -53,9 +53,12 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key
    53 53
         #
    
    54 54
         @staticmethod
    
    55 55
         def _new_from_config_node(spec_node, basedir=None):
    
    56
    -        _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert'])
    
    56
    +        _yaml.node_validate(spec_node,
    
    57
    +                            ['url', 'push', 'allow-partial-push', 'server-cert', 'client-key', 'client-cert'])
    
    57 58
             url = _yaml.node_get(spec_node, str, 'url')
    
    58 59
             push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
    
    60
    +        partial_push = _yaml.node_get(spec_node, bool, 'allow-partial-push', default_value=False)
    
    61
    +
    
    59 62
             if not url:
    
    60 63
                 provenance = _yaml.node_get_provenance(spec_node, 'url')
    
    61 64
                 raise LoadError(LoadErrorReason.INVALID_DATA,
    
    ... ... @@ -83,10 +86,10 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key
    83 86
                 raise LoadError(LoadErrorReason.INVALID_DATA,
    
    84 87
                                 "{}: 'client-cert' was specified without 'client-key'".format(provenance))
    
    85 88
     
    
    86
    -        return CASRemoteSpec(url, push, server_cert, client_key, client_cert)
    
    89
    +        return CASRemoteSpec(url, push, partial_push, server_cert, client_key, client_cert)
    
    87 90
     
    
    88 91
     
    
    89
    -CASRemoteSpec.__new__.__defaults__ = (None, None, None)
    
    92
    +CASRemoteSpec.__new__.__defaults__ = (False, None, None, None)
    
    90 93
     
    
    91 94
     
    
    92 95
     class BlobNotFound(CASError):
    
    ... ... @@ -353,6 +356,7 @@ class CASCache():
    353 356
         # Args:
    
    354 357
         #     refs (list): The refs to push
    
    355 358
         #     remote (CASRemote): The remote to push to
    
    359
    +    #     subdir (string): Optional specific subdir to exempt from the push
    
    356 360
         #
    
    357 361
         # Returns:
    
    358 362
         #   (bool): True if any remote was updated, False if no pushes were required
    
    ... ... @@ -360,7 +364,7 @@ class CASCache():
    360 364
         # Raises:
    
    361 365
         #   (CASError): if there was an error
    
    362 366
         #
    
    363
    -    def push(self, refs, remote):
    
    367
    +    def push(self, refs, remote, subdir=None):
    
    364 368
             skipped_remote = True
    
    365 369
             try:
    
    366 370
                 for ref in refs:
    
    ... ... @@ -382,7 +386,7 @@ class CASCache():
    382 386
                             # Intentionally re-raise RpcError for outer except block.
    
    383 387
                             raise
    
    384 388
     
    
    385
    -                self._send_directory(remote, tree)
    
    389
    +                self._send_directory(remote, tree, excluded_dir=subdir)
    
    386 390
     
    
    387 391
                     request = buildstream_pb2.UpdateReferenceRequest()
    
    388 392
                     request.keys.append(ref)
    
    ... ... @@ -886,7 +890,7 @@ class CASCache():
    886 890
             for dirnode in directory.directories:
    
    887 891
                 self._reachable_refs_dir(reachable, dirnode.digest, update_mtime=update_mtime)
    
    888 892
     
    
    889
    -    def _required_blobs(self, directory_digest):
    
    893
    +    def _required_blobs(self, directory_digest, excluded_dir=None):
    
    890 894
             # parse directory, and recursively add blobs
    
    891 895
             d = remote_execution_pb2.Digest()
    
    892 896
             d.hash = directory_digest.hash
    
    ... ... @@ -905,7 +909,8 @@ class CASCache():
    905 909
                 yield d
    
    906 910
     
    
    907 911
             for dirnode in directory.directories:
    
    908
    -            yield from self._required_blobs(dirnode.digest)
    
    912
    +            if dirnode.name != excluded_dir:
    
    913
    +                yield from self._required_blobs(dirnode.digest)
    
    909 914
     
    
    910 915
         def _fetch_blob(self, remote, digest, stream):
    
    911 916
             resource_name = '/'.join(['blobs', digest.hash, str(digest.size_bytes)])
    
    ... ... @@ -1091,8 +1096,8 @@ class CASCache():
    1091 1096
     
    
    1092 1097
             assert response.committed_size == digest.size_bytes
    
    1093 1098
     
    
    1094
    -    def _send_directory(self, remote, digest, u_uid=uuid.uuid4()):
    
    1095
    -        required_blobs = self._required_blobs(digest)
    
    1099
    +    def _send_directory(self, remote, digest, u_uid=uuid.uuid4(), excluded_dir=None):
    
    1100
    +        required_blobs = self._required_blobs(digest, excluded_dir=excluded_dir)
    
    1096 1101
     
    
    1097 1102
             missing_blobs = dict()
    
    1098 1103
             # Limit size of FindMissingBlobs request
    

  • buildstream/_stream.py
    ... ... @@ -1199,7 +1199,7 @@ class Stream():
    1199 1199
                 element_source_dir = self._get_element_dirname(directory, element)
    
    1200 1200
                 if list(element.sources()):
    
    1201 1201
                     os.makedirs(element_source_dir)
    
    1202
    -                element._stage_sources_at(element_source_dir)
    
    1202
    +                element._stage_sources_at(element_source_dir, mount_workspaces=False)
    
    1203 1203
     
    
    1204 1204
         # Write a master build script to the sandbox
    
    1205 1205
         def _write_build_script(self, directory, elements):
    

  • buildstream/_versions.py
    ... ... @@ -23,7 +23,7 @@
    23 23
     # This version is bumped whenever enhancements are made
    
    24 24
     # to the `project.conf` format or the core element format.
    
    25 25
     #
    
    26
    -BST_FORMAT_VERSION = 19
    
    26
    +BST_FORMAT_VERSION = 20
    
    27 27
     
    
    28 28
     
    
    29 29
     # The base BuildStream artifact version
    

  • buildstream/_yaml.py
    ... ... @@ -352,6 +352,7 @@ _sentinel = object()
    352 352
     #    key (str): The key to get a value for in node
    
    353 353
     #    indices (list of ints): Optionally decend into lists of lists
    
    354 354
     #    default_value: Optionally return this value if the key is not found
    
    355
    +#    allow_none: (bool): Allow None to be a valid value
    
    355 356
     #
    
    356 357
     # Returns:
    
    357 358
     #    The value if found in node, otherwise default_value is returned
    
    ... ... @@ -362,7 +363,7 @@ _sentinel = object()
    362 363
     # Note:
    
    363 364
     #    Returned strings are stripped of leading and trailing whitespace
    
    364 365
     #
    
    365
    -def node_get(node, expected_type, key, indices=None, default_value=_sentinel):
    
    366
    +def node_get(node, expected_type, key, indices=None, *, default_value=_sentinel, allow_none=False):
    
    366 367
         value = node.get(key, default_value)
    
    367 368
         provenance = node_get_provenance(node)
    
    368 369
         if value is _sentinel:
    
    ... ... @@ -377,8 +378,8 @@ def node_get(node, expected_type, key, indices=None, default_value=_sentinel):
    377 378
                 value = value[index]
    
    378 379
                 path += '[{:d}]'.format(index)
    
    379 380
     
    
    380
    -    # We want to allow None as a valid value for any type
    
    381
    -    if value is None:
    
    381
    +    # Optionally allow None as a valid value for any type
    
    382
    +    if value is None and (allow_none or default_value is None):
    
    382 383
             return None
    
    383 384
     
    
    384 385
         if not isinstance(value, expected_type):
    

  • buildstream/element.py
    ... ... @@ -1800,13 +1800,19 @@ class Element(Plugin):
    1800 1800
         #   (bool): True if this element does not need a push job to be created
    
    1801 1801
         #
    
    1802 1802
         def _skip_push(self):
    
    1803
    +
    
    1803 1804
             if not self.__artifacts.has_push_remotes(element=self):
    
    1804 1805
                 # No push remotes for this element's project
    
    1805 1806
                 return True
    
    1806 1807
     
    
    1807 1808
             # Do not push elements that aren't cached, or that are cached with a dangling buildtree
    
    1808
    -        # artifact unless element type is expected to have an an empty buildtree directory
    
    1809
    -        if not self._cached_buildtree():
    
    1809
    +        # artifact unless element type is expected to have an an empty buildtree directory. Check
    
    1810
    +        # that this default behaviour is not overriden via a remote configured to allow pushing
    
    1811
    +        # artifacts without their corresponding buildtree.
    
    1812
    +        if not self._cached():
    
    1813
    +            return True
    
    1814
    +
    
    1815
    +        if not self._cached_buildtree() and not self.__artifacts.has_partial_push_remotes(element=self):
    
    1810 1816
                 return True
    
    1811 1817
     
    
    1812 1818
             # Do not push tainted artifact
    
    ... ... @@ -1817,7 +1823,8 @@ class Element(Plugin):
    1817 1823
     
    
    1818 1824
         # _push():
    
    1819 1825
         #
    
    1820
    -    # Push locally cached artifact to remote artifact repository.
    
    1826
    +    # Push locally cached artifact to remote artifact repository. An attempt
    
    1827
    +    # will be made to push partial artifacts given current config
    
    1821 1828
         #
    
    1822 1829
         # Returns:
    
    1823 1830
         #   (bool): True if the remote was updated, False if it already existed
    
    ... ... @@ -1830,8 +1837,20 @@ class Element(Plugin):
    1830 1837
                 self.warn("Not pushing tainted artifact.")
    
    1831 1838
                 return False
    
    1832 1839
     
    
    1833
    -        # Push all keys used for local commit
    
    1834
    -        pushed = self.__artifacts.push(self, self.__get_cache_keys_for_commit())
    
    1840
    +        # Push all keys used for local commit, this could be full or partial,
    
    1841
    +        # given previous _skip_push() logic. If buildtree isn't cached, then
    
    1842
    +        # set partial push
    
    1843
    +
    
    1844
    +        partial = False
    
    1845
    +        subdir = 'buildtree'
    
    1846
    +        if not self._cached_buildtree():
    
    1847
    +            partial = True
    
    1848
    +            subdir = ''
    
    1849
    +
    
    1850
    +        pushed = self.__artifacts.push(self, self.__get_cache_keys_for_commit(), partial=partial, subdir=subdir)
    
    1851
    +
    
    1852
    +        # Artifact might be cached in the server partially with the top level ref existing.
    
    1853
    +        # Check if we need to attempt a push of a locally cached buildtree given current config
    
    1835 1854
             if not pushed:
    
    1836 1855
                 return False
    
    1837 1856
     
    

  • buildstream/plugin.py
    ... ... @@ -323,7 +323,7 @@ class Plugin():
    323 323
             provenance = _yaml.node_get_provenance(node, key=member_name)
    
    324 324
             return str(provenance)
    
    325 325
     
    
    326
    -    def node_get_member(self, node, expected_type, member_name, default=_yaml._sentinel):
    
    326
    +    def node_get_member(self, node, expected_type, member_name, default=_yaml._sentinel, *, allow_none=False):
    
    327 327
             """Fetch the value of a node member, raising an error if the value is
    
    328 328
             missing or incorrectly typed.
    
    329 329
     
    
    ... ... @@ -332,6 +332,7 @@ class Plugin():
    332 332
                expected_type (type): The expected type of the node member
    
    333 333
                member_name (str): The name of the member to fetch
    
    334 334
                default (expected_type): A value to return when *member_name* is not specified in *node*
    
    335
    +           allow_none (bool): Allow explicitly set None values in the YAML (*Since: 1.4*)
    
    335 336
     
    
    336 337
             Returns:
    
    337 338
                The value of *member_name* in *node*, otherwise *default*
    
    ... ... @@ -352,7 +353,7 @@ class Plugin():
    352 353
               # Fetch an optional integer
    
    353 354
               level = self.node_get_member(node, int, 'level', -1)
    
    354 355
             """
    
    355
    -        return _yaml.node_get(node, expected_type, member_name, default_value=default)
    
    356
    +        return _yaml.node_get(node, expected_type, member_name, default_value=default, allow_none=allow_none)
    
    356 357
     
    
    357 358
         def node_get_project_path(self, node, key, *,
    
    358 359
                                   check_is_file=False, check_is_dir=False):
    

  • buildstream/plugins/sources/git.py
    ... ... @@ -133,7 +133,22 @@ details on common configuration options for sources.
    133 133
     
    
    134 134
     This plugin provides the following :ref:`configurable warnings <configurable_warnings>`:
    
    135 135
     
    
    136
    -- ``git:inconsistent-submodule`` - A submodule was found to be missing from the underlying git repository.
    
    136
    +- ``git:inconsistent-submodule`` - A submodule present in the git repository's .gitmodules was never
    
    137
    +  added with `git submodule add`.
    
    138
    +
    
    139
    +- ``git:unlisted-submodule`` - A submodule is present in the git repository but was not specified in
    
    140
    +  the source configuration and was not disabled for checkout.
    
    141
    +
    
    142
    +  .. note::
    
    143
    +
    
    144
    +     The ``git:unlisted-submodule`` warning is available since :ref:`format version 20 <project_format_version>`
    
    145
    +
    
    146
    +- ``git:invalid-submodule`` - A submodule is specified in the source configuration but does not exist
    
    147
    +  in the repository.
    
    148
    +
    
    149
    +  .. note::
    
    150
    +
    
    151
    +     The ``git:invalid-submodule`` warning is available since :ref:`format version 20 <project_format_version>`
    
    137 152
     
    
    138 153
     This plugin also utilises the following configurable :class:`core warnings <buildstream.types.CoreWarnings>`:
    
    139 154
     
    
    ... ... @@ -158,6 +173,8 @@ GIT_MODULES = '.gitmodules'
    158 173
     
    
    159 174
     # Warnings
    
    160 175
     WARN_INCONSISTENT_SUBMODULE = "inconsistent-submodule"
    
    176
    +WARN_UNLISTED_SUBMODULE = "unlisted-submodule"
    
    177
    +WARN_INVALID_SUBMODULE = "invalid-submodule"
    
    161 178
     
    
    162 179
     
    
    163 180
     # Because of handling of submodules, we maintain a GitMirror
    
    ... ... @@ -305,7 +322,7 @@ class GitMirror(SourceFetcher):
    305 322
     
    
    306 323
             return ref, list(tags)
    
    307 324
     
    
    308
    -    def stage(self, directory, track=None):
    
    325
    +    def stage(self, directory):
    
    309 326
             fullpath = os.path.join(directory, self.path)
    
    310 327
     
    
    311 328
             # Using --shared here avoids copying the objects into the checkout, in any
    
    ... ... @@ -324,11 +341,7 @@ class GitMirror(SourceFetcher):
    324 341
     
    
    325 342
             self._rebuild_git(fullpath)
    
    326 343
     
    
    327
    -        # Check that the user specified ref exists in the track if provided & not already tracked
    
    328
    -        if track:
    
    329
    -            self.assert_ref_in_track(fullpath, track)
    
    330
    -
    
    331
    -    def init_workspace(self, directory, track=None):
    
    344
    +    def init_workspace(self, directory):
    
    332 345
             fullpath = os.path.join(directory, self.path)
    
    333 346
             url = self.source.translate_url(self.url)
    
    334 347
     
    
    ... ... @@ -344,10 +357,6 @@ class GitMirror(SourceFetcher):
    344 357
                              fail="Failed to checkout git ref {}".format(self.ref),
    
    345 358
                              cwd=fullpath)
    
    346 359
     
    
    347
    -        # Check that the user specified ref exists in the track if provided & not already tracked
    
    348
    -        if track:
    
    349
    -            self.assert_ref_in_track(fullpath, track)
    
    350
    -
    
    351 360
         # List the submodules (path/url tuples) present at the given ref of this repo
    
    352 361
         def submodule_list(self):
    
    353 362
             modules = "{}:{}".format(self.ref, GIT_MODULES)
    
    ... ... @@ -413,28 +422,6 @@ class GitMirror(SourceFetcher):
    413 422
     
    
    414 423
                 return None
    
    415 424
     
    
    416
    -    # Assert that ref exists in track, if track has been specified.
    
    417
    -    def assert_ref_in_track(self, fullpath, track):
    
    418
    -        _, branch = self.source.check_output([self.source.host_git, 'branch', '--list', track,
    
    419
    -                                              '--contains', self.ref],
    
    420
    -                                             cwd=fullpath,)
    
    421
    -        if branch:
    
    422
    -            return
    
    423
    -        else:
    
    424
    -            _, tag = self.source.check_output([self.source.host_git, 'tag', '--list', track,
    
    425
    -                                               '--contains', self.ref],
    
    426
    -                                              cwd=fullpath,)
    
    427
    -            if tag:
    
    428
    -                return
    
    429
    -
    
    430
    -        detail = "The ref provided for the element does not exist locally in the provided track branch / tag " + \
    
    431
    -                 "'{}'.\nYou may wish to track the element to update the ref from '{}' ".format(track, track) + \
    
    432
    -                 "with `bst track`,\nor examine the upstream at '{}' for the specific ref.".format(self.url)
    
    433
    -
    
    434
    -        self.source.warn("{}: expected ref '{}' was not found in given track '{}' for staged repository: '{}'\n"
    
    435
    -                         .format(self.source, self.ref, track, self.url),
    
    436
    -                         detail=detail, warning_token=CoreWarnings.REF_NOT_IN_TRACK)
    
    437
    -
    
    438 425
         def _rebuild_git(self, fullpath):
    
    439 426
             if not self.tags:
    
    440 427
                 return
    
    ... ... @@ -563,7 +550,6 @@ class GitSource(Source):
    563 550
                     self.submodule_checkout_overrides[path] = checkout
    
    564 551
     
    
    565 552
             self.mark_download_url(self.original_url)
    
    566
    -        self.tracked = False
    
    567 553
     
    
    568 554
         def preflight(self):
    
    569 555
             # Check if git is installed, get the binary at the same time
    
    ... ... @@ -653,8 +639,6 @@ class GitSource(Source):
    653 639
                 # Update self.mirror.ref and node.ref from the self.tracking branch
    
    654 640
                 ret = self.mirror.latest_commit_with_tags(self.tracking, self.track_tags)
    
    655 641
     
    
    656
    -        # Set tracked attribute, parameter for if self.mirror.assert_ref_in_track is needed
    
    657
    -        self.tracked = True
    
    658 642
             return ret
    
    659 643
     
    
    660 644
         def init_workspace(self, directory):
    
    ... ... @@ -662,7 +646,7 @@ class GitSource(Source):
    662 646
             self.refresh_submodules()
    
    663 647
     
    
    664 648
             with self.timed_activity('Setting up workspace "{}"'.format(directory), silent_nested=True):
    
    665
    -            self.mirror.init_workspace(directory, track=(self.tracking if not self.tracked else None))
    
    649
    +            self.mirror.init_workspace(directory)
    
    666 650
                 for mirror in self.submodules:
    
    667 651
                     mirror.init_workspace(directory)
    
    668 652
     
    
    ... ... @@ -678,15 +662,9 @@ class GitSource(Source):
    678 662
             # Stage the main repo in the specified directory
    
    679 663
             #
    
    680 664
             with self.timed_activity("Staging {}".format(self.mirror.url), silent_nested=True):
    
    681
    -            self.mirror.stage(directory, track=(self.tracking if not self.tracked else None))
    
    665
    +            self.mirror.stage(directory)
    
    682 666
                 for mirror in self.submodules:
    
    683
    -                if mirror.path in self.submodule_checkout_overrides:
    
    684
    -                    checkout = self.submodule_checkout_overrides[mirror.path]
    
    685
    -                else:
    
    686
    -                    checkout = self.checkout_submodules
    
    687
    -
    
    688
    -                if checkout:
    
    689
    -                    mirror.stage(directory)
    
    667
    +                mirror.stage(directory)
    
    690 668
     
    
    691 669
         def get_source_fetchers(self):
    
    692 670
             yield self.mirror
    
    ... ... @@ -694,6 +672,74 @@ class GitSource(Source):
    694 672
             for submodule in self.submodules:
    
    695 673
                 yield submodule
    
    696 674
     
    
    675
    +    def validate_cache(self):
    
    676
    +        discovered_submodules = {}
    
    677
    +        unlisted_submodules = []
    
    678
    +        invalid_submodules = []
    
    679
    +
    
    680
    +        for path, url in self.mirror.submodule_list():
    
    681
    +            discovered_submodules[path] = url
    
    682
    +            if self.ignore_submodule(path):
    
    683
    +                continue
    
    684
    +
    
    685
    +            override_url = self.submodule_overrides.get(path)
    
    686
    +            if not override_url:
    
    687
    +                unlisted_submodules.append((path, url))
    
    688
    +
    
    689
    +        # Warn about submodules which are explicitly configured but do not exist
    
    690
    +        for path, url in self.submodule_overrides.items():
    
    691
    +            if path not in discovered_submodules:
    
    692
    +                invalid_submodules.append((path, url))
    
    693
    +
    
    694
    +        if invalid_submodules:
    
    695
    +            detail = []
    
    696
    +            for path, url in invalid_submodules:
    
    697
    +                detail.append("  Submodule URL '{}' at path '{}'".format(url, path))
    
    698
    +
    
    699
    +            self.warn("{}: Invalid submodules specified".format(self),
    
    700
    +                      warning_token=WARN_INVALID_SUBMODULE,
    
    701
    +                      detail="The following submodules are specified in the source "
    
    702
    +                      "description but do not exist according to the repository\n\n" +
    
    703
    +                      "\n".join(detail))
    
    704
    +
    
    705
    +        # Warn about submodules which exist but have not been explicitly configured
    
    706
    +        if unlisted_submodules:
    
    707
    +            detail = []
    
    708
    +            for path, url in unlisted_submodules:
    
    709
    +                detail.append("  Submodule URL '{}' at path '{}'".format(url, path))
    
    710
    +
    
    711
    +            self.warn("{}: Unlisted submodules exist".format(self),
    
    712
    +                      warning_token=WARN_UNLISTED_SUBMODULE,
    
    713
    +                      detail="The following submodules exist but are not specified " +
    
    714
    +                      "in the source description\n\n" +
    
    715
    +                      "\n".join(detail))
    
    716
    +
    
    717
    +        # Assert that the ref exists in the track tag/branch, if track has been specified.
    
    718
    +        ref_in_track = False
    
    719
    +        if self.tracking:
    
    720
    +            _, branch = self.check_output([self.host_git, 'branch', '--list', self.tracking,
    
    721
    +                                           '--contains', self.mirror.ref],
    
    722
    +                                          cwd=self.mirror.mirror)
    
    723
    +            if branch:
    
    724
    +                ref_in_track = True
    
    725
    +            else:
    
    726
    +                _, tag = self.check_output([self.host_git, 'tag', '--list', self.tracking,
    
    727
    +                                            '--contains', self.mirror.ref],
    
    728
    +                                           cwd=self.mirror.mirror)
    
    729
    +                if tag:
    
    730
    +                    ref_in_track = True
    
    731
    +
    
    732
    +            if not ref_in_track:
    
    733
    +                detail = "The ref provided for the element does not exist locally " + \
    
    734
    +                         "in the provided track branch / tag '{}'.\n".format(self.tracking) + \
    
    735
    +                         "You may wish to track the element to update the ref from '{}' ".format(self.tracking) + \
    
    736
    +                         "with `bst track`,\n" + \
    
    737
    +                         "or examine the upstream at '{}' for the specific ref.".format(self.mirror.url)
    
    738
    +
    
    739
    +                self.warn("{}: expected ref '{}' was not found in given track '{}' for staged repository: '{}'\n"
    
    740
    +                          .format(self, self.mirror.ref, self.tracking, self.mirror.url),
    
    741
    +                          detail=detail, warning_token=CoreWarnings.REF_NOT_IN_TRACK)
    
    742
    +
    
    697 743
         ###########################################################
    
    698 744
         #                     Local Functions                     #
    
    699 745
         ###########################################################
    
    ... ... @@ -718,12 +764,12 @@ class GitSource(Source):
    718 764
             self.mirror.ensure()
    
    719 765
             submodules = []
    
    720 766
     
    
    721
    -        # XXX Here we should issue a warning if either:
    
    722
    -        #   A.) A submodule exists but is not defined in the element configuration
    
    723
    -        #   B.) The element configuration configures submodules which dont exist at the current ref
    
    724
    -        #
    
    725 767
             for path, url in self.mirror.submodule_list():
    
    726 768
     
    
    769
    +            # Completely ignore submodules which are disabled for checkout
    
    770
    +            if self.ignore_submodule(path):
    
    771
    +                continue
    
    772
    +
    
    727 773
                 # Allow configuration to override the upstream
    
    728 774
                 # location of the submodules.
    
    729 775
                 override_url = self.submodule_overrides.get(path)
    
    ... ... @@ -747,6 +793,16 @@ class GitSource(Source):
    747 793
                 tags.append((tag, commit_ref, annotated))
    
    748 794
             return tags
    
    749 795
     
    
    796
    +    # Checks whether the plugin configuration has explicitly
    
    797
    +    # configured this submodule to be ignored
    
    798
    +    def ignore_submodule(self, path):
    
    799
    +        try:
    
    800
    +            checkout = self.submodule_checkout_overrides[path]
    
    801
    +        except KeyError:
    
    802
    +            checkout = self.checkout_submodules
    
    803
    +
    
    804
    +        return not checkout
    
    805
    +
    
    750 806
     
    
    751 807
     # Plugin entry point
    
    752 808
     def setup():
    

  • buildstream/source.py
    ... ... @@ -102,6 +102,11 @@ these methods are mandatory to implement.
    102 102
       submodules). For details on how to define a SourceFetcher, see
    
    103 103
       :ref:`SourceFetcher <core_source_fetcher>`.
    
    104 104
     
    
    105
    +* :func:`Source.validate_cache() <buildstream.source.Source.validate_cache>`
    
    106
    +
    
    107
    +  Perform any validations which require the sources to be cached.
    
    108
    +
    
    109
    +  **Optional**: This is completely optional and will do nothing if left unimplemented.
    
    105 110
     
    
    106 111
     Accessing previous sources
    
    107 112
     --------------------------
    
    ... ... @@ -480,9 +485,22 @@ class Source(Plugin):
    480 485
     
    
    481 486
             *Since: 1.2*
    
    482 487
             """
    
    483
    -
    
    484 488
             return []
    
    485 489
     
    
    490
    +    def validate_cache(self):
    
    491
    +        """Implement any validations once we know the sources are cached
    
    492
    +
    
    493
    +        This is guaranteed to be called only once for a given session
    
    494
    +        once the sources are known to be
    
    495
    +        :attr:`Consistency.CACHED <buildstream.types.Consistency.CACHED>`,
    
    496
    +        if source tracking is enabled in the session for this source,
    
    497
    +        then this will only be called if the sources become cached after
    
    498
    +        tracking completes.
    
    499
    +
    
    500
    +        *Since: 1.4*
    
    501
    +        """
    
    502
    +        pass
    
    503
    +
    
    486 504
         #############################################################
    
    487 505
         #                       Public Methods                      #
    
    488 506
         #############################################################
    
    ... ... @@ -659,6 +677,11 @@ class Source(Plugin):
    659 677
                 with context.silence():
    
    660 678
                     self.__consistency = self.get_consistency()  # pylint: disable=assignment-from-no-return
    
    661 679
     
    
    680
    +                # Give the Source an opportunity to validate the cached
    
    681
    +                # sources as soon as the Source becomes Consistency.CACHED.
    
    682
    +                if self.__consistency == Consistency.CACHED:
    
    683
    +                    self.validate_cache()
    
    684
    +
    
    662 685
         # Return cached consistency
    
    663 686
         #
    
    664 687
         def _get_consistency(self):
    

  • tests/format/project.py
    ... ... @@ -200,3 +200,10 @@ def test_element_path_project_path_contains_symlinks(cli, datafiles, tmpdir):
    200 200
             f.write("kind: manual\n")
    
    201 201
         result = cli.run(project=linked_project, args=['show', 'element.bst'])
    
    202 202
         result.assert_success()
    
    203
    +
    
    204
    +
    
    205
    +@pytest.mark.datafiles(os.path.join(DATA_DIR))
    
    206
    +def test_empty_depends(cli, datafiles):
    
    207
    +    project = os.path.join(datafiles.dirname, datafiles.basename, "empty-depends")
    
    208
    +    result = cli.run(project=project, args=['show', 'manual.bst'])
    
    209
    +    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)

  • tests/format/project/empty-depends/manual.bst
    1
    +kind: manual
    
    2
    +
    
    3
    +depends:

  • tests/format/project/empty-depends/project.conf
    1
    +name: test

  • tests/frontend/source_checkout.py
    ... ... @@ -28,10 +28,17 @@ def generate_remote_import_element(input_path, output_path):
    28 28
     
    
    29 29
     
    
    30 30
     @pytest.mark.datafiles(DATA_DIR)
    
    31
    -def test_source_checkout(datafiles, cli):
    
    31
    +@pytest.mark.parametrize('with_workspace', [('workspace'), ('no-workspace')])
    
    32
    +def test_source_checkout(datafiles, tmpdir_factory, cli, with_workspace):
    
    33
    +    tmpdir = tmpdir_factory.mktemp("")
    
    32 34
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    33 35
         checkout = os.path.join(cli.directory, 'source-checkout')
    
    34 36
         target = 'checkout-deps.bst'
    
    37
    +    workspace = os.path.join(str(tmpdir), 'workspace')
    
    38
    +
    
    39
    +    if with_workspace == "workspace":
    
    40
    +        result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, target])
    
    41
    +        result.assert_success()
    
    35 42
     
    
    36 43
         result = cli.run(project=project, args=['source-checkout', target, '--deps', 'none', checkout])
    
    37 44
         result.assert_success()
    

  • tests/sources/git.py
    ... ... @@ -455,6 +455,274 @@ def test_ref_not_in_track(cli, tmpdir, datafiles, fail):
    455 455
             assert "ref-not-in-track" in result.stderr
    
    456 456
     
    
    457 457
     
    
    458
    +@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
    
    459
    +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
    
    460
    +@pytest.mark.parametrize("fail", ['warn', 'error'])
    
    461
    +def test_unlisted_submodule(cli, tmpdir, datafiles, fail):
    
    462
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    463
    +
    
    464
    +    # Make the warning an error if we're testing errors
    
    465
    +    if fail == 'error':
    
    466
    +        project_template = {
    
    467
    +            "name": "foo",
    
    468
    +            "fatal-warnings": ['git:unlisted-submodule']
    
    469
    +        }
    
    470
    +        _yaml.dump(project_template, os.path.join(project, 'project.conf'))
    
    471
    +
    
    472
    +    # Create the submodule first from the 'subrepofiles' subdir
    
    473
    +    subrepo = create_repo('git', str(tmpdir), 'subrepo')
    
    474
    +    subrepo.create(os.path.join(project, 'subrepofiles'))
    
    475
    +
    
    476
    +    # Create the repo from 'repofiles' subdir
    
    477
    +    repo = create_repo('git', str(tmpdir))
    
    478
    +    ref = repo.create(os.path.join(project, 'repofiles'))
    
    479
    +
    
    480
    +    # Add a submodule pointing to the one we created
    
    481
    +    ref = repo.add_submodule('subdir', 'file://' + subrepo.repo)
    
    482
    +
    
    483
    +    # Create the source, and delete the explicit configuration
    
    484
    +    # of the submodules.
    
    485
    +    #
    
    486
    +    # We expect this to cause an unlisted submodule warning
    
    487
    +    # after the source has been fetched.
    
    488
    +    #
    
    489
    +    gitsource = repo.source_config(ref=ref)
    
    490
    +    del gitsource['submodules']
    
    491
    +
    
    492
    +    # Write out our test target
    
    493
    +    element = {
    
    494
    +        'kind': 'import',
    
    495
    +        'sources': [
    
    496
    +            gitsource
    
    497
    +        ]
    
    498
    +    }
    
    499
    +    _yaml.dump(element, os.path.join(project, 'target.bst'))
    
    500
    +
    
    501
    +    # We will not see the warning or error before the first fetch, because
    
    502
    +    # we don't have the repository yet and so we have no knowledge of
    
    503
    +    # the unlisted submodule.
    
    504
    +    result = cli.run(project=project, args=['show', 'target.bst'])
    
    505
    +    result.assert_success()
    
    506
    +    assert "git:unlisted-submodule" not in result.stderr
    
    507
    +
    
    508
    +    # We will notice this directly in fetch, as it will try to fetch
    
    509
    +    # the submodules it discovers as a result of fetching the primary repo.
    
    510
    +    result = cli.run(project=project, args=['fetch', 'target.bst'])
    
    511
    +
    
    512
    +    # Assert a warning or an error depending on what we're checking
    
    513
    +    if fail == 'error':
    
    514
    +        result.assert_main_error(ErrorDomain.STREAM, None)
    
    515
    +        result.assert_task_error(ErrorDomain.PLUGIN, 'git:unlisted-submodule')
    
    516
    +    else:
    
    517
    +        result.assert_success()
    
    518
    +        assert "git:unlisted-submodule" in result.stderr
    
    519
    +
    
    520
    +    # Now that we've fetched it, `bst show` will discover the unlisted submodule too
    
    521
    +    result = cli.run(project=project, args=['show', 'target.bst'])
    
    522
    +
    
    523
    +    # Assert a warning or an error depending on what we're checking
    
    524
    +    if fail == 'error':
    
    525
    +        result.assert_main_error(ErrorDomain.PLUGIN, 'git:unlisted-submodule')
    
    526
    +    else:
    
    527
    +        result.assert_success()
    
    528
    +        assert "git:unlisted-submodule" in result.stderr
    
    529
    +
    
    530
    +
    
    531
    +@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
    
    532
    +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
    
    533
    +@pytest.mark.parametrize("fail", ['warn', 'error'])
    
    534
    +def test_track_unlisted_submodule(cli, tmpdir, datafiles, fail):
    
    535
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    536
    +
    
    537
    +    # Make the warning an error if we're testing errors
    
    538
    +    if fail == 'error':
    
    539
    +        project_template = {
    
    540
    +            "name": "foo",
    
    541
    +            "fatal-warnings": ['git:unlisted-submodule']
    
    542
    +        }
    
    543
    +        _yaml.dump(project_template, os.path.join(project, 'project.conf'))
    
    544
    +
    
    545
    +    # Create the submodule first from the 'subrepofiles' subdir
    
    546
    +    subrepo = create_repo('git', str(tmpdir), 'subrepo')
    
    547
    +    subrepo.create(os.path.join(project, 'subrepofiles'))
    
    548
    +
    
    549
    +    # Create the repo from 'repofiles' subdir
    
    550
    +    repo = create_repo('git', str(tmpdir))
    
    551
    +    ref = repo.create(os.path.join(project, 'repofiles'))
    
    552
    +
    
    553
    +    # Add a submodule pointing to the one we created, but use
    
    554
    +    # the original ref, let the submodules appear after tracking
    
    555
    +    repo.add_submodule('subdir', 'file://' + subrepo.repo)
    
    556
    +
    
    557
    +    # Create the source, and delete the explicit configuration
    
    558
    +    # of the submodules.
    
    559
    +    gitsource = repo.source_config(ref=ref)
    
    560
    +    del gitsource['submodules']
    
    561
    +
    
    562
    +    # Write out our test target
    
    563
    +    element = {
    
    564
    +        'kind': 'import',
    
    565
    +        'sources': [
    
    566
    +            gitsource
    
    567
    +        ]
    
    568
    +    }
    
    569
    +    _yaml.dump(element, os.path.join(project, 'target.bst'))
    
    570
    +
    
    571
    +    # Fetch the repo, we will not see the warning because we
    
    572
    +    # are still pointing to a ref which predates the submodules
    
    573
    +    result = cli.run(project=project, args=['fetch', 'target.bst'])
    
    574
    +    result.assert_success()
    
    575
    +    assert "git:unlisted-submodule" not in result.stderr
    
    576
    +
    
    577
    +    # We won't get a warning/error when tracking either, the source
    
    578
    +    # has not become Consistency.CACHED so the opportunity to check
    
    579
    +    # for the warning has not yet arisen.
    
    580
    +    result = cli.run(project=project, args=['track', 'target.bst'])
    
    581
    +    result.assert_success()
    
    582
    +    assert "git:unlisted-submodule" not in result.stderr
    
    583
    +
    
    584
    +    # Fetching the repo at the new ref will finally reveal the warning
    
    585
    +    result = cli.run(project=project, args=['fetch', 'target.bst'])
    
    586
    +    if fail == 'error':
    
    587
    +        result.assert_main_error(ErrorDomain.STREAM, None)
    
    588
    +        result.assert_task_error(ErrorDomain.PLUGIN, 'git:unlisted-submodule')
    
    589
    +    else:
    
    590
    +        result.assert_success()
    
    591
    +        assert "git:unlisted-submodule" in result.stderr
    
    592
    +
    
    593
    +
    
    594
    +@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
    
    595
    +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
    
    596
    +@pytest.mark.parametrize("fail", ['warn', 'error'])
    
    597
    +def test_invalid_submodule(cli, tmpdir, datafiles, fail):
    
    598
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    599
    +
    
    600
    +    # Make the warning an error if we're testing errors
    
    601
    +    if fail == 'error':
    
    602
    +        project_template = {
    
    603
    +            "name": "foo",
    
    604
    +            "fatal-warnings": ['git:invalid-submodule']
    
    605
    +        }
    
    606
    +        _yaml.dump(project_template, os.path.join(project, 'project.conf'))
    
    607
    +
    
    608
    +    # Create the repo from 'repofiles' subdir
    
    609
    +    repo = create_repo('git', str(tmpdir))
    
    610
    +    ref = repo.create(os.path.join(project, 'repofiles'))
    
    611
    +
    
    612
    +    # Create the source without any submodules, and add
    
    613
    +    # an invalid submodule configuration to it.
    
    614
    +    #
    
    615
    +    # We expect this to cause an invalid submodule warning
    
    616
    +    # after the source has been fetched and we know what
    
    617
    +    # the real submodules actually are.
    
    618
    +    #
    
    619
    +    gitsource = repo.source_config(ref=ref)
    
    620
    +    gitsource['submodules'] = {
    
    621
    +        'subdir': {
    
    622
    +            'url': 'https://pony.org/repo.git'
    
    623
    +        }
    
    624
    +    }
    
    625
    +
    
    626
    +    # Write out our test target
    
    627
    +    element = {
    
    628
    +        'kind': 'import',
    
    629
    +        'sources': [
    
    630
    +            gitsource
    
    631
    +        ]
    
    632
    +    }
    
    633
    +    _yaml.dump(element, os.path.join(project, 'target.bst'))
    
    634
    +
    
    635
    +    # We will not see the warning or error before the first fetch, because
    
    636
    +    # we don't have the repository yet and so we have no knowledge of
    
    637
    +    # the unlisted submodule.
    
    638
    +    result = cli.run(project=project, args=['show', 'target.bst'])
    
    639
    +    result.assert_success()
    
    640
    +    assert "git:invalid-submodule" not in result.stderr
    
    641
    +
    
    642
    +    # We will notice this directly in fetch, as it will try to fetch
    
    643
    +    # the submodules it discovers as a result of fetching the primary repo.
    
    644
    +    result = cli.run(project=project, args=['fetch', 'target.bst'])
    
    645
    +
    
    646
    +    # Assert a warning or an error depending on what we're checking
    
    647
    +    if fail == 'error':
    
    648
    +        result.assert_main_error(ErrorDomain.STREAM, None)
    
    649
    +        result.assert_task_error(ErrorDomain.PLUGIN, 'git:invalid-submodule')
    
    650
    +    else:
    
    651
    +        result.assert_success()
    
    652
    +        assert "git:invalid-submodule" in result.stderr
    
    653
    +
    
    654
    +    # Now that we've fetched it, `bst show` will discover the unlisted submodule too
    
    655
    +    result = cli.run(project=project, args=['show', 'target.bst'])
    
    656
    +
    
    657
    +    # Assert a warning or an error depending on what we're checking
    
    658
    +    if fail == 'error':
    
    659
    +        result.assert_main_error(ErrorDomain.PLUGIN, 'git:invalid-submodule')
    
    660
    +    else:
    
    661
    +        result.assert_success()
    
    662
    +        assert "git:invalid-submodule" in result.stderr
    
    663
    +
    
    664
    +
    
    665
    +@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
    
    666
    +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
    
    667
    +@pytest.mark.parametrize("fail", ['warn', 'error'])
    
    668
    +def test_track_invalid_submodule(cli, tmpdir, datafiles, fail):
    
    669
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    670
    +
    
    671
    +    # Make the warning an error if we're testing errors
    
    672
    +    if fail == 'error':
    
    673
    +        project_template = {
    
    674
    +            "name": "foo",
    
    675
    +            "fatal-warnings": ['git:invalid-submodule']
    
    676
    +        }
    
    677
    +        _yaml.dump(project_template, os.path.join(project, 'project.conf'))
    
    678
    +
    
    679
    +    # Create the submodule first from the 'subrepofiles' subdir
    
    680
    +    subrepo = create_repo('git', str(tmpdir), 'subrepo')
    
    681
    +    subrepo.create(os.path.join(project, 'subrepofiles'))
    
    682
    +
    
    683
    +    # Create the repo from 'repofiles' subdir
    
    684
    +    repo = create_repo('git', str(tmpdir))
    
    685
    +    ref = repo.create(os.path.join(project, 'repofiles'))
    
    686
    +
    
    687
    +    # Add a submodule pointing to the one we created
    
    688
    +    ref = repo.add_submodule('subdir', 'file://' + subrepo.repo)
    
    689
    +
    
    690
    +    # Add a commit beyond the ref which *removes* the submodule we've added
    
    691
    +    repo.remove_path('subdir')
    
    692
    +
    
    693
    +    # Create the source, this will keep the submodules so initially
    
    694
    +    # the configuration is valid for the ref we're using
    
    695
    +    gitsource = repo.source_config(ref=ref)
    
    696
    +
    
    697
    +    # Write out our test target
    
    698
    +    element = {
    
    699
    +        'kind': 'import',
    
    700
    +        'sources': [
    
    701
    +            gitsource
    
    702
    +        ]
    
    703
    +    }
    
    704
    +    _yaml.dump(element, os.path.join(project, 'target.bst'))
    
    705
    +
    
    706
    +    # Fetch the repo, we will not see the warning because we
    
    707
    +    # are still pointing to a ref which predates the submodules
    
    708
    +    result = cli.run(project=project, args=['fetch', 'target.bst'])
    
    709
    +    result.assert_success()
    
    710
    +    assert "git:invalid-submodule" not in result.stderr
    
    711
    +
    
    712
    +    # In this case, we will get the error directly after tracking,
    
    713
    +    # since the new HEAD does not require any submodules which are
    
    714
    +    # not locally cached, the Source will be CACHED directly after
    
    715
    +    # tracking and the validations will occur as a result.
    
    716
    +    #
    
    717
    +    result = cli.run(project=project, args=['track', 'target.bst'])
    
    718
    +    if fail == 'error':
    
    719
    +        result.assert_main_error(ErrorDomain.STREAM, None)
    
    720
    +        result.assert_task_error(ErrorDomain.PLUGIN, 'git:invalid-submodule')
    
    721
    +    else:
    
    722
    +        result.assert_success()
    
    723
    +        assert "git:invalid-submodule" in result.stderr
    
    724
    +
    
    725
    +
    
    458 726
     @pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
    
    459 727
     @pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
    
    460 728
     @pytest.mark.parametrize("ref_format", ['sha1', 'git-describe'])
    

  • tests/testutils/repo/git.py
    ... ... @@ -76,6 +76,12 @@ class Git(Repo):
    76 76
             self._run_git('commit', '-m', 'Added the submodule')
    
    77 77
             return self.latest_commit()
    
    78 78
     
    
    79
    +    # This can also be used to a file or a submodule
    
    80
    +    def remove_path(self, path):
    
    81
    +        self._run_git('rm', path)
    
    82
    +        self._run_git('commit', '-m', 'Removing {}'.format(path))
    
    83
    +        return self.latest_commit()
    
    84
    +
    
    79 85
         def source_config(self, ref=None, checkout_submodules=None):
    
    80 86
             config = {
    
    81 87
                 'kind': 'git',
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]