[Notes] [Git][BuildStream/buildstream][tpollard/566] 19 commits: BREAK:remove unconditional 'are you sure?' prompts



Title: GitLab

Tom Pollard pushed to branch tpollard/566 at BuildStream / buildstream

Commits:

29 changed files:

Changes:

  • NEWS
    ... ... @@ -50,6 +50,11 @@ buildstream 1.3.1
    50 50
         an error message and a hint instead, to avoid bothering folks that just
    
    51 51
         made a mistake.
    
    52 52
     
    
    53
    +  o BREAKING CHANGE: The unconditional 'Are you sure?' prompts have been
    
    54
    +    removed. These would always ask you if you were sure when running
    
    55
    +    'bst workspace close --remove-dir' or 'bst workspace reset'. They got in
    
    56
    +    the way too often.
    
    57
    +
    
    53 58
       o Failed builds are included in the cache as well.
    
    54 59
         `bst checkout` will provide anything in `%{install-root}`.
    
    55 60
         A build including cached fails will cause any dependant elements
    
    ... ... @@ -87,12 +92,6 @@ buildstream 1.3.1
    87 92
         instead of just a specially-formatted build-root with a `root` and `scratch`
    
    88 93
         subdirectory.
    
    89 94
     
    
    90
    -  o The buildstream.conf file learned new
    
    91
    -    'prompt.really-workspace-close-remove-dir' and
    
    92
    -    'prompt.really-workspace-reset-hard' options. These allow users to suppress
    
    93
    -    certain confirmation prompts, e.g. double-checking that the user meant to
    
    94
    -    run the command as typed.
    
    95
    -
    
    96 95
       o Due to the element `build tree` being cached in the respective artifact their
    
    97 96
         size in some cases has significantly increased. In *most* cases the build trees
    
    98 97
         are not utilised when building targets, as such by default bst 'pull' & 'build'
    

  • buildstream/_artifactcache.py
    ... ... @@ -107,6 +107,7 @@ class ArtifactCache():
    107 107
     
    
    108 108
             self._has_fetch_remotes = False
    
    109 109
             self._has_push_remotes = False
    
    110
    +        self._has_partial_push_remotes = False
    
    110 111
     
    
    111 112
             os.makedirs(self.extractdir, exist_ok=True)
    
    112 113
     
    
    ... ... @@ -488,6 +489,9 @@ class ArtifactCache():
    488 489
                     self._has_fetch_remotes = True
    
    489 490
                     if remote_spec.push:
    
    490 491
                         self._has_push_remotes = True
    
    492
    +                    # Partial push requires generic push option to also be set
    
    493
    +                    if remote_spec.partial_push:
    
    494
    +                        self._has_partial_push_remotes = True
    
    491 495
     
    
    492 496
                     remotes[remote_spec.url] = CASRemote(remote_spec)
    
    493 497
     
    
    ... ... @@ -685,6 +689,32 @@ class ArtifactCache():
    685 689
                 remotes_for_project = self._remotes[element._get_project()]
    
    686 690
                 return any(remote.spec.push for remote in remotes_for_project)
    
    687 691
     
    
    692
    +    # has_partial_push_remotes():
    
    693
    +    #
    
    694
    +    # Check whether any remote repositories are available for pushing
    
    695
    +    # non-complete artifacts. This option requires the generic push value
    
    696
    +    # to also be set.
    
    697
    +    #
    
    698
    +    # Args:
    
    699
    +    #     element (Element): The Element to check
    
    700
    +    #
    
    701
    +    # Returns:
    
    702
    +    #   (bool): True if any remote repository is configured for optional
    
    703
    +    #            partial pushes, False otherwise
    
    704
    +    #
    
    705
    +    def has_partial_push_remotes(self, *, element=None):
    
    706
    +        # If there's no partial push remotes available, we can't partial push at all
    
    707
    +        if not self._has_partial_push_remotes:
    
    708
    +            return False
    
    709
    +        elif element is None:
    
    710
    +            # At least one remote is set to allow partial pushes
    
    711
    +            return True
    
    712
    +        else:
    
    713
    +            # Check whether the specified element's project has push remotes configured
    
    714
    +            # to not accept partial artifact pushes
    
    715
    +            remotes_for_project = self._remotes[element._get_project()]
    
    716
    +            return any(remote.spec.partial_push for remote in remotes_for_project)
    
    717
    +
    
    688 718
         # push():
    
    689 719
         #
    
    690 720
         # Push committed artifact to remote repository.
    
    ... ... @@ -692,6 +722,8 @@ class ArtifactCache():
    692 722
         # Args:
    
    693 723
         #     element (Element): The Element whose artifact is to be pushed
    
    694 724
         #     keys (list): The cache keys to use
    
    725
    +    #     partial(bool): If the artifact is cached in a partial state
    
    726
    +    #     subdir(string): Optional subdir to not push
    
    695 727
         #
    
    696 728
         # Returns:
    
    697 729
         #   (bool): True if any remote was updated, False if no pushes were required
    
    ... ... @@ -699,12 +731,25 @@ class ArtifactCache():
    699 731
         # Raises:
    
    700 732
         #   (ArtifactError): if there was an error
    
    701 733
         #
    
    702
    -    def push(self, element, keys):
    
    734
    +    def push(self, element, keys, partial=False, subdir=None):
    
    703 735
             refs = [self.get_artifact_fullname(element, key) for key in list(keys)]
    
    704 736
     
    
    705 737
             project = element._get_project()
    
    706 738
     
    
    707
    -        push_remotes = [r for r in self._remotes[project] if r.spec.push]
    
    739
    +        push_remotes = []
    
    740
    +        partial_remotes = []
    
    741
    +
    
    742
    +        # Create list of remotes to push to, given current element and partial push config
    
    743
    +        if not partial:
    
    744
    +            push_remotes = [r for r in self._remotes[project] if (r.spec.push and not r.spec.partial_push)]
    
    745
    +
    
    746
    +        if self._has_partial_push_remotes:
    
    747
    +            # Create a specific list of the remotes expecting the artifact to be push in a partial
    
    748
    +            # state. This list needs to be pushed in a partial state, without the optional subdir if
    
    749
    +            # exists locally. No need to attempt pushing a partial artifact to a remote that is queued to
    
    750
    +            # to also recieve a full artifact
    
    751
    +            partial_remotes = [r for r in self._remotes[project] if (r.spec.partial_push and r.spec.push) and
    
    752
    +                               r not in push_remotes]
    
    708 753
     
    
    709 754
             pushed = False
    
    710 755
     
    
    ... ... @@ -713,7 +758,9 @@ class ArtifactCache():
    713 758
                 display_key = element._get_brief_display_key()
    
    714 759
                 element.status("Pushing artifact {} -> {}".format(display_key, remote.spec.url))
    
    715 760
     
    
    716
    -            if self.cas.push(refs, remote):
    
    761
    +            # Passing the optional subdir allows for remote artifacts that are cached in a 'partial'
    
    762
    +            # state to be completed
    
    763
    +            if self.cas.push(refs, remote, subdir=subdir):
    
    717 764
                     element.info("Pushed artifact {} -> {}".format(display_key, remote.spec.url))
    
    718 765
                     pushed = True
    
    719 766
                 else:
    
    ... ... @@ -721,6 +768,19 @@ class ArtifactCache():
    721 768
                         remote.spec.url, element._get_brief_display_key()
    
    722 769
                     ))
    
    723 770
     
    
    771
    +        for remote in partial_remotes:
    
    772
    +            remote.init()
    
    773
    +            display_key = element._get_brief_display_key()
    
    774
    +            element.status("Pushing partial artifact {} -> {}".format(display_key, remote.spec.url))
    
    775
    +
    
    776
    +            if self.cas.push(refs, remote, excluded_subdirs=subdir):
    
    777
    +                element.info("Pushed partial artifact {} -> {}".format(display_key, remote.spec.url))
    
    778
    +                pushed = True
    
    779
    +            else:
    
    780
    +                element.info("Remote ({}) already has {} partial cached".format(
    
    781
    +                    remote.spec.url, element._get_brief_display_key()
    
    782
    +                ))
    
    783
    +
    
    724 784
             return pushed
    
    725 785
     
    
    726 786
         # pull():
    
    ... ... @@ -748,14 +808,23 @@ class ArtifactCache():
    748 808
                     element.status("Pulling artifact {} <- {}".format(display_key, remote.spec.url))
    
    749 809
     
    
    750 810
                     if self.cas.pull(ref, remote, progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs):
    
    751
    -                    element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
    
    752 811
                         if subdir:
    
    753
    -                        # Attempt to extract subdir into artifact extract dir if it already exists
    
    754
    -                        # without containing the subdir. If the respective artifact extract dir does not
    
    755
    -                        # exist a complete extraction will complete.
    
    756
    -                        self.extract(element, key, subdir)
    
    757
    -                    # no need to pull from additional remotes
    
    758
    -                    return True
    
    812
    +                        if not self.contains_subdir_artifact(element, key, subdir):
    
    813
    +                            # The pull was expecting the specific subdir to be present in the remote, attempt
    
    814
    +                            # to find it in other available remotes
    
    815
    +                            element.info("Pulled partial artifact {} <- {}. Attempting to retrieve {} from remotes"
    
    816
    +                                         .format(display_key, remote.spec.url, subdir))
    
    817
    +                        else:
    
    818
    +                            element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
    
    819
    +                            # Attempt to extract subdir into artifact extract dir if it already exists
    
    820
    +                            # without containing the subdir. If the respective artifact extract dir does not
    
    821
    +                            # exist a complete extraction will complete.
    
    822
    +                            self.extract(element, key, subdir)
    
    823
    +                            # no need to pull from additional remotes
    
    824
    +                            return True
    
    825
    +                    else:
    
    826
    +                        element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
    
    827
    +                        return True
    
    759 828
                     else:
    
    760 829
                         element.info("Remote ({}) does not have {} cached".format(
    
    761 830
                             remote.spec.url, element._get_brief_display_key()
    

  • buildstream/_cas/cascache.py
    ... ... @@ -198,34 +198,47 @@ class CASCache():
    198 198
         #   (bool): True if pull was successful, False if ref was not available
    
    199 199
         #
    
    200 200
         def pull(self, ref, remote, *, progress=None, subdir=None, excluded_subdirs=None):
    
    201
    -        try:
    
    202
    -            remote.init()
    
    203 201
     
    
    204
    -            request = buildstream_pb2.GetReferenceRequest(instance_name=remote.spec.instance_name)
    
    205
    -            request.key = ref
    
    206
    -            response = remote.ref_storage.GetReference(request)
    
    202
    +        tree_found = False
    
    207 203
     
    
    208
    -            tree = remote_execution_pb2.Digest()
    
    209
    -            tree.hash = response.digest.hash
    
    210
    -            tree.size_bytes = response.digest.size_bytes
    
    204
    +        while True:
    
    205
    +            try:
    
    206
    +                if not tree_found:
    
    207
    +                    remote.init()
    
    211 208
     
    
    212
    -            # Check if the element artifact is present, if so just fetch the subdir.
    
    213
    -            if subdir and os.path.exists(self.objpath(tree)):
    
    214
    -                self._fetch_subdir(remote, tree, subdir)
    
    215
    -            else:
    
    216
    -                # Fetch artifact, excluded_subdirs determined in pullqueue
    
    217
    -                self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
    
    209
    +                    request = buildstream_pb2.GetReferenceRequest(instance_name=remote.spec.instance_name)
    
    210
    +                    request.key = ref
    
    211
    +                    response = remote.ref_storage.GetReference(request)
    
    218 212
     
    
    219
    -            self.set_ref(ref, tree)
    
    213
    +                    tree = remote_execution_pb2.Digest()
    
    214
    +                    tree.hash = response.digest.hash
    
    215
    +                    tree.size_bytes = response.digest.size_bytes
    
    220 216
     
    
    221
    -            return True
    
    222
    -        except grpc.RpcError as e:
    
    223
    -            if e.code() != grpc.StatusCode.NOT_FOUND:
    
    224
    -                raise CASCacheError("Failed to pull ref {}: {}".format(ref, e)) from e
    
    225
    -            else:
    
    226
    -                return False
    
    227
    -        except BlobNotFound as e:
    
    228
    -            return False
    
    217
    +                # Check if the element artifact is present, if so just fetch the subdir.
    
    218
    +                if subdir and os.path.exists(self.objpath(tree)):
    
    219
    +                    self._fetch_subdir(remote, tree, subdir)
    
    220
    +                else:
    
    221
    +                    # Fetch artifact, excluded_subdirs determined in pullqueue
    
    222
    +                    self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
    
    223
    +
    
    224
    +                self.set_ref(ref, tree)
    
    225
    +
    
    226
    +                return True
    
    227
    +            except grpc.RpcError as e:
    
    228
    +                if e.code() != grpc.StatusCode.NOT_FOUND:
    
    229
    +                    raise CASCacheError("Failed to pull ref {}: {}".format(ref, e)) from e
    
    230
    +                else:
    
    231
    +                    return False
    
    232
    +            except BlobNotFound as e:
    
    233
    +                if not excluded_subdirs and subdir:
    
    234
    +                    # The remote has the top level digest but could not complete a full pull,
    
    235
    +                    # attempt partial without the need to initialise and check for the artifact
    
    236
    +                    # digest. This default behaviour of dropping back to partial pulls could
    
    237
    +                    # be made a configurable warning given at artfictcache level.
    
    238
    +                    tree_found = True
    
    239
    +                    excluded_subdirs, subdir = subdir, excluded_subdirs
    
    240
    +                else:
    
    241
    +                    return False
    
    229 242
     
    
    230 243
         # pull_tree():
    
    231 244
         #
    
    ... ... @@ -270,6 +283,8 @@ class CASCache():
    270 283
         # Args:
    
    271 284
         #     refs (list): The refs to push
    
    272 285
         #     remote (CASRemote): The remote to push to
    
    286
    +    #     subdir (string): Optional specific subdir to include in the push
    
    287
    +    #     excluded_subdirs (list): The optional list of subdirs to not push
    
    273 288
         #
    
    274 289
         # Returns:
    
    275 290
         #   (bool): True if any remote was updated, False if no pushes were required
    
    ... ... @@ -277,7 +292,7 @@ class CASCache():
    277 292
         # Raises:
    
    278 293
         #   (CASCacheError): if there was an error
    
    279 294
         #
    
    280
    -    def push(self, refs, remote):
    
    295
    +    def push(self, refs, remote, *, subdir=None, excluded_subdirs=None):
    
    281 296
             skipped_remote = True
    
    282 297
             try:
    
    283 298
                 for ref in refs:
    
    ... ... @@ -291,15 +306,18 @@ class CASCache():
    291 306
                         response = remote.ref_storage.GetReference(request)
    
    292 307
     
    
    293 308
                         if response.digest.hash == tree.hash and response.digest.size_bytes == tree.size_bytes:
    
    294
    -                        # ref is already on the server with the same tree
    
    295
    -                        continue
    
    309
    +                        # ref is already on the server with the same tree, however it might be partially cached.
    
    310
    +                        # If artifact is not set to be pushed partially attempt to 'complete' the remote artifact if
    
    311
    +                        # needed, else continue.
    
    312
    +                        if excluded_subdirs or remote.verify_digest_on_remote(self._get_subdir(tree, subdir)):
    
    313
    +                            continue
    
    296 314
     
    
    297 315
                     except grpc.RpcError as e:
    
    298 316
                         if e.code() != grpc.StatusCode.NOT_FOUND:
    
    299 317
                             # Intentionally re-raise RpcError for outer except block.
    
    300 318
                             raise
    
    301 319
     
    
    302
    -                self._send_directory(remote, tree)
    
    320
    +                self._send_directory(remote, tree, excluded_dir=excluded_subdirs)
    
    303 321
     
    
    304 322
                     request = buildstream_pb2.UpdateReferenceRequest(instance_name=remote.spec.instance_name)
    
    305 323
                     request.keys.append(ref)
    
    ... ... @@ -782,10 +800,17 @@ class CASCache():
    782 800
                     a += 1
    
    783 801
                     b += 1
    
    784 802
     
    
    785
    -    def _reachable_refs_dir(self, reachable, tree, update_mtime=False):
    
    803
    +    def _reachable_refs_dir(self, reachable, tree, update_mtime=False, subdir=False):
    
    786 804
             if tree.hash in reachable:
    
    787 805
                 return
    
    788 806
     
    
    807
    +        # If looping through subdir digests, skip processing if
    
    808
    +        # ref path does not exist, allowing for partial objects
    
    809
    +        if subdir and not os.path.exists(self.objpath(tree)):
    
    810
    +            return
    
    811
    +
    
    812
    +        # Raises FileNotFound exception is path does not exist,
    
    813
    +        # which should only be thrown on the top level digest
    
    789 814
             if update_mtime:
    
    790 815
                 os.utime(self.objpath(tree))
    
    791 816
     
    
    ... ... @@ -802,9 +827,9 @@ class CASCache():
    802 827
                 reachable.add(filenode.digest.hash)
    
    803 828
     
    
    804 829
             for dirnode in directory.directories:
    
    805
    -            self._reachable_refs_dir(reachable, dirnode.digest, update_mtime=update_mtime)
    
    830
    +            self._reachable_refs_dir(reachable, dirnode.digest, update_mtime=update_mtime, subdir=True)
    
    806 831
     
    
    807
    -    def _required_blobs(self, directory_digest):
    
    832
    +    def _required_blobs(self, directory_digest, excluded_dir=None):
    
    808 833
             # parse directory, and recursively add blobs
    
    809 834
             d = remote_execution_pb2.Digest()
    
    810 835
             d.hash = directory_digest.hash
    
    ... ... @@ -823,7 +848,8 @@ class CASCache():
    823 848
                 yield d
    
    824 849
     
    
    825 850
             for dirnode in directory.directories:
    
    826
    -            yield from self._required_blobs(dirnode.digest)
    
    851
    +            if dirnode.name != excluded_dir:
    
    852
    +                yield from self._required_blobs(dirnode.digest)
    
    827 853
     
    
    828 854
         # _ensure_blob():
    
    829 855
         #
    
    ... ... @@ -928,6 +954,7 @@ class CASCache():
    928 954
                 objpath = self._ensure_blob(remote, dir_digest)
    
    929 955
     
    
    930 956
                 directory = remote_execution_pb2.Directory()
    
    957
    +
    
    931 958
                 with open(objpath, 'rb') as f:
    
    932 959
                     directory.ParseFromString(f.read())
    
    933 960
     
    
    ... ... @@ -970,8 +997,8 @@ class CASCache():
    970 997
     
    
    971 998
             return dirdigest
    
    972 999
     
    
    973
    -    def _send_directory(self, remote, digest, u_uid=uuid.uuid4()):
    
    974
    -        required_blobs = self._required_blobs(digest)
    
    1000
    +    def _send_directory(self, remote, digest, u_uid=uuid.uuid4(), excluded_dir=None):
    
    1001
    +        required_blobs = self._required_blobs(digest, excluded_dir=excluded_dir)
    
    975 1002
     
    
    976 1003
             missing_blobs = dict()
    
    977 1004
             # Limit size of FindMissingBlobs request
    

  • buildstream/_cas/casremote.py
    ... ... @@ -23,7 +23,8 @@ from .. import utils
    23 23
     _MAX_PAYLOAD_BYTES = 1024 * 1024
    
    24 24
     
    
    25 25
     
    
    26
    -class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key client_cert instance_name')):
    
    26
    +class CASRemoteSpec(namedtuple('CASRemoteSpec',
    
    27
    +                               'url push partial_push server_cert client_key client_cert instance_name')):
    
    27 28
     
    
    28 29
         # _new_from_config_node
    
    29 30
         #
    
    ... ... @@ -31,9 +32,18 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key
    31 32
         #
    
    32 33
         @staticmethod
    
    33 34
         def _new_from_config_node(spec_node, basedir=None):
    
    34
    -        _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance_name'])
    
    35
    +        _yaml.node_validate(spec_node, ['url', 'push', 'allow-partial-push', 'server-cert', 'client-key',
    
    36
    +                                        'client-cert', 'instance_name'])
    
    35 37
             url = _yaml.node_get(spec_node, str, 'url')
    
    36 38
             push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
    
    39
    +        partial_push = _yaml.node_get(spec_node, bool, 'allow-partial-push', default_value=False)
    
    40
    +
    
    41
    +        # partial_push depends on push, raise error if not configured correctly
    
    42
    +        if partial_push and not push:
    
    43
    +            provenance = _yaml.node_get_provenance(spec_node, 'allow-partial-push')
    
    44
    +            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    45
    +                            "{}: allow-partial-push also requires push to be set".format(provenance))
    
    46
    +
    
    37 47
             if not url:
    
    38 48
                 provenance = _yaml.node_get_provenance(spec_node, 'url')
    
    39 49
                 raise LoadError(LoadErrorReason.INVALID_DATA,
    
    ... ... @@ -63,10 +73,10 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key
    63 73
                 raise LoadError(LoadErrorReason.INVALID_DATA,
    
    64 74
                                 "{}: 'client-cert' was specified without 'client-key'".format(provenance))
    
    65 75
     
    
    66
    -        return CASRemoteSpec(url, push, server_cert, client_key, client_cert, instance_name)
    
    76
    +        return CASRemoteSpec(url, push, partial_push, server_cert, client_key, client_cert, instance_name)
    
    67 77
     
    
    68 78
     
    
    69
    -CASRemoteSpec.__new__.__defaults__ = (None, None, None, None)
    
    79
    +CASRemoteSpec.__new__.__defaults__ = (False, None, None, None, None)
    
    70 80
     
    
    71 81
     
    
    72 82
     class BlobNotFound(CASRemoteError):
    

  • buildstream/_context.py
    ... ... @@ -121,18 +121,10 @@ class Context():
    121 121
             # Whether or not to attempt to pull build trees globally
    
    122 122
             self.pull_buildtrees = None
    
    123 123
     
    
    124
    -        # Boolean, whether we double-check with the user that they meant to
    
    125
    -        # remove a workspace directory.
    
    126
    -        self.prompt_workspace_close_remove_dir = None
    
    127
    -
    
    128 124
             # Boolean, whether we double-check with the user that they meant to
    
    129 125
             # close the workspace when they're using it to access the project.
    
    130 126
             self.prompt_workspace_close_project_inaccessible = None
    
    131 127
     
    
    132
    -        # Boolean, whether we double-check with the user that they meant to do
    
    133
    -        # a hard reset of a workspace, potentially losing changes.
    
    134
    -        self.prompt_workspace_reset_hard = None
    
    135
    -
    
    136 128
             # Whether elements must be rebuilt when their dependencies have changed
    
    137 129
             self._strict_build_plan = None
    
    138 130
     
    
    ... ... @@ -260,16 +252,10 @@ class Context():
    260 252
             prompt = _yaml.node_get(
    
    261 253
                 defaults, Mapping, 'prompt')
    
    262 254
             _yaml.node_validate(prompt, [
    
    263
    -            'really-workspace-close-remove-dir',
    
    264 255
                 'really-workspace-close-project-inaccessible',
    
    265
    -            'really-workspace-reset-hard',
    
    266 256
             ])
    
    267
    -        self.prompt_workspace_close_remove_dir = _node_get_option_str(
    
    268
    -            prompt, 'really-workspace-close-remove-dir', ['ask', 'yes']) == 'ask'
    
    269 257
             self.prompt_workspace_close_project_inaccessible = _node_get_option_str(
    
    270 258
                 prompt, 'really-workspace-close-project-inaccessible', ['ask', 'yes']) == 'ask'
    
    271
    -        self.prompt_workspace_reset_hard = _node_get_option_str(
    
    272
    -            prompt, 'really-workspace-reset-hard', ['ask', 'yes']) == 'ask'
    
    273 259
     
    
    274 260
             # Load per-projects overrides
    
    275 261
             self._project_overrides = _yaml.node_get(defaults, Mapping, 'projects', default_value={})
    

  • buildstream/_frontend/cli.py
    ... ... @@ -841,11 +841,6 @@ def workspace_close(app, remove_dir, all_, elements):
    841 841
             if nonexisting:
    
    842 842
                 raise AppError("Workspace does not exist", detail="\n".join(nonexisting))
    
    843 843
     
    
    844
    -        if app.interactive and remove_dir and app.context.prompt_workspace_close_remove_dir:
    
    845
    -            if not click.confirm('This will remove all your changes, are you sure?'):
    
    846
    -                click.echo('Aborting', err=True)
    
    847
    -                sys.exit(-1)
    
    848
    -
    
    849 844
             for element_name in elements:
    
    850 845
                 app.stream.workspace_close(element_name, remove_dir=remove_dir)
    
    851 846
     
    
    ... ... @@ -879,11 +874,6 @@ def workspace_reset(app, soft, track_, all_, elements):
    879 874
             if all_ and not app.stream.workspace_exists():
    
    880 875
                 raise AppError("No open workspaces to reset")
    
    881 876
     
    
    882
    -        if app.interactive and not soft and app.context.prompt_workspace_reset_hard:
    
    883
    -            if not click.confirm('This will remove all your changes, are you sure?'):
    
    884
    -                click.echo('Aborting', err=True)
    
    885
    -                sys.exit(-1)
    
    886
    -
    
    887 877
             if all_:
    
    888 878
                 elements = tuple(element_name for element_name, _ in app.context.get_workspaces().list())
    
    889 879
     
    

  • buildstream/_loader/loadelement.py
    ... ... @@ -39,6 +39,20 @@ from .types import Symbol, Dependency
    39 39
     #    loader (Loader): The Loader object for this element
    
    40 40
     #
    
    41 41
     class LoadElement():
    
    42
    +    # Dependency():
    
    43
    +    #
    
    44
    +    # A link from a LoadElement to its dependencies.
    
    45
    +    #
    
    46
    +    # Keeps a link to one of the current Element's dependencies, together with
    
    47
    +    # its dependency type.
    
    48
    +    #
    
    49
    +    # Args:
    
    50
    +    #    element (LoadElement): a LoadElement on which there is a dependency
    
    51
    +    #    dep_type (str): the type of dependency this dependency link is
    
    52
    +    class Dependency:
    
    53
    +        def __init__(self, element, dep_type):
    
    54
    +            self.element = element
    
    55
    +            self.dep_type = dep_type
    
    42 56
     
    
    43 57
         def __init__(self, node, filename, loader):
    
    44 58
     
    
    ... ... @@ -74,8 +88,11 @@ class LoadElement():
    74 88
                 'build-depends', 'runtime-depends',
    
    75 89
             ])
    
    76 90
     
    
    77
    -        # Extract the Dependencies
    
    78
    -        self.deps = _extract_depends_from_node(self.node)
    
    91
    +        self.dependencies = []
    
    92
    +
    
    93
    +    @property
    
    94
    +    def junction(self):
    
    95
    +        return self._loader.project.junction
    
    79 96
     
    
    80 97
         # depends():
    
    81 98
         #
    
    ... ... @@ -101,8 +118,8 @@ class LoadElement():
    101 118
                 return
    
    102 119
     
    
    103 120
             self._dep_cache = {}
    
    104
    -        for dep in self.deps:
    
    105
    -            elt = self._loader.get_element_for_dep(dep)
    
    121
    +        for dep in self.dependencies:
    
    122
    +            elt = dep.element
    
    106 123
     
    
    107 124
                 # Ensure the cache of the element we depend on
    
    108 125
                 elt._ensure_depends_cache()
    

  • buildstream/_loader/loader.py
    ... ... @@ -19,7 +19,6 @@
    19 19
     
    
    20 20
     import os
    
    21 21
     from functools import cmp_to_key
    
    22
    -from collections import namedtuple
    
    23 22
     from collections.abc import Mapping
    
    24 23
     import tempfile
    
    25 24
     import shutil
    
    ... ... @@ -32,8 +31,8 @@ from .._profile import Topics, profile_start, profile_end
    32 31
     from .._includes import Includes
    
    33 32
     from .._yamlcache import YamlCache
    
    34 33
     
    
    35
    -from .types import Symbol, Dependency
    
    36
    -from .loadelement import LoadElement
    
    34
    +from .types import Symbol
    
    35
    +from .loadelement import LoadElement, _extract_depends_from_node
    
    37 36
     from . import MetaElement
    
    38 37
     from . import MetaSource
    
    39 38
     from ..types import CoreWarnings
    
    ... ... @@ -112,7 +111,7 @@ class Loader():
    112 111
     
    
    113 112
             # First pass, recursively load files and populate our table of LoadElements
    
    114 113
             #
    
    115
    -        deps = []
    
    114
    +        target_elements = []
    
    116 115
     
    
    117 116
             # XXX This will need to be changed to the context's top-level project if this method
    
    118 117
             # is ever used for subprojects
    
    ... ... @@ -122,10 +121,10 @@ class Loader():
    122 121
             with YamlCache.open(self._context, cache_file) as yaml_cache:
    
    123 122
                 for target in targets:
    
    124 123
                     profile_start(Topics.LOAD_PROJECT, target)
    
    125
    -                junction, name, loader = self._parse_name(target, rewritable, ticker,
    
    126
    -                                                          fetch_subprojects=fetch_subprojects)
    
    127
    -                loader._load_file(name, rewritable, ticker, fetch_subprojects, yaml_cache)
    
    128
    -                deps.append(Dependency(name, junction=junction))
    
    124
    +                _junction, name, loader = self._parse_name(target, rewritable, ticker,
    
    125
    +                                                           fetch_subprojects=fetch_subprojects)
    
    126
    +                element = loader._load_file(name, rewritable, ticker, fetch_subprojects, yaml_cache)
    
    127
    +                target_elements.append(element)
    
    129 128
                     profile_end(Topics.LOAD_PROJECT, target)
    
    130 129
     
    
    131 130
             #
    
    ... ... @@ -134,29 +133,29 @@ class Loader():
    134 133
     
    
    135 134
             # Set up a dummy element that depends on all top-level targets
    
    136 135
             # to resolve potential circular dependencies between them
    
    137
    -        DummyTarget = namedtuple('DummyTarget', ['name', 'full_name', 'deps'])
    
    138
    -
    
    139
    -        dummy = DummyTarget(name='', full_name='', deps=deps)
    
    140
    -        self._elements[''] = dummy
    
    136
    +        dummy_target = LoadElement("", "", self)
    
    137
    +        dummy_target.dependencies.extend(
    
    138
    +            LoadElement.Dependency(element, Symbol.RUNTIME)
    
    139
    +            for element in target_elements
    
    140
    +        )
    
    141 141
     
    
    142 142
             profile_key = "_".join(t for t in targets)
    
    143 143
             profile_start(Topics.CIRCULAR_CHECK, profile_key)
    
    144
    -        self._check_circular_deps('')
    
    144
    +        self._check_circular_deps(dummy_target)
    
    145 145
             profile_end(Topics.CIRCULAR_CHECK, profile_key)
    
    146 146
     
    
    147 147
             ret = []
    
    148 148
             #
    
    149 149
             # Sort direct dependencies of elements by their dependency ordering
    
    150 150
             #
    
    151
    -        for target in targets:
    
    152
    -            profile_start(Topics.SORT_DEPENDENCIES, target)
    
    153
    -            junction, name, loader = self._parse_name(target, rewritable, ticker,
    
    154
    -                                                      fetch_subprojects=fetch_subprojects)
    
    155
    -            loader._sort_dependencies(name)
    
    156
    -            profile_end(Topics.SORT_DEPENDENCIES, target)
    
    151
    +        for element in target_elements:
    
    152
    +            loader = element._loader
    
    153
    +            profile_start(Topics.SORT_DEPENDENCIES, element.name)
    
    154
    +            loader._sort_dependencies(element)
    
    155
    +            profile_end(Topics.SORT_DEPENDENCIES, element.name)
    
    157 156
                 # Finally, wrap what we have into LoadElements and return the target
    
    158 157
                 #
    
    159
    -            ret.append(loader._collect_element(name))
    
    158
    +            ret.append(loader._collect_element(element))
    
    160 159
     
    
    161 160
             return ret
    
    162 161
     
    
    ... ... @@ -184,22 +183,6 @@ class Loader():
    184 183
                 if os.path.exists(self._tempdir):
    
    185 184
                     shutil.rmtree(self._tempdir)
    
    186 185
     
    
    187
    -    # get_element_for_dep():
    
    188
    -    #
    
    189
    -    # Gets a cached LoadElement by Dependency object
    
    190
    -    #
    
    191
    -    # This is used by LoadElement
    
    192
    -    #
    
    193
    -    # Args:
    
    194
    -    #    dep (Dependency): The dependency to search for
    
    195
    -    #
    
    196
    -    # Returns:
    
    197
    -    #    (LoadElement): The cached LoadElement
    
    198
    -    #
    
    199
    -    def get_element_for_dep(self, dep):
    
    200
    -        loader = self._get_loader_for_dep(dep)
    
    201
    -        return loader._elements[dep.name]
    
    202
    -
    
    203 186
         ###########################################
    
    204 187
         #            Private Methods              #
    
    205 188
         ###########################################
    
    ... ... @@ -272,8 +255,10 @@ class Loader():
    272 255
     
    
    273 256
             self._elements[filename] = element
    
    274 257
     
    
    258
    +        dependencies = _extract_depends_from_node(node)
    
    259
    +
    
    275 260
             # Load all dependency files for the new LoadElement
    
    276
    -        for dep in element.deps:
    
    261
    +        for dep in dependencies:
    
    277 262
                 if dep.junction:
    
    278 263
                     self._load_file(dep.junction, rewritable, ticker, fetch_subprojects, yaml_cache)
    
    279 264
                     loader = self._get_loader(dep.junction, rewritable=rewritable, ticker=ticker,
    
    ... ... @@ -288,7 +273,9 @@ class Loader():
    288 273
                                     "{}: Cannot depend on junction"
    
    289 274
                                     .format(dep.provenance))
    
    290 275
     
    
    291
    -        deps_names = [dep.name for dep in element.deps]
    
    276
    +            element.dependencies.append(LoadElement.Dependency(dep_element, dep.dep_type))
    
    277
    +
    
    278
    +        deps_names = [dep.name for dep in dependencies]
    
    292 279
             self._warn_invalid_elements(deps_names)
    
    293 280
     
    
    294 281
             return element
    
    ... ... @@ -299,12 +286,12 @@ class Loader():
    299 286
         # dependencies already resolved.
    
    300 287
         #
    
    301 288
         # Args:
    
    302
    -    #    element_name (str): The element-path relative element name to check
    
    289
    +    #    element (str): The element to check
    
    303 290
         #
    
    304 291
         # Raises:
    
    305 292
         #    (LoadError): In case there was a circular dependency error
    
    306 293
         #
    
    307
    -    def _check_circular_deps(self, element_name, check_elements=None, validated=None, sequence=None):
    
    294
    +    def _check_circular_deps(self, element, check_elements=None, validated=None, sequence=None):
    
    308 295
     
    
    309 296
             if check_elements is None:
    
    310 297
                 check_elements = {}
    
    ... ... @@ -313,38 +300,31 @@ class Loader():
    313 300
             if sequence is None:
    
    314 301
                 sequence = []
    
    315 302
     
    
    316
    -        element = self._elements[element_name]
    
    317
    -
    
    318
    -        # element name must be unique across projects
    
    319
    -        # to be usable as key for the check_elements and validated dicts
    
    320
    -        element_name = element.full_name
    
    321
    -
    
    322 303
             # Skip already validated branches
    
    323
    -        if validated.get(element_name) is not None:
    
    304
    +        if validated.get(element) is not None:
    
    324 305
                 return
    
    325 306
     
    
    326
    -        if check_elements.get(element_name) is not None:
    
    307
    +        if check_elements.get(element) is not None:
    
    327 308
                 # Create `chain`, the loop of element dependencies from this
    
    328 309
                 # element back to itself, by trimming everything before this
    
    329 310
                 # element from the sequence under consideration.
    
    330
    -            chain = sequence[sequence.index(element_name):]
    
    331
    -            chain.append(element_name)
    
    311
    +            chain = sequence[sequence.index(element.full_name):]
    
    312
    +            chain.append(element.full_name)
    
    332 313
                 raise LoadError(LoadErrorReason.CIRCULAR_DEPENDENCY,
    
    333 314
                                 ("Circular dependency detected at element: {}\n" +
    
    334 315
                                  "Dependency chain: {}")
    
    335
    -                            .format(element.name, " -> ".join(chain)))
    
    316
    +                            .format(element.full_name, " -> ".join(chain)))
    
    336 317
     
    
    337 318
             # Push / Check each dependency / Pop
    
    338
    -        check_elements[element_name] = True
    
    339
    -        sequence.append(element_name)
    
    340
    -        for dep in element.deps:
    
    341
    -            loader = self._get_loader_for_dep(dep)
    
    342
    -            loader._check_circular_deps(dep.name, check_elements, validated, sequence)
    
    343
    -        del check_elements[element_name]
    
    319
    +        check_elements[element] = True
    
    320
    +        sequence.append(element.full_name)
    
    321
    +        for dep in element.dependencies:
    
    322
    +            dep.element._loader._check_circular_deps(dep.element, check_elements, validated, sequence)
    
    323
    +        del check_elements[element]
    
    344 324
             sequence.pop()
    
    345 325
     
    
    346 326
             # Eliminate duplicate paths
    
    347
    -        validated[element_name] = True
    
    327
    +        validated[element] = True
    
    348 328
     
    
    349 329
         # _sort_dependencies():
    
    350 330
         #
    
    ... ... @@ -357,28 +337,21 @@ class Loader():
    357 337
         # sorts throughout the build process.
    
    358 338
         #
    
    359 339
         # Args:
    
    360
    -    #    element_name (str): The element-path relative element name to sort
    
    340
    +    #    element (LoadElement): The element to sort
    
    361 341
         #
    
    362
    -    def _sort_dependencies(self, element_name, visited=None):
    
    342
    +    def _sort_dependencies(self, element, visited=None):
    
    363 343
             if visited is None:
    
    364
    -            visited = {}
    
    344
    +            visited = set()
    
    365 345
     
    
    366
    -        element = self._elements[element_name]
    
    367
    -
    
    368
    -        # element name must be unique across projects
    
    369
    -        # to be usable as key for the visited dict
    
    370
    -        element_name = element.full_name
    
    371
    -
    
    372
    -        if visited.get(element_name) is not None:
    
    346
    +        if element in visited:
    
    373 347
                 return
    
    374 348
     
    
    375
    -        for dep in element.deps:
    
    376
    -            loader = self._get_loader_for_dep(dep)
    
    377
    -            loader._sort_dependencies(dep.name, visited=visited)
    
    349
    +        for dep in element.dependencies:
    
    350
    +            dep.element._loader._sort_dependencies(dep.element, visited=visited)
    
    378 351
     
    
    379 352
             def dependency_cmp(dep_a, dep_b):
    
    380
    -            element_a = self.get_element_for_dep(dep_a)
    
    381
    -            element_b = self.get_element_for_dep(dep_b)
    
    353
    +            element_a = dep_a.element
    
    354
    +            element_b = dep_b.element
    
    382 355
     
    
    383 356
                 # Sort on inter element dependency first
    
    384 357
                 if element_a.depends(element_b):
    
    ... ... @@ -395,21 +368,21 @@ class Loader():
    395 368
                         return -1
    
    396 369
     
    
    397 370
                 # All things being equal, string comparison.
    
    398
    -            if dep_a.name > dep_b.name:
    
    371
    +            if element_a.name > element_b.name:
    
    399 372
                     return 1
    
    400
    -            elif dep_a.name < dep_b.name:
    
    373
    +            elif element_a.name < element_b.name:
    
    401 374
                     return -1
    
    402 375
     
    
    403 376
                 # Sort local elements before junction elements
    
    404 377
                 # and use string comparison between junction elements
    
    405
    -            if dep_a.junction and dep_b.junction:
    
    406
    -                if dep_a.junction > dep_b.junction:
    
    378
    +            if element_a.junction and element_b.junction:
    
    379
    +                if element_a.junction > element_b.junction:
    
    407 380
                         return 1
    
    408
    -                elif dep_a.junction < dep_b.junction:
    
    381
    +                elif element_a.junction < element_b.junction:
    
    409 382
                         return -1
    
    410
    -            elif dep_a.junction:
    
    383
    +            elif element_a.junction:
    
    411 384
                     return -1
    
    412
    -            elif dep_b.junction:
    
    385
    +            elif element_b.junction:
    
    413 386
                     return 1
    
    414 387
     
    
    415 388
                 # This wont ever happen
    
    ... ... @@ -418,26 +391,23 @@ class Loader():
    418 391
             # Now dependency sort, we ensure that if any direct dependency
    
    419 392
             # directly or indirectly depends on another direct dependency,
    
    420 393
             # it is found later in the list.
    
    421
    -        element.deps.sort(key=cmp_to_key(dependency_cmp))
    
    394
    +        element.dependencies.sort(key=cmp_to_key(dependency_cmp))
    
    422 395
     
    
    423
    -        visited[element_name] = True
    
    396
    +        visited.add(element)
    
    424 397
     
    
    425 398
         # _collect_element()
    
    426 399
         #
    
    427 400
         # Collect the toplevel elements we have
    
    428 401
         #
    
    429 402
         # Args:
    
    430
    -    #    element_name (str): The element-path relative element name to sort
    
    403
    +    #    element (LoadElement): The element for which to load a MetaElement
    
    431 404
         #
    
    432 405
         # Returns:
    
    433 406
         #    (MetaElement): A recursively loaded MetaElement
    
    434 407
         #
    
    435
    -    def _collect_element(self, element_name):
    
    436
    -
    
    437
    -        element = self._elements[element_name]
    
    438
    -
    
    408
    +    def _collect_element(self, element):
    
    439 409
             # Return the already built one, if we already built it
    
    440
    -        meta_element = self._meta_elements.get(element_name)
    
    410
    +        meta_element = self._meta_elements.get(element.name)
    
    441 411
             if meta_element:
    
    442 412
                 return meta_element
    
    443 413
     
    
    ... ... @@ -461,10 +431,10 @@ class Loader():
    461 431
                     del source[Symbol.DIRECTORY]
    
    462 432
     
    
    463 433
                 index = sources.index(source)
    
    464
    -            meta_source = MetaSource(element_name, index, element_kind, kind, source, directory)
    
    434
    +            meta_source = MetaSource(element.name, index, element_kind, kind, source, directory)
    
    465 435
                 meta_sources.append(meta_source)
    
    466 436
     
    
    467
    -        meta_element = MetaElement(self.project, element_name, element_kind,
    
    437
    +        meta_element = MetaElement(self.project, element.name, element_kind,
    
    468 438
                                        elt_provenance, meta_sources,
    
    469 439
                                        _yaml.node_get(node, Mapping, Symbol.CONFIG, default_value={}),
    
    470 440
                                        _yaml.node_get(node, Mapping, Symbol.VARIABLES, default_value={}),
    
    ... ... @@ -475,12 +445,12 @@ class Loader():
    475 445
                                        element_kind == 'junction')
    
    476 446
     
    
    477 447
             # Cache it now, make sure it's already there before recursing
    
    478
    -        self._meta_elements[element_name] = meta_element
    
    448
    +        self._meta_elements[element.name] = meta_element
    
    479 449
     
    
    480 450
             # Descend
    
    481
    -        for dep in element.deps:
    
    482
    -            loader = self._get_loader_for_dep(dep)
    
    483
    -            meta_dep = loader._collect_element(dep.name)
    
    451
    +        for dep in element.dependencies:
    
    452
    +            loader = dep.element._loader
    
    453
    +            meta_dep = loader._collect_element(dep.element)
    
    484 454
                 if dep.dep_type != 'runtime':
    
    485 455
                     meta_element.build_dependencies.append(meta_dep)
    
    486 456
                 if dep.dep_type != 'build':
    
    ... ... @@ -539,7 +509,7 @@ class Loader():
    539 509
                     return None
    
    540 510
     
    
    541 511
             # meta junction element
    
    542
    -        meta_element = self._collect_element(filename)
    
    512
    +        meta_element = self._collect_element(self._elements[filename])
    
    543 513
             if meta_element.kind != 'junction':
    
    544 514
                 raise LoadError(LoadErrorReason.INVALID_DATA,
    
    545 515
                                 "{}: Expected junction but element kind is {}".format(filename, meta_element.kind))
    
    ... ... @@ -601,23 +571,6 @@ class Loader():
    601 571
     
    
    602 572
             return loader
    
    603 573
     
    
    604
    -    # _get_loader_for_dep():
    
    605
    -    #
    
    606
    -    # Gets the appropriate Loader for a Dependency object
    
    607
    -    #
    
    608
    -    # Args:
    
    609
    -    #    dep (Dependency): A Dependency object
    
    610
    -    #
    
    611
    -    # Returns:
    
    612
    -    #    (Loader): The Loader object to use for this Dependency
    
    613
    -    #
    
    614
    -    def _get_loader_for_dep(self, dep):
    
    615
    -        if dep.junction:
    
    616
    -            # junction dependency, delegate to appropriate loader
    
    617
    -            return self._loaders[dep.junction]
    
    618
    -        else:
    
    619
    -            return self
    
    620
    -
    
    621 574
         # _parse_name():
    
    622 575
         #
    
    623 576
         # Get junction and base name of element along with loader for the sub-project
    

  • buildstream/data/userconfig.yaml
    ... ... @@ -112,14 +112,6 @@ logging:
    112 112
     #
    
    113 113
     prompt:
    
    114 114
     
    
    115
    -  # Whether to really proceed with 'bst workspace close --remove-dir' removing
    
    116
    -  # a workspace directory, potentially losing changes.
    
    117
    -  #
    
    118
    -  #  ask - Ask the user if they are sure.
    
    119
    -  #  yes - Always remove, without asking.
    
    120
    -  #
    
    121
    -  really-workspace-close-remove-dir: ask
    
    122
    -
    
    123 115
       # Whether to really proceed with 'bst workspace close' when doing so would
    
    124 116
       # stop them from running bst commands in this workspace.
    
    125 117
       #
    
    ... ... @@ -127,11 +119,3 @@ prompt:
    127 119
       #  yes - Always close, without asking.
    
    128 120
       #
    
    129 121
       really-workspace-close-project-inaccessible: ask
    130
    -
    
    131
    -  # Whether to really proceed with 'bst workspace reset' doing a hard reset of
    
    132
    -  # a workspace, potentially losing changes.
    
    133
    -  #
    
    134
    -  #  ask - Ask the user if they are sure.
    
    135
    -  #  yes - Always hard reset, without asking.
    
    136
    -  #
    
    137
    -  really-workspace-reset-hard: ask

  • buildstream/element.py
    ... ... @@ -1797,13 +1797,19 @@ class Element(Plugin):
    1797 1797
         #   (bool): True if this element does not need a push job to be created
    
    1798 1798
         #
    
    1799 1799
         def _skip_push(self):
    
    1800
    +
    
    1800 1801
             if not self.__artifacts.has_push_remotes(element=self):
    
    1801 1802
                 # No push remotes for this element's project
    
    1802 1803
                 return True
    
    1803 1804
     
    
    1804 1805
             # Do not push elements that aren't cached, or that are cached with a dangling buildtree
    
    1805
    -        # artifact unless element type is expected to have an an empty buildtree directory
    
    1806
    -        if not self._cached_buildtree():
    
    1806
    +        # artifact unless element type is expected to have an an empty buildtree directory. Check
    
    1807
    +        # that this default behaviour is not overriden via a remote configured to allow pushing
    
    1808
    +        # artifacts without their corresponding buildtree.
    
    1809
    +        if not self._cached():
    
    1810
    +            return True
    
    1811
    +
    
    1812
    +        if not self._cached_buildtree() and not self.__artifacts.has_partial_push_remotes(element=self):
    
    1807 1813
                 return True
    
    1808 1814
     
    
    1809 1815
             # Do not push tainted artifact
    
    ... ... @@ -1814,11 +1820,14 @@ class Element(Plugin):
    1814 1820
     
    
    1815 1821
         # _push():
    
    1816 1822
         #
    
    1817
    -    # Push locally cached artifact to remote artifact repository.
    
    1823
    +    # Push locally cached artifact to remote artifact repository. An attempt
    
    1824
    +    # will be made to push partial artifacts if given current config dictates.
    
    1825
    +    # If a remote set for 'full' artifact pushes is found to be cached partially
    
    1826
    +    # in the remote, an attempt will be made to 'complete' it.
    
    1818 1827
         #
    
    1819 1828
         # Returns:
    
    1820 1829
         #   (bool): True if the remote was updated, False if it already existed
    
    1821
    -    #           and no updated was required
    
    1830
    +    #           and no update was required
    
    1822 1831
         #
    
    1823 1832
         def _push(self):
    
    1824 1833
             self.__assert_cached()
    
    ... ... @@ -1827,8 +1836,17 @@ class Element(Plugin):
    1827 1836
                 self.warn("Not pushing tainted artifact.")
    
    1828 1837
                 return False
    
    1829 1838
     
    
    1830
    -        # Push all keys used for local commit
    
    1831
    -        pushed = self.__artifacts.push(self, self.__get_cache_keys_for_commit())
    
    1839
    +        # Push all keys used for local commit, this could be full or partial,
    
    1840
    +        # given previous _skip_push() logic. If buildtree isn't cached, then
    
    1841
    +        # set partial push
    
    1842
    +
    
    1843
    +        partial = False
    
    1844
    +        subdir = 'buildtree'
    
    1845
    +        if not self._cached_buildtree():
    
    1846
    +            partial = True
    
    1847
    +
    
    1848
    +        pushed = self.__artifacts.push(self, self.__get_cache_keys_for_commit(), partial=partial, subdir=subdir)
    
    1849
    +
    
    1832 1850
             if not pushed:
    
    1833 1851
                 return False
    
    1834 1852
     
    

  • buildstream/plugins/elements/filter.py
    ... ... @@ -20,25 +20,127 @@
    20 20
     """
    
    21 21
     filter - Extract a subset of files from another element
    
    22 22
     =======================================================
    
    23
    -This filters another element by producing an output that is a subset of
    
    24
    -the filtered element.
    
    23
    +Filter another element by producing an output that is a subset of
    
    24
    +the parent element's output. Subsets are defined by the parent element's
    
    25
    +:ref:`split rules <public_split_rules>`.
    
    25 26
     
    
    26
    -To specify the element to filter, specify it as the one and only build
    
    27
    -dependency to filter. See :ref:`Dependencies <format_dependencies>`
    
    28
    -for what dependencies are and how to specify them.
    
    27
    +Overview
    
    28
    +--------
    
    29
    +A filter element must have exactly one *build* dependency, where said
    
    30
    +dependency is the 'parent' element which we would like to filter.
    
    31
    +Runtime dependencies may also be specified, which can be useful to propagate
    
    32
    +forward from this filter element onto its reverse dependencies.
    
    33
    +See :ref:`Dependencies <format_dependencies>` to see how we specify dependencies.
    
    29 34
     
    
    30
    -Dependencies aside from the filtered element may be specified, but
    
    31
    -they must be runtime dependencies only. This can be useful to propagate
    
    32
    -runtime dependencies forward from this filter element onto its reverse
    
    33
    -dependencies.
    
    35
    +When workspaces are opened, closed or reset on a filter element, or this
    
    36
    +element is tracked, the filter element will transparently pass on the command
    
    37
    +to its parent element (the sole build-dependency).
    
    34 38
     
    
    35
    -When workspaces are opened, closed or reset on this element, or this
    
    36
    -element is tracked, instead of erroring due to a lack of sources, this
    
    37
    -element will transparently pass on the command to its sole build-dependency.
    
    39
    +Example
    
    40
    +-------
    
    41
    +Consider a simple import element, ``import.bst`` which imports the local files
    
    42
    +'foo', 'bar' and 'baz' (each stored in ``files/``, relative to the project's root):
    
    38 43
     
    
    39
    -The default configuration and possible options are as such:
    
    40
    -  .. literalinclude:: ../../../buildstream/plugins/elements/filter.yaml
    
    41
    -     :language: yaml
    
    44
    +.. code:: yaml
    
    45
    +
    
    46
    +   kind: import
    
    47
    +
    
    48
    +   # Specify sources to import
    
    49
    +   sources:
    
    50
    +   - kind: local
    
    51
    +     path: files
    
    52
    +
    
    53
    +   # Specify public domain data, visible to other elements
    
    54
    +   public:
    
    55
    +     bst:
    
    56
    +       split-rules:
    
    57
    +         foo:
    
    58
    +         - /foo
    
    59
    +         bar:
    
    60
    +         - /bar
    
    61
    +
    
    62
    +.. note::
    
    63
    +
    
    64
    +   We can make an element's metadata visible to all reverse dependencies by making use
    
    65
    +   of the ``public:`` field. See the :ref:`public data documentation <format_public>`
    
    66
    +   for more information.
    
    67
    +
    
    68
    +In this example, ``import.bst`` will serve as the 'parent' of the filter element, thus
    
    69
    +its output will be filtered. It is important to understand that the artifact of the
    
    70
    +above element will contain the files: 'foo', 'bar' and 'baz'.
    
    71
    +
    
    72
    +Now, to produce an element whose artifact contains the file 'foo', and exlusively 'foo',
    
    73
    +we can define the following filter, ``filter-foo.bst``:
    
    74
    +
    
    75
    +.. code:: yaml
    
    76
    +
    
    77
    +   kind: filter
    
    78
    +
    
    79
    +   # Declare the sole build-dependency of the filter element
    
    80
    +   depends:
    
    81
    +   - filename: import.bst
    
    82
    +     type: build
    
    83
    +
    
    84
    +   # Declare a list of domains to include in the filter's artifact
    
    85
    +   config:
    
    86
    +     include:
    
    87
    +     - foo
    
    88
    +
    
    89
    +.. note::
    
    90
    +
    
    91
    +   We can also specify build-dependencies with a 'build-depends' field which has been
    
    92
    +   available since :ref:`format version 14 <project_format_version>`. See the
    
    93
    +   :ref:`Build-Depends documentation <format_build_depends>` for more detail.
    
    94
    +
    
    95
    +It should be noted that an 'empty' ``include:`` list would, by default, include all
    
    96
    +split-rules specified in the parent element, which, in this example, would be the
    
    97
    +files 'foo' and 'bar' (the file 'baz' was not covered by any split rules).
    
    98
    +
    
    99
    +Equally, we can use the ``exclude:`` statement to create the same artifact (which
    
    100
    +only contains the file 'foo') by declaring the following element, ``exclude-bar.bst``:
    
    101
    +
    
    102
    +.. code:: yaml
    
    103
    +
    
    104
    +   kind: filter
    
    105
    +
    
    106
    +   # Declare the sole build-dependency of the filter element
    
    107
    +   depends:
    
    108
    +   - filename: import.bst
    
    109
    +     type: build
    
    110
    +
    
    111
    +   # Declare a list of domains to exclude in the filter's artifact
    
    112
    +   config:
    
    113
    +     exclude:
    
    114
    +     - bar
    
    115
    +
    
    116
    +In addition to the ``include:`` and ``exclude:`` fields, there exists an ``include-orphans:``
    
    117
    +(Boolean) field, which defaults to ``False``. This will determine whether to include files
    
    118
    +which are not present in the 'split-rules'. For example, if we wanted to filter out all files
    
    119
    +which are not included as split rules we can define the following element, ``filter-misc.bst``:
    
    120
    +
    
    121
    +.. code:: yaml
    
    122
    +
    
    123
    +   kind: filter
    
    124
    +
    
    125
    +   # Declare the sole build-dependency of the filter element
    
    126
    +   depends:
    
    127
    +   - filename: import.bst
    
    128
    +     type: build
    
    129
    +
    
    130
    +   # Filter out all files which are not declared as split rules
    
    131
    +   config:
    
    132
    +     exclude:
    
    133
    +     - foo
    
    134
    +     - bar
    
    135
    +     include-orphans: True
    
    136
    +
    
    137
    +The artifact of ``filter-misc.bst`` will only contain the file 'baz'.
    
    138
    +
    
    139
    +Below is more information regarding the the default configurations and possible options
    
    140
    +of the filter element:
    
    141
    +
    
    142
    +.. literalinclude:: ../../../buildstream/plugins/elements/filter.yaml
    
    143
    +   :language: yaml
    
    42 144
     """
    
    43 145
     
    
    44 146
     from buildstream import Element, ElementError, Scope
    
    ... ... @@ -47,6 +149,8 @@ from buildstream import Element, ElementError, Scope
    47 149
     class FilterElement(Element):
    
    48 150
         # pylint: disable=attribute-defined-outside-init
    
    49 151
     
    
    152
    +    BST_ARTIFACT_VERSION = 1
    
    153
    +
    
    50 154
         # The filter element's output is its dependencies, so
    
    51 155
         # we must rebuild if the dependencies change even when
    
    52 156
         # not in strict build plans.
    
    ... ... @@ -64,6 +168,8 @@ class FilterElement(Element):
    64 168
             self.include = self.node_get_member(node, list, 'include')
    
    65 169
             self.exclude = self.node_get_member(node, list, 'exclude')
    
    66 170
             self.include_orphans = self.node_get_member(node, bool, 'include-orphans')
    
    171
    +        self.include_provenance = self.node_provenance(node, member_name='include')
    
    172
    +        self.exclude_provenance = self.node_provenance(node, member_name='exclude')
    
    67 173
     
    
    68 174
         def preflight(self):
    
    69 175
             # Exactly one build-depend is permitted
    
    ... ... @@ -102,7 +208,32 @@ class FilterElement(Element):
    102 208
     
    
    103 209
         def assemble(self, sandbox):
    
    104 210
             with self.timed_activity("Staging artifact", silent_nested=True):
    
    105
    -            for dep in self.dependencies(Scope.BUILD):
    
    211
    +            for dep in self.dependencies(Scope.BUILD, recurse=False):
    
    212
    +                # Check that all the included/excluded domains exist
    
    213
    +                pub_data = dep.get_public_data('bst')
    
    214
    +                split_rules = pub_data.get('split-rules', {})
    
    215
    +                unfound_includes = []
    
    216
    +                for domain in self.include:
    
    217
    +                    if domain not in split_rules:
    
    218
    +                        unfound_includes.append(domain)
    
    219
    +                unfound_excludes = []
    
    220
    +                for domain in self.exclude:
    
    221
    +                    if domain not in split_rules:
    
    222
    +                        unfound_excludes.append(domain)
    
    223
    +
    
    224
    +                detail = []
    
    225
    +                if unfound_includes:
    
    226
    +                    detail.append("Unknown domains were used in {}".format(self.include_provenance))
    
    227
    +                    detail.extend([' - {}'.format(domain) for domain in unfound_includes])
    
    228
    +
    
    229
    +                if unfound_excludes:
    
    230
    +                    detail.append("Unknown domains were used in {}".format(self.exclude_provenance))
    
    231
    +                    detail.extend([' - {}'.format(domain) for domain in unfound_excludes])
    
    232
    +
    
    233
    +                if detail:
    
    234
    +                    detail = '\n'.join(detail)
    
    235
    +                    raise ElementError("Unknown domains declared.", detail=detail)
    
    236
    +
    
    106 237
                     dep.stage_artifact(sandbox, include=self.include,
    
    107 238
                                        exclude=self.exclude, orphans=self.include_orphans)
    
    108 239
             return ""
    

  • buildstream/plugins/elements/filter.yaml
    ... ... @@ -2,20 +2,20 @@
    2 2
     # Filter element configuration
    
    3 3
     config:
    
    4 4
     
    
    5
    -  # A list of domains to include from each artifact, as
    
    6
    -  # they were defined in the element's 'split-rules'.
    
    5
    +  # A list of domains to include in each artifact, as
    
    6
    +  # they were defined as public data in the parent
    
    7
    +  # element's 'split-rules'.
    
    7 8
       #
    
    8
    -  # Since domains can be added, it is not an error to
    
    9
    -  # specify domains which may not exist for all of the
    
    10
    -  # elements in this composition.
    
    9
    +  # If a domain is specified that does not exist, the
    
    10
    +  # filter element will fail to build.
    
    11 11
       #
    
    12 12
       # The default empty list indicates that all domains
    
    13
    -  # from each dependency should be included.
    
    13
    +  # of the parent's artifact should be included.
    
    14 14
       #
    
    15 15
       include: []
    
    16 16
     
    
    17 17
       # A list of domains to exclude from each artifact, as
    
    18
    -  # they were defined in the element's 'split-rules'.
    
    18
    +  # they were defined in the parent element's 'split-rules'.
    
    19 19
       #
    
    20 20
       # In the case that a file is spoken for by a domain
    
    21 21
       # in the 'include' list and another in the 'exclude'
    
    ... ... @@ -23,7 +23,7 @@ config:
    23 23
       exclude: []
    
    24 24
     
    
    25 25
       # Whether to include orphan files which are not
    
    26
    -  # included by any of the 'split-rules' present on
    
    27
    -  # a given element.
    
    26
    +  # included by any of the 'split-rules' present in
    
    27
    +  # the parent element.
    
    28 28
       #
    
    29 29
       include-orphans: False

  • doc/source/using_config.rst
    ... ... @@ -59,6 +59,15 @@ configuration:
    59 59
          # Add another cache to pull from
    
    60 60
          - url: https://anothercache.com/artifacts:8080
    
    61 61
            server-cert: another_server.crt
    
    62
    +     # Add a cache to push/pull to/from, specifying
    
    63
    +       that you wish to push artifacts in a 'partial'
    
    64
    +       state (this being without the respective buildtree).
    
    65
    +       Note that allow-partial-push requires push to also
    
    66
    +       be set.
    
    67
    +     - url: https://anothercache.com/artifacts:11003
    
    68
    +       push: true
    
    69
    +       allow-partial-push: true
    
    70
    +
    
    62 71
     
    
    63 72
     .. note::
    
    64 73
     
    
    ... ... @@ -86,6 +95,14 @@ configuration:
    86 95
              # Add another cache to pull from
    
    87 96
              - url: https://ourprojectcache.com/artifacts:8080
    
    88 97
                server-cert: project_server.crt
    
    98
    +         # Add a cache to push/pull to/from, specifying
    
    99
    +           that you wish to push artifacts in a 'partial'
    
    100
    +           state (this being without the respective buildtree).
    
    101
    +           Note that allow-partial-push requires push to also
    
    102
    +           be set.
    
    103
    +         - url: https://anothercache.com/artifacts:11003
    
    104
    +           push: true
    
    105
    +           allow-partial-push: true
    
    89 106
     
    
    90 107
     
    
    91 108
     .. note::
    

  • tests/artifactcache/config.py
    ... ... @@ -139,3 +139,28 @@ def test_missing_certs(cli, datafiles, config_key, config_value):
    139 139
         # This does not happen for a simple `bst show`.
    
    140 140
         result = cli.run(project=project, args=['artifact', 'pull', 'element.bst'])
    
    141 141
         result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
    
    142
    +
    
    143
    +
    
    144
    +# Assert that if allow-partial-push is specified as true without push also being
    
    145
    +# set likewise, we get a comprehensive LoadError instead of an unhandled exception.
    
    146
    +@pytest.mark.datafiles(DATA_DIR)
    
    147
    +def test_partial_push_error(cli, datafiles):
    
    148
    +    project = os.path.join(datafiles.dirname, datafiles.basename, 'project', 'elements')
    
    149
    +
    
    150
    +    project_conf = {
    
    151
    +        'name': 'test',
    
    152
    +
    
    153
    +        'artifacts': {
    
    154
    +            'url': 'https://cache.example.com:12345',
    
    155
    +            'allow-partial-push': 'True'
    
    156
    +        }
    
    157
    +    }
    
    158
    +    project_conf_file = os.path.join(project, 'project.conf')
    
    159
    +    _yaml.dump(project_conf, project_conf_file)
    
    160
    +
    
    161
    +    # Use `pull` here to ensure we try to initialize the remotes, triggering the error
    
    162
    +    #
    
    163
    +    # This does not happen for a simple `bst show`.
    
    164
    +    result = cli.run(project=project, args=['artifact', 'pull', 'target.bst'])
    
    165
    +    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
    
    166
    +    assert "allow-partial-push also requires push to be set" in result.stderr

  • tests/cachekey/cachekey.py
    ... ... @@ -214,3 +214,41 @@ def test_cache_key_fatal_warnings(cli, tmpdir, first_warnings, second_warnings,
    214 214
         second_keys = run_get_cache_key("second", second_warnings)
    
    215 215
     
    
    216 216
         assert compare_cache_keys(first_keys, second_keys) == identical_keys
    
    217
    +
    
    218
    +
    
    219
    +@pytest.mark.datafiles(DATA_DIR)
    
    220
    +def test_keys_stable_over_targets(cli, datafiles):
    
    221
    +    root_element = 'elements/key-stability/top-level.bst'
    
    222
    +    target1 = 'elements/key-stability/t1.bst'
    
    223
    +    target2 = 'elements/key-stability/t2.bst'
    
    224
    +
    
    225
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    226
    +    full_graph_result = cli.run(project=project, args=[
    
    227
    +        'show',
    
    228
    +        '--format', '%{name}::%{full-key}',
    
    229
    +        root_element
    
    230
    +    ])
    
    231
    +    full_graph_result.assert_success()
    
    232
    +    all_cache_keys = parse_output_keys(full_graph_result.output)
    
    233
    +
    
    234
    +    ordering1_result = cli.run(project=project, args=[
    
    235
    +        'show',
    
    236
    +        '--format', '%{name}::%{full-key}',
    
    237
    +        target1,
    
    238
    +        target2
    
    239
    +    ])
    
    240
    +    ordering1_result.assert_success()
    
    241
    +    ordering1_cache_keys = parse_output_keys(ordering1_result.output)
    
    242
    +
    
    243
    +    ordering2_result = cli.run(project=project, args=[
    
    244
    +        'show',
    
    245
    +        '--format', '%{name}::%{full-key}',
    
    246
    +        target2,
    
    247
    +        target1
    
    248
    +    ])
    
    249
    +    ordering2_result.assert_success()
    
    250
    +    ordering2_cache_keys = parse_output_keys(ordering2_result.output)
    
    251
    +
    
    252
    +    for element in ordering1_cache_keys:
    
    253
    +        assert ordering1_cache_keys[element] == ordering2_cache_keys[element]
    
    254
    +        assert ordering1_cache_keys[element] == all_cache_keys[element]

  • tests/cachekey/project/elements/key-stability/aaa.bst
    1
    +kind: import
    
    2
    +sources:
    
    3
    +- kind: local
    
    4
    +  path: elements/key-stability/aaa.bst

  • tests/cachekey/project/elements/key-stability/t1.bst
    1
    +kind: import
    
    2
    +sources:
    
    3
    +- kind: local
    
    4
    +  path: elements/key-stability/t1.bst
    
    5
    +depends:
    
    6
    +- elements/key-stability/zzz.bst

  • tests/cachekey/project/elements/key-stability/t2.bst
    1
    +kind: import
    
    2
    +sources:
    
    3
    +- kind: local
    
    4
    +  path: elements/key-stability/t2.bst
    
    5
    +depends:
    
    6
    +- elements/key-stability/aaa.bst
    
    7
    +- elements/key-stability/zzz.bst

  • tests/cachekey/project/elements/key-stability/top-level.bst
    1
    +kind: import
    
    2
    +sources:
    
    3
    +- kind: local
    
    4
    +  path: elements/key-stability/top-level.bst
    
    5
    +depends:
    
    6
    +- elements/key-stability/t1.bst
    
    7
    +- elements/key-stability/t2.bst

  • tests/cachekey/project/elements/key-stability/zzz.bst
    1
    +kind: import
    
    2
    +sources:
    
    3
    +- kind: local
    
    4
    +  path: elements/key-stability/zzz.bst

  • tests/elements/filter.py
    ... ... @@ -464,3 +464,34 @@ def test_filter_track_multi_exclude(datafiles, cli, tmpdir):
    464 464
         assert "ref" not in new_input["sources"][0]
    
    465 465
         new_input2 = _yaml.load(input2_file)
    
    466 466
         assert new_input2["sources"][0]["ref"] == ref
    
    467
    +
    
    468
    +
    
    469
    +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
    
    470
    +def test_filter_include_with_indirect_deps(datafiles, cli, tmpdir):
    
    471
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    472
    +    result = cli.run(project=project, args=[
    
    473
    +        'build', 'output-include-with-indirect-deps.bst'])
    
    474
    +    result.assert_success()
    
    475
    +
    
    476
    +    checkout = os.path.join(tmpdir.dirname, tmpdir.basename, 'checkout')
    
    477
    +    result = cli.run(project=project, args=[
    
    478
    +        'artifact', 'checkout', 'output-include-with-indirect-deps.bst', '--directory', checkout])
    
    479
    +    result.assert_success()
    
    480
    +
    
    481
    +    # direct dependencies should be staged and filtered
    
    482
    +    assert os.path.exists(os.path.join(checkout, "baz"))
    
    483
    +
    
    484
    +    # indirect dependencies shouldn't be staged and filtered
    
    485
    +    assert not os.path.exists(os.path.join(checkout, "foo"))
    
    486
    +    assert not os.path.exists(os.path.join(checkout, "bar"))
    
    487
    +
    
    488
    +
    
    489
    +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
    
    490
    +def test_filter_fails_for_nonexisting_domain(datafiles, cli, tmpdir):
    
    491
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    492
    +    result = cli.run(project=project, args=['build', 'output-include-nonexistent-domain.bst'])
    
    493
    +    result.assert_main_error(ErrorDomain.STREAM, None)
    
    494
    +
    
    495
    +    error = "Unknown domains were used in output-include-nonexistent-domain.bst [line 7 column 2]"
    
    496
    +    assert error in result.stderr
    
    497
    +    assert '- unknown_file' in result.stderr

  • tests/elements/filter/basic/elements/deps-permitted.bst
    1 1
     kind: filter
    
    2 2
     depends:
    
    3
    -- filename: output-include.bst
    
    3
    +- filename: input.bst
    
    4 4
       type: build
    
    5 5
     - filename: output-exclude.bst
    
    6 6
       type: runtime
    

  • tests/elements/filter/basic/elements/input-with-deps.bst
    1
    +kind: import
    
    2
    +
    
    3
    +depends:
    
    4
    +- filename: input.bst
    
    5
    +
    
    6
    +sources:
    
    7
    +- kind: local
    
    8
    +  path: files
    
    9
    +
    
    10
    +public:
    
    11
    +  bst:
    
    12
    +    split-rules:
    
    13
    +      baz:
    
    14
    +      - /baz

  • tests/elements/filter/basic/elements/output-include-nonexistent-domain.bst
    1
    +kind: filter
    
    2
    +depends:
    
    3
    +- filename: input.bst
    
    4
    +  type: build
    
    5
    +config:
    
    6
    +  include:
    
    7
    +  - unknown_file
    
    8
    +

  • tests/elements/filter/basic/elements/output-include-with-indirect-deps.bst
    1
    +kind: filter
    
    2
    +
    
    3
    +depends:
    
    4
    +- filename: input-with-deps.bst
    
    5
    +  type: build

  • tests/integration/pushbuildtrees.py
    1
    +import os
    
    2
    +import shutil
    
    3
    +import pytest
    
    4
    +import subprocess
    
    5
    +
    
    6
    +from buildstream import _yaml
    
    7
    +from tests.testutils import cli_integration as cli, create_artifact_share
    
    8
    +from tests.testutils.integration import assert_contains
    
    9
    +from tests.testutils.site import HAVE_BWRAP, IS_LINUX
    
    10
    +from buildstream._exceptions import ErrorDomain, LoadErrorReason
    
    11
    +
    
    12
    +
    
    13
    +DATA_DIR = os.path.join(
    
    14
    +    os.path.dirname(os.path.realpath(__file__)),
    
    15
    +    "project"
    
    16
    +)
    
    17
    +
    
    18
    +
    
    19
    +# Remove artifact cache & set cli.config value of pull-buildtrees
    
    20
    +# to false, which is the default user context. The cache has to be
    
    21
    +# cleared as just forcefully removing the refpath leaves dangling objects.
    
    22
    +def default_state(cli, tmpdir, share):
    
    23
    +    shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
    
    24
    +    cli.configure({
    
    25
    +        'artifacts': {'url': share.repo, 'push': False},
    
    26
    +        'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
    
    27
    +        'cache': {'pull-buildtrees': False},
    
    28
    +    })
    
    29
    +
    
    30
    +
    
    31
    +# Tests to capture the integration of the optionl push of buildtrees.
    
    32
    +# The behaviour should encompass pushing artifacts that are already cached
    
    33
    +# without a buildtree as well as artifacts that are cached with their buildtree.
    
    34
    +# This option is handled via 'allow-partial-push' on a per artifact remote config
    
    35
    +# node basis. Multiple remote config nodes can point to the same url and as such can
    
    36
    +# have different 'allow-partial-push' options, tests need to cover this using project
    
    37
    +# confs.
    
    38
    +@pytest.mark.integration
    
    39
    +@pytest.mark.datafiles(DATA_DIR)
    
    40
    +@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
    
    41
    +def test_pushbuildtrees(cli, tmpdir, datafiles, integration_cache):
    
    42
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    43
    +    element_name = 'autotools/amhello.bst'
    
    44
    +
    
    45
    +    # Create artifact shares for pull & push testing
    
    46
    +    with create_artifact_share(os.path.join(str(tmpdir), 'share1')) as share1,\
    
    47
    +        create_artifact_share(os.path.join(str(tmpdir), 'share2')) as share2,\
    
    48
    +        create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3,\
    
    49
    +        create_artifact_share(os.path.join(str(tmpdir), 'share4')) as share4:
    
    50
    +
    
    51
    +        cli.configure({
    
    52
    +            'artifacts': {'url': share1.repo, 'push': True},
    
    53
    +            'artifactdir': os.path.join(str(tmpdir), 'artifacts')
    
    54
    +        })
    
    55
    +
    
    56
    +        cli.configure({'artifacts': [{'url': share1.repo, 'push': True},
    
    57
    +                                     {'url': share2.repo, 'push': True, 'allow-partial-push': True}]})
    
    58
    +
    
    59
    +        # Build autotools element, checked pushed, delete local.
    
    60
    +        # As share 2 has push & allow-partial-push set a true, it
    
    61
    +        # should have pushed the artifacts, without the cached buildtrees,
    
    62
    +        # to it.
    
    63
    +        result = cli.run(project=project, args=['build', element_name])
    
    64
    +        assert result.exit_code == 0
    
    65
    +        assert cli.get_element_state(project, element_name) == 'cached'
    
    66
    +        elementdigest = share1.has_artifact('test', element_name, cli.get_element_key(project, element_name))
    
    67
    +        buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
    
    68
    +                                    elementdigest.hash, 'buildtree')
    
    69
    +        assert os.path.isdir(buildtreedir)
    
    70
    +        assert element_name in result.get_partial_pushed_elements()
    
    71
    +        assert element_name in result.get_pushed_elements()
    
    72
    +        assert share1.has_artifact('test', element_name, cli.get_element_key(project, element_name))
    
    73
    +        assert share2.has_artifact('test', element_name, cli.get_element_key(project, element_name))
    
    74
    +        default_state(cli, tmpdir, share1)
    
    75
    +
    
    76
    +        # Check that after explictly pulling an artifact without it's buildtree,
    
    77
    +        # we can push it to another remote that is configured to accept the partial
    
    78
    +        # artifact
    
    79
    +        result = cli.run(project=project, args=['pull', element_name])
    
    80
    +        assert element_name in result.get_pulled_elements()
    
    81
    +        cli.configure({'artifacts': {'url': share3.repo, 'push': True, 'allow-partial-push': True}})
    
    82
    +        assert cli.get_element_state(project, element_name) == 'cached'
    
    83
    +        assert not os.path.isdir(buildtreedir)
    
    84
    +        result = cli.run(project=project, args=['push', element_name])
    
    85
    +        assert result.exit_code == 0
    
    86
    +        assert element_name in result.get_partial_pushed_elements()
    
    87
    +        assert element_name not in result.get_pushed_elements()
    
    88
    +        assert share3.has_artifact('test', element_name, cli.get_element_key(project, element_name))
    
    89
    +        default_state(cli, tmpdir, share3)
    
    90
    +
    
    91
    +        # Delete the local cache and pull the partial artifact from share 3,
    
    92
    +        # this should not include the buildtree when extracted locally, even when
    
    93
    +        # pull-buildtrees is given as a cli parameter as no available remotes will
    
    94
    +        # contain the buildtree
    
    95
    +        assert not os.path.isdir(buildtreedir)
    
    96
    +        assert cli.get_element_state(project, element_name) != 'cached'
    
    97
    +        result = cli.run(project=project, args=['--pull-buildtrees', 'pull', element_name])
    
    98
    +        assert element_name in result.get_partial_pulled_elements()
    
    99
    +        assert not os.path.isdir(buildtreedir)
    
    100
    +        default_state(cli, tmpdir, share3)
    
    101
    +
    
    102
    +        # Delete the local cache and attempt to pull a 'full' artifact, including its
    
    103
    +        # buildtree. As with before share3 being the first listed remote will not have
    
    104
    +        # the buildtree available and should spawn a partial pull. Having share1 as the
    
    105
    +        # second available remote should allow the buildtree to be pulled thus 'completing'
    
    106
    +        # the artifact
    
    107
    +        cli.configure({'artifacts': [{'url': share3.repo, 'push': True, 'allow-partial-push': True},
    
    108
    +                                     {'url': share1.repo, 'push': True}]})
    
    109
    +        assert cli.get_element_state(project, element_name) != 'cached'
    
    110
    +        result = cli.run(project=project, args=['--pull-buildtrees', 'pull', element_name])
    
    111
    +        assert element_name in result.get_partial_pulled_elements()
    
    112
    +        assert element_name in result.get_pulled_elements()
    
    113
    +        assert "Attempting to retrieve buildtree from remotes" in result.stderr
    
    114
    +        assert os.path.isdir(buildtreedir)
    
    115
    +        assert cli.get_element_state(project, element_name) == 'cached'
    
    116
    +
    
    117
    +        # Test that we are able to 'complete' an artifact on a server which is cached partially,
    
    118
    +        # but has now been configured for full artifact pushing. This should require only pushing
    
    119
    +        # the missing blobs, which should be those of just the buildtree. In this case changing
    
    120
    +        # share3 to full pushes should exercise this
    
    121
    +        cli.configure({'artifacts': {'url': share3.repo, 'push': True}})
    
    122
    +        result = cli.run(project=project, args=['push', element_name])
    
    123
    +        assert element_name in result.get_pushed_elements()
    
    124
    +
    
    125
    +        # Ensure that the same remote url can be defined multiple times with differing push
    
    126
    +        # config. Buildstream supports the same remote having different configurations which
    
    127
    +        # partial pushing could be different for elements defined at a top level project.conf to
    
    128
    +        # those from a junctioned project. Assert that elements are pushed to the same remote in
    
    129
    +        # a state defined via their respective project.confs
    
    130
    +        default_state(cli, tmpdir, share1)
    
    131
    +        cli.configure({'artifactdir': os.path.join(str(tmpdir), 'artifacts')}, reset=True)
    
    132
    +        junction = os.path.join(project, 'elements', 'junction')
    
    133
    +        os.mkdir(junction)
    
    134
    +        shutil.copy2(os.path.join(project, 'elements', element_name), junction)
    
    135
    +
    
    136
    +        junction_conf = {}
    
    137
    +        project_conf = {}
    
    138
    +        junction_conf['name'] = 'amhello'
    
    139
    +        junction_conf['artifacts'] = {'url': share4.repo, 'push': True, 'allow-partial-push': True}
    
    140
    +        _yaml.dump(junction_conf, os.path.join(junction, 'project.conf'))
    
    141
    +        project_conf['artifacts'] = {'url': share4.repo, 'push': True}
    
    142
    +
    
    143
    +        # Read project.conf, the junction project.conf and buildstream.conf
    
    144
    +        # before running bst
    
    145
    +        with open(os.path.join(project, 'project.conf'), 'r') as f:
    
    146
    +            print(f.read())
    
    147
    +        with open(os.path.join(junction, 'project.conf'), 'r') as f:
    
    148
    +            print(f.read())
    
    149
    +        with open(os.path.join(project, 'cache', 'buildstream.conf'), 'r') as f:
    
    150
    +            print(f.read())
    
    151
    +
    
    152
    +        result = cli.run(project=project, args=['build', 'junction/amhello.bst'], project_config=project_conf)
    
    153
    +
    
    154
    +        # Read project.conf, the junction project.conf and buildstream.conf
    
    155
    +        # after running bst
    
    156
    +        with open(os.path.join(project, 'project.conf'), 'r') as f:
    
    157
    +            print(f.read())
    
    158
    +        with open(os.path.join(junction, 'project.conf'), 'r') as f:
    
    159
    +            print(f.read())
    
    160
    +        with open(os.path.join(project, 'cache', 'buildstream.conf'), 'r') as f:
    
    161
    +            print(f.read())
    
    162
    +
    
    163
    +        assert 'junction/amhello.bst' in result.get_partial_pushed_elements()
    
    164
    +        assert 'base/base-alpine.bst' in result.get_pushed_elements()

  • tests/testutils/runcli.py
    ... ... @@ -208,6 +208,13 @@ class Result():
    208 208
     
    
    209 209
             return list(pushed)
    
    210 210
     
    
    211
    +    def get_partial_pushed_elements(self):
    
    212
    +        pushed = re.findall(r'\[\s*push:(\S+)\s*\]\s*INFO\s*Pushed partial artifact', self.stderr)
    
    213
    +        if pushed is None:
    
    214
    +            return []
    
    215
    +
    
    216
    +        return list(pushed)
    
    217
    +
    
    211 218
         def get_pulled_elements(self):
    
    212 219
             pulled = re.findall(r'\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled artifact', self.stderr)
    
    213 220
             if pulled is None:
    
    ... ... @@ -215,6 +222,13 @@ class Result():
    215 222
     
    
    216 223
             return list(pulled)
    
    217 224
     
    
    225
    +    def get_partial_pulled_elements(self):
    
    226
    +        pulled = re.findall(r'\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled partial artifact', self.stderr)
    
    227
    +        if pulled is None:
    
    228
    +            return []
    
    229
    +
    
    230
    +        return list(pulled)
    
    231
    +
    
    218 232
     
    
    219 233
     class Cli():
    
    220 234
     
    
    ... ... @@ -235,11 +249,15 @@ class Cli():
    235 249
         #
    
    236 250
         # Args:
    
    237 251
         #    config (dict): The user configuration to use
    
    252
    +    #    reset (bool): Optional reset of stored config
    
    238 253
         #
    
    239
    -    def configure(self, config):
    
    254
    +    def configure(self, config, reset=False):
    
    240 255
             if self.config is None:
    
    241 256
                 self.config = {}
    
    242 257
     
    
    258
    +        if reset:
    
    259
    +            self.config.clear()
    
    260
    +
    
    243 261
             for key, val in config.items():
    
    244 262
                 self.config[key] = val
    
    245 263
     
    

  • tests/testutils/site.py
    ... ... @@ -18,7 +18,7 @@ try:
    18 18
         utils.get_host_tool('git')
    
    19 19
         HAVE_GIT = True
    
    20 20
         out = str(subprocess.check_output(['git', '--version']), "utf-8")
    
    21
    -    version = tuple(int(x) for x in out.split(' ', 2)[2].split('.'))
    
    21
    +    version = tuple(int(x) for x in out.split(' ')[2].split('.'))
    
    22 22
         HAVE_OLD_GIT = version < (1, 8, 5)
    
    23 23
     except ProgramNotFoundError:
    
    24 24
         HAVE_GIT = False
    

  • tox.ini
    ... ... @@ -88,5 +88,5 @@ whitelist_externals =
    88 88
     commands =
    
    89 89
         python3 setup.py --command-packages=click_man.commands man_pages
    
    90 90
     deps =
    
    91
    -    click-man
    
    91
    +    click-man >= 0.3.0
    
    92 92
         -rrequirements/requirements.txt



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]