[Notes] [Git][BuildStream/buildstream][tpollard/566] 26 commits: dev-requirements.txt: Require at least pytest 3.9



Title: GitLab

Tom Pollard pushed to branch tpollard/566 at BuildStream / buildstream

Commits:

25 changed files:

Changes:

  • buildstream/_artifactcache/artifactcache.py
    ... ... @@ -74,6 +74,7 @@ class ArtifactCache():
    74 74
     
    
    75 75
             self._has_fetch_remotes = False
    
    76 76
             self._has_push_remotes = False
    
    77
    +        self._has_partial_push_remotes = False
    
    77 78
     
    
    78 79
             os.makedirs(self.extractdir, exist_ok=True)
    
    79 80
     
    
    ... ... @@ -398,6 +399,8 @@ class ArtifactCache():
    398 399
                     self._has_fetch_remotes = True
    
    399 400
                     if remote_spec.push:
    
    400 401
                         self._has_push_remotes = True
    
    402
    +                    if remote_spec.partial_push:
    
    403
    +                        self._has_partial_push_remotes = True
    
    401 404
     
    
    402 405
                     remotes[remote_spec.url] = CASRemote(remote_spec)
    
    403 406
     
    
    ... ... @@ -596,6 +599,31 @@ class ArtifactCache():
    596 599
                 remotes_for_project = self._remotes[element._get_project()]
    
    597 600
                 return any(remote.spec.push for remote in remotes_for_project)
    
    598 601
     
    
    602
    +    # has_partial_push_remotes():
    
    603
    +    #
    
    604
    +    # Check whether any remote repositories are available for pushing
    
    605
    +    # non-complete artifacts
    
    606
    +    #
    
    607
    +    # Args:
    
    608
    +    #     element (Element): The Element to check
    
    609
    +    #
    
    610
    +    # Returns:
    
    611
    +    #   (bool): True if any remote repository is configured for optional
    
    612
    +    #            partial pushes, False otherwise
    
    613
    +    #
    
    614
    +    def has_partial_push_remotes(self, *, element=None):
    
    615
    +        # If there's no partial push remotes available, we can't partial push at all
    
    616
    +        if not self._has_partial_push_remotes:
    
    617
    +            return False
    
    618
    +        elif element is None:
    
    619
    +            # At least one remote is set to allow partial pushes
    
    620
    +            return True
    
    621
    +        else:
    
    622
    +            # Check whether the specified element's project has push remotes configured
    
    623
    +            # to not accept partial artifact pushes
    
    624
    +            remotes_for_project = self._remotes[element._get_project()]
    
    625
    +            return any(remote.spec.partial_push for remote in remotes_for_project)
    
    626
    +
    
    599 627
         # push():
    
    600 628
         #
    
    601 629
         # Push committed artifact to remote repository.
    
    ... ... @@ -603,6 +631,8 @@ class ArtifactCache():
    603 631
         # Args:
    
    604 632
         #     element (Element): The Element whose artifact is to be pushed
    
    605 633
         #     keys (list): The cache keys to use
    
    634
    +    #     partial(bool): If the artifact is cached in a partial state
    
    635
    +    #     subdir(string): Optional subdir to not push
    
    606 636
         #
    
    607 637
         # Returns:
    
    608 638
         #   (bool): True if any remote was updated, False if no pushes were required
    
    ... ... @@ -610,12 +640,25 @@ class ArtifactCache():
    610 640
         # Raises:
    
    611 641
         #   (ArtifactError): if there was an error
    
    612 642
         #
    
    613
    -    def push(self, element, keys):
    
    643
    +    def push(self, element, keys, partial=False, subdir=None):
    
    614 644
             refs = [self.get_artifact_fullname(element, key) for key in list(keys)]
    
    615 645
     
    
    616 646
             project = element._get_project()
    
    617 647
     
    
    618
    -        push_remotes = [r for r in self._remotes[project] if r.spec.push]
    
    648
    +        push_remotes = []
    
    649
    +        partial_remotes = []
    
    650
    +
    
    651
    +        # Create list of remotes to push to, given current element and partial push config
    
    652
    +        if not partial:
    
    653
    +            push_remotes = [r for r in self._remotes[project] if (r.spec.push and not r.spec.partial_push)]
    
    654
    +
    
    655
    +        if self._has_partial_push_remotes:
    
    656
    +            # Create a specific list of the remotes expecting the artifact to be push in a partial
    
    657
    +            # state. This list needs to be pushed in a partial state, without the optional subdir if
    
    658
    +            # exists locally. No need to attempt pushing a partial artifact to a remote that is queued to
    
    659
    +            # to also recieve a full artifact
    
    660
    +            partial_remotes = [r for r in self._remotes[project] if (r.spec.partial_push and r.spec.push) and
    
    661
    +                               r not in push_remotes]
    
    619 662
     
    
    620 663
             pushed = False
    
    621 664
     
    
    ... ... @@ -632,6 +675,19 @@ class ArtifactCache():
    632 675
                         remote.spec.url, element._get_brief_display_key()
    
    633 676
                     ))
    
    634 677
     
    
    678
    +        for remote in partial_remotes:
    
    679
    +            remote.init()
    
    680
    +            display_key = element._get_brief_display_key()
    
    681
    +            element.status("Pushing partial artifact {} -> {}".format(display_key, remote.spec.url))
    
    682
    +
    
    683
    +            if self.cas.push(refs, remote, subdir=subdir):
    
    684
    +                element.info("Pushed partial artifact {} -> {}".format(display_key, remote.spec.url))
    
    685
    +                pushed = True
    
    686
    +            else:
    
    687
    +                element.info("Remote ({}) already has {} partial cached".format(
    
    688
    +                    remote.spec.url, element._get_brief_display_key()
    
    689
    +                ))
    
    690
    +
    
    635 691
             return pushed
    
    636 692
     
    
    637 693
         # pull():
    
    ... ... @@ -659,14 +715,23 @@ class ArtifactCache():
    659 715
                     element.status("Pulling artifact {} <- {}".format(display_key, remote.spec.url))
    
    660 716
     
    
    661 717
                     if self.cas.pull(ref, remote, progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs):
    
    662
    -                    element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
    
    663 718
                         if subdir:
    
    664
    -                        # Attempt to extract subdir into artifact extract dir if it already exists
    
    665
    -                        # without containing the subdir. If the respective artifact extract dir does not
    
    666
    -                        # exist a complete extraction will complete.
    
    667
    -                        self.extract(element, key, subdir)
    
    668
    -                    # no need to pull from additional remotes
    
    669
    -                    return True
    
    719
    +                        if not self.contains_subdir_artifact(element, key, subdir):
    
    720
    +                            # The pull was expecting the specific subdit to be present, attempt
    
    721
    +                            # to find it in other available remotes
    
    722
    +                            element.info("Pulled partial artifact {} <- {}. Attempting to retrieve {} from remotes"
    
    723
    +                                         .format(display_key, remote.spec.url, subdir))
    
    724
    +                        else:
    
    725
    +                            element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
    
    726
    +                            # Attempt to extract subdir into artifact extract dir if it already exists
    
    727
    +                            # without containing the subdir. If the respective artifact extract dir does not
    
    728
    +                            # exist a complete extraction will complete.
    
    729
    +                            self.extract(element, key, subdir)
    
    730
    +                            # no need to pull from additional remotes
    
    731
    +                            return True
    
    732
    +                    else:
    
    733
    +                        element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
    
    734
    +                        return True
    
    670 735
                     else:
    
    671 736
                         element.info("Remote ({}) does not have {} cached".format(
    
    672 737
                             remote.spec.url, element._get_brief_display_key()
    

  • buildstream/_artifactcache/cascache.py
    ... ... @@ -45,7 +45,8 @@ from .. import _yaml
    45 45
     _MAX_PAYLOAD_BYTES = 1024 * 1024
    
    46 46
     
    
    47 47
     
    
    48
    -class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key client_cert instance_name')):
    
    48
    +class CASRemoteSpec(namedtuple('CASRemoteSpec',
    
    49
    +                               'url push partial_push server_cert client_key client_cert instance_name')):
    
    49 50
     
    
    50 51
         # _new_from_config_node
    
    51 52
         #
    
    ... ... @@ -53,9 +54,13 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key
    53 54
         #
    
    54 55
         @staticmethod
    
    55 56
         def _new_from_config_node(spec_node, basedir=None):
    
    56
    -        _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance_name'])
    
    57
    +        _yaml.node_validate(spec_node,
    
    58
    +                            ['url', 'push', 'allow-partial-push', 'server-cert', 'client-key',
    
    59
    +                             'client-cert', 'instance_name'])
    
    57 60
             url = _yaml.node_get(spec_node, str, 'url')
    
    58 61
             push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
    
    62
    +        partial_push = _yaml.node_get(spec_node, bool, 'allow-partial-push', default_value=False)
    
    63
    +
    
    59 64
             if not url:
    
    60 65
                 provenance = _yaml.node_get_provenance(spec_node, 'url')
    
    61 66
                 raise LoadError(LoadErrorReason.INVALID_DATA,
    
    ... ... @@ -85,10 +90,10 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key
    85 90
                 raise LoadError(LoadErrorReason.INVALID_DATA,
    
    86 91
                                 "{}: 'client-cert' was specified without 'client-key'".format(provenance))
    
    87 92
     
    
    88
    -        return CASRemoteSpec(url, push, server_cert, client_key, client_cert, instance_name)
    
    93
    +        return CASRemoteSpec(url, push, partial_push, server_cert, client_key, client_cert, instance_name)
    
    89 94
     
    
    90 95
     
    
    91
    -CASRemoteSpec.__new__.__defaults__ = (None, None, None, None)
    
    96
    +CASRemoteSpec.__new__.__defaults__ = (False, None, None, None, None)
    
    92 97
     
    
    93 98
     
    
    94 99
     class BlobNotFound(CASError):
    
    ... ... @@ -283,34 +288,40 @@ class CASCache():
    283 288
         #   (bool): True if pull was successful, False if ref was not available
    
    284 289
         #
    
    285 290
         def pull(self, ref, remote, *, progress=None, subdir=None, excluded_subdirs=None):
    
    286
    -        try:
    
    287
    -            remote.init()
    
    288 291
     
    
    289
    -            request = buildstream_pb2.GetReferenceRequest(instance_name=remote.spec.instance_name)
    
    290
    -            request.key = ref
    
    291
    -            response = remote.ref_storage.GetReference(request)
    
    292
    +        while True:
    
    293
    +            try:
    
    294
    +                remote.init()
    
    292 295
     
    
    293
    -            tree = remote_execution_pb2.Digest()
    
    294
    -            tree.hash = response.digest.hash
    
    295
    -            tree.size_bytes = response.digest.size_bytes
    
    296
    +                request = buildstream_pb2.GetReferenceRequest(instance_name=remote.spec.instance_name)
    
    297
    +                request.key = ref
    
    298
    +                response = remote.ref_storage.GetReference(request)
    
    296 299
     
    
    297
    -            # Check if the element artifact is present, if so just fetch the subdir.
    
    298
    -            if subdir and os.path.exists(self.objpath(tree)):
    
    299
    -                self._fetch_subdir(remote, tree, subdir)
    
    300
    -            else:
    
    301
    -                # Fetch artifact, excluded_subdirs determined in pullqueue
    
    302
    -                self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
    
    300
    +                tree = remote_execution_pb2.Digest()
    
    301
    +                tree.hash = response.digest.hash
    
    302
    +                tree.size_bytes = response.digest.size_bytes
    
    303 303
     
    
    304
    -            self.set_ref(ref, tree)
    
    304
    +                # Check if the element artifact is present, if so just fetch the subdir.
    
    305
    +                if subdir and os.path.exists(self.objpath(tree)):
    
    306
    +                    self._fetch_subdir(remote, tree, subdir)
    
    307
    +                else:
    
    308
    +                    # Fetch artifact, excluded_subdirs determined in pullqueue
    
    309
    +                    self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
    
    305 310
     
    
    306
    -            return True
    
    307
    -        except grpc.RpcError as e:
    
    308
    -            if e.code() != grpc.StatusCode.NOT_FOUND:
    
    309
    -                raise CASError("Failed to pull ref {}: {}".format(ref, e)) from e
    
    310
    -            else:
    
    311
    -                return False
    
    312
    -        except BlobNotFound as e:
    
    313
    -            return False
    
    311
    +                self.set_ref(ref, tree)
    
    312
    +
    
    313
    +                return True
    
    314
    +            except grpc.RpcError as e:
    
    315
    +                if e.code() != grpc.StatusCode.NOT_FOUND:
    
    316
    +                    raise CASError("Failed to pull ref {}: {}".format(ref, e)) from e
    
    317
    +                else:
    
    318
    +                    return False
    
    319
    +            except BlobNotFound as e:
    
    320
    +                if not excluded_subdirs and subdir:
    
    321
    +                    # Could not complete a full pull, attempt partial
    
    322
    +                    excluded_subdirs, subdir = subdir, excluded_subdirs
    
    323
    +                else:
    
    324
    +                    return False
    
    314 325
     
    
    315 326
         # pull_tree():
    
    316 327
         #
    
    ... ... @@ -355,6 +366,7 @@ class CASCache():
    355 366
         # Args:
    
    356 367
         #     refs (list): The refs to push
    
    357 368
         #     remote (CASRemote): The remote to push to
    
    369
    +    #     subdir (string): Optional specific subdir to exempt from the push
    
    358 370
         #
    
    359 371
         # Returns:
    
    360 372
         #   (bool): True if any remote was updated, False if no pushes were required
    
    ... ... @@ -362,7 +374,7 @@ class CASCache():
    362 374
         # Raises:
    
    363 375
         #   (CASError): if there was an error
    
    364 376
         #
    
    365
    -    def push(self, refs, remote):
    
    377
    +    def push(self, refs, remote, subdir=None):
    
    366 378
             skipped_remote = True
    
    367 379
             try:
    
    368 380
                 for ref in refs:
    
    ... ... @@ -384,7 +396,7 @@ class CASCache():
    384 396
                             # Intentionally re-raise RpcError for outer except block.
    
    385 397
                             raise
    
    386 398
     
    
    387
    -                self._send_directory(remote, tree)
    
    399
    +                self._send_directory(remote, tree, excluded_dir=subdir)
    
    388 400
     
    
    389 401
                     request = buildstream_pb2.UpdateReferenceRequest(instance_name=remote.spec.instance_name)
    
    390 402
                     request.keys.append(ref)
    
    ... ... @@ -866,10 +878,17 @@ class CASCache():
    866 878
                     a += 1
    
    867 879
                     b += 1
    
    868 880
     
    
    869
    -    def _reachable_refs_dir(self, reachable, tree, update_mtime=False):
    
    881
    +    def _reachable_refs_dir(self, reachable, tree, update_mtime=False, subdir=False):
    
    870 882
             if tree.hash in reachable:
    
    871 883
                 return
    
    872 884
     
    
    885
    +        # If looping through subdir digests, skip processing if
    
    886
    +        # ref path does not exist, allowing for partial objects
    
    887
    +        if subdir and not os.path.exists(self.objpath(tree)):
    
    888
    +            return
    
    889
    +
    
    890
    +        # Raises FileNotFound exception is path does not exist,
    
    891
    +        # which should only be entered on the top level digest
    
    873 892
             if update_mtime:
    
    874 893
                 os.utime(self.objpath(tree))
    
    875 894
     
    
    ... ... @@ -886,9 +905,9 @@ class CASCache():
    886 905
                 reachable.add(filenode.digest.hash)
    
    887 906
     
    
    888 907
             for dirnode in directory.directories:
    
    889
    -            self._reachable_refs_dir(reachable, dirnode.digest, update_mtime=update_mtime)
    
    908
    +            self._reachable_refs_dir(reachable, dirnode.digest, update_mtime=update_mtime, subdir=True)
    
    890 909
     
    
    891
    -    def _required_blobs(self, directory_digest):
    
    910
    +    def _required_blobs(self, directory_digest, excluded_dir=None):
    
    892 911
             # parse directory, and recursively add blobs
    
    893 912
             d = remote_execution_pb2.Digest()
    
    894 913
             d.hash = directory_digest.hash
    
    ... ... @@ -907,7 +926,8 @@ class CASCache():
    907 926
                 yield d
    
    908 927
     
    
    909 928
             for dirnode in directory.directories:
    
    910
    -            yield from self._required_blobs(dirnode.digest)
    
    929
    +            if dirnode.name != excluded_dir:
    
    930
    +                yield from self._required_blobs(dirnode.digest)
    
    911 931
     
    
    912 932
         def _fetch_blob(self, remote, digest, stream):
    
    913 933
             resource_name_components = ['blobs', digest.hash, str(digest.size_bytes)]
    
    ... ... @@ -1029,6 +1049,7 @@ class CASCache():
    1029 1049
                 objpath = self._ensure_blob(remote, dir_digest)
    
    1030 1050
     
    
    1031 1051
                 directory = remote_execution_pb2.Directory()
    
    1052
    +
    
    1032 1053
                 with open(objpath, 'rb') as f:
    
    1033 1054
                     directory.ParseFromString(f.read())
    
    1034 1055
     
    
    ... ... @@ -1104,9 +1125,8 @@ class CASCache():
    1104 1125
     
    
    1105 1126
             assert response.committed_size == digest.size_bytes
    
    1106 1127
     
    
    1107
    -    def _send_directory(self, remote, digest, u_uid=uuid.uuid4()):
    
    1108
    -        required_blobs = self._required_blobs(digest)
    
    1109
    -
    
    1128
    +    def _send_directory(self, remote, digest, u_uid=uuid.uuid4(), excluded_dir=None):
    
    1129
    +        required_blobs = self._required_blobs(digest, excluded_dir=excluded_dir)
    
    1110 1130
             missing_blobs = dict()
    
    1111 1131
             # Limit size of FindMissingBlobs request
    
    1112 1132
             for required_blobs_group in _grouper(required_blobs, 512):
    

  • buildstream/_artifactcache/casserver.py
    ... ... @@ -27,8 +27,8 @@ import uuid
    27 27
     import errno
    
    28 28
     import threading
    
    29 29
     
    
    30
    -import click
    
    31 30
     import grpc
    
    31
    +import click
    
    32 32
     
    
    33 33
     from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
    
    34 34
     from .._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
    

  • buildstream/_frontend/cli.py
    ... ... @@ -673,7 +673,6 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar):
    673 673
     @cli.group(short_help="Manipulate sources for an element")
    
    674 674
     def source():
    
    675 675
         """Manipulate sources for an element"""
    
    676
    -    pass
    
    677 676
     
    
    678 677
     
    
    679 678
     ##################################################################
    
    ... ... @@ -822,7 +821,6 @@ def source_checkout(app, element, location, force, deps, fetch_, except_,
    822 821
     @cli.group(short_help="Manipulate developer workspaces")
    
    823 822
     def workspace():
    
    824 823
         """Manipulate developer workspaces"""
    
    825
    -    pass
    
    826 824
     
    
    827 825
     
    
    828 826
     ##################################################################
    
    ... ... @@ -1000,7 +998,6 @@ def _classify_artifacts(names, cas, project_directory):
    1000 998
     @cli.group(short_help="Manipulate cached artifacts")
    
    1001 999
     def artifact():
    
    1002 1000
         """Manipulate cached artifacts"""
    
    1003
    -    pass
    
    1004 1001
     
    
    1005 1002
     
    
    1006 1003
     ################################################################
    

  • buildstream/_frontend/complete.py
    ... ... @@ -203,7 +203,7 @@ def is_incomplete_option(all_args, cmd_param):
    203 203
             if start_of_option(arg_str):
    
    204 204
                 last_option = arg_str
    
    205 205
     
    
    206
    -    return True if last_option and last_option in cmd_param.opts else False
    
    206
    +    return bool(last_option and last_option in cmd_param.opts)
    
    207 207
     
    
    208 208
     
    
    209 209
     def is_incomplete_argument(current_params, cmd_param):
    

  • buildstream/_frontend/widget.py
    ... ... @@ -23,8 +23,8 @@ from contextlib import ExitStack
    23 23
     from mmap import mmap
    
    24 24
     import re
    
    25 25
     import textwrap
    
    26
    -import click
    
    27 26
     from ruamel import yaml
    
    27
    +import click
    
    28 28
     
    
    29 29
     from . import Profile
    
    30 30
     from .. import Element, Consistency
    

  • buildstream/_loader/loader.py
    ... ... @@ -99,7 +99,6 @@ class Loader():
    99 99
         # Returns: The toplevel LoadElement
    
    100 100
         def load(self, targets, rewritable=False, ticker=None, fetch_subprojects=False):
    
    101 101
     
    
    102
    -        invalid_elements = []
    
    103 102
             for filename in targets:
    
    104 103
                 if os.path.isabs(filename):
    
    105 104
                     # XXX Should this just be an assertion ?
    
    ... ... @@ -109,14 +108,8 @@ class Loader():
    109 108
                                     "path to the base project directory: {}"
    
    110 109
                                     .format(filename, self._basedir))
    
    111 110
     
    
    112
    -            if not filename.endswith(".bst"):
    
    113
    -                invalid_elements.append(filename)
    
    111
    +        self._warn_invalid_elements(targets)
    
    114 112
     
    
    115
    -        if invalid_elements:
    
    116
    -            self._warn("Target elements '{}' do not have expected file extension `.bst` "
    
    117
    -                       "Improperly named elements will not be discoverable by commands"
    
    118
    -                       .format(invalid_elements),
    
    119
    -                       warning_token=CoreWarnings.BAD_ELEMENT_SUFFIX)
    
    120 113
             # First pass, recursively load files and populate our table of LoadElements
    
    121 114
             #
    
    122 115
             deps = []
    
    ... ... @@ -280,12 +273,7 @@ class Loader():
    280 273
             self._elements[filename] = element
    
    281 274
     
    
    282 275
             # Load all dependency files for the new LoadElement
    
    283
    -        invalid_elements = []
    
    284 276
             for dep in element.deps:
    
    285
    -            if not dep.name.endswith(".bst"):
    
    286
    -                invalid_elements.append(dep.name)
    
    287
    -                continue
    
    288
    -
    
    289 277
                 if dep.junction:
    
    290 278
                     self._load_file(dep.junction, rewritable, ticker, fetch_subprojects, yaml_cache)
    
    291 279
                     loader = self._get_loader(dep.junction, rewritable=rewritable, ticker=ticker,
    
    ... ... @@ -300,11 +288,9 @@ class Loader():
    300 288
                                     "{}: Cannot depend on junction"
    
    301 289
                                     .format(dep.provenance))
    
    302 290
     
    
    303
    -        if invalid_elements:
    
    304
    -            self._warn("The following dependencies do not have expected file extension `.bst`: {} "
    
    305
    -                       "Improperly named elements will not be discoverable by commands"
    
    306
    -                       .format(invalid_elements),
    
    307
    -                       warning_token=CoreWarnings.BAD_ELEMENT_SUFFIX)
    
    291
    +        deps_names = [dep.name for dep in element.deps]
    
    292
    +        self._warn_invalid_elements(deps_names)
    
    293
    +
    
    308 294
             return element
    
    309 295
     
    
    310 296
         # _check_circular_deps():
    
    ... ... @@ -679,3 +665,69 @@ class Loader():
    679 665
     
    
    680 666
             message = Message(None, MessageType.WARN, brief)
    
    681 667
             self._context.message(message)
    
    668
    +
    
    669
    +    # Print warning messages if any of the specified elements have invalid names.
    
    670
    +    #
    
    671
    +    # Valid filenames should end with ".bst" extension.
    
    672
    +    #
    
    673
    +    # Args:
    
    674
    +    #    elements (list): List of element names
    
    675
    +    #
    
    676
    +    # Raises:
    
    677
    +    #     (:class:`.LoadError`): When warning_token is considered fatal by the project configuration
    
    678
    +    #
    
    679
    +    def _warn_invalid_elements(self, elements):
    
    680
    +
    
    681
    +        # invalid_elements
    
    682
    +        #
    
    683
    +        # A dict that maps warning types to the matching elements.
    
    684
    +        invalid_elements = {
    
    685
    +            CoreWarnings.BAD_ELEMENT_SUFFIX: [],
    
    686
    +            CoreWarnings.BAD_CHARACTERS_IN_NAME: [],
    
    687
    +        }
    
    688
    +
    
    689
    +        for filename in elements:
    
    690
    +            if not filename.endswith(".bst"):
    
    691
    +                invalid_elements[CoreWarnings.BAD_ELEMENT_SUFFIX].append(filename)
    
    692
    +            if not self._valid_chars_name(filename):
    
    693
    +                invalid_elements[CoreWarnings.BAD_CHARACTERS_IN_NAME].append(filename)
    
    694
    +
    
    695
    +        if invalid_elements[CoreWarnings.BAD_ELEMENT_SUFFIX]:
    
    696
    +            self._warn("Target elements '{}' do not have expected file extension `.bst` "
    
    697
    +                       "Improperly named elements will not be discoverable by commands"
    
    698
    +                       .format(invalid_elements[CoreWarnings.BAD_ELEMENT_SUFFIX]),
    
    699
    +                       warning_token=CoreWarnings.BAD_ELEMENT_SUFFIX)
    
    700
    +        if invalid_elements[CoreWarnings.BAD_CHARACTERS_IN_NAME]:
    
    701
    +            self._warn("Target elements '{}' have invalid characerts in their name."
    
    702
    +                       .format(invalid_elements[CoreWarnings.BAD_CHARACTERS_IN_NAME]),
    
    703
    +                       warning_token=CoreWarnings.BAD_CHARACTERS_IN_NAME)
    
    704
    +
    
    705
    +    # Check if given filename containers valid characters.
    
    706
    +    #
    
    707
    +    # Args:
    
    708
    +    #    name (str): Name of the file
    
    709
    +    #
    
    710
    +    # Returns:
    
    711
    +    #    (bool): True if all characters are valid, False otherwise.
    
    712
    +    #
    
    713
    +    def _valid_chars_name(self, name):
    
    714
    +        for char in name:
    
    715
    +            char_val = ord(char)
    
    716
    +
    
    717
    +            # 0-31 are control chars, 127 is DEL, and >127 means non-ASCII
    
    718
    +            if char_val <= 31 or char_val >= 127:
    
    719
    +                return False
    
    720
    +
    
    721
    +            # Disallow characters that are invalid on Windows. The list can be
    
    722
    +            # found at https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file
    
    723
    +            #
    
    724
    +            # Note that although : (colon) is not allowed, we do not raise
    
    725
    +            # warnings because of that, since we use it as a separator for
    
    726
    +            # junctioned elements.
    
    727
    +            #
    
    728
    +            # We also do not raise warnings on slashes since they are used as
    
    729
    +            # path separators.
    
    730
    +            if char in r'<>"|?*':
    
    731
    +                return False
    
    732
    +
    
    733
    +        return True

  • buildstream/_signals.py
    ... ... @@ -77,7 +77,7 @@ def terminator(terminate_func):
    77 77
             yield
    
    78 78
             return
    
    79 79
     
    
    80
    -    outermost = False if terminator_stack else True
    
    80
    +    outermost = bool(not terminator_stack)
    
    81 81
     
    
    82 82
         terminator_stack.append(terminate_func)
    
    83 83
         if outermost:
    
    ... ... @@ -137,7 +137,7 @@ def suspend_handler(sig, frame):
    137 137
     def suspendable(suspend_callback, resume_callback):
    
    138 138
         global suspendable_stack                  # pylint: disable=global-statement
    
    139 139
     
    
    140
    -    outermost = False if suspendable_stack else True
    
    140
    +    outermost = bool(not suspendable_stack)
    
    141 141
         suspender = Suspender(suspend_callback, resume_callback)
    
    142 142
         suspendable_stack.append(suspender)
    
    143 143
     
    

  • buildstream/element.py
    ... ... @@ -326,7 +326,6 @@ class Element(Plugin):
    326 326
     
    
    327 327
             *Since: 1.2*
    
    328 328
             """
    
    329
    -        pass
    
    330 329
     
    
    331 330
         def assemble(self, sandbox):
    
    332 331
             """Assemble the output artifact
    
    ... ... @@ -1340,7 +1339,7 @@ class Element(Plugin):
    1340 1339
         @contextmanager
    
    1341 1340
         def _prepare_sandbox(self, scope, directory, shell=False, integrate=True, usebuildtree=False):
    
    1342 1341
             # bst shell and bst checkout require a local sandbox.
    
    1343
    -        bare_directory = True if directory else False
    
    1342
    +        bare_directory = bool(directory)
    
    1344 1343
             with self.__sandbox(directory, config=self.__sandbox_config, allow_remote=False,
    
    1345 1344
                                 bare_directory=bare_directory) as sandbox:
    
    1346 1345
                 sandbox._usebuildtree = usebuildtree
    
    ... ... @@ -1695,7 +1694,7 @@ class Element(Plugin):
    1695 1694
     
    
    1696 1695
             # Store workspaced.yaml
    
    1697 1696
             _yaml.dump(_yaml.node_sanitize({
    
    1698
    -            'workspaced': True if self._get_workspace() else False
    
    1697
    +            'workspaced': bool(self._get_workspace())
    
    1699 1698
             }), os.path.join(metadir, 'workspaced.yaml'))
    
    1700 1699
     
    
    1701 1700
             # Store workspaced-dependencies.yaml
    
    ... ... @@ -1801,13 +1800,19 @@ class Element(Plugin):
    1801 1800
         #   (bool): True if this element does not need a push job to be created
    
    1802 1801
         #
    
    1803 1802
         def _skip_push(self):
    
    1803
    +
    
    1804 1804
             if not self.__artifacts.has_push_remotes(element=self):
    
    1805 1805
                 # No push remotes for this element's project
    
    1806 1806
                 return True
    
    1807 1807
     
    
    1808 1808
             # Do not push elements that aren't cached, or that are cached with a dangling buildtree
    
    1809
    -        # artifact unless element type is expected to have an an empty buildtree directory
    
    1810
    -        if not self._cached_buildtree():
    
    1809
    +        # artifact unless element type is expected to have an an empty buildtree directory. Check
    
    1810
    +        # that this default behaviour is not overriden via a remote configured to allow pushing
    
    1811
    +        # artifacts without their corresponding buildtree.
    
    1812
    +        if not self._cached():
    
    1813
    +            return True
    
    1814
    +
    
    1815
    +        if not self._cached_buildtree() and not self.__artifacts.has_partial_push_remotes(element=self):
    
    1811 1816
                 return True
    
    1812 1817
     
    
    1813 1818
             # Do not push tainted artifact
    
    ... ... @@ -1818,7 +1823,8 @@ class Element(Plugin):
    1818 1823
     
    
    1819 1824
         # _push():
    
    1820 1825
         #
    
    1821
    -    # Push locally cached artifact to remote artifact repository.
    
    1826
    +    # Push locally cached artifact to remote artifact repository. An attempt
    
    1827
    +    # will be made to push partial artifacts given current config
    
    1822 1828
         #
    
    1823 1829
         # Returns:
    
    1824 1830
         #   (bool): True if the remote was updated, False if it already existed
    
    ... ... @@ -1831,8 +1837,19 @@ class Element(Plugin):
    1831 1837
                 self.warn("Not pushing tainted artifact.")
    
    1832 1838
                 return False
    
    1833 1839
     
    
    1834
    -        # Push all keys used for local commit
    
    1835
    -        pushed = self.__artifacts.push(self, self.__get_cache_keys_for_commit())
    
    1840
    +        # Push all keys used for local commit, this could be full or partial,
    
    1841
    +        # given previous _skip_push() logic. If buildtree isn't cached, then
    
    1842
    +        # set partial push
    
    1843
    +
    
    1844
    +        partial = False
    
    1845
    +        subdir = 'buildtree'
    
    1846
    +        if not self._cached_buildtree():
    
    1847
    +            partial = True
    
    1848
    +
    
    1849
    +        pushed = self.__artifacts.push(self, self.__get_cache_keys_for_commit(), partial=partial, subdir=subdir)
    
    1850
    +
    
    1851
    +        # Artifact might be cached in the server partially with the top level ref existing.
    
    1852
    +        # Check if we need to attempt a push of a locally cached buildtree given current config
    
    1836 1853
             if not pushed:
    
    1837 1854
                 return False
    
    1838 1855
     
    

  • buildstream/plugins/elements/import.py
    ... ... @@ -112,7 +112,7 @@ class ImportElement(BuildElement):
    112 112
     
    
    113 113
             # Ensure target directory parent exists but target directory doesn't
    
    114 114
             commands.append("mkdir -p {}".format(os.path.dirname(outputdir)))
    
    115
    -        commands.append("[ ! -e {} ] || rmdir {}".format(outputdir, outputdir))
    
    115
    +        commands.append("[ ! -e {outputdir} ] || rmdir {outputdir}".format(outputdir=outputdir))
    
    116 116
     
    
    117 117
             # Move it over
    
    118 118
             commands.append("mv {} {}".format(inputdir, outputdir))
    

  • buildstream/plugins/sources/_downloadablefilesource.py
    ... ... @@ -231,7 +231,13 @@ class DownloadableFileSource(Source):
    231 231
             if not DownloadableFileSource.__urlopener:
    
    232 232
                 try:
    
    233 233
                     netrc_config = netrc.netrc()
    
    234
    -            except FileNotFoundError:
    
    234
    +            except OSError:
    
    235
    +                # If the .netrc file was not found, FileNotFoundError will be
    
    236
    +                # raised, but OSError will be raised directly by the netrc package
    
    237
    +                # in the case that $HOME is not set.
    
    238
    +                #
    
    239
    +                # This will catch both cases.
    
    240
    +                #
    
    235 241
                     DownloadableFileSource.__urlopener = urllib.request.build_opener()
    
    236 242
                 except netrc.NetrcParseError as e:
    
    237 243
                     self.warn('{}: While reading .netrc: {}'.format(self, e))
    

  • buildstream/source.py
    ... ... @@ -499,7 +499,6 @@ class Source(Plugin):
    499 499
     
    
    500 500
             *Since: 1.4*
    
    501 501
             """
    
    502
    -        pass
    
    503 502
     
    
    504 503
         #############################################################
    
    505 504
         #                       Public Methods                      #
    

  • buildstream/storage/_casbaseddirectory.py
    ... ... @@ -53,12 +53,10 @@ class IndexEntry():
    53 53
     class ResolutionException(VirtualDirectoryError):
    
    54 54
         """ Superclass of all exceptions that can be raised by
    
    55 55
         CasBasedDirectory._resolve. Should not be used outside this module. """
    
    56
    -    pass
    
    57 56
     
    
    58 57
     
    
    59 58
     class InfiniteSymlinkException(ResolutionException):
    
    60 59
         """ Raised when an infinite symlink loop is found. """
    
    61
    -    pass
    
    62 60
     
    
    63 61
     
    
    64 62
     class AbsoluteSymlinkException(ResolutionException):
    
    ... ... @@ -66,7 +64,6 @@ class AbsoluteSymlinkException(ResolutionException):
    66 64
         target starts with the path separator) and we have disallowed
    
    67 65
         following such symlinks.
    
    68 66
         """
    
    69
    -    pass
    
    70 67
     
    
    71 68
     
    
    72 69
     class UnexpectedFileException(ResolutionException):
    
    ... ... @@ -664,13 +661,11 @@ class CasBasedDirectory(Directory):
    664 661
             """ Sets a static modification time for all regular files in this directory.
    
    665 662
             Since we don't store any modification time, we don't need to do anything.
    
    666 663
             """
    
    667
    -        pass
    
    668 664
     
    
    669 665
         def set_deterministic_user(self):
    
    670 666
             """ Sets all files in this directory to the current user's euid/egid.
    
    671 667
             We also don't store user data, so this can be ignored.
    
    672 668
             """
    
    673
    -        pass
    
    674 669
     
    
    675 670
         def export_files(self, to_directory, *, can_link=False, can_destroy=False):
    
    676 671
             """Copies everything from this into to_directory, which must be the name
    

  • buildstream/types.py
    ... ... @@ -105,6 +105,12 @@ class CoreWarnings():
    105 105
         is referenced either on the command line or by another element
    
    106 106
         """
    
    107 107
     
    
    108
    +    BAD_CHARACTERS_IN_NAME = "bad-characters-in-name"
    
    109
    +    """
    
    110
    +    This warning will be produces when filename for a target contains invalid
    
    111
    +    characters in its name.
    
    112
    +    """
    
    113
    +
    
    108 114
     
    
    109 115
     # _KeyStrength():
    
    110 116
     #
    

  • dev-requirements.txt
    1 1
     coverage == 4.4.0
    
    2 2
     pep8
    
    3
    -pylint == 2.1.1
    
    4
    -pytest >= 3.8
    
    3
    +pylint
    
    4
    +pytest >= 3.9
    
    5 5
     pytest-cov >= 2.5.0
    
    6
    -pytest-datafiles
    
    6
    +pytest-datafiles >= 2.0
    
    7 7
     pytest-env
    
    8 8
     pytest-pep8
    
    9 9
     pytest-pylint
    

  • doc/source/format_declaring.rst
    ... ... @@ -526,3 +526,27 @@ read-only variables are also dynamically declared by BuildStream:
    526 526
       build, support for this is conditional on the element type
    
    527 527
       and the build system used (any element using 'make' can
    
    528 528
       implement this).
    
    529
    +
    
    530
    +
    
    531
    +Naming elements
    
    532
    +---------------
    
    533
    +When naming the element files, use the following rules:
    
    534
    +
    
    535
    +* The name of the file must have ``.bst`` extension.
    
    536
    +
    
    537
    +* All characters in the name must be printable 7-bit ASCII characters.
    
    538
    +
    
    539
    +* Following characters are reserved and must not be part of the name:
    
    540
    +
    
    541
    +  - ``<`` (less than)
    
    542
    +  - ``>`` (greater than)
    
    543
    +  - ``:`` (colon)
    
    544
    +  - ``"`` (double quote)
    
    545
    +  - ``/`` (forward slash)
    
    546
    +  - ``\`` (backslash)
    
    547
    +  - ``|`` (vertical bar)
    
    548
    +  - ``?`` (question mark)
    
    549
    +  - ``*`` (asterisk)
    
    550
    +
    
    551
    +BuildStream will attempt to raise warnings when any of these rules are violated
    
    552
    +but that may not always be possible.

  • tests/frontend/buildcheckout.py
    ... ... @@ -85,6 +85,20 @@ def test_build_invalid_suffix_dep(datafiles, cli, strict, hardlinks):
    85 85
         result.assert_main_error(ErrorDomain.LOAD, "bad-element-suffix")
    
    86 86
     
    
    87 87
     
    
    88
    +@pytest.mark.datafiles(DATA_DIR)
    
    89
    +def test_build_invalid_filename_chars(datafiles, cli):
    
    90
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    91
    +    result = cli.run(project=project, args=strict_args(['build', 'invalid-chars|<>-in-name.bst'], 'non-strict'))
    
    92
    +    result.assert_main_error(ErrorDomain.LOAD, "bad-characters-in-name")
    
    93
    +
    
    94
    +
    
    95
    +@pytest.mark.datafiles(DATA_DIR)
    
    96
    +def test_build_invalid_filename_chars_dep(datafiles, cli):
    
    97
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    98
    +    result = cli.run(project=project, args=strict_args(['build', 'invalid-chars-in-dep.bst'], 'non-strict'))
    
    99
    +    result.assert_main_error(ErrorDomain.LOAD, "bad-characters-in-name")
    
    100
    +
    
    101
    +
    
    88 102
     @pytest.mark.datafiles(DATA_DIR)
    
    89 103
     @pytest.mark.parametrize("deps", [("run"), ("none"), ("build")])
    
    90 104
     def test_build_checkout_deps(datafiles, cli, deps):
    

  • tests/frontend/project/elements/invalid-chars-in-dep.bst
    1
    +kind: stack
    
    2
    +description: |
    
    3
    +
    
    4
    +  This element itself has a valid name, but depends on elements that have
    
    5
    +  invalid names. This should also result in a warning.
    
    6
    +
    
    7
    +depends:
    
    8
    +- invalid-chars|<>-in-name.bst

  • tests/frontend/project/elements/invalid-chars|<>-in-name.bst
    1
    +kind: stack
    
    2
    +description: |
    
    3
    +  The name of this files contains characters that are not allowed by
    
    4
    +  BuildStream, using it should raise a warning.

  • tests/frontend/project/project.conf
    ... ... @@ -5,3 +5,4 @@ element-path: elements
    5 5
     
    
    6 6
     fatal-warnings:
    
    7 7
     - bad-element-suffix
    
    8
    +- bad-characters-in-name

  • tests/integration/pushbuildtrees.py
    1
    +import os
    
    2
    +import shutil
    
    3
    +import pytest
    
    4
    +
    
    5
    +from tests.testutils import cli_integration as cli, create_artifact_share
    
    6
    +from tests.testutils.integration import assert_contains
    
    7
    +from tests.testutils.site import HAVE_BWRAP, IS_LINUX
    
    8
    +from buildstream._exceptions import ErrorDomain, LoadErrorReason
    
    9
    +
    
    10
    +
    
    11
    +DATA_DIR = os.path.join(
    
    12
    +    os.path.dirname(os.path.realpath(__file__)),
    
    13
    +    "project"
    
    14
    +)
    
    15
    +
    
    16
    +
    
    17
    +# Remove artifact cache & set cli.config value of pull-buildtrees
    
    18
    +# to false, which is the default user context. The cache has to be
    
    19
    +# cleared as just forcefully removing the refpath leaves dangling objects.
    
    20
    +def default_state(cli, tmpdir, share):
    
    21
    +    shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
    
    22
    +    cli.configure({
    
    23
    +        'artifacts': {'url': share.repo, 'push': False},
    
    24
    +        'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
    
    25
    +        'cache': {'pull-buildtrees': False},
    
    26
    +    })
    
    27
    +
    
    28
    +
    
    29
    +# Tests to capture the integration of the optionl push of buildtrees.
    
    30
    +# The behaviour should encompass pushing artifacts that are already cached
    
    31
    +# without a buildtree as well as artifacts that are cached with their buildtree.
    
    32
    +# This option is handled via 'allow-partial-push' on a per artifact remote config
    
    33
    +# node basis. Multiple remote config nodes can point to the same url and as such can
    
    34
    +# have different 'allow-partial-push' options, tests need to cover this using project
    
    35
    +# confs.
    
    36
    +@pytest.mark.integration
    
    37
    +@pytest.mark.datafiles(DATA_DIR)
    
    38
    +@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
    
    39
    +def test_pushbuildtrees(cli, tmpdir, datafiles, integration_cache):
    
    40
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    41
    +    element_name = 'autotools/amhello.bst'
    
    42
    +
    
    43
    +    # Create artifact shares for pull & push testing
    
    44
    +    with create_artifact_share(os.path.join(str(tmpdir), 'share1')) as share1,\
    
    45
    +        create_artifact_share(os.path.join(str(tmpdir), 'share2')) as share2,\
    
    46
    +        create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3,\
    
    47
    +        create_artifact_share(os.path.join(str(tmpdir), 'share4')) as share4:
    
    48
    +
    
    49
    +        cli.configure({
    
    50
    +            'artifacts': {'url': share1.repo, 'push': True},
    
    51
    +            'artifactdir': os.path.join(str(tmpdir), 'artifacts')
    
    52
    +        })
    
    53
    +
    
    54
    +        cli.configure({'artifacts': [{'url': share1.repo, 'push': True},
    
    55
    +                                     {'url': share2.repo, 'push': True, 'allow-partial-push': True}]})
    
    56
    +
    
    57
    +        # Build autotools element, checked pushed, delete local.
    
    58
    +        # As share 2 has push & allow-partial-push set a true, it
    
    59
    +        # should have pushed the artifacts, without the cached buildtrees,
    
    60
    +        # to it.
    
    61
    +        result = cli.run(project=project, args=['build', element_name])
    
    62
    +        assert result.exit_code == 0
    
    63
    +        assert cli.get_element_state(project, element_name) == 'cached'
    
    64
    +        elementdigest = share1.has_artifact('test', element_name, cli.get_element_key(project, element_name))
    
    65
    +        buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
    
    66
    +                                    elementdigest.hash, 'buildtree')
    
    67
    +        assert os.path.isdir(buildtreedir)
    
    68
    +        assert element_name in result.get_partial_pushed_elements()
    
    69
    +        assert element_name in result.get_pushed_elements()
    
    70
    +        assert share1.has_artifact('test', element_name, cli.get_element_key(project, element_name))
    
    71
    +        assert share2.has_artifact('test', element_name, cli.get_element_key(project, element_name))
    
    72
    +        default_state(cli, tmpdir, share1)
    
    73
    +
    
    74
    +        # Check that after explictly pulling an artifact without it's buildtree,
    
    75
    +        # we can push it to another remote that is configured to accept the partial
    
    76
    +        # artifact
    
    77
    +        result = cli.run(project=project, args=['pull', element_name])
    
    78
    +        assert element_name in result.get_pulled_elements()
    
    79
    +        cli.configure({'artifacts': {'url': share3.repo, 'push': True, 'allow-partial-push': True}})
    
    80
    +        assert cli.get_element_state(project, element_name) == 'cached'
    
    81
    +        assert not os.path.isdir(buildtreedir)
    
    82
    +        result = cli.run(project=project, args=['push', element_name])
    
    83
    +        assert result.exit_code == 0
    
    84
    +        assert element_name in result.get_partial_pushed_elements()
    
    85
    +        assert element_name not in result.get_pushed_elements()
    
    86
    +        assert share3.has_artifact('test', element_name, cli.get_element_key(project, element_name))
    
    87
    +        default_state(cli, tmpdir, share3)
    
    88
    +
    
    89
    +        # Delete the local cache and pull the partial artifact from share 3,
    
    90
    +        # this should not include the buildtree when extracted locally, even when
    
    91
    +        # pull-buildtrees is given as a cli parameter as no available remotes will
    
    92
    +        # contain the buildtree
    
    93
    +        assert not os.path.isdir(buildtreedir)
    
    94
    +        assert cli.get_element_state(project, element_name) != 'cached'
    
    95
    +        result = cli.run(project=project, args=['--pull-buildtrees', 'pull', element_name])
    
    96
    +        assert element_name in result.get_partial_pulled_elements()
    
    97
    +        assert not os.path.isdir(buildtreedir)
    
    98
    +        default_state(cli, tmpdir, share3)
    
    99
    +
    
    100
    +        # Delete the local cache and attempt to pull a 'full' artifact, including its
    
    101
    +        # buildtree. As with before share3 being the first listed remote will not have
    
    102
    +        # the buildtree available and should spawn a partial pull. Having share1 as the
    
    103
    +        # second available remote should allow the buildtree to be pulled thus 'completing'
    
    104
    +        # the artifact
    
    105
    +        cli.configure({'artifacts': [{'url': share3.repo, 'push': True, 'allow-partial-push': True},
    
    106
    +                                     {'url': share1.repo, 'push': True}]})
    
    107
    +        assert cli.get_element_state(project, element_name) != 'cached'
    
    108
    +        result = cli.run(project=project, args=['--pull-buildtrees', 'pull', element_name])
    
    109
    +        assert element_name in result.get_partial_pulled_elements()
    
    110
    +        assert element_name in result.get_pulled_elements()
    
    111
    +        assert "Attempting to retrieve buildtree from remotes" in result.stderr
    
    112
    +        assert os.path.isdir(buildtreedir)
    
    113
    +        assert cli.get_element_state(project, element_name) == 'cached'

  • tests/loader/junctions.py
    ... ... @@ -3,7 +3,7 @@ import pytest
    3 3
     import shutil
    
    4 4
     
    
    5 5
     from buildstream import _yaml, ElementError
    
    6
    -from buildstream._exceptions import LoadError, LoadErrorReason
    
    6
    +from buildstream._exceptions import ErrorDomain, LoadErrorReason
    
    7 7
     from tests.testutils import cli, create_repo
    
    8 8
     from tests.testutils.site import HAVE_GIT
    
    9 9
     
    
    ... ... @@ -38,9 +38,9 @@ def test_simple_build(cli, tmpdir, datafiles):
    38 38
     
    
    39 39
         # Build, checkout
    
    40 40
         result = cli.run(project=project, args=['build', 'target.bst'])
    
    41
    -    assert result.exit_code == 0
    
    41
    +    result.assert_success()
    
    42 42
         result = cli.run(project=project, args=['checkout', 'target.bst', checkoutdir])
    
    43
    -    assert result.exit_code == 0
    
    43
    +    result.assert_success()
    
    44 44
     
    
    45 45
         # Check that the checkout contains the expected files from both projects
    
    46 46
         assert(os.path.exists(os.path.join(checkoutdir, 'base.txt')))
    
    ... ... @@ -54,7 +54,7 @@ def test_build_of_same_junction_used_twice(cli, tmpdir, datafiles):
    54 54
         # Check we can build a project that contains the same junction
    
    55 55
         # that is used twice, but named differently
    
    56 56
         result = cli.run(project=project, args=['build', 'target.bst'])
    
    57
    -    assert result.exit_code == 0
    
    57
    +    result.assert_success()
    
    58 58
     
    
    59 59
     
    
    60 60
     @pytest.mark.datafiles(DATA_DIR)
    
    ... ... @@ -69,9 +69,9 @@ def test_nested_simple(cli, tmpdir, datafiles):
    69 69
     
    
    70 70
         # Build, checkout
    
    71 71
         result = cli.run(project=project, args=['build', 'target.bst'])
    
    72
    -    assert result.exit_code == 0
    
    72
    +    result.assert_success()
    
    73 73
         result = cli.run(project=project, args=['checkout', 'target.bst', checkoutdir])
    
    74
    -    assert result.exit_code == 0
    
    74
    +    result.assert_success()
    
    75 75
     
    
    76 76
         # Check that the checkout contains the expected files from all subprojects
    
    77 77
         assert(os.path.exists(os.path.join(checkoutdir, 'base.txt')))
    
    ... ... @@ -93,9 +93,9 @@ def test_nested_double(cli, tmpdir, datafiles):
    93 93
     
    
    94 94
         # Build, checkout
    
    95 95
         result = cli.run(project=project, args=['build', 'target.bst'])
    
    96
    -    assert result.exit_code == 0
    
    96
    +    result.assert_success()
    
    97 97
         result = cli.run(project=project, args=['checkout', 'target.bst', checkoutdir])
    
    98
    -    assert result.exit_code == 0
    
    98
    +    result.assert_success()
    
    99 99
     
    
    100 100
         # Check that the checkout contains the expected files from all subprojects
    
    101 101
         assert(os.path.exists(os.path.join(checkoutdir, 'base.txt')))
    
    ... ... @@ -115,45 +115,46 @@ def test_nested_conflict(cli, datafiles):
    115 115
         copy_subprojects(project, datafiles, ['foo', 'bar'])
    
    116 116
     
    
    117 117
         result = cli.run(project=project, args=['build', 'target.bst'])
    
    118
    -    assert result.exit_code != 0
    
    119
    -    assert result.exception
    
    120
    -    assert isinstance(result.exception, LoadError)
    
    121
    -    assert result.exception.reason == LoadErrorReason.CONFLICTING_JUNCTION
    
    118
    +    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.CONFLICTING_JUNCTION)
    
    122 119
     
    
    123 120
     
    
    121
    +# Test that we error correctly when the junction element itself is missing
    
    124 122
     @pytest.mark.datafiles(DATA_DIR)
    
    125
    -def test_invalid_missing(cli, datafiles):
    
    123
    +def test_missing_junction(cli, datafiles):
    
    126 124
         project = os.path.join(str(datafiles), 'invalid')
    
    127 125
     
    
    128 126
         result = cli.run(project=project, args=['build', 'missing.bst'])
    
    129
    -    assert result.exit_code != 0
    
    130
    -    assert result.exception
    
    131
    -    assert isinstance(result.exception, LoadError)
    
    132
    -    assert result.exception.reason == LoadErrorReason.MISSING_FILE
    
    127
    +    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
    
    133 128
     
    
    134 129
     
    
    130
    +# Test that we error correctly when an element is not found in the subproject
    
    131
    +@pytest.mark.datafiles(DATA_DIR)
    
    132
    +def test_missing_subproject_element(cli, datafiles):
    
    133
    +    project = os.path.join(str(datafiles), 'invalid')
    
    134
    +    copy_subprojects(project, datafiles, ['base'])
    
    135
    +
    
    136
    +    result = cli.run(project=project, args=['build', 'missing-element.bst'])
    
    137
    +    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
    
    138
    +
    
    139
    +
    
    140
    +# Test that we error correctly when a junction itself has dependencies
    
    135 141
     @pytest.mark.datafiles(DATA_DIR)
    
    136 142
     def test_invalid_with_deps(cli, datafiles):
    
    137 143
         project = os.path.join(str(datafiles), 'invalid')
    
    138 144
         copy_subprojects(project, datafiles, ['base'])
    
    139 145
     
    
    140 146
         result = cli.run(project=project, args=['build', 'junction-with-deps.bst'])
    
    141
    -    assert result.exit_code != 0
    
    142
    -    assert result.exception
    
    143
    -    assert isinstance(result.exception, ElementError)
    
    144
    -    assert result.exception.reason == 'element-forbidden-depends'
    
    147
    +    result.assert_main_error(ErrorDomain.ELEMENT, 'element-forbidden-depends')
    
    145 148
     
    
    146 149
     
    
    150
    +# Test that we error correctly when a junction is directly depended on
    
    147 151
     @pytest.mark.datafiles(DATA_DIR)
    
    148 152
     def test_invalid_junction_dep(cli, datafiles):
    
    149 153
         project = os.path.join(str(datafiles), 'invalid')
    
    150 154
         copy_subprojects(project, datafiles, ['base'])
    
    151 155
     
    
    152 156
         result = cli.run(project=project, args=['build', 'junction-dep.bst'])
    
    153
    -    assert result.exit_code != 0
    
    154
    -    assert result.exception
    
    155
    -    assert isinstance(result.exception, LoadError)
    
    156
    -    assert result.exception.reason == LoadErrorReason.INVALID_DATA
    
    157
    +    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
    
    157 158
     
    
    158 159
     
    
    159 160
     @pytest.mark.datafiles(DATA_DIR)
    
    ... ... @@ -165,9 +166,9 @@ def test_options_default(cli, tmpdir, datafiles):
    165 166
     
    
    166 167
         # Build, checkout
    
    167 168
         result = cli.run(project=project, args=['build', 'target.bst'])
    
    168
    -    assert result.exit_code == 0
    
    169
    +    result.assert_success()
    
    169 170
         result = cli.run(project=project, args=['checkout', 'target.bst', checkoutdir])
    
    170
    -    assert result.exit_code == 0
    
    171
    +    result.assert_success()
    
    171 172
     
    
    172 173
         assert(os.path.exists(os.path.join(checkoutdir, 'pony.txt')))
    
    173 174
         assert(not os.path.exists(os.path.join(checkoutdir, 'horsy.txt')))
    
    ... ... @@ -182,9 +183,9 @@ def test_options(cli, tmpdir, datafiles):
    182 183
     
    
    183 184
         # Build, checkout
    
    184 185
         result = cli.run(project=project, args=['build', 'target.bst'])
    
    185
    -    assert result.exit_code == 0
    
    186
    +    result.assert_success()
    
    186 187
         result = cli.run(project=project, args=['checkout', 'target.bst', checkoutdir])
    
    187
    -    assert result.exit_code == 0
    
    188
    +    result.assert_success()
    
    188 189
     
    
    189 190
         assert(not os.path.exists(os.path.join(checkoutdir, 'pony.txt')))
    
    190 191
         assert(os.path.exists(os.path.join(checkoutdir, 'horsy.txt')))
    
    ... ... @@ -199,9 +200,9 @@ def test_options_inherit(cli, tmpdir, datafiles):
    199 200
     
    
    200 201
         # Build, checkout
    
    201 202
         result = cli.run(project=project, args=['build', 'target.bst'])
    
    202
    -    assert result.exit_code == 0
    
    203
    +    result.assert_success()
    
    203 204
         result = cli.run(project=project, args=['checkout', 'target.bst', checkoutdir])
    
    204
    -    assert result.exit_code == 0
    
    205
    +    result.assert_success()
    
    205 206
     
    
    206 207
         assert(not os.path.exists(os.path.join(checkoutdir, 'pony.txt')))
    
    207 208
         assert(os.path.exists(os.path.join(checkoutdir, 'horsy.txt')))
    
    ... ... @@ -228,14 +229,11 @@ def test_git_show(cli, tmpdir, datafiles):
    228 229
     
    
    229 230
         # Verify that bst show does not implicitly fetch subproject
    
    230 231
         result = cli.run(project=project, args=['show', 'target.bst'])
    
    231
    -    assert result.exit_code != 0
    
    232
    -    assert result.exception
    
    233
    -    assert isinstance(result.exception, LoadError)
    
    234
    -    assert result.exception.reason == LoadErrorReason.SUBPROJECT_FETCH_NEEDED
    
    232
    +    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_FETCH_NEEDED)
    
    235 233
     
    
    236 234
         # Explicitly fetch subproject
    
    237 235
         result = cli.run(project=project, args=['source', 'fetch', 'base.bst'])
    
    238
    -    assert result.exit_code == 0
    
    236
    +    result.assert_success()
    
    239 237
     
    
    240 238
         # Check that bst show succeeds now and the pipeline includes the subproject element
    
    241 239
         element_list = cli.get_pipeline(project, ['target.bst'])
    
    ... ... @@ -263,9 +261,9 @@ def test_git_build(cli, tmpdir, datafiles):
    263 261
     
    
    264 262
         # Build (with implicit fetch of subproject), checkout
    
    265 263
         result = cli.run(project=project, args=['build', 'target.bst'])
    
    266
    -    assert result.exit_code == 0
    
    264
    +    result.assert_success()
    
    267 265
         result = cli.run(project=project, args=['checkout', 'target.bst', checkoutdir])
    
    268
    -    assert result.exit_code == 0
    
    266
    +    result.assert_success()
    
    269 267
     
    
    270 268
         # Check that the checkout contains the expected files from both projects
    
    271 269
         assert(os.path.exists(os.path.join(checkoutdir, 'base.txt')))
    
    ... ... @@ -304,9 +302,9 @@ def test_build_git_cross_junction_names(cli, tmpdir, datafiles):
    304 302
     
    
    305 303
         # Build (with implicit fetch of subproject), checkout
    
    306 304
         result = cli.run(project=project, args=['build', 'base.bst:target.bst'])
    
    307
    -    assert result.exit_code == 0
    
    305
    +    result.assert_success()
    
    308 306
         result = cli.run(project=project, args=['checkout', 'base.bst:target.bst', checkoutdir])
    
    309
    -    assert result.exit_code == 0
    
    307
    +    result.assert_success()
    
    310 308
     
    
    311 309
         # Check that the checkout contains the expected files from both projects
    
    312 310
         assert(os.path.exists(os.path.join(checkoutdir, 'base.txt')))

  • tests/loader/junctions/invalid/missing-element.bst
    1
    +# This refers to the `foo.bst` element through
    
    2
    +# the `base.bst` junction. The `base.bst` junction
    
    3
    +# exists but the `foo.bst` element does not exist
    
    4
    +# in the subproject.
    
    5
    +#
    
    6
    +kind: stack
    
    7
    +depends:
    
    8
    +- junction: base.bst
    
    9
    +  filename: foo.bst

  • tests/sources/tar.py
    ... ... @@ -388,3 +388,19 @@ def test_netrc_already_specified_user(cli, datafiles, server_type, tmpdir):
    388 388
             result = cli.run(project=project, args=['source', 'track', 'target.bst'])
    
    389 389
             result.assert_main_error(ErrorDomain.STREAM, None)
    
    390 390
             result.assert_task_error(ErrorDomain.SOURCE, None)
    
    391
    +
    
    392
    +
    
    393
    +# Test that BuildStream doesnt crash if HOME is unset while
    
    394
    +# the netrc module is trying to find it's ~/.netrc file.
    
    395
    +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'fetch'))
    
    396
    +def test_homeless_environment(cli, tmpdir, datafiles):
    
    397
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    398
    +    generate_project(project, tmpdir)
    
    399
    +
    
    400
    +    # Create a local tar
    
    401
    +    src_tar = os.path.join(str(tmpdir), "a.tar.gz")
    
    402
    +    _assemble_tar(os.path.join(str(datafiles), "content"), "a", src_tar)
    
    403
    +
    
    404
    +    # Use a track, make sure the plugin tries to find a ~/.netrc
    
    405
    +    result = cli.run(project=project, args=['source', 'track', 'target.bst'], env={'HOME': None})
    
    406
    +    result.assert_success()

  • tests/testutils/runcli.py
    ... ... @@ -191,6 +191,13 @@ class Result():
    191 191
     
    
    192 192
             return list(pushed)
    
    193 193
     
    
    194
    +    def get_partial_pushed_elements(self):
    
    195
    +        pushed = re.findall(r'\[\s*push:(\S+)\s*\]\s*INFO\s*Pushed partial artifact', self.stderr)
    
    196
    +        if pushed is None:
    
    197
    +            return []
    
    198
    +
    
    199
    +        return list(pushed)
    
    200
    +
    
    194 201
         def get_pulled_elements(self):
    
    195 202
             pulled = re.findall(r'\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled artifact', self.stderr)
    
    196 203
             if pulled is None:
    
    ... ... @@ -198,6 +205,13 @@ class Result():
    198 205
     
    
    199 206
             return list(pulled)
    
    200 207
     
    
    208
    +    def get_partial_pulled_elements(self):
    
    209
    +        pulled = re.findall(r'\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled partial artifact', self.stderr)
    
    210
    +        if pulled is None:
    
    211
    +            return []
    
    212
    +
    
    213
    +        return list(pulled)
    
    214
    +
    
    201 215
     
    
    202 216
     class Cli():
    
    203 217
     
    
    ... ... @@ -529,13 +543,16 @@ def environment(env):
    529 543
         old_env = {}
    
    530 544
         for key, value in env.items():
    
    531 545
             old_env[key] = os.environ.get(key)
    
    532
    -        os.environ[key] = value
    
    546
    +        if value is None:
    
    547
    +            os.environ.pop(key, None)
    
    548
    +        else:
    
    549
    +            os.environ[key] = value
    
    533 550
     
    
    534 551
         yield
    
    535 552
     
    
    536 553
         for key, value in old_env.items():
    
    537 554
             if value is None:
    
    538
    -            del os.environ[key]
    
    555
    +            os.environ.pop(key, None)
    
    539 556
             else:
    
    540 557
                 os.environ[key] = value
    
    541 558
     
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]