[Notes] [Git][BuildStream/buildstream][jmac/remote_execution_split] 12 commits: tests/plugin/pipeline.py: Avoid using host user conf



Title: GitLab

Jim MacArthur pushed to branch jmac/remote_execution_split at BuildStream / buildstream

Commits:

24 changed files:

Changes:

  • NEWS
    ... ... @@ -63,6 +63,10 @@ buildstream 1.3.1
    63 63
     
    
    64 64
       o Added new `bst source-checkout` command to checkout sources of an element.
    
    65 65
     
    
    66
    +  o `bst workspace open` now supports the creation of multiple elements and
    
    67
    +    allows the user to set a default location for their creation. This has meant
    
    68
    +    that the new CLI is no longer backwards compatible with buildstream 1.2.
    
    69
    +
    
    66 70
     
    
    67 71
     =================
    
    68 72
     buildstream 1.1.5
    

  • buildstream/_artifactcache/artifactcache.py
    ... ... @@ -31,7 +31,7 @@ from .. import _signals
    31 31
     from .. import utils
    
    32 32
     from .. import _yaml
    
    33 33
     
    
    34
    -from .cascache import CASCache, CASRemote
    
    34
    +from .cascache import CASRemote, CASRemoteSpec
    
    35 35
     
    
    36 36
     
    
    37 37
     CACHE_SIZE_FILE = "cache_size"
    
    ... ... @@ -45,48 +45,8 @@ CACHE_SIZE_FILE = "cache_size"
    45 45
     #     push (bool): Whether we should attempt to push artifacts to this cache,
    
    46 46
     #                  in addition to pulling from it.
    
    47 47
     #
    
    48
    -class ArtifactCacheSpec(namedtuple('ArtifactCacheSpec', 'url push server_cert client_key client_cert')):
    
    49
    -
    
    50
    -    # _new_from_config_node
    
    51
    -    #
    
    52
    -    # Creates an ArtifactCacheSpec() from a YAML loaded node
    
    53
    -    #
    
    54
    -    @staticmethod
    
    55
    -    def _new_from_config_node(spec_node, basedir=None):
    
    56
    -        _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert'])
    
    57
    -        url = _yaml.node_get(spec_node, str, 'url')
    
    58
    -        push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
    
    59
    -        if not url:
    
    60
    -            provenance = _yaml.node_get_provenance(spec_node, 'url')
    
    61
    -            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    62
    -                            "{}: empty artifact cache URL".format(provenance))
    
    63
    -
    
    64
    -        server_cert = _yaml.node_get(spec_node, str, 'server-cert', default_value=None)
    
    65
    -        if server_cert and basedir:
    
    66
    -            server_cert = os.path.join(basedir, server_cert)
    
    67
    -
    
    68
    -        client_key = _yaml.node_get(spec_node, str, 'client-key', default_value=None)
    
    69
    -        if client_key and basedir:
    
    70
    -            client_key = os.path.join(basedir, client_key)
    
    71
    -
    
    72
    -        client_cert = _yaml.node_get(spec_node, str, 'client-cert', default_value=None)
    
    73
    -        if client_cert and basedir:
    
    74
    -            client_cert = os.path.join(basedir, client_cert)
    
    75
    -
    
    76
    -        if client_key and not client_cert:
    
    77
    -            provenance = _yaml.node_get_provenance(spec_node, 'client-key')
    
    78
    -            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    79
    -                            "{}: 'client-key' was specified without 'client-cert'".format(provenance))
    
    80
    -
    
    81
    -        if client_cert and not client_key:
    
    82
    -            provenance = _yaml.node_get_provenance(spec_node, 'client-cert')
    
    83
    -            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    84
    -                            "{}: 'client-cert' was specified without 'client-key'".format(provenance))
    
    85
    -
    
    86
    -        return ArtifactCacheSpec(url, push, server_cert, client_key, client_cert)
    
    87
    -
    
    88
    -
    
    89
    -ArtifactCacheSpec.__new__.__defaults__ = (None, None, None)
    
    48
    +class ArtifactCacheSpec(CASRemoteSpec):
    
    49
    +    pass
    
    90 50
     
    
    91 51
     
    
    92 52
     # An ArtifactCache manages artifacts.
    
    ... ... @@ -99,7 +59,7 @@ class ArtifactCache():
    99 59
             self.context = context
    
    100 60
             self.extractdir = os.path.join(context.artifactdir, 'extract')
    
    101 61
     
    
    102
    -        self.cas = CASCache(context.artifactdir)
    
    62
    +        self.cas = context.get_cascache()
    
    103 63
     
    
    104 64
             self.global_remote_specs = []
    
    105 65
             self.project_remote_specs = {}
    
    ... ... @@ -719,107 +679,6 @@ class ArtifactCache():
    719 679
     
    
    720 680
             return False
    
    721 681
     
    
    722
    -    # pull_tree():
    
    723
    -    #
    
    724
    -    # Pull a single Tree rather than an artifact.
    
    725
    -    # Does not update local refs.
    
    726
    -    #
    
    727
    -    # Args:
    
    728
    -    #     project (Project): The current project
    
    729
    -    #     digest (Digest): The digest of the tree
    
    730
    -    #
    
    731
    -    def pull_tree(self, project, digest):
    
    732
    -        for remote in self._remotes[project]:
    
    733
    -            digest = self.cas.pull_tree(remote, digest)
    
    734
    -
    
    735
    -            if digest:
    
    736
    -                # no need to pull from additional remotes
    
    737
    -                return digest
    
    738
    -
    
    739
    -        return None
    
    740
    -
    
    741
    -    # push_directory():
    
    742
    -    #
    
    743
    -    # Push the given virtual directory to all remotes.
    
    744
    -    #
    
    745
    -    # Args:
    
    746
    -    #     project (Project): The current project
    
    747
    -    #     directory (Directory): A virtual directory object to push.
    
    748
    -    #
    
    749
    -    # Raises:
    
    750
    -    #     (ArtifactError): if there was an error
    
    751
    -    #
    
    752
    -    def push_directory(self, project, directory):
    
    753
    -        if self._has_push_remotes:
    
    754
    -            push_remotes = [r for r in self._remotes[project] if r.spec.push]
    
    755
    -        else:
    
    756
    -            push_remotes = []
    
    757
    -
    
    758
    -        if not push_remotes:
    
    759
    -            raise ArtifactError("push_directory was called, but no remote artifact " +
    
    760
    -                                "servers are configured as push remotes.")
    
    761
    -
    
    762
    -        if directory.ref is None:
    
    763
    -            return
    
    764
    -
    
    765
    -        for remote in push_remotes:
    
    766
    -            self.cas.push_directory(remote, directory)
    
    767
    -
    
    768
    -    # push_message():
    
    769
    -    #
    
    770
    -    # Push the given protobuf message to all remotes.
    
    771
    -    #
    
    772
    -    # Args:
    
    773
    -    #     project (Project): The current project
    
    774
    -    #     message (Message): A protobuf message to push.
    
    775
    -    #
    
    776
    -    # Raises:
    
    777
    -    #     (ArtifactError): if there was an error
    
    778
    -    #
    
    779
    -    def push_message(self, project, message):
    
    780
    -
    
    781
    -        if self._has_push_remotes:
    
    782
    -            push_remotes = [r for r in self._remotes[project] if r.spec.push]
    
    783
    -        else:
    
    784
    -            push_remotes = []
    
    785
    -
    
    786
    -        if not push_remotes:
    
    787
    -            raise ArtifactError("push_message was called, but no remote artifact " +
    
    788
    -                                "servers are configured as push remotes.")
    
    789
    -
    
    790
    -        for remote in push_remotes:
    
    791
    -            message_digest = self.cas.push_message(remote, message)
    
    792
    -
    
    793
    -        return message_digest
    
    794
    -
    
    795
    -    # verify_digest_pushed():
    
    796
    -    #
    
    797
    -    # Check whether the object is already on the server in which case
    
    798
    -    # there is no need to upload it.
    
    799
    -    #
    
    800
    -    # Args:
    
    801
    -    #     project (Project): The current project
    
    802
    -    #     digest (Digest): The object digest.
    
    803
    -    #
    
    804
    -    def verify_digest_pushed(self, project, digest):
    
    805
    -
    
    806
    -        if self._has_push_remotes:
    
    807
    -            push_remotes = [r for r in self._remotes[project] if r.spec.push]
    
    808
    -        else:
    
    809
    -            push_remotes = []
    
    810
    -
    
    811
    -        if not push_remotes:
    
    812
    -            raise ArtifactError("verify_digest_pushed was called, but no remote artifact " +
    
    813
    -                                "servers are configured as push remotes.")
    
    814
    -
    
    815
    -        pushed = False
    
    816
    -
    
    817
    -        for remote in push_remotes:
    
    818
    -            if self.cas.verify_digest_on_remote(remote, digest):
    
    819
    -                pushed = True
    
    820
    -
    
    821
    -        return pushed
    
    822
    -
    
    823 682
         # link_key():
    
    824 683
         #
    
    825 684
         # Add a key for an existing artifact.
    

  • buildstream/_artifactcache/cascache.py
    ... ... @@ -17,6 +17,7 @@
    17 17
     #  Authors:
    
    18 18
     #        Jürg Billeter <juerg billeter codethink co uk>
    
    19 19
     
    
    20
    +from collections import namedtuple
    
    20 21
     import hashlib
    
    21 22
     import itertools
    
    22 23
     import io
    
    ... ... @@ -34,7 +35,8 @@ from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remo
    34 35
     from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc
    
    35 36
     
    
    36 37
     from .. import utils
    
    37
    -from .._exceptions import CASError
    
    38
    +from .._exceptions import CASError, LoadError, LoadErrorReason
    
    39
    +from .. import _yaml
    
    38 40
     
    
    39 41
     
    
    40 42
     # The default limit for gRPC messages is 4 MiB.
    
    ... ... @@ -42,6 +44,50 @@ from .._exceptions import CASError
    42 44
     _MAX_PAYLOAD_BYTES = 1024 * 1024
    
    43 45
     
    
    44 46
     
    
    47
    +class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key client_cert')):
    
    48
    +
    
    49
    +    # _new_from_config_node
    
    50
    +    #
    
    51
    +    # Creates an CASRemoteSpec() from a YAML loaded node
    
    52
    +    #
    
    53
    +    @staticmethod
    
    54
    +    def _new_from_config_node(spec_node, basedir=None):
    
    55
    +        _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert'])
    
    56
    +        url = _yaml.node_get(spec_node, str, 'url')
    
    57
    +        push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
    
    58
    +        if not url:
    
    59
    +            provenance = _yaml.node_get_provenance(spec_node, 'url')
    
    60
    +            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    61
    +                            "{}: empty artifact cache URL".format(provenance))
    
    62
    +
    
    63
    +        server_cert = _yaml.node_get(spec_node, str, 'server-cert', default_value=None)
    
    64
    +        if server_cert and basedir:
    
    65
    +            server_cert = os.path.join(basedir, server_cert)
    
    66
    +
    
    67
    +        client_key = _yaml.node_get(spec_node, str, 'client-key', default_value=None)
    
    68
    +        if client_key and basedir:
    
    69
    +            client_key = os.path.join(basedir, client_key)
    
    70
    +
    
    71
    +        client_cert = _yaml.node_get(spec_node, str, 'client-cert', default_value=None)
    
    72
    +        if client_cert and basedir:
    
    73
    +            client_cert = os.path.join(basedir, client_cert)
    
    74
    +
    
    75
    +        if client_key and not client_cert:
    
    76
    +            provenance = _yaml.node_get_provenance(spec_node, 'client-key')
    
    77
    +            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    78
    +                            "{}: 'client-key' was specified without 'client-cert'".format(provenance))
    
    79
    +
    
    80
    +        if client_cert and not client_key:
    
    81
    +            provenance = _yaml.node_get_provenance(spec_node, 'client-cert')
    
    82
    +            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    83
    +                            "{}: 'client-cert' was specified without 'client-key'".format(provenance))
    
    84
    +
    
    85
    +        return CASRemoteSpec(url, push, server_cert, client_key, client_cert)
    
    86
    +
    
    87
    +
    
    88
    +CASRemoteSpec.__new__.__defaults__ = (None, None, None)
    
    89
    +
    
    90
    +
    
    45 91
     # A CASCache manages a CAS repository as specified in the Remote Execution API.
    
    46 92
     #
    
    47 93
     # Args:
    

  • buildstream/_context.py
    ... ... @@ -31,6 +31,7 @@ from ._exceptions import LoadError, LoadErrorReason, BstError
    31 31
     from ._message import Message, MessageType
    
    32 32
     from ._profile import Topics, profile_start, profile_end
    
    33 33
     from ._artifactcache import ArtifactCache
    
    34
    +from ._artifactcache.cascache import CASCache
    
    34 35
     from ._workspaces import Workspaces
    
    35 36
     from .plugin import _plugin_lookup
    
    36 37
     
    
    ... ... @@ -59,6 +60,9 @@ class Context():
    59 60
             # The directory where build sandboxes will be created
    
    60 61
             self.builddir = None
    
    61 62
     
    
    63
    +        # Default root location for workspaces
    
    64
    +        self.workspacedir = None
    
    65
    +
    
    62 66
             # The local binary artifact cache directory
    
    63 67
             self.artifactdir = None
    
    64 68
     
    
    ... ... @@ -138,6 +142,7 @@ class Context():
    138 142
             self._workspaces = None
    
    139 143
             self._log_handle = None
    
    140 144
             self._log_filename = None
    
    145
    +        self._cascache = None
    
    141 146
     
    
    142 147
         # load()
    
    143 148
         #
    
    ... ... @@ -177,10 +182,10 @@ class Context():
    177 182
             _yaml.node_validate(defaults, [
    
    178 183
                 'sourcedir', 'builddir', 'artifactdir', 'logdir',
    
    179 184
                 'scheduler', 'artifacts', 'logging', 'projects',
    
    180
    -            'cache', 'prompt'
    
    185
    +            'cache', 'prompt', 'workspacedir',
    
    181 186
             ])
    
    182 187
     
    
    183
    -        for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir']:
    
    188
    +        for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
    
    184 189
                 # Allow the ~ tilde expansion and any environment variables in
    
    185 190
                 # path specification in the config files.
    
    186 191
                 #
    
    ... ... @@ -617,6 +622,11 @@ class Context():
    617 622
             if not os.environ.get('XDG_DATA_HOME'):
    
    618 623
                 os.environ['XDG_DATA_HOME'] = os.path.expanduser('~/.local/share')
    
    619 624
     
    
    625
    +    def get_cascache(self):
    
    626
    +        if self._cascache is None:
    
    627
    +            self._cascache = CASCache(self.artifactdir)
    
    628
    +        return self._cascache
    
    629
    +
    
    620 630
     
    
    621 631
     # _node_get_option_str()
    
    622 632
     #
    

  • buildstream/_frontend/cli.py
    ... ... @@ -707,31 +707,23 @@ def workspace():
    707 707
     @click.option('--no-checkout', default=False, is_flag=True,
    
    708 708
                   help="Do not checkout the source, only link to the given directory")
    
    709 709
     @click.option('--force', '-f', default=False, is_flag=True,
    
    710
    -              help="Overwrite files existing in checkout directory")
    
    710
    +              help="The workspace will be created even if the directory in which it will be created is not empty " +
    
    711
    +              "or if a workspace for that element already exists")
    
    711 712
     @click.option('--track', 'track_', default=False, is_flag=True,
    
    712 713
                   help="Track and fetch new source references before checking out the workspace")
    
    713
    -@click.argument('element',
    
    714
    -                type=click.Path(readable=False))
    
    715
    -@click.argument('directory', type=click.Path(file_okay=False))
    
    714
    +@click.option('--directory', type=click.Path(file_okay=False), default=None,
    
    715
    +              help="Only for use when a single Element is given: Set the directory to use to create the workspace")
    
    716
    +@click.argument('elements', nargs=-1, type=click.Path(readable=False), required=True)
    
    716 717
     @click.pass_obj
    
    717
    -def workspace_open(app, no_checkout, force, track_, element, directory):
    
    718
    +def workspace_open(app, no_checkout, force, track_, directory, elements):
    
    718 719
         """Open a workspace for manual source modification"""
    
    719 720
     
    
    720
    -    if os.path.exists(directory):
    
    721
    -
    
    722
    -        if not os.path.isdir(directory):
    
    723
    -            click.echo("Checkout directory is not a directory: {}".format(directory), err=True)
    
    724
    -            sys.exit(-1)
    
    725
    -
    
    726
    -        if not (no_checkout or force) and os.listdir(directory):
    
    727
    -            click.echo("Checkout directory is not empty: {}".format(directory), err=True)
    
    728
    -            sys.exit(-1)
    
    729
    -
    
    730 721
         with app.initialized():
    
    731
    -        app.stream.workspace_open(element, directory,
    
    722
    +        app.stream.workspace_open(elements,
    
    732 723
                                       no_checkout=no_checkout,
    
    733 724
                                       track_first=track_,
    
    734
    -                                  force=force)
    
    725
    +                                  force=force,
    
    726
    +                                  custom_dir=directory)
    
    735 727
     
    
    736 728
     
    
    737 729
     ##################################################################
    

  • buildstream/_project.py
    ... ... @@ -30,6 +30,7 @@ from ._profile import Topics, profile_start, profile_end
    30 30
     from ._exceptions import LoadError, LoadErrorReason
    
    31 31
     from ._options import OptionPool
    
    32 32
     from ._artifactcache import ArtifactCache
    
    33
    +from .sandbox import SandboxRemote
    
    33 34
     from ._elementfactory import ElementFactory
    
    34 35
     from ._sourcefactory import SourceFactory
    
    35 36
     from .plugin import CoreWarnings
    
    ... ... @@ -130,7 +131,7 @@ class Project():
    130 131
             self._shell_host_files = []   # A list of HostMount objects
    
    131 132
     
    
    132 133
             self.artifact_cache_specs = None
    
    133
    -        self.remote_execution_url = None
    
    134
    +        self.remote_execution_specs = None
    
    134 135
             self._sandbox = None
    
    135 136
             self._splits = None
    
    136 137
     
    
    ... ... @@ -493,9 +494,7 @@ class Project():
    493 494
             self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config, self.directory)
    
    494 495
     
    
    495 496
             # Load remote-execution configuration for this project
    
    496
    -        remote_execution = _yaml.node_get(config, Mapping, 'remote-execution')
    
    497
    -        _yaml.node_validate(remote_execution, ['url'])
    
    498
    -        self.remote_execution_url = _yaml.node_get(remote_execution, str, 'url')
    
    497
    +        self.remote_execution_specs = SandboxRemote.specs_from_config_node(config, self.directory)
    
    499 498
     
    
    500 499
             # Load sandbox environment variables
    
    501 500
             self.base_environment = _yaml.node_get(config, Mapping, 'environment')
    

  • buildstream/_stream.py
    ... ... @@ -464,44 +464,30 @@ class Stream():
    464 464
         # Open a project workspace
    
    465 465
         #
    
    466 466
         # Args:
    
    467
    -    #    target (str): The target element to open the workspace for
    
    468
    -    #    directory (str): The directory to stage the source in
    
    467
    +    #    targets (list): List of target elements to open workspaces for
    
    469 468
         #    no_checkout (bool): Whether to skip checking out the source
    
    470 469
         #    track_first (bool): Whether to track and fetch first
    
    471 470
         #    force (bool): Whether to ignore contents in an existing directory
    
    471
    +    #    custom_dir (str): Custom location to create a workspace or false to use default location.
    
    472 472
         #
    
    473
    -    def workspace_open(self, target, directory, *,
    
    473
    +    def workspace_open(self, targets, *,
    
    474 474
                            no_checkout,
    
    475 475
                            track_first,
    
    476
    -                       force):
    
    476
    +                       force,
    
    477
    +                       custom_dir):
    
    478
    +        # This function is a little funny but it is trying to be as atomic as possible.
    
    477 479
     
    
    478 480
             if track_first:
    
    479
    -            track_targets = (target,)
    
    481
    +            track_targets = targets
    
    480 482
             else:
    
    481 483
                 track_targets = ()
    
    482 484
     
    
    483
    -        elements, track_elements = self._load((target,), track_targets,
    
    485
    +        elements, track_elements = self._load(targets, track_targets,
    
    484 486
                                                   selection=PipelineSelection.REDIRECT,
    
    485 487
                                                   track_selection=PipelineSelection.REDIRECT)
    
    486
    -        target = elements[0]
    
    487
    -        directory = os.path.abspath(directory)
    
    488
    -
    
    489
    -        if not list(target.sources()):
    
    490
    -            build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
    
    491
    -            if not build_depends:
    
    492
    -                raise StreamError("The given element has no sources")
    
    493
    -            detail = "Try opening a workspace on one of its dependencies instead:\n"
    
    494
    -            detail += "  \n".join(build_depends)
    
    495
    -            raise StreamError("The given element has no sources", detail=detail)
    
    496 488
     
    
    497 489
             workspaces = self._context.get_workspaces()
    
    498 490
     
    
    499
    -        # Check for workspace config
    
    500
    -        workspace = workspaces.get_workspace(target._get_full_name())
    
    501
    -        if workspace and not force:
    
    502
    -            raise StreamError("Workspace '{}' is already defined at: {}"
    
    503
    -                              .format(target.name, workspace.get_absolute_path()))
    
    504
    -
    
    505 491
             # If we're going to checkout, we need at least a fetch,
    
    506 492
             # if we were asked to track first, we're going to fetch anyway.
    
    507 493
             #
    
    ... ... @@ -511,29 +497,88 @@ class Stream():
    511 497
                     track_elements = elements
    
    512 498
                 self._fetch(elements, track_elements=track_elements)
    
    513 499
     
    
    514
    -        if not no_checkout and target._get_consistency() != Consistency.CACHED:
    
    515
    -            raise StreamError("Could not stage uncached source. " +
    
    516
    -                              "Use `--track` to track and " +
    
    517
    -                              "fetch the latest version of the " +
    
    518
    -                              "source.")
    
    519
    -
    
    520
    -        if workspace:
    
    521
    -            workspaces.delete_workspace(target._get_full_name())
    
    522
    -            workspaces.save_config()
    
    523
    -            shutil.rmtree(directory)
    
    524
    -        try:
    
    525
    -            os.makedirs(directory, exist_ok=True)
    
    526
    -        except OSError as e:
    
    527
    -            raise StreamError("Failed to create workspace directory: {}".format(e)) from e
    
    500
    +        expanded_directories = []
    
    501
    +        #  To try to be more atomic, loop through the elements and raise any errors we can early
    
    502
    +        for target in elements:
    
    503
    +
    
    504
    +            if not list(target.sources()):
    
    505
    +                build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
    
    506
    +                if not build_depends:
    
    507
    +                    raise StreamError("The element {}  has no sources".format(target.name))
    
    508
    +                detail = "Try opening a workspace on one of its dependencies instead:\n"
    
    509
    +                detail += "  \n".join(build_depends)
    
    510
    +                raise StreamError("The element {} has no sources".format(target.name), detail=detail)
    
    511
    +
    
    512
    +            # Check for workspace config
    
    513
    +            workspace = workspaces.get_workspace(target._get_full_name())
    
    514
    +            if workspace and not force:
    
    515
    +                raise StreamError("Element '{}' already has workspace defined at: {}"
    
    516
    +                                  .format(target.name, workspace.get_absolute_path()))
    
    517
    +
    
    518
    +            if not no_checkout and target._get_consistency() != Consistency.CACHED:
    
    519
    +                raise StreamError("Could not stage uncached source. For {} ".format(target.name) +
    
    520
    +                                  "Use `--track` to track and " +
    
    521
    +                                  "fetch the latest version of the " +
    
    522
    +                                  "source.")
    
    523
    +
    
    524
    +            if not custom_dir:
    
    525
    +                directory = os.path.abspath(os.path.join(self._context.workspacedir, target.name))
    
    526
    +                if directory[-4:] == '.bst':
    
    527
    +                    directory = directory[:-4]
    
    528
    +                expanded_directories.append(directory)
    
    529
    +
    
    530
    +        if custom_dir:
    
    531
    +            if len(elements) != 1:
    
    532
    +                raise StreamError("Exactly one element can be given if --directory is used",
    
    533
    +                                  reason='directory-with-multiple-elements')
    
    534
    +            expanded_directories = [custom_dir, ]
    
    535
    +        else:
    
    536
    +            # If this fails it is a bug in what ever calls this, usually cli.py and so can not be tested for via the
    
    537
    +            # run bst test mechanism.
    
    538
    +            assert len(elements) == len(expanded_directories)
    
    539
    +
    
    540
    +        for target, directory in zip(elements, expanded_directories):
    
    541
    +            if os.path.exists(directory):
    
    542
    +                if not os.path.isdir(directory):
    
    543
    +                    raise StreamError("For element '{}', Directory path is not a directory: {}"
    
    544
    +                                      .format(target.name, directory), reason='bad-directory')
    
    545
    +
    
    546
    +                if not (no_checkout or force) and os.listdir(directory):
    
    547
    +                    raise StreamError("For element '{}', Directory path is not empty: {}"
    
    548
    +                                      .format(target.name, directory), reason='bad-directory')
    
    549
    +
    
    550
    +        # So far this function has tried to catch as many issues as possible with out making any changes
    
    551
    +        # Now it dose the bits that can not be made atomic.
    
    552
    +        targetGenerator = zip(elements, expanded_directories)
    
    553
    +        for target, directory in targetGenerator:
    
    554
    +            self._message(MessageType.INFO, "Creating workspace for element {}"
    
    555
    +                          .format(target.name))
    
    556
    +
    
    557
    +            workspace = workspaces.get_workspace(target._get_full_name())
    
    558
    +            if workspace:
    
    559
    +                workspaces.delete_workspace(target._get_full_name())
    
    560
    +                workspaces.save_config()
    
    561
    +                shutil.rmtree(directory)
    
    562
    +            try:
    
    563
    +                os.makedirs(directory, exist_ok=True)
    
    564
    +            except OSError as e:
    
    565
    +                todo_elements = " ".join([str(target.name) for target, directory_dict in targetGenerator])
    
    566
    +                if todo_elements:
    
    567
    +                    # This output should make creating the remaining workspaces as easy as possible.
    
    568
    +                    todo_elements = "\nDid not try to create workspaces for " + todo_elements
    
    569
    +                raise StreamError("Failed to create workspace directory: {}".format(e) + todo_elements) from e
    
    528 570
     
    
    529
    -        workspaces.create_workspace(target._get_full_name(), directory)
    
    571
    +            workspaces.create_workspace(target._get_full_name(), directory)
    
    530 572
     
    
    531
    -        if not no_checkout:
    
    532
    -            with target.timed_activity("Staging sources to {}".format(directory)):
    
    533
    -                target._open_workspace()
    
    573
    +            if not no_checkout:
    
    574
    +                with target.timed_activity("Staging sources to {}".format(directory)):
    
    575
    +                    target._open_workspace()
    
    534 576
     
    
    535
    -        workspaces.save_config()
    
    536
    -        self._message(MessageType.INFO, "Saved workspace configuration")
    
    577
    +            # Saving the workspace once it is set up means that if the next workspace fails to be created before
    
    578
    +            # the configuration gets saved. The successfully created workspace still gets saved.
    
    579
    +            workspaces.save_config()
    
    580
    +            self._message(MessageType.INFO, "Created a workspace for element: {}"
    
    581
    +                          .format(target._get_full_name()))
    
    537 582
     
    
    538 583
         # workspace_close
    
    539 584
         #
    

  • buildstream/data/projectconfig.yaml
    ... ... @@ -197,6 +197,3 @@ shell:
    197 197
       # Command to run when `bst shell` does not provide a command
    
    198 198
       #
    
    199 199
       command: [ 'sh', '-i' ]
    200
    -
    
    201
    -remote-execution:
    
    202
    -  url: ""
    \ No newline at end of file

  • buildstream/data/userconfig.yaml
    ... ... @@ -22,6 +22,9 @@ artifactdir: ${XDG_CACHE_HOME}/buildstream/artifacts
    22 22
     # Location to store build logs
    
    23 23
     logdir: ${XDG_CACHE_HOME}/buildstream/logs
    
    24 24
     
    
    25
    +# Default root location for workspaces, blank for no default set.
    
    26
    +workspacedir: .
    
    27
    +
    
    25 28
     #
    
    26 29
     #    Cache
    
    27 30
     #
    

  • buildstream/element.py
    ... ... @@ -250,9 +250,9 @@ class Element(Plugin):
    250 250
     
    
    251 251
             # Extract remote execution URL
    
    252 252
             if not self.__is_junction:
    
    253
    -            self.__remote_execution_url = project.remote_execution_url
    
    253
    +            self.__remote_execution_specs = project.remote_execution_specs
    
    254 254
             else:
    
    255
    -            self.__remote_execution_url = None
    
    255
    +            self.__remote_execution_specs = None
    
    256 256
     
    
    257 257
             # Extract Sandbox config
    
    258 258
             self.__sandbox_config = self.__extract_sandbox_config(meta)
    
    ... ... @@ -2125,7 +2125,7 @@ class Element(Plugin):
    2125 2125
         # supports it.
    
    2126 2126
         #
    
    2127 2127
         def __use_remote_execution(self):
    
    2128
    -        return self.__remote_execution_url and self.BST_VIRTUAL_DIRECTORY
    
    2128
    +        return self.__remote_execution_specs and self.BST_VIRTUAL_DIRECTORY
    
    2129 2129
     
    
    2130 2130
         # __sandbox():
    
    2131 2131
         #
    
    ... ... @@ -2160,13 +2160,13 @@ class Element(Plugin):
    2160 2160
                                         stdout=stdout,
    
    2161 2161
                                         stderr=stderr,
    
    2162 2162
                                         config=config,
    
    2163
    -                                    server_url=self.__remote_execution_url,
    
    2163
    +                                    specs=self.__remote_execution_specs,
    
    2164 2164
                                         bare_directory=bare_directory,
    
    2165 2165
                                         allow_real_directory=False)
    
    2166 2166
                 yield sandbox
    
    2167 2167
     
    
    2168 2168
             elif directory is not None and os.path.exists(directory):
    
    2169
    -            if allow_remote and self.__remote_execution_url:
    
    2169
    +            if allow_remote and self.__remote_execution_specs:
    
    2170 2170
                     self.warn("Artifact {} is configured to use remote execution but element plugin does not support it."
    
    2171 2171
                               .format(self.name), detail="Element plugin '{kind}' does not support virtual directories."
    
    2172 2172
                               .format(kind=self.get_kind()), warning_token="remote-failure")
    

  • buildstream/sandbox/_sandboxremote.py
    ... ... @@ -19,6 +19,7 @@
    19 19
     #        Jim MacArthur <jim macarthur codethink co uk>
    
    20 20
     
    
    21 21
     import os
    
    22
    +from collections import namedtuple
    
    22 23
     from urllib.parse import urlparse
    
    23 24
     from functools import partial
    
    24 25
     
    
    ... ... @@ -31,7 +32,15 @@ from .. import _signals
    31 32
     from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
    
    32 33
     from .._protos.google.rpc import code_pb2
    
    33 34
     from .._exceptions import SandboxError
    
    35
    +from .. import _yaml
    
    34 36
     from .._protos.google.longrunning import operations_pb2, operations_pb2_grpc
    
    37
    +from .._artifactcache.cascache import CASRemote, CASRemoteSpec
    
    38
    +
    
    39
    +from .._exceptions import SandboxError
    
    40
    +
    
    41
    +
    
    42
    +class RemoteExecutionSpec(namedtuple('RemoteExecutionSpec', 'exec_service storage_service')):
    
    43
    +    pass
    
    35 44
     
    
    36 45
     
    
    37 46
     # SandboxRemote()
    
    ... ... @@ -44,17 +53,67 @@ class SandboxRemote(Sandbox):
    44 53
         def __init__(self, *args, **kwargs):
    
    45 54
             super().__init__(*args, **kwargs)
    
    46 55
     
    
    47
    -        url = urlparse(kwargs['server_url'])
    
    48
    -        if not url.scheme or not url.hostname or not url.port:
    
    49
    -            raise SandboxError("Configured remote URL '{}' does not match the expected layout. "
    
    50
    -                               .format(kwargs['server_url']) +
    
    51
    -                               "It should be of the form <protocol>://<domain name>:<port>.")
    
    52
    -        elif url.scheme != 'http':
    
    53
    -            raise SandboxError("Configured remote '{}' uses an unsupported protocol. "
    
    54
    -                               "Only plain HTTP is currenlty supported (no HTTPS).")
    
    56
    +        config = kwargs['specs']  # This should be a RemoteExecutionSpec
    
    57
    +        if config is None:
    
    58
    +            return
    
    59
    +
    
    60
    +        self.storage_url = config.storage_service['url']
    
    61
    +        self.exec_url = config.exec_service['url']
    
    62
    +
    
    63
    +        self.storage_remote_spec = CASRemoteSpec(self.storage_url, push=True,
    
    64
    +                                                 server_cert=config.storage_service['server-cert'],
    
    65
    +                                                 client_key=config.storage_service['client-key'],
    
    66
    +                                                 client_cert=config.storage_service['client-cert'])
    
    67
    +
    
    68
    +    @staticmethod
    
    69
    +    def specs_from_config_node(config_node, basedir):
    
    70
    +
    
    71
    +        def require_node(config, keyname):
    
    72
    +            val = config.get(keyname)
    
    73
    +            if val is None:
    
    74
    +                provenance = _yaml.node_get_provenance(remote_config, key=keyname)
    
    75
    +                raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
    
    76
    +                                      "'{}' was not present in the remote "
    
    77
    +                                      "execution configuration (remote-execution). "
    
    78
    +                                      .format(keyname))
    
    79
    +            return val
    
    80
    +
    
    81
    +        remote_config = config_node.get("remote-execution", None)
    
    82
    +        if remote_config is None:
    
    83
    +            return None
    
    84
    +
    
    85
    +        # Maintain some backwards compatibility with older configs, in which 'url' was the only valid key for
    
    86
    +        # remote-execution.
    
    55 87
     
    
    56
    -        self.server_url = '{}:{}'.format(url.hostname, url.port)
    
    57
    -        self.operation_name = None
    
    88
    +        tls_keys = ['client-key', 'client-cert', 'server-cert']
    
    89
    +
    
    90
    +        _yaml.node_validate(remote_config, ['exec-service', 'storage-service', 'url'])
    
    91
    +        remote_exec_service_config = require_node(remote_config, 'exec-service')
    
    92
    +        remote_exec_storage_config = require_node(remote_config, 'storage-service')
    
    93
    +
    
    94
    +        _yaml.node_validate(remote_exec_service_config, ['url'])
    
    95
    +        _yaml.node_validate(remote_exec_storage_config, ['url'] + tls_keys)
    
    96
    +
    
    97
    +        if 'url' in remote_config:
    
    98
    +            if 'exec-service' not in remote_config:
    
    99
    +                remote_config['exec-service'] = {'url': remote_config['url']}
    
    100
    +            else:
    
    101
    +                provenance = _yaml.node_get_provenance(remote_config, key='url')
    
    102
    +                raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
    
    103
    +                                      "'url' and 'exec-service' keys were found in the remote "
    
    104
    +                                      "execution configuration (remote-execution). "
    
    105
    +                                      "You can only specify one of these.")
    
    106
    +
    
    107
    +        for key in tls_keys:
    
    108
    +            if key not in remote_exec_storage_config:
    
    109
    +                provenance = _yaml.node_get_provenance(remote_config, key='storage-service')
    
    110
    +                raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
    
    111
    +                                      "{}: The keys {} are necessary for the storage-service section of "
    
    112
    +                                      "remote-execution configuration. Your config is missing '{}'."
    
    113
    +                                      .format(str(provenance), tls_keys, key))
    
    114
    +
    
    115
    +        spec = RemoteExecutionSpec(remote_config['exec-service'], remote_config['storage-service'])
    
    116
    +        return spec
    
    58 117
     
    
    59 118
         def run_remote_command(self, command, input_root_digest, working_directory, environment):
    
    60 119
             # Sends an execution request to the remote execution server.
    
    ... ... @@ -73,12 +132,14 @@ class SandboxRemote(Sandbox):
    73 132
                                                           output_directories=[self._output_directory],
    
    74 133
                                                           platform=None)
    
    75 134
             context = self._get_context()
    
    76
    -        cascache = context.artifactcache
    
    135
    +        cascache = context.get_cascache()
    
    136
    +        casremote = CASRemote(self.storage_remote_spec)
    
    137
    +
    
    77 138
             # Upload the Command message to the remote CAS server
    
    78
    -        command_digest = cascache.push_message(self._get_project(), remote_command)
    
    79
    -        if not command_digest or not cascache.verify_digest_pushed(self._get_project(), command_digest):
    
    139
    +        command_digest = cascache.push_message(casremote, remote_command)
    
    140
    +        if not command_digest or not cascache.verify_digest_on_remote(casremote, command_digest):
    
    80 141
                 raise SandboxError("Failed pushing build command to remote CAS.")
    
    81
    -
    
    142
    +        print("Created command digest: {}".format(command_digest.hash))
    
    82 143
             # Create and send the action.
    
    83 144
             action = remote_execution_pb2.Action(command_digest=command_digest,
    
    84 145
                                                  input_root_digest=input_root_digest,
    
    ... ... @@ -86,12 +147,13 @@ class SandboxRemote(Sandbox):
    86 147
                                                  do_not_cache=False)
    
    87 148
     
    
    88 149
             # Upload the Action message to the remote CAS server
    
    89
    -        action_digest = cascache.push_message(self._get_project(), action)
    
    90
    -        if not action_digest or not cascache.verify_digest_pushed(self._get_project(), action_digest):
    
    150
    +        action_digest = cascache.push_message(casremote, action)
    
    151
    +        print("Created action digest: {}".format(action_digest.hash))
    
    152
    +        if not action_digest or not cascache.verify_digest_on_remote(casremote, action_digest):
    
    91 153
                 raise SandboxError("Failed pushing build action to remote CAS.")
    
    92 154
     
    
    93 155
             # Next, try to create a communication channel to the BuildGrid server.
    
    94
    -        channel = grpc.insecure_channel(self.server_url)
    
    156
    +        channel = grpc.insecure_channel(self.exec_url)
    
    95 157
             stub = remote_execution_pb2_grpc.ExecutionStub(channel)
    
    96 158
             request = remote_execution_pb2.ExecuteRequest(action_digest=action_digest,
    
    97 159
                                                           skip_cache_lookup=False)
    
    ... ... @@ -117,7 +179,7 @@ class SandboxRemote(Sandbox):
    117 179
                     status_code = e.code()
    
    118 180
                     if status_code == grpc.StatusCode.UNAVAILABLE:
    
    119 181
                         raise SandboxError("Failed contacting remote execution server at {}."
    
    120
    -                                       .format(self.server_url))
    
    182
    +                                       .format(self.exec_url))
    
    121 183
     
    
    122 184
                     elif status_code in (grpc.StatusCode.INVALID_ARGUMENT,
    
    123 185
                                          grpc.StatusCode.FAILED_PRECONDITION,
    
    ... ... @@ -188,9 +250,11 @@ class SandboxRemote(Sandbox):
    188 250
                 raise SandboxError("Output directory structure had no digest attached.")
    
    189 251
     
    
    190 252
             context = self._get_context()
    
    191
    -        cascache = context.artifactcache
    
    253
    +        cascache = context.get_cascache()
    
    254
    +        casremote = CASRemote(self.storage_remote_spec)
    
    255
    +
    
    192 256
             # Now do a pull to ensure we have the necessary parts.
    
    193
    -        dir_digest = cascache.pull_tree(self._get_project(), tree_digest)
    
    257
    +        dir_digest = cascache.pull_tree(casremote, tree_digest)
    
    194 258
             if dir_digest is None or not dir_digest.hash or not dir_digest.size_bytes:
    
    195 259
                 raise SandboxError("Output directory structure pulling from remote failed.")
    
    196 260
     
    
    ... ... @@ -223,11 +287,16 @@ class SandboxRemote(Sandbox):
    223 287
     
    
    224 288
             upload_vdir.recalculate_hash()
    
    225 289
     
    
    226
    -        context = self._get_context()
    
    227
    -        cascache = context.artifactcache
    
    290
    +        cascache = context.get_cascache()
    
    291
    +        casremote = CASRemote(self.storage_remote_spec)
    
    228 292
             # Now, push that key (without necessarily needing a ref) to the remote.
    
    229
    -        cascache.push_directory(self._get_project(), upload_vdir)
    
    230
    -        if not cascache.verify_digest_pushed(self._get_project(), upload_vdir.ref):
    
    293
    +
    
    294
    +        try:
    
    295
    +            cascache.push_directory(casremote, upload_vdir)
    
    296
    +        except grpc._channel._Rendezvous as e:
    
    297
    +            raise SandboxError("Failed to push source directory to remote: {}".format(e)) from e
    
    298
    +
    
    299
    +        if not cascache.verify_digest_on_remote(casremote, upload_vdir.ref):
    
    231 300
                 raise SandboxError("Failed to verify that source has been pushed to the remote artifact cache.")
    
    232 301
     
    
    233 302
             # Fallback to the sandbox default settings for
    

  • doc/sessions/developing.run
    ... ... @@ -7,7 +7,7 @@ commands:
    7 7
     # Capture workspace open output 
    
    8 8
     - directory: ../examples/developing/
    
    9 9
       output: ../source/sessions/developing-workspace-open.html
    
    10
    -  command: workspace open hello.bst workspace_hello
    
    10
    +  command: workspace open --directory workspace_hello hello.bst
    
    11 11
     
    
    12 12
     # Catpure output from workspace list
    
    13 13
     - directory: ../examples/developing/
    
    ... ... @@ -37,7 +37,7 @@ commands:
    37 37
     # Reopen workspace
    
    38 38
     - directory: ../examples/developing/
    
    39 39
       output: ../source/sessions/developing-reopen-workspace.html
    
    40
    -  command: workspace open --no-checkout hello.bst workspace_hello
    
    40
    +  command: workspace open --no-checkout --directory workspace_hello hello.bst
    
    41 41
     
    
    42 42
     # Reset workspace
    
    43 43
     - directory: ../examples/developing/
    

  • doc/sessions/junctions.run
    ... ... @@ -13,7 +13,7 @@ commands:
    13 13
     # Open a crossJunction workspace:
    
    14 14
     - directory: ../examples/junctions
    
    15 15
       output: ../source/sessions/junctions-workspace-open.html
    
    16
    -  command: workspace open hello-junction.bst:hello.bst workspace_hello
    
    16
    +  command: workspace open --directory workspace_hello hello-junction.bst:hello.bst
    
    17 17
     
    
    18 18
     # Remove the workspace 
    
    19 19
     - directory: ../examples/junctions
    

  • doc/source/format_project.rst
    ... ... @@ -231,10 +231,25 @@ using the `remote-execution` option:
    231 231
       remote-execution:
    
    232 232
     
    
    233 233
         # A url defining a remote execution server
    
    234
    -    url: http://buildserver.example.com:50051
    
    234
    +    exec-server:
    
    235
    +      url: buildserver.example.com:50051
    
    236
    +    storage-server:
    
    237
    +    - url: https://foo.com/artifacts:11002
    
    238
    +      server-cert: server.crt
    
    239
    +      client-cert: client.crt
    
    240
    +      client-key: client.key
    
    241
    +
    
    242
    +For the exec-server, the protocol will always be REAPI, so no protocol
    
    243
    +is listed in the url. The exec-server part of remote execution does
    
    244
    +not support encrypted connections yet.
    
    245
    +
    
    246
    +storage-server specifies a remote CAS store and the parameters are the
    
    247
    +same as those used to specify an :ref:`artifact server <artifacts>`.
    
    235 248
     
    
    236
    -The url should contain a hostname and port separated by ':'. Only plain HTTP is
    
    237
    -currently suported (no HTTPS).
    
    249
    +The storage server may be the same server used for artifact
    
    250
    +caching. Remote execution cannot work without push access to the
    
    251
    +storage server, so you must specify a client certificate and key, and
    
    252
    +a server certificate.
    
    238 253
     
    
    239 254
     The Remote Execution API can be found via https://github.com/bazelbuild/remote-apis.
    
    240 255
     
    

  • tests/examples/developing.py
    ... ... @@ -59,7 +59,7 @@ def test_open_workspace(cli, tmpdir, datafiles):
    59 59
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    60 60
         workspace_dir = os.path.join(str(tmpdir), "workspace_hello")
    
    61 61
     
    
    62
    -    result = cli.run(project=project, args=['workspace', 'open', '-f', 'hello.bst', workspace_dir])
    
    62
    +    result = cli.run(project=project, args=['workspace', 'open', '-f', '--directory', workspace_dir, 'hello.bst', ])
    
    63 63
         result.assert_success()
    
    64 64
     
    
    65 65
         result = cli.run(project=project, args=['workspace', 'list'])
    
    ... ... @@ -78,7 +78,7 @@ def test_make_change_in_workspace(cli, tmpdir, datafiles):
    78 78
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    79 79
         workspace_dir = os.path.join(str(tmpdir), "workspace_hello")
    
    80 80
     
    
    81
    -    result = cli.run(project=project, args=['workspace', 'open', '-f', 'hello.bst', workspace_dir])
    
    81
    +    result = cli.run(project=project, args=['workspace', 'open', '-f', '--directory', workspace_dir, 'hello.bst'])
    
    82 82
         result.assert_success()
    
    83 83
     
    
    84 84
         result = cli.run(project=project, args=['workspace', 'list'])
    

  • tests/examples/junctions.py
    ... ... @@ -48,7 +48,7 @@ def test_open_cross_junction_workspace(cli, tmpdir, datafiles):
    48 48
         workspace_dir = os.path.join(str(tmpdir), "workspace_hello_junction")
    
    49 49
     
    
    50 50
         result = cli.run(project=project,
    
    51
    -                     args=['workspace', 'open', 'hello-junction.bst:hello.bst', workspace_dir])
    
    51
    +                     args=['workspace', 'open', '--directory', workspace_dir, 'hello-junction.bst:hello.bst'])
    
    52 52
         result.assert_success()
    
    53 53
     
    
    54 54
         result = cli.run(project=project,
    

  • tests/frontend/buildcheckout.py
    ... ... @@ -509,7 +509,7 @@ def test_build_checkout_workspaced_junction(cli, tmpdir, datafiles):
    509 509
     
    
    510 510
         # Now open a workspace on the junction
    
    511 511
         #
    
    512
    -    result = cli.run(project=project, args=['workspace', 'open', 'junction.bst', workspace])
    
    512
    +    result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, 'junction.bst'])
    
    513 513
         result.assert_success()
    
    514 514
         filename = os.path.join(workspace, 'files', 'etc-files', 'etc', 'animal.conf')
    
    515 515
     
    

  • tests/frontend/cross_junction_workspace.py
    ... ... @@ -47,7 +47,7 @@ def open_cross_junction(cli, tmpdir):
    47 47
         workspace = tmpdir.join("workspace")
    
    48 48
     
    
    49 49
         element = 'sub.bst:data.bst'
    
    50
    -    args = ['workspace', 'open', element, str(workspace)]
    
    50
    +    args = ['workspace', 'open', '--directory', str(workspace), element]
    
    51 51
         result = cli.run(project=project, args=args)
    
    52 52
         result.assert_success()
    
    53 53
     
    

  • tests/frontend/workspace.py
    ... ... @@ -21,9 +21,11 @@
    21 21
     #           Phillip Smyth <phillip smyth codethink co uk>
    
    22 22
     #           Jonathan Maw <jonathan maw codethink co uk>
    
    23 23
     #           Richard Maw <richard maw codethink co uk>
    
    24
    +#           William Salmon <will salmon codethink co uk>
    
    24 25
     #
    
    25 26
     
    
    26 27
     import os
    
    28
    +import stat
    
    27 29
     import pytest
    
    28 30
     import shutil
    
    29 31
     import subprocess
    
    ... ... @@ -43,65 +45,120 @@ DATA_DIR = os.path.join(
    43 45
     )
    
    44 46
     
    
    45 47
     
    
    46
    -def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
    
    47
    -                   project_path=None, element_attrs=None):
    
    48
    -    if not workspace_dir:
    
    49
    -        workspace_dir = os.path.join(str(tmpdir), 'workspace{}'.format(suffix))
    
    50
    -    if not project_path:
    
    51
    -        project_path = os.path.join(datafiles.dirname, datafiles.basename)
    
    52
    -    else:
    
    53
    -        shutil.copytree(os.path.join(datafiles.dirname, datafiles.basename), project_path)
    
    54
    -    bin_files_path = os.path.join(project_path, 'files', 'bin-files')
    
    55
    -    element_path = os.path.join(project_path, 'elements')
    
    56
    -    element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
    
    48
    +class WorkspaceCreater():
    
    49
    +    def __init__(self, cli, tmpdir, datafiles, project_path=None):
    
    50
    +        self.cli = cli
    
    51
    +        self.tmpdir = tmpdir
    
    52
    +        self.datafiles = datafiles
    
    53
    +
    
    54
    +        if not project_path:
    
    55
    +            project_path = os.path.join(datafiles.dirname, datafiles.basename)
    
    56
    +        else:
    
    57
    +            shutil.copytree(os.path.join(datafiles.dirname, datafiles.basename), project_path)
    
    58
    +
    
    59
    +        self.project_path = project_path
    
    60
    +        self.bin_files_path = os.path.join(project_path, 'files', 'bin-files')
    
    61
    +
    
    62
    +        self.workspace_cmd = os.path.join(self.project_path, 'workspace_cmd')
    
    63
    +
    
    64
    +    def create_workspace_element(self, kind, track, suffix='', workspace_dir=None,
    
    65
    +                                 element_attrs=None):
    
    66
    +        element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
    
    67
    +        element_path = os.path.join(self.project_path, 'elements')
    
    68
    +        if not workspace_dir:
    
    69
    +            workspace_dir = os.path.join(self.workspace_cmd, element_name)
    
    70
    +            if workspace_dir[-4:] == '.bst':
    
    71
    +                workspace_dir = workspace_dir[:-4]
    
    72
    +
    
    73
    +        # Create our repo object of the given source type with
    
    74
    +        # the bin files, and then collect the initial ref.
    
    75
    +        repo = create_repo(kind, str(self.tmpdir))
    
    76
    +        ref = repo.create(self.bin_files_path)
    
    77
    +        if track:
    
    78
    +            ref = None
    
    79
    +
    
    80
    +        # Write out our test target
    
    81
    +        element = {
    
    82
    +            'kind': 'import',
    
    83
    +            'sources': [
    
    84
    +                repo.source_config(ref=ref)
    
    85
    +            ]
    
    86
    +        }
    
    87
    +        if element_attrs:
    
    88
    +            element = {**element, **element_attrs}
    
    89
    +        _yaml.dump(element,
    
    90
    +                   os.path.join(element_path,
    
    91
    +                                element_name))
    
    92
    +        return element_name, element_path, workspace_dir
    
    57 93
     
    
    58
    -    # Create our repo object of the given source type with
    
    59
    -    # the bin files, and then collect the initial ref.
    
    60
    -    #
    
    61
    -    repo = create_repo(kind, str(tmpdir))
    
    62
    -    ref = repo.create(bin_files_path)
    
    63
    -    if track:
    
    64
    -        ref = None
    
    94
    +    def create_workspace_elements(self, kinds, track, suffixs=None, workspace_dir_usr=None,
    
    95
    +                                  element_attrs=None):
    
    65 96
     
    
    66
    -    # Write out our test target
    
    67
    -    element = {
    
    68
    -        'kind': 'import',
    
    69
    -        'sources': [
    
    70
    -            repo.source_config(ref=ref)
    
    71
    -        ]
    
    72
    -    }
    
    73
    -    if element_attrs:
    
    74
    -        element = {**element, **element_attrs}
    
    75
    -    _yaml.dump(element,
    
    76
    -               os.path.join(element_path,
    
    77
    -                            element_name))
    
    97
    +        element_tuples = []
    
    78 98
     
    
    79
    -    # Assert that there is no reference, a track & fetch is needed
    
    80
    -    state = cli.get_element_state(project_path, element_name)
    
    81
    -    if track:
    
    82
    -        assert state == 'no reference'
    
    83
    -    else:
    
    84
    -        assert state == 'fetch needed'
    
    99
    +        if suffixs is None:
    
    100
    +            suffixs = ['', ] * len(kinds)
    
    101
    +        else:
    
    102
    +            if len(suffixs) != len(kinds):
    
    103
    +                raise "terable error"
    
    85 104
     
    
    86
    -    # Now open the workspace, this should have the effect of automatically
    
    87
    -    # tracking & fetching the source from the repo.
    
    88
    -    args = ['workspace', 'open']
    
    89
    -    if track:
    
    90
    -        args.append('--track')
    
    91
    -    args.extend([element_name, workspace_dir])
    
    92
    -    result = cli.run(project=project_path, args=args)
    
    105
    +        for suffix, kind in zip(suffixs, kinds):
    
    106
    +            element_name, element_path, workspace_dir = \
    
    107
    +                self.create_workspace_element(kind, track, suffix, workspace_dir_usr,
    
    108
    +                                              element_attrs)
    
    93 109
     
    
    94
    -    result.assert_success()
    
    110
    +            # Assert that there is no reference, a track & fetch is needed
    
    111
    +            state = self.cli.get_element_state(self.project_path, element_name)
    
    112
    +            if track:
    
    113
    +                assert state == 'no reference'
    
    114
    +            else:
    
    115
    +                assert state == 'fetch needed'
    
    116
    +            element_tuples.append((element_name, workspace_dir))
    
    95 117
     
    
    96
    -    # Assert that we are now buildable because the source is
    
    97
    -    # now cached.
    
    98
    -    assert cli.get_element_state(project_path, element_name) == 'buildable'
    
    118
    +        return element_tuples
    
    99 119
     
    
    100
    -    # Check that the executable hello file is found in the workspace
    
    101
    -    filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
    
    102
    -    assert os.path.exists(filename)
    
    120
    +    def open_workspaces(self, kinds, track, suffixs=None, workspace_dir=None,
    
    121
    +                        element_attrs=None):
    
    122
    +
    
    123
    +        element_tuples = self.create_workspace_elements(kinds, track, suffixs, workspace_dir,
    
    124
    +                                                        element_attrs)
    
    125
    +        os.makedirs(self.workspace_cmd, exist_ok=True)
    
    126
    +
    
    127
    +        # Now open the workspace, this should have the effect of automatically
    
    128
    +        # tracking & fetching the source from the repo.
    
    129
    +        args = ['workspace', 'open']
    
    130
    +        if track:
    
    131
    +            args.append('--track')
    
    132
    +        if workspace_dir is not None:
    
    133
    +            assert len(element_tuples) == 1, "test logic error"
    
    134
    +            _, workspace_dir = element_tuples[0]
    
    135
    +            args.extend(['--directory', workspace_dir])
    
    136
    +
    
    137
    +        args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
    
    138
    +        result = self.cli.run(cwd=self.workspace_cmd, project=self.project_path, args=args)
    
    139
    +
    
    140
    +        result.assert_success()
    
    141
    +
    
    142
    +        for element_name, workspace_dir in element_tuples:
    
    143
    +            # Assert that we are now buildable because the source is
    
    144
    +            # now cached.
    
    145
    +            assert self.cli.get_element_state(self.project_path, element_name) == 'buildable'
    
    146
    +
    
    147
    +            # Check that the executable hello file is found in the workspace
    
    148
    +            filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
    
    149
    +            assert os.path.exists(filename)
    
    150
    +
    
    151
    +        return element_tuples
    
    103 152
     
    
    104
    -    return (element_name, project_path, workspace_dir)
    
    153
    +
    
    154
    +def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
    
    155
    +                   project_path=None, element_attrs=None):
    
    156
    +    workspace_object = WorkspaceCreater(cli, tmpdir, datafiles, project_path)
    
    157
    +    workspaces = workspace_object.open_workspaces((kind, ), track, (suffix, ), workspace_dir,
    
    158
    +                                                  element_attrs)
    
    159
    +    assert len(workspaces) == 1
    
    160
    +    element_name, workspace = workspaces[0]
    
    161
    +    return element_name, workspace_object.project_path, workspace
    
    105 162
     
    
    106 163
     
    
    107 164
     @pytest.mark.datafiles(DATA_DIR)
    
    ... ... @@ -128,6 +185,128 @@ def test_open_bzr_customize(cli, tmpdir, datafiles):
    128 185
         assert(expected_output_str in str(output))
    
    129 186
     
    
    130 187
     
    
    188
    +@pytest.mark.datafiles(DATA_DIR)
    
    189
    +def test_open_multi(cli, tmpdir, datafiles):
    
    190
    +
    
    191
    +    workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
    
    192
    +    workspaces = workspace_object.open_workspaces(repo_kinds, False)
    
    193
    +
    
    194
    +    for (elname, workspace), kind in zip(workspaces, repo_kinds):
    
    195
    +        assert kind in elname
    
    196
    +        workspace_lsdir = os.listdir(workspace)
    
    197
    +        if kind == 'git':
    
    198
    +            assert('.git' in workspace_lsdir)
    
    199
    +        elif kind == 'bzr':
    
    200
    +            assert('.bzr' in workspace_lsdir)
    
    201
    +        else:
    
    202
    +            assert not ('.git' in workspace_lsdir)
    
    203
    +            assert not ('.bzr' in workspace_lsdir)
    
    204
    +
    
    205
    +
    
    206
    +@pytest.mark.datafiles(DATA_DIR)
    
    207
    +def test_open_multi_unwritable(cli, tmpdir, datafiles):
    
    208
    +    workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
    
    209
    +
    
    210
    +    element_tuples = workspace_object.create_workspace_elements(repo_kinds, False, repo_kinds)
    
    211
    +    os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
    
    212
    +
    
    213
    +    # Now open the workspace, this should have the effect of automatically
    
    214
    +    # tracking & fetching the source from the repo.
    
    215
    +    args = ['workspace', 'open']
    
    216
    +    args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
    
    217
    +    cli.configure({'workspacedir': workspace_object.workspace_cmd})
    
    218
    +
    
    219
    +    cwdstat = os.stat(workspace_object.workspace_cmd)
    
    220
    +    try:
    
    221
    +        os.chmod(workspace_object.workspace_cmd, cwdstat.st_mode - stat.S_IWRITE)
    
    222
    +        result = workspace_object.cli.run(project=workspace_object.project_path, args=args)
    
    223
    +    finally:
    
    224
    +        # Using this finally to make sure we always put thing back how they should be.
    
    225
    +        os.chmod(workspace_object.workspace_cmd, cwdstat.st_mode)
    
    226
    +
    
    227
    +    result.assert_main_error(ErrorDomain.STREAM, None)
    
    228
    +    # Normally we avoid checking stderr in favour of using the mechine readable result.assert_main_error
    
    229
    +    # But Tristan was very keen that the names of the elements left needing workspaces were present in the out put
    
    230
    +    assert (" ".join([element_name for element_name, workspace_dir_suffix in element_tuples[1:]]) in result.stderr)
    
    231
    +
    
    232
    +
    
    233
    +@pytest.mark.datafiles(DATA_DIR)
    
    234
    +def test_open_multi_with_directory(cli, tmpdir, datafiles):
    
    235
    +    workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
    
    236
    +
    
    237
    +    element_tuples = workspace_object.create_workspace_elements(repo_kinds, False, repo_kinds)
    
    238
    +    os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
    
    239
    +
    
    240
    +    # Now open the workspace, this should have the effect of automatically
    
    241
    +    # tracking & fetching the source from the repo.
    
    242
    +    args = ['workspace', 'open']
    
    243
    +    args.extend(['--directory', 'any/dir/should/fail'])
    
    244
    +
    
    245
    +    args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
    
    246
    +    result = workspace_object.cli.run(cwd=workspace_object.workspace_cmd, project=workspace_object.project_path,
    
    247
    +                                      args=args)
    
    248
    +
    
    249
    +    result.assert_main_error(ErrorDomain.STREAM, 'directory-with-multiple-elements')
    
    250
    +
    
    251
    +
    
    252
    +@pytest.mark.datafiles(DATA_DIR)
    
    253
    +def test_open_defaultlocation(cli, tmpdir, datafiles):
    
    254
    +    workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
    
    255
    +
    
    256
    +    ((element_name, workspace_dir), ) = workspace_object.create_workspace_elements(['git'], False, ['git'])
    
    257
    +    os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
    
    258
    +
    
    259
    +    # Now open the workspace, this should have the effect of automatically
    
    260
    +    # tracking & fetching the source from the repo.
    
    261
    +    args = ['workspace', 'open']
    
    262
    +    args.append(element_name)
    
    263
    +
    
    264
    +    # In the other tests we set the cmd to workspace_object.workspace_cmd with the optional
    
    265
    +    # argument, cwd for the workspace_object.cli.run function. But hear we set the default
    
    266
    +    # workspace location to workspace_object.workspace_cmd and run the cli.run function with
    
    267
    +    # no cwd option so that it runs in the project directory.
    
    268
    +    cli.configure({'workspacedir': workspace_object.workspace_cmd})
    
    269
    +    result = workspace_object.cli.run(project=workspace_object.project_path,
    
    270
    +                                      args=args)
    
    271
    +
    
    272
    +    result.assert_success()
    
    273
    +
    
    274
    +    assert cli.get_element_state(workspace_object.project_path, element_name) == 'buildable'
    
    275
    +
    
    276
    +    # Check that the executable hello file is found in the workspace
    
    277
    +    # even though the cli.run function was not run with cwd = workspace_object.workspace_cmd
    
    278
    +    # the workspace should be created in there as we used the 'workspacedir' configuration
    
    279
    +    # option.
    
    280
    +    filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
    
    281
    +    assert os.path.exists(filename)
    
    282
    +
    
    283
    +
    
    284
    +@pytest.mark.datafiles(DATA_DIR)
    
    285
    +def test_open_defaultlocation_exists(cli, tmpdir, datafiles):
    
    286
    +    workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
    
    287
    +
    
    288
    +    ((element_name, workspace_dir), ) = workspace_object.create_workspace_elements(['git'], False, ['git'])
    
    289
    +    os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
    
    290
    +
    
    291
    +    with open(workspace_dir, 'w') as fl:
    
    292
    +        fl.write('foo')
    
    293
    +
    
    294
    +    # Now open the workspace, this should have the effect of automatically
    
    295
    +    # tracking & fetching the source from the repo.
    
    296
    +    args = ['workspace', 'open']
    
    297
    +    args.append(element_name)
    
    298
    +
    
    299
    +    # In the other tests we set the cmd to workspace_object.workspace_cmd with the optional
    
    300
    +    # argument, cwd for the workspace_object.cli.run function. But hear we set the default
    
    301
    +    # workspace location to workspace_object.workspace_cmd and run the cli.run function with
    
    302
    +    # no cwd option so that it runs in the project directory.
    
    303
    +    cli.configure({'workspacedir': workspace_object.workspace_cmd})
    
    304
    +    result = workspace_object.cli.run(project=workspace_object.project_path,
    
    305
    +                                      args=args)
    
    306
    +
    
    307
    +    result.assert_main_error(ErrorDomain.STREAM, 'bad-directory')
    
    308
    +
    
    309
    +
    
    131 310
     @pytest.mark.datafiles(DATA_DIR)
    
    132 311
     @pytest.mark.parametrize("kind", repo_kinds)
    
    133 312
     def test_open_track(cli, tmpdir, datafiles, kind):
    
    ... ... @@ -150,7 +329,7 @@ def test_open_force(cli, tmpdir, datafiles, kind):
    150 329
     
    
    151 330
         # Now open the workspace again with --force, this should happily succeed
    
    152 331
         result = cli.run(project=project, args=[
    
    153
    -        'workspace', 'open', '--force', element_name, workspace
    
    332
    +        'workspace', 'open', '--force', '--directory', workspace, element_name
    
    154 333
         ])
    
    155 334
         result.assert_success()
    
    156 335
     
    
    ... ... @@ -165,7 +344,7 @@ def test_open_force_open(cli, tmpdir, datafiles, kind):
    165 344
     
    
    166 345
         # Now open the workspace again with --force, this should happily succeed
    
    167 346
         result = cli.run(project=project, args=[
    
    168
    -        'workspace', 'open', '--force', element_name, workspace
    
    347
    +        'workspace', 'open', '--force', '--directory', workspace, element_name
    
    169 348
         ])
    
    170 349
         result.assert_success()
    
    171 350
     
    
    ... ... @@ -196,7 +375,7 @@ def test_open_force_different_workspace(cli, tmpdir, datafiles, kind):
    196 375
     
    
    197 376
         # Now open the workspace again with --force, this should happily succeed
    
    198 377
         result = cli.run(project=project, args=[
    
    199
    -        'workspace', 'open', '--force', element_name2, workspace
    
    378
    +        'workspace', 'open', '--force', '--directory', workspace, element_name2
    
    200 379
         ])
    
    201 380
     
    
    202 381
         # Assert that the file in workspace 1 has been replaced
    
    ... ... @@ -504,7 +683,7 @@ def test_buildable_no_ref(cli, tmpdir, datafiles):
    504 683
         # Now open the workspace. We don't need to checkout the source though.
    
    505 684
         workspace = os.path.join(str(tmpdir), 'workspace-no-ref')
    
    506 685
         os.makedirs(workspace)
    
    507
    -    args = ['workspace', 'open', '--no-checkout', element_name, workspace]
    
    686
    +    args = ['workspace', 'open', '--no-checkout', '--directory', workspace, element_name]
    
    508 687
         result = cli.run(project=project, args=args)
    
    509 688
         result.assert_success()
    
    510 689
     
    
    ... ... @@ -766,7 +945,7 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte
    766 945
                                 element_name))
    
    767 946
     
    
    768 947
         # Make a change to the workspaces file
    
    769
    -    result = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
    
    948
    +    result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
    
    770 949
         result.assert_success()
    
    771 950
         result = cli.run(project=project, args=['workspace', 'close', '--remove-dir', element_name])
    
    772 951
         result.assert_success()
    

  • tests/integration/shell.py
    ... ... @@ -290,7 +290,7 @@ def test_workspace_visible(cli, tmpdir, datafiles):
    290 290
     
    
    291 291
         # Open a workspace on our build failing element
    
    292 292
         #
    
    293
    -    res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
    
    293
    +    res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
    
    294 294
         assert res.exit_code == 0
    
    295 295
     
    
    296 296
         # Ensure the dependencies of our build failing element are built
    

  • tests/integration/workspace.py
    ... ... @@ -24,7 +24,7 @@ def test_workspace_mount(cli, tmpdir, datafiles):
    24 24
         workspace = os.path.join(cli.directory, 'workspace')
    
    25 25
         element_name = 'workspace/workspace-mount.bst'
    
    26 26
     
    
    27
    -    res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
    
    27
    +    res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
    
    28 28
         assert res.exit_code == 0
    
    29 29
     
    
    30 30
         res = cli.run(project=project, args=['build', element_name])
    
    ... ... @@ -41,7 +41,7 @@ def test_workspace_commanddir(cli, tmpdir, datafiles):
    41 41
         workspace = os.path.join(cli.directory, 'workspace')
    
    42 42
         element_name = 'workspace/workspace-commanddir.bst'
    
    43 43
     
    
    44
    -    res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
    
    44
    +    res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
    
    45 45
         assert res.exit_code == 0
    
    46 46
     
    
    47 47
         res = cli.run(project=project, args=['build', element_name])
    
    ... ... @@ -78,7 +78,7 @@ def test_workspace_updated_dependency(cli, tmpdir, datafiles):
    78 78
         _yaml.dump(dependency, os.path.join(element_path, dep_name))
    
    79 79
     
    
    80 80
         # First open the workspace
    
    81
    -    res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
    
    81
    +    res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
    
    82 82
         assert res.exit_code == 0
    
    83 83
     
    
    84 84
         # We build the workspaced element, so that we have an artifact
    
    ... ... @@ -134,7 +134,7 @@ def test_workspace_update_dependency_failed(cli, tmpdir, datafiles):
    134 134
         _yaml.dump(dependency, os.path.join(element_path, dep_name))
    
    135 135
     
    
    136 136
         # First open the workspace
    
    137
    -    res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
    
    137
    +    res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
    
    138 138
         assert res.exit_code == 0
    
    139 139
     
    
    140 140
         # We build the workspaced element, so that we have an artifact
    
    ... ... @@ -210,7 +210,7 @@ def test_updated_dependency_nested(cli, tmpdir, datafiles):
    210 210
         _yaml.dump(dependency, os.path.join(element_path, dep_name))
    
    211 211
     
    
    212 212
         # First open the workspace
    
    213
    -    res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
    
    213
    +    res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
    
    214 214
         assert res.exit_code == 0
    
    215 215
     
    
    216 216
         # We build the workspaced element, so that we have an artifact
    
    ... ... @@ -264,7 +264,7 @@ def test_incremental_configure_commands_run_only_once(cli, tmpdir, datafiles):
    264 264
         _yaml.dump(element, os.path.join(element_path, element_name))
    
    265 265
     
    
    266 266
         # We open a workspace on the above element
    
    267
    -    res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
    
    267
    +    res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
    
    268 268
         res.assert_success()
    
    269 269
     
    
    270 270
         # Then we build, and check whether the configure step succeeded
    

  • tests/plugins/filter.py
    ... ... @@ -108,19 +108,28 @@ def test_filter_forbid_also_rdep(datafiles, cli):
    108 108
     def test_filter_workspace_open(datafiles, cli, tmpdir):
    
    109 109
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    110 110
         workspace_dir = os.path.join(tmpdir.dirname, tmpdir.basename, "workspace")
    
    111
    -    result = cli.run(project=project, args=['workspace', 'open', 'deps-permitted.bst', workspace_dir])
    
    111
    +    result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'deps-permitted.bst'])
    
    112 112
         result.assert_success()
    
    113 113
         assert os.path.exists(os.path.join(workspace_dir, "foo"))
    
    114 114
         assert os.path.exists(os.path.join(workspace_dir, "bar"))
    
    115 115
         assert os.path.exists(os.path.join(workspace_dir, "baz"))
    
    116 116
     
    
    117 117
     
    
    118
    +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
    
    119
    +def test_filter_workspace_open_multi(datafiles, cli, tmpdir):
    
    120
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    121
    +    result = cli.run(cwd=project, project=project, args=['workspace', 'open', 'deps-permitted.bst',
    
    122
    +                                                         'output-orphans.bst'])
    
    123
    +    result.assert_success()
    
    124
    +    assert os.path.exists(os.path.join(project, "input"))
    
    125
    +
    
    126
    +
    
    118 127
     @pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
    
    119 128
     def test_filter_workspace_build(datafiles, cli, tmpdir):
    
    120 129
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    121 130
         tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
    
    122 131
         workspace_dir = os.path.join(tempdir, "workspace")
    
    123
    -    result = cli.run(project=project, args=['workspace', 'open', 'output-orphans.bst', workspace_dir])
    
    132
    +    result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'output-orphans.bst'])
    
    124 133
         result.assert_success()
    
    125 134
         src = os.path.join(workspace_dir, "foo")
    
    126 135
         dst = os.path.join(workspace_dir, "quux")
    
    ... ... @@ -138,7 +147,7 @@ def test_filter_workspace_close(datafiles, cli, tmpdir):
    138 147
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    139 148
         tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
    
    140 149
         workspace_dir = os.path.join(tempdir, "workspace")
    
    141
    -    result = cli.run(project=project, args=['workspace', 'open', 'output-orphans.bst', workspace_dir])
    
    150
    +    result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'output-orphans.bst'])
    
    142 151
         result.assert_success()
    
    143 152
         src = os.path.join(workspace_dir, "foo")
    
    144 153
         dst = os.path.join(workspace_dir, "quux")
    
    ... ... @@ -158,7 +167,7 @@ def test_filter_workspace_reset(datafiles, cli, tmpdir):
    158 167
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    159 168
         tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
    
    160 169
         workspace_dir = os.path.join(tempdir, "workspace")
    
    161
    -    result = cli.run(project=project, args=['workspace', 'open', 'output-orphans.bst', workspace_dir])
    
    170
    +    result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'output-orphans.bst'])
    
    162 171
         result.assert_success()
    
    163 172
         src = os.path.join(workspace_dir, "foo")
    
    164 173
         dst = os.path.join(workspace_dir, "quux")
    

  • tests/plugins/pipeline.py
    ... ... @@ -14,7 +14,7 @@ DATA_DIR = os.path.join(
    14 14
     
    
    15 15
     def create_pipeline(tmpdir, basedir, target):
    
    16 16
         context = Context()
    
    17
    -    context.load()
    
    17
    +    context.load(config=os.devnull)
    
    18 18
         context.deploydir = os.path.join(str(tmpdir), 'deploy')
    
    19 19
         context.artifactdir = os.path.join(str(tmpdir), 'artifact')
    
    20 20
         project = Project(basedir, context)
    

  • tests/sandboxes/remote-exec-config.py
    1
    +import pytest
    
    2
    +
    
    3
    +import itertools
    
    4
    +import os
    
    5
    +
    
    6
    +from buildstream import _yaml
    
    7
    +from buildstream._exceptions import ErrorDomain, LoadErrorReason
    
    8
    +
    
    9
    +from tests.testutils.runcli import cli
    
    10
    +
    
    11
    +DATA_DIR = os.path.dirname(os.path.realpath(__file__))
    
    12
    +
    
    13
    +# Tests that we get a useful error message when supplying invalid
    
    14
    +# remote execution configurations.
    
    15
    +
    
    16
    +
    
    17
    +# Assert that if both 'url' (the old style) and 'exec-service' (the new style)
    
    18
    +# are used at once, a LoadError results.
    
    19
    +@pytest.mark.datafiles(DATA_DIR)
    
    20
    +def test_old_and_new_configs(cli, datafiles):
    
    21
    +    project = os.path.join(datafiles.dirname, datafiles.basename, 'missing-certs')
    
    22
    +
    
    23
    +    project_conf = {
    
    24
    +        'name': 'test',
    
    25
    +
    
    26
    +        'remote-execution': {
    
    27
    +            'url': 'https://cache.example.com:12345',
    
    28
    +            'exec-service': {
    
    29
    +                'url': 'http://localhost:8088'
    
    30
    +            },
    
    31
    +            'storage-service': {
    
    32
    +                'url': 'http://charactron:11001',
    
    33
    +            }
    
    34
    +        }
    
    35
    +    }
    
    36
    +    project_conf_file = os.path.join(project, 'project.conf')
    
    37
    +    _yaml.dump(project_conf, project_conf_file)
    
    38
    +
    
    39
    +    # Use `pull` here to ensure we try to initialize the remotes, triggering the error
    
    40
    +    #
    
    41
    +    # This does not happen for a simple `bst show`.
    
    42
    +    result = cli.run(project=project, args=['pull', 'element.bst'])
    
    43
    +    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA, "specify one")
    
    44
    +
    
    45
    +
    
    46
    +# Assert that if either the client key or client cert is specified
    
    47
    +# without specifying its counterpart, we get a comprehensive LoadError
    
    48
    +# instead of an unhandled exception.
    
    49
    +@pytest.mark.datafiles(DATA_DIR)
    
    50
    +@pytest.mark.parametrize('config_key, config_value', [
    
    51
    +    ('client-cert', 'client.crt'),
    
    52
    +    ('client-key', 'client.key')
    
    53
    +])
    
    54
    +def test_missing_certs(cli, datafiles, config_key, config_value):
    
    55
    +    project = os.path.join(datafiles.dirname, datafiles.basename, 'missing-certs')
    
    56
    +
    
    57
    +    project_conf = {
    
    58
    +        'name': 'test',
    
    59
    +
    
    60
    +        'remote-execution': {
    
    61
    +            'exec-service': {
    
    62
    +                'url': 'http://localhost:8088'
    
    63
    +            },
    
    64
    +            'storage-service': {
    
    65
    +                'url': 'http://charactron:11001',
    
    66
    +                config_key: config_value,
    
    67
    +            }
    
    68
    +        }
    
    69
    +    }
    
    70
    +    project_conf_file = os.path.join(project, 'project.conf')
    
    71
    +    _yaml.dump(project_conf, project_conf_file)
    
    72
    +
    
    73
    +    # Use `pull` here to ensure we try to initialize the remotes, triggering the error
    
    74
    +    #
    
    75
    +    # This does not happen for a simple `bst show`.
    
    76
    +    result = cli.run(project=project, args=['show', 'element.bst'])
    
    77
    +    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA, "Your config is missing")
    
    78
    +
    
    79
    +
    
    80
    +# Assert that if incomplete information is supplied we get a sensible error message.
    
    81
    +@pytest.mark.datafiles(DATA_DIR)
    
    82
    +def test_empty_config(cli, datafiles):
    
    83
    +    project = os.path.join(datafiles.dirname, datafiles.basename, 'missing-certs')
    
    84
    +
    
    85
    +    project_conf = {
    
    86
    +        'name': 'test',
    
    87
    +
    
    88
    +        'remote-execution': {
    
    89
    +        }
    
    90
    +    }
    
    91
    +    project_conf_file = os.path.join(project, 'project.conf')
    
    92
    +    _yaml.dump(project_conf, project_conf_file)
    
    93
    +
    
    94
    +    # Use `pull` here to ensure we try to initialize the remotes, triggering the error
    
    95
    +    #
    
    96
    +    # This does not happen for a simple `bst show`.
    
    97
    +    result = cli.run(project=project, args=['pull', 'element.bst'])
    
    98
    +    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA, "specify one")



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]