[Notes] [Git][BuildStream/buildstream][jonathan/workspace-fragment-create] 35 commits: tests: Avoid hangs due to exceptions in subprocesses



Title: GitLab

Jonathan Maw pushed to branch jonathan/workspace-fragment-create at BuildStream / buildstream

Commits:

29 changed files:

Changes:

  • NEWS
    ... ... @@ -67,6 +67,12 @@ buildstream 1.3.1
    67 67
         allows the user to set a default location for their creation. This has meant
    
    68 68
         that the new CLI is no longer backwards compatible with buildstream 1.2.
    
    69 69
     
    
    70
    +  o Add sandbox API for command batching and use it for build, script, and
    
    71
    +    compose elements.
    
    72
    +
    
    73
    +  o Opening a workspace now creates a .bstproject.yaml file that allows buildstream
    
    74
    +    commands to be run from a workspace that is not inside a project.
    
    75
    +
    
    70 76
     
    
    71 77
     =================
    
    72 78
     buildstream 1.1.5
    

  • buildstream/__init__.py
    ... ... @@ -27,7 +27,7 @@ if "_BST_COMPLETION" not in os.environ:
    27 27
         del get_versions
    
    28 28
     
    
    29 29
         from .utils import UtilError, ProgramNotFoundError
    
    30
    -    from .sandbox import Sandbox, SandboxFlags
    
    30
    +    from .sandbox import Sandbox, SandboxFlags, SandboxCommandError
    
    31 31
         from .types import Scope, Consistency
    
    32 32
         from .plugin import Plugin
    
    33 33
         from .source import Source, SourceError, SourceFetcher
    

  • buildstream/_context.py
    ... ... @@ -31,7 +31,7 @@ from ._exceptions import LoadError, LoadErrorReason, BstError
    31 31
     from ._message import Message, MessageType
    
    32 32
     from ._profile import Topics, profile_start, profile_end
    
    33 33
     from ._artifactcache import ArtifactCache
    
    34
    -from ._workspaces import Workspaces
    
    34
    +from ._workspaces import Workspaces, WorkspaceProjectCache
    
    35 35
     from .plugin import _plugin_lookup
    
    36 36
     
    
    37 37
     
    
    ... ... @@ -121,6 +121,10 @@ class Context():
    121 121
             # remove a workspace directory.
    
    122 122
             self.prompt_workspace_close_remove_dir = None
    
    123 123
     
    
    124
    +        # Boolean, whether we double-check with the user that they meant to
    
    125
    +        # close the workspace when they're using it to access the project.
    
    126
    +        self.prompt_workspace_close_project_inaccessible = None
    
    127
    +
    
    124 128
             # Boolean, whether we double-check with the user that they meant to do
    
    125 129
             # a hard reset of a workspace, potentially losing changes.
    
    126 130
             self.prompt_workspace_reset_hard = None
    
    ... ... @@ -139,6 +143,7 @@ class Context():
    139 143
             self._projects = []
    
    140 144
             self._project_overrides = {}
    
    141 145
             self._workspaces = None
    
    146
    +        self._workspace_project_cache = WorkspaceProjectCache()
    
    142 147
             self._log_handle = None
    
    143 148
             self._log_filename = None
    
    144 149
     
    
    ... ... @@ -248,12 +253,15 @@ class Context():
    248 253
                 defaults, Mapping, 'prompt')
    
    249 254
             _yaml.node_validate(prompt, [
    
    250 255
                 'auto-init', 'really-workspace-close-remove-dir',
    
    256
    +            'really-workspace-close-project-inaccessible',
    
    251 257
                 'really-workspace-reset-hard',
    
    252 258
             ])
    
    253 259
             self.prompt_auto_init = _node_get_option_str(
    
    254 260
                 prompt, 'auto-init', ['ask', 'no']) == 'ask'
    
    255 261
             self.prompt_workspace_close_remove_dir = _node_get_option_str(
    
    256 262
                 prompt, 'really-workspace-close-remove-dir', ['ask', 'yes']) == 'ask'
    
    263
    +        self.prompt_workspace_close_project_inaccessible = _node_get_option_str(
    
    264
    +            prompt, 'really-workspace-close-project-inaccessible', ['ask', 'yes']) == 'ask'
    
    257 265
             self.prompt_workspace_reset_hard = _node_get_option_str(
    
    258 266
                 prompt, 'really-workspace-reset-hard', ['ask', 'yes']) == 'ask'
    
    259 267
     
    
    ... ... @@ -310,6 +318,16 @@ class Context():
    310 318
         def get_workspaces(self):
    
    311 319
             return self._workspaces
    
    312 320
     
    
    321
    +    # get_workspace_project_cache():
    
    322
    +    #
    
    323
    +    # Return the WorkspaceProjectCache object used for this BuildStream invocation
    
    324
    +    #
    
    325
    +    # Returns:
    
    326
    +    #    (WorkspaceProjectCache): The WorkspaceProjectCache object
    
    327
    +    #
    
    328
    +    def get_workspace_project_cache(self):
    
    329
    +        return self._workspace_project_cache
    
    330
    +
    
    313 331
         # get_overrides():
    
    314 332
         #
    
    315 333
         # Fetch the override dictionary for the active project. This returns
    

  • buildstream/_frontend/cli.py
    ... ... @@ -59,18 +59,9 @@ def complete_target(args, incomplete):
    59 59
         :return: all the possible user-specified completions for the param
    
    60 60
         """
    
    61 61
     
    
    62
    +    from .. import utils
    
    62 63
         project_conf = 'project.conf'
    
    63 64
     
    
    64
    -    def ensure_project_dir(directory):
    
    65
    -        directory = os.path.abspath(directory)
    
    66
    -        while not os.path.isfile(os.path.join(directory, project_conf)):
    
    67
    -            parent_dir = os.path.dirname(directory)
    
    68
    -            if directory == parent_dir:
    
    69
    -                break
    
    70
    -            directory = parent_dir
    
    71
    -
    
    72
    -        return directory
    
    73
    -
    
    74 65
         # First resolve the directory, in case there is an
    
    75 66
         # active --directory/-C option
    
    76 67
         #
    
    ... ... @@ -89,7 +80,7 @@ def complete_target(args, incomplete):
    89 80
         else:
    
    90 81
             # Check if this directory or any of its parent directories
    
    91 82
             # contain a project config file
    
    92
    -        base_directory = ensure_project_dir(base_directory)
    
    83
    +        base_directory = utils._search_upward_for_file(base_directory, project_conf)
    
    93 84
     
    
    94 85
         # Now parse the project.conf just to find the element path,
    
    95 86
         # this is unfortunately a bit heavy.
    
    ... ... @@ -756,11 +747,18 @@ def workspace_close(app, remove_dir, all_, elements):
    756 747
     
    
    757 748
             elements = app.stream.redirect_element_names(elements)
    
    758 749
     
    
    759
    -        # Check that the workspaces in question exist
    
    750
    +        # Check that the workspaces in question exist, and that it's safe to
    
    751
    +        # remove them.
    
    760 752
             nonexisting = []
    
    761 753
             for element_name in elements:
    
    762 754
                 if not app.stream.workspace_exists(element_name):
    
    763 755
                     nonexisting.append(element_name)
    
    756
    +            if (app.stream.workspace_is_required(element_name) and app.interactive and
    
    757
    +                    app.context.prompt_workspace_close_project_inaccessible):
    
    758
    +                click.echo("Removing '{}' will prevent you from running buildstream commands".format(element_name))
    
    759
    +                if not click.confirm('Are you sure you want to close this workspace?'):
    
    760
    +                    click.echo('Aborting', err=True)
    
    761
    +                    sys.exit(-1)
    
    764 762
             if nonexisting:
    
    765 763
                 raise AppError("Workspace does not exist", detail="\n".join(nonexisting))
    
    766 764
     
    

  • buildstream/_loader/loader.py
    ... ... @@ -563,17 +563,23 @@ class Loader():
    563 563
                                     "Subproject has no ref for junction: {}".format(filename),
    
    564 564
                                     detail=detail)
    
    565 565
     
    
    566
    -        # Stage sources
    
    567
    -        os.makedirs(self._context.builddir, exist_ok=True)
    
    568
    -        basedir = tempfile.mkdtemp(prefix="{}-".format(element.normal_name), dir=self._context.builddir)
    
    569
    -        element._stage_sources_at(basedir, mount_workspaces=False)
    
    566
    +        if len(sources) == 1 and sources[0]._get_local_path():
    
    567
    +            # Optimization for junctions with a single local source
    
    568
    +            basedir = sources[0]._get_local_path()
    
    569
    +            tempdir = None
    
    570
    +        else:
    
    571
    +            # Stage sources
    
    572
    +            os.makedirs(self._context.builddir, exist_ok=True)
    
    573
    +            basedir = tempfile.mkdtemp(prefix="{}-".format(element.normal_name), dir=self._context.builddir)
    
    574
    +            element._stage_sources_at(basedir, mount_workspaces=False)
    
    575
    +            tempdir = basedir
    
    570 576
     
    
    571 577
             # Load the project
    
    572 578
             project_dir = os.path.join(basedir, element.path)
    
    573 579
             try:
    
    574 580
                 from .._project import Project
    
    575 581
                 project = Project(project_dir, self._context, junction=element,
    
    576
    -                              parent_loader=self, tempdir=basedir)
    
    582
    +                              parent_loader=self, tempdir=tempdir)
    
    577 583
             except LoadError as e:
    
    578 584
                 if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
    
    579 585
                     raise LoadError(reason=LoadErrorReason.INVALID_JUNCTION,
    

  • buildstream/_project.py
    ... ... @@ -94,8 +94,10 @@ class Project():
    94 94
             # The project name
    
    95 95
             self.name = None
    
    96 96
     
    
    97
    -        # The project directory
    
    98
    -        self.directory = self._ensure_project_dir(directory)
    
    97
    +        self._context = context  # The invocation Context, a private member
    
    98
    +
    
    99
    +        # The project directory, and whether the project was found from an external workspace
    
    100
    +        self.directory, self._required_workspace_element = self._find_project_dir(directory)
    
    99 101
     
    
    100 102
             # Absolute path to where elements are loaded from within the project
    
    101 103
             self.element_path = None
    
    ... ... @@ -116,7 +118,6 @@ class Project():
    116 118
             #
    
    117 119
             # Private Members
    
    118 120
             #
    
    119
    -        self._context = context  # The invocation Context
    
    120 121
     
    
    121 122
             self._default_mirror = default_mirror    # The name of the preferred mirror.
    
    122 123
     
    
    ... ... @@ -370,6 +371,14 @@ class Project():
    370 371
     
    
    371 372
             self._load_second_pass()
    
    372 373
     
    
    374
    +    # required_workspace_element()
    
    375
    +    #
    
    376
    +    # Returns the element whose workspace is required to load this project,
    
    377
    +    # if any.
    
    378
    +    #
    
    379
    +    def required_workspace_element(self):
    
    380
    +        return self._required_workspace_element
    
    381
    +
    
    373 382
         # cleanup()
    
    374 383
         #
    
    375 384
         # Cleans up resources used loading elements
    
    ... ... @@ -651,7 +660,7 @@ class Project():
    651 660
             # Source url aliases
    
    652 661
             output._aliases = _yaml.node_get(config, Mapping, 'aliases', default_value={})
    
    653 662
     
    
    654
    -    # _ensure_project_dir()
    
    663
    +    # _find_project_dir()
    
    655 664
         #
    
    656 665
         # Returns path of the project directory, if a configuration file is found
    
    657 666
         # in given directory or any of its parent directories.
    
    ... ... @@ -662,18 +671,26 @@ class Project():
    662 671
         # Raises:
    
    663 672
         #    LoadError if project.conf is not found
    
    664 673
         #
    
    665
    -    def _ensure_project_dir(self, directory):
    
    666
    -        directory = os.path.abspath(directory)
    
    667
    -        while not os.path.isfile(os.path.join(directory, _PROJECT_CONF_FILE)):
    
    668
    -            parent_dir = os.path.dirname(directory)
    
    669
    -            if directory == parent_dir:
    
    674
    +    # Returns:
    
    675
    +    #    (str) - the directory that contains the project, and
    
    676
    +    #    (str) - the name of the element required to find the project, or an empty string
    
    677
    +    #
    
    678
    +    def _find_project_dir(self, directory):
    
    679
    +        workspace_element = ""
    
    680
    +        project_directory = utils._search_upward_for_file(directory, _PROJECT_CONF_FILE)
    
    681
    +        if not project_directory:
    
    682
    +            workspace_project_cache = self._context.get_workspace_project_cache()
    
    683
    +            workspace_project = workspace_project_cache.get(directory)
    
    684
    +            if workspace_project:
    
    685
    +                project_directory = workspace_project.get_default_path()
    
    686
    +                workspace_element = workspace_project.get_default_element()
    
    687
    +            else:
    
    670 688
                     raise LoadError(
    
    671 689
                         LoadErrorReason.MISSING_PROJECT_CONF,
    
    672 690
                         '{} not found in current directory or any of its parent directories'
    
    673 691
                         .format(_PROJECT_CONF_FILE))
    
    674
    -            directory = parent_dir
    
    675 692
     
    
    676
    -        return directory
    
    693
    +        return project_directory, workspace_element
    
    677 694
     
    
    678 695
         def _load_plugin_factories(self, config, output):
    
    679 696
             plugin_source_origins = []   # Origins of custom sources
    

  • buildstream/_stream.py
    ... ... @@ -28,7 +28,7 @@ import tarfile
    28 28
     from contextlib import contextmanager
    
    29 29
     from tempfile import TemporaryDirectory
    
    30 30
     
    
    31
    -from ._exceptions import StreamError, ImplError, BstError, set_last_task_error
    
    31
    +from ._exceptions import StreamError, ImplError, BstError, set_last_task_error, LoadError, LoadErrorReason
    
    32 32
     from ._message import Message, MessageType
    
    33 33
     from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
    
    34 34
     from ._pipeline import Pipeline, PipelineSelection
    
    ... ... @@ -550,6 +550,8 @@ class Stream():
    550 550
             # So far this function has tried to catch as many issues as possible with out making any changes
    
    551 551
             # Now it dose the bits that can not be made atomic.
    
    552 552
             targetGenerator = zip(elements, expanded_directories)
    
    553
    +        workspace_project_cache = self._context.get_workspace_project_cache()
    
    554
    +        project = self._context.get_toplevel_project()
    
    553 555
             for target, directory in targetGenerator:
    
    554 556
                 self._message(MessageType.INFO, "Creating workspace for element {}"
    
    555 557
                               .format(target.name))
    
    ... ... @@ -574,6 +576,10 @@ class Stream():
    574 576
                     with target.timed_activity("Staging sources to {}".format(directory)):
    
    575 577
                         target._open_workspace()
    
    576 578
     
    
    579
    +            workspace_project = workspace_project_cache.add(directory, project.directory,
    
    580
    +                                                            target._get_full_name())
    
    581
    +            workspace_project.write()
    
    582
    +
    
    577 583
                 # Saving the workspace once it is set up means that if the next workspace fails to be created before
    
    578 584
                 # the configuration gets saved. The successfully created workspace still gets saved.
    
    579 585
                 workspaces.save_config()
    
    ... ... @@ -601,6 +607,16 @@ class Stream():
    601 607
                     except OSError as e:
    
    602 608
                         raise StreamError("Could not remove  '{}': {}"
    
    603 609
                                           .format(workspace.get_absolute_path(), e)) from e
    
    610
    +        else:
    
    611
    +            workspace_project_cache = self._context.get_workspace_project_cache()
    
    612
    +            try:
    
    613
    +                workspace_project_cache.remove(workspace.get_absolute_path())
    
    614
    +            except LoadError as e:
    
    615
    +                # We might be closing a workspace with a deleted directory
    
    616
    +                if e.reason == LoadErrorReason.MISSING_FILE:
    
    617
    +                    pass
    
    618
    +                else:
    
    619
    +                    raise
    
    604 620
     
    
    605 621
             # Delete the workspace and save the configuration
    
    606 622
             workspaces.delete_workspace(element_name)
    
    ... ... @@ -644,6 +660,8 @@ class Stream():
    644 660
             for element in elements:
    
    645 661
                 workspace = workspaces.get_workspace(element._get_full_name())
    
    646 662
                 workspace_path = workspace.get_absolute_path()
    
    663
    +            workspace_project_cache = self._context.get_workspace_project_cache()
    
    664
    +            workspace_project = workspace_project_cache.get(workspace_path)
    
    647 665
                 if soft:
    
    648 666
                     workspace.prepared = False
    
    649 667
                     self._message(MessageType.INFO, "Reset workspace state for {} at: {}"
    
    ... ... @@ -664,6 +682,8 @@ class Stream():
    664 682
                 with element.timed_activity("Staging sources to {}".format(workspace_path)):
    
    665 683
                     element._open_workspace()
    
    666 684
     
    
    685
    +            workspace_project.write()
    
    686
    +
    
    667 687
                 self._message(MessageType.INFO,
    
    668 688
                               "Reset workspace for {} at: {}".format(element.name,
    
    669 689
                                                                      workspace_path))
    
    ... ... @@ -694,6 +714,20 @@ class Stream():
    694 714
     
    
    695 715
             return False
    
    696 716
     
    
    717
    +    # workspace_is_required()
    
    718
    +    #
    
    719
    +    # Checks whether the workspace belonging to element_name is required to
    
    720
    +    # load the project
    
    721
    +    #
    
    722
    +    # Args:
    
    723
    +    #    element_name (str): The element whose workspace may be required
    
    724
    +    #
    
    725
    +    # Returns:
    
    726
    +    #    (bool): True if the workspace is required
    
    727
    +    def workspace_is_required(self, element_name):
    
    728
    +        required_elm = self._project.required_workspace_element()
    
    729
    +        return required_elm == element_name
    
    730
    +
    
    697 731
         # workspace_list
    
    698 732
         #
    
    699 733
         # Serializes the workspaces and dumps them in YAML to stdout.
    

  • buildstream/_workspaces.py
    ... ... @@ -25,6 +25,211 @@ from ._exceptions import LoadError, LoadErrorReason
    25 25
     
    
    26 26
     
    
    27 27
     BST_WORKSPACE_FORMAT_VERSION = 3
    
    28
    +BST_WORKSPACE_PROJECT_FORMAT_VERSION = 1
    
    29
    +WORKSPACE_PROJECT_FILE = ".bstproject.yaml"
    
    30
    +
    
    31
    +
    
    32
    +# WorkspaceProject()
    
    33
    +#
    
    34
    +# An object to contain various helper functions and data required for
    
    35
    +# referring from a workspace back to buildstream.
    
    36
    +#
    
    37
    +# Args:
    
    38
    +#    directory (str): The directory that the workspace exists in
    
    39
    +#    project_path (str): The project path used to refer back
    
    40
    +#                        to buildstream projects.
    
    41
    +#    element_name (str): The name of the element used to create this workspace.
    
    42
    +class WorkspaceProject():
    
    43
    +    def __init__(self, directory, project_path="", element_name=""):
    
    44
    +        self._projects = []
    
    45
    +        self._directory = directory
    
    46
    +
    
    47
    +        assert (project_path and element_name) or (not project_path and not element_name)
    
    48
    +        if project_path:
    
    49
    +            self._add_project(project_path, element_name)
    
    50
    +
    
    51
    +    # get_default_path()
    
    52
    +    #
    
    53
    +    # Retrieves the default path to a project.
    
    54
    +    #
    
    55
    +    # Returns:
    
    56
    +    #    (str): The path to a project
    
    57
    +    def get_default_path(self):
    
    58
    +        return self._projects[0]['project-path']
    
    59
    +
    
    60
    +    # get_default_element()
    
    61
    +    #
    
    62
    +    # Retrieves the name of the element that owns this workspace.
    
    63
    +    #
    
    64
    +    # Returns:
    
    65
    +    #    (str): The name of an element
    
    66
    +    def get_default_element(self):
    
    67
    +        return self._projects[0]['element-name']
    
    68
    +
    
    69
    +    # to_dict()
    
    70
    +    #
    
    71
    +    # Turn the members data into a dict for serialization purposes
    
    72
    +    #
    
    73
    +    # Returns:
    
    74
    +    #    (dict): A dict representation of the WorkspaceProject
    
    75
    +    #
    
    76
    +    def to_dict(self):
    
    77
    +        ret = {
    
    78
    +            'projects': self._projects,
    
    79
    +            'format-version': BST_WORKSPACE_PROJECT_FORMAT_VERSION,
    
    80
    +        }
    
    81
    +        return ret
    
    82
    +
    
    83
    +    # from_dict()
    
    84
    +    #
    
    85
    +    # Loads a new WorkspaceProject from a simple dictionary
    
    86
    +    #
    
    87
    +    # Args:
    
    88
    +    #    directory (str): The directory that the workspace exists in
    
    89
    +    #    dictionary (dict): The dict to generate a WorkspaceProject from
    
    90
    +    #
    
    91
    +    # Returns:
    
    92
    +    #   (WorkspaceProject): A newly instantiated WorkspaceProject
    
    93
    +    @classmethod
    
    94
    +    def from_dict(cls, directory, dictionary):
    
    95
    +        # Only know how to handle one format-version at the moment.
    
    96
    +        format_version = int(dictionary['format-version'])
    
    97
    +        assert format_version == BST_WORKSPACE_PROJECT_FORMAT_VERSION, \
    
    98
    +            "Format version {} not found in {}".format(BST_WORKSPACE_PROJECT_FORMAT_VERSION, dictionary)
    
    99
    +
    
    100
    +        workspace_project = cls(directory)
    
    101
    +        for item in dictionary['projects']:
    
    102
    +            workspace_project._add_project(item['project-path'], item['element-name'])
    
    103
    +
    
    104
    +        return workspace_project
    
    105
    +
    
    106
    +    # load()
    
    107
    +    #
    
    108
    +    # Loads the WorkspaceProject for a given directory. This directory may be a
    
    109
    +    # subdirectory of the workspace's directory.
    
    110
    +    #
    
    111
    +    # Args:
    
    112
    +    #    directory (str): The directory
    
    113
    +    # Returns:
    
    114
    +    #    (WorkspaceProject): The created WorkspaceProject, if in a workspace, or
    
    115
    +    #    (NoneType): None, if the directory is not inside a workspace.
    
    116
    +    @classmethod
    
    117
    +    def load(cls, directory):
    
    118
    +        project_dir = cls.search_for_dir(directory)
    
    119
    +        if project_dir:
    
    120
    +            workspace_file = os.path.join(project_dir, WORKSPACE_PROJECT_FILE)
    
    121
    +            data_dict = _yaml.load(workspace_file)
    
    122
    +            return cls.from_dict(project_dir, data_dict)
    
    123
    +        else:
    
    124
    +            return None
    
    125
    +
    
    126
    +    # write()
    
    127
    +    #
    
    128
    +    # Writes the WorkspaceProject to disk
    
    129
    +    def write(self):
    
    130
    +        os.makedirs(self._directory, exist_ok=True)
    
    131
    +        _yaml.dump(self.to_dict(), self._get_filename())
    
    132
    +
    
    133
    +    # search_for_dir()
    
    134
    +    #
    
    135
    +    # Returns the directory that contains the workspace local project file,
    
    136
    +    # searching upwards from search_dir.
    
    137
    +    @staticmethod
    
    138
    +    def search_for_dir(search_dir):
    
    139
    +        return utils._search_upward_for_file(search_dir, WORKSPACE_PROJECT_FILE)
    
    140
    +
    
    141
    +    def _get_filename(self):
    
    142
    +        return os.path.join(self._directory, WORKSPACE_PROJECT_FILE)
    
    143
    +
    
    144
    +    def _add_project(self, project_path, element_name):
    
    145
    +        assert (project_path and element_name)
    
    146
    +        self._projects.append({'project-path': project_path, 'element-name': element_name})
    
    147
    +
    
    148
    +
    
    149
    +# WorkspaceProjectCache()
    
    150
    +#
    
    151
    +# A class to manage workspace project data for multiple workspaces.
    
    152
    +#
    
    153
    +class WorkspaceProjectCache():
    
    154
    +    def __init__(self):
    
    155
    +        self._projects = {}  # Mapping of a workspace directory to its WorkspaceProject
    
    156
    +
    
    157
    +    # get()
    
    158
    +    #
    
    159
    +    # Returns a WorkspaceProject for a given directory, retrieving from the cache if
    
    160
    +    # present, and searching the filesystem for the file and loading it if not.
    
    161
    +    #
    
    162
    +    # Args:
    
    163
    +    #    directory (str): The directory to search for a WorkspaceProject.
    
    164
    +    #
    
    165
    +    # Returns:
    
    166
    +    #    (WorkspaceProject): The WorkspaceProject that was found for that directory.
    
    167
    +    #    or      (NoneType): None, if no WorkspaceProject can be found.
    
    168
    +    #
    
    169
    +    def get(self, directory):
    
    170
    +        try:
    
    171
    +            workspace_project = self._projects[directory]
    
    172
    +        except KeyError:
    
    173
    +            found_dir = WorkspaceProject.search_for_dir(directory)
    
    174
    +            if found_dir:
    
    175
    +                try:
    
    176
    +                    workspace_project = self._projects[found_dir]
    
    177
    +                except KeyError:
    
    178
    +                    workspace_project = WorkspaceProject.load(found_dir)
    
    179
    +                    self._projects[found_dir] = workspace_project
    
    180
    +            else:
    
    181
    +                workspace_project = None
    
    182
    +
    
    183
    +        return workspace_project
    
    184
    +
    
    185
    +    # add()
    
    186
    +    #
    
    187
    +    # Adds the project path and element name to the WorkspaceProject that exists
    
    188
    +    # for that directory
    
    189
    +    #
    
    190
    +    # Args:
    
    191
    +    #    directory (str): The directory to search for a WorkspaceProject.
    
    192
    +    #    project_path (str): The path to the project that refers to this workspace
    
    193
    +    #    element_name (str): The element in the project that was refers to this workspace
    
    194
    +    #
    
    195
    +    # Returns:
    
    196
    +    #    (WorkspaceProject): The WorkspaceProject that was found for that directory.
    
    197
    +    #
    
    198
    +    def add(self, directory, project_path='', element_name=''):
    
    199
    +        workspace_project = self.get(directory)
    
    200
    +        if not workspace_project:
    
    201
    +            workspace_project = WorkspaceProject(directory)
    
    202
    +            self._projects[directory] = workspace_project
    
    203
    +        if project_path:
    
    204
    +            workspace_project._add_project(project_path, element_name)
    
    205
    +        return workspace_project
    
    206
    +
    
    207
    +    # remove()
    
    208
    +    #
    
    209
    +    # Removes the project path and element name from the WorkspaceProject that exists
    
    210
    +    # for that directory.
    
    211
    +    #
    
    212
    +    # NOTE: This currently just deletes the file, but with support for multiple
    
    213
    +    # projects opening the same workspace, this will involve decreasing the count
    
    214
    +    # and deleting the file if there are no more projects.
    
    215
    +    #
    
    216
    +    # Args:
    
    217
    +    #    directory (str): The directory to search for a WorkspaceProject.
    
    218
    +    #    project_path (str): **UNUSED** The path to the project that refers to this workspace
    
    219
    +    #    element_name (str): **UNUSED** The element in the project that was refers to this workspace
    
    220
    +    #
    
    221
    +    def remove(self, directory, project_path='', element_name=''):
    
    222
    +        # NOTE: project_path and element_name will only be used when I implement
    
    223
    +        #       multiple owners of a workspace
    
    224
    +        workspace_project = self.get(directory)
    
    225
    +        if not workspace_project:
    
    226
    +            raise LoadError(LoadErrorReason.MISSING_FILE,
    
    227
    +                            "Failed to find a {} file to remove".format(WORKSPACE_PROJECT_FILE))
    
    228
    +        path = workspace_project._get_filename()
    
    229
    +        try:
    
    230
    +            os.unlink(path)
    
    231
    +        except FileNotFoundError:
    
    232
    +            pass
    
    28 233
     
    
    29 234
     
    
    30 235
     # Workspace()
    
    ... ... @@ -174,10 +379,15 @@ class Workspace():
    174 379
             if recalculate or self._key is None:
    
    175 380
                 fullpath = self.get_absolute_path()
    
    176 381
     
    
    382
    +            excluded_files = (WORKSPACE_PROJECT_FILE,)
    
    383
    +
    
    177 384
                 # Get a list of tuples of the the project relative paths and fullpaths
    
    178 385
                 if os.path.isdir(fullpath):
    
    179 386
                     filelist = utils.list_relative_paths(fullpath)
    
    180
    -                filelist = [(relpath, os.path.join(fullpath, relpath)) for relpath in filelist]
    
    387
    +                filelist = [
    
    388
    +                    (relpath, os.path.join(fullpath, relpath)) for relpath in filelist
    
    389
    +                    if relpath not in excluded_files
    
    390
    +                ]
    
    181 391
                 else:
    
    182 392
                     filelist = [(self.get_absolute_path(), fullpath)]
    
    183 393
     
    

  • buildstream/buildelement.py
    ... ... @@ -127,7 +127,7 @@ artifact collection purposes.
    127 127
     """
    
    128 128
     
    
    129 129
     import os
    
    130
    -from . import Element, Scope, ElementError
    
    130
    +from . import Element, Scope
    
    131 131
     from . import SandboxFlags
    
    132 132
     
    
    133 133
     
    
    ... ... @@ -207,6 +207,10 @@ class BuildElement(Element):
    207 207
             # Setup environment
    
    208 208
             sandbox.set_environment(self.get_environment())
    
    209 209
     
    
    210
    +        # Enable command batching across prepare() and assemble()
    
    211
    +        self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
    
    212
    +                                    collect=self.get_variable('install-root'))
    
    213
    +
    
    210 214
         def stage(self, sandbox):
    
    211 215
     
    
    212 216
             # Stage deps in the sandbox root
    
    ... ... @@ -215,7 +219,7 @@ class BuildElement(Element):
    215 219
     
    
    216 220
             # Run any integration commands provided by the dependencies
    
    217 221
             # once they are all staged and ready
    
    218
    -        with self.timed_activity("Integrating sandbox"):
    
    222
    +        with sandbox.batch(SandboxFlags.NONE, label="Integrating sandbox"):
    
    219 223
                 for dep in self.dependencies(Scope.BUILD):
    
    220 224
                     dep.integrate(sandbox)
    
    221 225
     
    
    ... ... @@ -223,14 +227,13 @@ class BuildElement(Element):
    223 227
             self.stage_sources(sandbox, self.get_variable('build-root'))
    
    224 228
     
    
    225 229
         def assemble(self, sandbox):
    
    226
    -
    
    227 230
             # Run commands
    
    228 231
             for command_name in _command_steps:
    
    229 232
                 commands = self.__commands[command_name]
    
    230 233
                 if not commands or command_name == 'configure-commands':
    
    231 234
                     continue
    
    232 235
     
    
    233
    -            with self.timed_activity("Running {}".format(command_name)):
    
    236
    +            with sandbox.batch(SandboxFlags.ROOT_READ_ONLY, label="Running {}".format(command_name)):
    
    234 237
                     for cmd in commands:
    
    235 238
                         self.__run_command(sandbox, cmd, command_name)
    
    236 239
     
    
    ... ... @@ -254,7 +257,7 @@ class BuildElement(Element):
    254 257
         def prepare(self, sandbox):
    
    255 258
             commands = self.__commands['configure-commands']
    
    256 259
             if commands:
    
    257
    -            with self.timed_activity("Running configure-commands"):
    
    260
    +            with sandbox.batch(SandboxFlags.ROOT_READ_ONLY, label="Running configure-commands"):
    
    258 261
                     for cmd in commands:
    
    259 262
                         self.__run_command(sandbox, cmd, 'configure-commands')
    
    260 263
     
    
    ... ... @@ -282,13 +285,9 @@ class BuildElement(Element):
    282 285
             return commands
    
    283 286
     
    
    284 287
         def __run_command(self, sandbox, cmd, cmd_name):
    
    285
    -        self.status("Running {}".format(cmd_name), detail=cmd)
    
    286
    -
    
    287 288
             # Note the -e switch to 'sh' means to exit with an error
    
    288 289
             # if any untested command fails.
    
    289 290
             #
    
    290
    -        exitcode = sandbox.run(['sh', '-c', '-e', cmd + '\n'],
    
    291
    -                               SandboxFlags.ROOT_READ_ONLY)
    
    292
    -        if exitcode != 0:
    
    293
    -            raise ElementError("Command '{}' failed with exitcode {}".format(cmd, exitcode),
    
    294
    -                               collect=self.get_variable('install-root'))
    291
    +        sandbox.run(['sh', '-c', '-e', cmd + '\n'],
    
    292
    +                    SandboxFlags.ROOT_READ_ONLY,
    
    293
    +                    label=cmd)

  • buildstream/data/userconfig.yaml
    ... ... @@ -128,6 +128,14 @@ prompt:
    128 128
       #
    
    129 129
       really-workspace-close-remove-dir: ask
    
    130 130
     
    
    131
    +  # Whether to really proceed with 'bst workspace close' when doing so would
    
    132
    +  # stop them from running bst commands in this workspace.
    
    133
    +  #
    
    134
    +  #  ask - Ask the user if they are sure.
    
    135
    +  #  yes - Always close, without asking.
    
    136
    +  #
    
    137
    +  really-workspace-close-project-inaccessible: ask
    
    138
    +
    
    131 139
       # Whether to really proceed with 'bst workspace reset' doing a hard reset of
    
    132 140
       # a workspace, potentially losing changes.
    
    133 141
       #
    

  • buildstream/element.py
    ... ... @@ -78,6 +78,7 @@ import stat
    78 78
     import copy
    
    79 79
     from collections import OrderedDict
    
    80 80
     from collections.abc import Mapping
    
    81
    +import contextlib
    
    81 82
     from contextlib import contextmanager
    
    82 83
     import tempfile
    
    83 84
     import shutil
    
    ... ... @@ -89,7 +90,7 @@ from ._exceptions import BstError, LoadError, LoadErrorReason, ImplError, \
    89 90
         ErrorDomain
    
    90 91
     from .utils import UtilError
    
    91 92
     from . import Plugin, Consistency, Scope
    
    92
    -from . import SandboxFlags
    
    93
    +from . import SandboxFlags, SandboxCommandError
    
    93 94
     from . import utils
    
    94 95
     from . import _cachekey
    
    95 96
     from . import _signals
    
    ... ... @@ -217,6 +218,10 @@ class Element(Plugin):
    217 218
             self.__build_result = None              # The result of assembling this Element (success, description, detail)
    
    218 219
             self._build_log_path = None            # The path of the build log for this Element
    
    219 220
     
    
    221
    +        self.__batch_prepare_assemble = False         # Whether batching across prepare()/assemble() is configured
    
    222
    +        self.__batch_prepare_assemble_flags = 0       # Sandbox flags for batching across prepare()/assemble()
    
    223
    +        self.__batch_prepare_assemble_collect = None  # Collect dir for batching across prepare()/assemble()
    
    224
    +
    
    220 225
             # hash tables of loaded artifact metadata, hashed by key
    
    221 226
             self.__metadata_keys = {}                     # Strong and weak keys for this key
    
    222 227
             self.__metadata_dependencies = {}             # Dictionary of dependency strong keys
    
    ... ... @@ -770,13 +775,13 @@ class Element(Plugin):
    770 775
             environment = self.get_environment()
    
    771 776
     
    
    772 777
             if bstdata is not None:
    
    773
    -            commands = self.node_get_member(bstdata, list, 'integration-commands', [])
    
    774
    -            for i in range(len(commands)):
    
    775
    -                cmd = self.node_subst_list_element(bstdata, 'integration-commands', [i])
    
    776
    -                self.status("Running integration command", detail=cmd)
    
    777
    -                exitcode = sandbox.run(['sh', '-e', '-c', cmd], 0, env=environment, cwd='/')
    
    778
    -                if exitcode != 0:
    
    779
    -                    raise ElementError("Command '{}' failed with exitcode {}".format(cmd, exitcode))
    
    778
    +            with sandbox.batch(SandboxFlags.NONE):
    
    779
    +                commands = self.node_get_member(bstdata, list, 'integration-commands', [])
    
    780
    +                for i in range(len(commands)):
    
    781
    +                    cmd = self.node_subst_list_element(bstdata, 'integration-commands', [i])
    
    782
    +
    
    783
    +                    sandbox.run(['sh', '-e', '-c', cmd], 0, env=environment, cwd='/',
    
    784
    +                                label=cmd)
    
    780 785
     
    
    781 786
         def stage_sources(self, sandbox, directory):
    
    782 787
             """Stage this element's sources to a directory in the sandbox
    
    ... ... @@ -863,6 +868,24 @@ class Element(Plugin):
    863 868
     
    
    864 869
             return None
    
    865 870
     
    
    871
    +    def batch_prepare_assemble(self, flags, *, collect=None):
    
    872
    +        """ Configure command batching across prepare() and assemble()
    
    873
    +
    
    874
    +        Args:
    
    875
    +           flags (:class:`.SandboxFlags`): The sandbox flags for the command batch
    
    876
    +           collect (str): An optional directory containing partial install contents
    
    877
    +                          on command failure.
    
    878
    +
    
    879
    +        This may be called in :func:`Element.configure_sandbox() <buildstream.element.Element.configure_sandbox>`
    
    880
    +        to enable batching of all sandbox commands issued in prepare() and assemble().
    
    881
    +        """
    
    882
    +        if self.__batch_prepare_assemble:
    
    883
    +            raise ElementError("{}: Command batching for prepare/assemble is already configured".format(self))
    
    884
    +
    
    885
    +        self.__batch_prepare_assemble = True
    
    886
    +        self.__batch_prepare_assemble_flags = flags
    
    887
    +        self.__batch_prepare_assemble_collect = collect
    
    888
    +
    
    866 889
         #############################################################
    
    867 890
         #            Private Methods used in BuildStream            #
    
    868 891
         #############################################################
    
    ... ... @@ -1323,7 +1346,7 @@ class Element(Plugin):
    1323 1346
                                 bare_directory=bare_directory) as sandbox:
    
    1324 1347
     
    
    1325 1348
                 # Configure always comes first, and we need it.
    
    1326
    -            self.configure_sandbox(sandbox)
    
    1349
    +            self.__configure_sandbox(sandbox)
    
    1327 1350
     
    
    1328 1351
                 # Stage something if we need it
    
    1329 1352
                 if not directory:
    
    ... ... @@ -1556,15 +1579,24 @@ class Element(Plugin):
    1556 1579
                     # Call the abstract plugin methods
    
    1557 1580
                     try:
    
    1558 1581
                         # Step 1 - Configure
    
    1559
    -                    self.configure_sandbox(sandbox)
    
    1582
    +                    self.__configure_sandbox(sandbox)
    
    1560 1583
                         # Step 2 - Stage
    
    1561 1584
                         self.stage(sandbox)
    
    1562
    -                    # Step 3 - Prepare
    
    1563
    -                    self.__prepare(sandbox)
    
    1564
    -                    # Step 4 - Assemble
    
    1565
    -                    collect = self.assemble(sandbox)  # pylint: disable=assignment-from-no-return
    
    1585
    +
    
    1586
    +                    if self.__batch_prepare_assemble:
    
    1587
    +                        cm = sandbox.batch(self.__batch_prepare_assemble_flags,
    
    1588
    +                                           collect=self.__batch_prepare_assemble_collect)
    
    1589
    +                    else:
    
    1590
    +                        cm = contextlib.suppress()
    
    1591
    +
    
    1592
    +                    with cm:
    
    1593
    +                        # Step 3 - Prepare
    
    1594
    +                        self.__prepare(sandbox)
    
    1595
    +                        # Step 4 - Assemble
    
    1596
    +                        collect = self.assemble(sandbox)  # pylint: disable=assignment-from-no-return
    
    1597
    +
    
    1566 1598
                         self.__set_build_result(success=True, description="succeeded")
    
    1567
    -                except ElementError as e:
    
    1599
    +                except (ElementError, SandboxCommandError) as e:
    
    1568 1600
                         # Shelling into a sandbox is useful to debug this error
    
    1569 1601
                         e.sandbox = True
    
    1570 1602
     
    
    ... ... @@ -2059,6 +2091,15 @@ class Element(Plugin):
    2059 2091
         def __can_build_incrementally(self):
    
    2060 2092
             return bool(self._get_workspace())
    
    2061 2093
     
    
    2094
    +    # __configure_sandbox():
    
    2095
    +    #
    
    2096
    +    # Internal method for calling public abstract configure_sandbox() method.
    
    2097
    +    #
    
    2098
    +    def __configure_sandbox(self, sandbox):
    
    2099
    +        self.__batch_prepare_assemble = False
    
    2100
    +
    
    2101
    +        self.configure_sandbox(sandbox)
    
    2102
    +
    
    2062 2103
         # __prepare():
    
    2063 2104
         #
    
    2064 2105
         # Internal method for calling public abstract prepare() method.
    
    ... ... @@ -2074,7 +2115,12 @@ class Element(Plugin):
    2074 2115
                 self.prepare(sandbox)
    
    2075 2116
     
    
    2076 2117
                 if workspace:
    
    2077
    -                workspace.prepared = True
    
    2118
    +                def mark_workspace_prepared():
    
    2119
    +                    workspace.prepared = True
    
    2120
    +
    
    2121
    +                # Defer workspace.prepared setting until pending batch commands
    
    2122
    +                # have been executed.
    
    2123
    +                sandbox._callback(mark_workspace_prepared)
    
    2078 2124
     
    
    2079 2125
         def __is_cached(self, keystrength):
    
    2080 2126
             if keystrength is None:
    
    ... ... @@ -2157,6 +2203,7 @@ class Element(Plugin):
    2157 2203
     
    
    2158 2204
                 sandbox = SandboxRemote(context, project,
    
    2159 2205
                                         directory,
    
    2206
    +                                    plugin=self,
    
    2160 2207
                                         stdout=stdout,
    
    2161 2208
                                         stderr=stderr,
    
    2162 2209
                                         config=config,
    
    ... ... @@ -2175,6 +2222,7 @@ class Element(Plugin):
    2175 2222
     
    
    2176 2223
                 sandbox = platform.create_sandbox(context, project,
    
    2177 2224
                                                   directory,
    
    2225
    +                                              plugin=self,
    
    2178 2226
                                                   stdout=stdout,
    
    2179 2227
                                                   stderr=stderr,
    
    2180 2228
                                                   config=config,
    

  • buildstream/plugins/elements/compose.py
    ... ... @@ -122,8 +122,9 @@ class ComposeElement(Element):
    122 122
                         snapshot = set(vbasedir.list_relative_paths())
    
    123 123
                         vbasedir.mark_unmodified()
    
    124 124
     
    
    125
    -                for dep in self.dependencies(Scope.BUILD):
    
    126
    -                    dep.integrate(sandbox)
    
    125
    +                with sandbox.batch(0):
    
    126
    +                    for dep in self.dependencies(Scope.BUILD):
    
    127
    +                        dep.integrate(sandbox)
    
    127 128
     
    
    128 129
                     if require_split:
    
    129 130
                         # Calculate added, modified and removed files
    

  • buildstream/plugins/sources/local.py
    ... ... @@ -124,6 +124,9 @@ class LocalSource(Source):
    124 124
                         else:
    
    125 125
                             os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
    
    126 126
     
    
    127
    +    def _get_local_path(self):
    
    128
    +        return self.fullpath
    
    129
    +
    
    127 130
     
    
    128 131
     # Create a unique key for a file
    
    129 132
     def unique_key(filename):
    

  • buildstream/sandbox/__init__.py
    ... ... @@ -17,6 +17,6 @@
    17 17
     #  Authors:
    
    18 18
     #        Tristan Maat <tristan maat codethink co uk>
    
    19 19
     
    
    20
    -from .sandbox import Sandbox, SandboxFlags
    
    20
    +from .sandbox import Sandbox, SandboxFlags, SandboxCommandError
    
    21 21
     from ._sandboxremote import SandboxRemote
    
    22 22
     from ._sandboxdummy import SandboxDummy

  • buildstream/sandbox/_sandboxbwrap.py
    ... ... @@ -58,22 +58,12 @@ class SandboxBwrap(Sandbox):
    58 58
             self.die_with_parent_available = kwargs['die_with_parent_available']
    
    59 59
             self.json_status_available = kwargs['json_status_available']
    
    60 60
     
    
    61
    -    def run(self, command, flags, *, cwd=None, env=None):
    
    61
    +    def _run(self, command, flags, *, cwd, env):
    
    62 62
             stdout, stderr = self._get_output()
    
    63 63
     
    
    64 64
             # Allowable access to underlying storage as we're part of the sandbox
    
    65 65
             root_directory = self.get_virtual_directory()._get_underlying_directory()
    
    66 66
     
    
    67
    -        # Fallback to the sandbox default settings for
    
    68
    -        # the cwd and env.
    
    69
    -        #
    
    70
    -        cwd = self._get_work_directory(cwd=cwd)
    
    71
    -        env = self._get_environment(cwd=cwd, env=env)
    
    72
    -
    
    73
    -        # Convert single-string argument to a list
    
    74
    -        if isinstance(command, str):
    
    75
    -            command = [command]
    
    76
    -
    
    77 67
             if not self._has_command(command[0], env):
    
    78 68
                 raise SandboxError("Staged artifacts do not provide command "
    
    79 69
                                    "'{}'".format(command[0]),
    

  • buildstream/sandbox/_sandboxchroot.py
    ... ... @@ -49,17 +49,7 @@ class SandboxChroot(Sandbox):
    49 49
     
    
    50 50
             self.mount_map = None
    
    51 51
     
    
    52
    -    def run(self, command, flags, *, cwd=None, env=None):
    
    53
    -
    
    54
    -        # Fallback to the sandbox default settings for
    
    55
    -        # the cwd and env.
    
    56
    -        #
    
    57
    -        cwd = self._get_work_directory(cwd=cwd)
    
    58
    -        env = self._get_environment(cwd=cwd, env=env)
    
    59
    -
    
    60
    -        # Convert single-string argument to a list
    
    61
    -        if isinstance(command, str):
    
    62
    -            command = [command]
    
    52
    +    def _run(self, command, flags, *, cwd, env):
    
    63 53
     
    
    64 54
             if not self._has_command(command[0], env):
    
    65 55
                 raise SandboxError("Staged artifacts do not provide command "
    

  • buildstream/sandbox/_sandboxdummy.py
    ... ... @@ -25,17 +25,7 @@ class SandboxDummy(Sandbox):
    25 25
             super().__init__(*args, **kwargs)
    
    26 26
             self._reason = kwargs.get("dummy_reason", "no reason given")
    
    27 27
     
    
    28
    -    def run(self, command, flags, *, cwd=None, env=None):
    
    29
    -
    
    30
    -        # Fallback to the sandbox default settings for
    
    31
    -        # the cwd and env.
    
    32
    -        #
    
    33
    -        cwd = self._get_work_directory(cwd=cwd)
    
    34
    -        env = self._get_environment(cwd=cwd, env=env)
    
    35
    -
    
    36
    -        # Convert single-string argument to a list
    
    37
    -        if isinstance(command, str):
    
    38
    -            command = [command]
    
    28
    +    def _run(self, command, flags, *, cwd, env):
    
    39 29
     
    
    40 30
             if not self._has_command(command[0], env):
    
    41 31
                 raise SandboxError("Staged artifacts do not provide command "
    

  • buildstream/sandbox/_sandboxremote.py
    ... ... @@ -19,12 +19,14 @@
    19 19
     #        Jim MacArthur <jim macarthur codethink co uk>
    
    20 20
     
    
    21 21
     import os
    
    22
    +import shlex
    
    22 23
     from urllib.parse import urlparse
    
    23 24
     from functools import partial
    
    24 25
     
    
    25 26
     import grpc
    
    26 27
     
    
    27
    -from . import Sandbox
    
    28
    +from . import Sandbox, SandboxCommandError
    
    29
    +from .sandbox import _SandboxBatch
    
    28 30
     from ..storage._filebaseddirectory import FileBasedDirectory
    
    29 31
     from ..storage._casbaseddirectory import CasBasedDirectory
    
    30 32
     from .. import _signals
    
    ... ... @@ -212,7 +214,7 @@ class SandboxRemote(Sandbox):
    212 214
             new_dir = CasBasedDirectory(self._get_context().artifactcache.cas, ref=dir_digest)
    
    213 215
             self._set_virtual_directory(new_dir)
    
    214 216
     
    
    215
    -    def run(self, command, flags, *, cwd=None, env=None):
    
    217
    +    def _run(self, command, flags, *, cwd, env):
    
    216 218
             # Upload sources
    
    217 219
             upload_vdir = self.get_virtual_directory()
    
    218 220
     
    
    ... ... @@ -230,16 +232,6 @@ class SandboxRemote(Sandbox):
    230 232
             if not cascache.verify_digest_pushed(self._get_project(), upload_vdir.ref):
    
    231 233
                 raise SandboxError("Failed to verify that source has been pushed to the remote artifact cache.")
    
    232 234
     
    
    233
    -        # Fallback to the sandbox default settings for
    
    234
    -        # the cwd and env.
    
    235
    -        #
    
    236
    -        cwd = self._get_work_directory(cwd=cwd)
    
    237
    -        env = self._get_environment(cwd=cwd, env=env)
    
    238
    -
    
    239
    -        # We want command args as a list of strings
    
    240
    -        if isinstance(command, str):
    
    241
    -            command = [command]
    
    242
    -
    
    243 235
             # Now transmit the command to execute
    
    244 236
             operation = self.run_remote_command(command, upload_vdir.ref, cwd, env)
    
    245 237
     
    
    ... ... @@ -275,3 +267,69 @@ class SandboxRemote(Sandbox):
    275 267
             self.process_job_output(action_result.output_directories, action_result.output_files)
    
    276 268
     
    
    277 269
             return 0
    
    270
    +
    
    271
    +    def _create_batch(self, main_group, flags, *, collect=None):
    
    272
    +        return _SandboxRemoteBatch(self, main_group, flags, collect=collect)
    
    273
    +
    
    274
    +
    
    275
    +# _SandboxRemoteBatch()
    
    276
    +#
    
    277
    +# Command batching by shell script generation.
    
    278
    +#
    
    279
    +class _SandboxRemoteBatch(_SandboxBatch):
    
    280
    +
    
    281
    +    def __init__(self, sandbox, main_group, flags, *, collect=None):
    
    282
    +        super().__init__(sandbox, main_group, flags, collect=collect)
    
    283
    +
    
    284
    +        self.script = None
    
    285
    +        self.first_command = None
    
    286
    +        self.cwd = None
    
    287
    +        self.env = None
    
    288
    +
    
    289
    +    def execute(self):
    
    290
    +        self.script = ""
    
    291
    +
    
    292
    +        self.main_group.execute(self)
    
    293
    +
    
    294
    +        first = self.first_command
    
    295
    +        if first and self.sandbox.run(['sh', '-c', '-e', self.script], self.flags, cwd=first.cwd, env=first.env) != 0:
    
    296
    +            raise SandboxCommandError("Command execution failed", collect=self.collect)
    
    297
    +
    
    298
    +    def execute_group(self, group):
    
    299
    +        group.execute_children(self)
    
    300
    +
    
    301
    +    def execute_command(self, command):
    
    302
    +        if self.first_command is None:
    
    303
    +            # First command in batch
    
    304
    +            # Initial working directory and environment of script already matches
    
    305
    +            # the command configuration.
    
    306
    +            self.first_command = command
    
    307
    +        else:
    
    308
    +            # Change working directory for this command
    
    309
    +            if command.cwd != self.cwd:
    
    310
    +                self.script += "mkdir -p {}\n".format(command.cwd)
    
    311
    +                self.script += "cd {}\n".format(command.cwd)
    
    312
    +
    
    313
    +            # Update environment for this command
    
    314
    +            for key in self.env.keys():
    
    315
    +                if key not in command.env:
    
    316
    +                    self.script += "unset {}\n".format(key)
    
    317
    +            for key, value in command.env.items():
    
    318
    +                if key not in self.env or self.env[key] != value:
    
    319
    +                    self.script += "export {}={}\n".format(key, shlex.quote(value))
    
    320
    +
    
    321
    +        # Keep track of current working directory and environment
    
    322
    +        self.cwd = command.cwd
    
    323
    +        self.env = command.env
    
    324
    +
    
    325
    +        # Actual command execution
    
    326
    +        cmdline = ' '.join(shlex.quote(cmd) for cmd in command.command)
    
    327
    +        self.script += "(set -ex; {})".format(cmdline)
    
    328
    +
    
    329
    +        # Error handling
    
    330
    +        label = command.label or cmdline
    
    331
    +        quoted_label = shlex.quote("'{}'".format(label))
    
    332
    +        self.script += " || (echo Command {} failed with exitcode $? >&2 ; exit 1)\n".format(quoted_label)
    
    333
    +
    
    334
    +    def execute_call(self, call):
    
    335
    +        raise SandboxError("SandboxRemote does not support callbacks in command batches")

  • buildstream/sandbox/sandbox.py
    1 1
     #
    
    2 2
     #  Copyright (C) 2017 Codethink Limited
    
    3
    +#  Copyright (C) 2018 Bloomberg Finance LP
    
    3 4
     #
    
    4 5
     #  This program is free software; you can redistribute it and/or
    
    5 6
     #  modify it under the terms of the GNU Lesser General Public
    
    ... ... @@ -29,7 +30,12 @@ See also: :ref:`sandboxing`.
    29 30
     """
    
    30 31
     
    
    31 32
     import os
    
    32
    -from .._exceptions import ImplError, BstError
    
    33
    +import shlex
    
    34
    +import contextlib
    
    35
    +from contextlib import contextmanager
    
    36
    +
    
    37
    +from .._exceptions import ImplError, BstError, SandboxError
    
    38
    +from .._message import Message, MessageType
    
    33 39
     from ..storage._filebaseddirectory import FileBasedDirectory
    
    34 40
     from ..storage._casbaseddirectory import CasBasedDirectory
    
    35 41
     
    
    ... ... @@ -38,6 +44,10 @@ class SandboxFlags():
    38 44
         """Flags indicating how the sandbox should be run.
    
    39 45
         """
    
    40 46
     
    
    47
    +    NONE = 0
    
    48
    +    """Use default sandbox configuration.
    
    49
    +    """
    
    50
    +
    
    41 51
         ROOT_READ_ONLY = 0x01
    
    42 52
         """The root filesystem is read only.
    
    43 53
     
    
    ... ... @@ -71,6 +81,19 @@ class SandboxFlags():
    71 81
         """
    
    72 82
     
    
    73 83
     
    
    84
    +class SandboxCommandError(SandboxError):
    
    85
    +    """Raised by :class:`.Sandbox` implementations when a command fails.
    
    86
    +
    
    87
    +    Args:
    
    88
    +       message (str): The error message to report to the user
    
    89
    +       collect (str): An optional directory containing partial install contents
    
    90
    +    """
    
    91
    +    def __init__(self, message, *, collect=None):
    
    92
    +        super().__init__(message, reason='command-failed')
    
    93
    +
    
    94
    +        self.collect = collect
    
    95
    +
    
    96
    +
    
    74 97
     class Sandbox():
    
    75 98
         """Sandbox()
    
    76 99
     
    
    ... ... @@ -94,6 +117,13 @@ class Sandbox():
    94 117
             self.__mount_sources = {}
    
    95 118
             self.__allow_real_directory = kwargs['allow_real_directory']
    
    96 119
     
    
    120
    +        # Plugin ID for logging
    
    121
    +        plugin = kwargs.get('plugin', None)
    
    122
    +        if plugin:
    
    123
    +            self.__plugin_id = plugin._get_unique_id()
    
    124
    +        else:
    
    125
    +            self.__plugin_id = None
    
    126
    +
    
    97 127
             # Configuration from kwargs common to all subclasses
    
    98 128
             self.__config = kwargs['config']
    
    99 129
             self.__stdout = kwargs['stdout']
    
    ... ... @@ -121,6 +151,9 @@ class Sandbox():
    121 151
             # directory via get_directory.
    
    122 152
             self._never_cache_vdirs = False
    
    123 153
     
    
    154
    +        # Pending command batch
    
    155
    +        self.__batch = None
    
    156
    +
    
    124 157
         def get_directory(self):
    
    125 158
             """Fetches the sandbox root directory
    
    126 159
     
    
    ... ... @@ -209,9 +242,16 @@ class Sandbox():
    209 242
                 'artifact': artifact
    
    210 243
             })
    
    211 244
     
    
    212
    -    def run(self, command, flags, *, cwd=None, env=None):
    
    245
    +    def run(self, command, flags, *, cwd=None, env=None, label=None):
    
    213 246
             """Run a command in the sandbox.
    
    214 247
     
    
    248
    +        If this is called outside a batch context, the command is immediately
    
    249
    +        executed.
    
    250
    +
    
    251
    +        If this is called in a batch context, the command is added to the batch
    
    252
    +        for later execution. If the command fails, later commands will not be
    
    253
    +        executed. Command flags must match batch flags.
    
    254
    +
    
    215 255
             Args:
    
    216 256
                 command (list): The command to run in the sandboxed environment, as a list
    
    217 257
                                 of strings starting with the binary to run.
    
    ... ... @@ -219,9 +259,10 @@ class Sandbox():
    219 259
                 cwd (str): The sandbox relative working directory in which to run the command.
    
    220 260
                 env (dict): A dictionary of string key, value pairs to set as environment
    
    221 261
                             variables inside the sandbox environment.
    
    262
    +            label (str): An optional label for the command, used for logging. (*Since: 1.4*)
    
    222 263
     
    
    223 264
             Returns:
    
    224
    -            (int): The program exit code.
    
    265
    +            (int|None): The program exit code, or None if running in batch context.
    
    225 266
     
    
    226 267
             Raises:
    
    227 268
                 (:class:`.ProgramNotFoundError`): If a host tool which the given sandbox
    
    ... ... @@ -234,9 +275,115 @@ class Sandbox():
    234 275
                function must make sure the directory will be created if it does
    
    235 276
                not exist yet, even if a workspace is being used.
    
    236 277
             """
    
    237
    -        raise ImplError("Sandbox of type '{}' does not implement run()"
    
    278
    +
    
    279
    +        # Fallback to the sandbox default settings for
    
    280
    +        # the cwd and env.
    
    281
    +        #
    
    282
    +        cwd = self._get_work_directory(cwd=cwd)
    
    283
    +        env = self._get_environment(cwd=cwd, env=env)
    
    284
    +
    
    285
    +        # Convert single-string argument to a list
    
    286
    +        if isinstance(command, str):
    
    287
    +            command = [command]
    
    288
    +
    
    289
    +        if self.__batch:
    
    290
    +            if flags != self.__batch.flags:
    
    291
    +                raise SandboxError("Inconsistent sandbox flags in single command batch")
    
    292
    +
    
    293
    +            batch_command = _SandboxBatchCommand(command, cwd=cwd, env=env, label=label)
    
    294
    +
    
    295
    +            current_group = self.__batch.current_group
    
    296
    +            current_group.append(batch_command)
    
    297
    +            return None
    
    298
    +        else:
    
    299
    +            return self._run(command, flags, cwd=cwd, env=env)
    
    300
    +
    
    301
    +    @contextmanager
    
    302
    +    def batch(self, flags, *, label=None, collect=None):
    
    303
    +        """Context manager for command batching
    
    304
    +
    
    305
    +        This provides a batch context that defers execution of commands until
    
    306
    +        the end of the context. If a command fails, the batch will be aborted
    
    307
    +        and subsequent commands will not be executed.
    
    308
    +
    
    309
    +        Command batches may be nested. Execution will start only when the top
    
    310
    +        level batch context ends.
    
    311
    +
    
    312
    +        Args:
    
    313
    +            flags (:class:`.SandboxFlags`): The flags for this command batch.
    
    314
    +            label (str): An optional label for the batch group, used for logging.
    
    315
    +            collect (str): An optional directory containing partial install contents
    
    316
    +                           on command failure.
    
    317
    +
    
    318
    +        Raises:
    
    319
    +            (:class:`.SandboxCommandError`): If a command fails.
    
    320
    +
    
    321
    +        *Since: 1.4*
    
    322
    +        """
    
    323
    +
    
    324
    +        group = _SandboxBatchGroup(label=label)
    
    325
    +
    
    326
    +        if self.__batch:
    
    327
    +            # Nested batch
    
    328
    +            if flags != self.__batch.flags:
    
    329
    +                raise SandboxError("Inconsistent sandbox flags in single command batch")
    
    330
    +
    
    331
    +            parent_group = self.__batch.current_group
    
    332
    +            parent_group.append(group)
    
    333
    +            self.__batch.current_group = group
    
    334
    +            try:
    
    335
    +                yield
    
    336
    +            finally:
    
    337
    +                self.__batch.current_group = parent_group
    
    338
    +        else:
    
    339
    +            # Top-level batch
    
    340
    +            batch = self._create_batch(group, flags, collect=collect)
    
    341
    +
    
    342
    +            self.__batch = batch
    
    343
    +            try:
    
    344
    +                yield
    
    345
    +            finally:
    
    346
    +                self.__batch = None
    
    347
    +
    
    348
    +            batch.execute()
    
    349
    +
    
    350
    +    #####################################################
    
    351
    +    #    Abstract Methods for Sandbox implementations   #
    
    352
    +    #####################################################
    
    353
    +
    
    354
    +    # _run()
    
    355
    +    #
    
    356
    +    # Abstract method for running a single command
    
    357
    +    #
    
    358
    +    # Args:
    
    359
    +    #    command (list): The command to run in the sandboxed environment, as a list
    
    360
    +    #                    of strings starting with the binary to run.
    
    361
    +    #    flags (:class:`.SandboxFlags`): The flags for running this command.
    
    362
    +    #    cwd (str): The sandbox relative working directory in which to run the command.
    
    363
    +    #    env (dict): A dictionary of string key, value pairs to set as environment
    
    364
    +    #                variables inside the sandbox environment.
    
    365
    +    #
    
    366
    +    # Returns:
    
    367
    +    #    (int): The program exit code.
    
    368
    +    #
    
    369
    +    def _run(self, command, flags, *, cwd, env):
    
    370
    +        raise ImplError("Sandbox of type '{}' does not implement _run()"
    
    238 371
                             .format(type(self).__name__))
    
    239 372
     
    
    373
    +    # _create_batch()
    
    374
    +    #
    
    375
    +    # Abstract method for creating a batch object. Subclasses can override
    
    376
    +    # this method to instantiate a subclass of _SandboxBatch.
    
    377
    +    #
    
    378
    +    # Args:
    
    379
    +    #    main_group (:class:`_SandboxBatchGroup`): The top level batch group.
    
    380
    +    #    flags (:class:`.SandboxFlags`): The flags for commands in this batch.
    
    381
    +    #    collect (str): An optional directory containing partial install contents
    
    382
    +    #                   on command failure.
    
    383
    +    #
    
    384
    +    def _create_batch(self, main_group, flags, *, collect=None):
    
    385
    +        return _SandboxBatch(self, main_group, flags, collect=collect)
    
    386
    +
    
    240 387
         ################################################
    
    241 388
         #               Private methods                #
    
    242 389
         ################################################
    
    ... ... @@ -385,3 +532,138 @@ class Sandbox():
    385 532
                     return True
    
    386 533
     
    
    387 534
             return False
    
    535
    +
    
    536
    +    # _get_plugin_id()
    
    537
    +    #
    
    538
    +    # Get the plugin's unique identifier
    
    539
    +    #
    
    540
    +    def _get_plugin_id(self):
    
    541
    +        return self.__plugin_id
    
    542
    +
    
    543
    +    # _callback()
    
    544
    +    #
    
    545
    +    # If this is called outside a batch context, the specified function is
    
    546
    +    # invoked immediately.
    
    547
    +    #
    
    548
    +    # If this is called in a batch context, the function is added to the batch
    
    549
    +    # for later invocation.
    
    550
    +    #
    
    551
    +    # Args:
    
    552
    +    #    callback (callable): The function to invoke
    
    553
    +    #
    
    554
    +    def _callback(self, callback):
    
    555
    +        if self.__batch:
    
    556
    +            batch_call = _SandboxBatchCall(callback)
    
    557
    +
    
    558
    +            current_group = self.__batch.current_group
    
    559
    +            current_group.append(batch_call)
    
    560
    +        else:
    
    561
    +            callback()
    
    562
    +
    
    563
    +
    
    564
    +# _SandboxBatch()
    
    565
    +#
    
    566
    +# A batch of sandbox commands.
    
    567
    +#
    
    568
    +class _SandboxBatch():
    
    569
    +
    
    570
    +    def __init__(self, sandbox, main_group, flags, *, collect=None):
    
    571
    +        self.sandbox = sandbox
    
    572
    +        self.main_group = main_group
    
    573
    +        self.current_group = main_group
    
    574
    +        self.flags = flags
    
    575
    +        self.collect = collect
    
    576
    +
    
    577
    +    def execute(self):
    
    578
    +        self.main_group.execute(self)
    
    579
    +
    
    580
    +    def execute_group(self, group):
    
    581
    +        if group.label:
    
    582
    +            context = self.sandbox._get_context()
    
    583
    +            cm = context.timed_activity(group.label, unique_id=self.sandbox._get_plugin_id())
    
    584
    +        else:
    
    585
    +            cm = contextlib.suppress()
    
    586
    +
    
    587
    +        with cm:
    
    588
    +            group.execute_children(self)
    
    589
    +
    
    590
    +    def execute_command(self, command):
    
    591
    +        if command.label:
    
    592
    +            context = self.sandbox._get_context()
    
    593
    +            message = Message(self.sandbox._get_plugin_id(), MessageType.STATUS,
    
    594
    +                              'Running {}'.format(command.label))
    
    595
    +            context.message(message)
    
    596
    +
    
    597
    +        exitcode = self.sandbox._run(command.command, self.flags, cwd=command.cwd, env=command.env)
    
    598
    +        if exitcode != 0:
    
    599
    +            cmdline = ' '.join(shlex.quote(cmd) for cmd in command.command)
    
    600
    +            label = command.label or cmdline
    
    601
    +            raise SandboxCommandError("Command '{}' failed with exitcode {}".format(label, exitcode),
    
    602
    +                                      collect=self.collect)
    
    603
    +
    
    604
    +    def execute_call(self, call):
    
    605
    +        call.callback()
    
    606
    +
    
    607
    +
    
    608
    +# _SandboxBatchItem()
    
    609
    +#
    
    610
    +# An item in a command batch.
    
    611
    +#
    
    612
    +class _SandboxBatchItem():
    
    613
    +
    
    614
    +    def __init__(self, *, label=None):
    
    615
    +        self.label = label
    
    616
    +
    
    617
    +
    
    618
    +# _SandboxBatchCommand()
    
    619
    +#
    
    620
    +# A command item in a command batch.
    
    621
    +#
    
    622
    +class _SandboxBatchCommand(_SandboxBatchItem):
    
    623
    +
    
    624
    +    def __init__(self, command, *, cwd, env, label=None):
    
    625
    +        super().__init__(label=label)
    
    626
    +
    
    627
    +        self.command = command
    
    628
    +        self.cwd = cwd
    
    629
    +        self.env = env
    
    630
    +
    
    631
    +    def execute(self, batch):
    
    632
    +        batch.execute_command(self)
    
    633
    +
    
    634
    +
    
    635
    +# _SandboxBatchGroup()
    
    636
    +#
    
    637
    +# A group in a command batch.
    
    638
    +#
    
    639
    +class _SandboxBatchGroup(_SandboxBatchItem):
    
    640
    +
    
    641
    +    def __init__(self, *, label=None):
    
    642
    +        super().__init__(label=label)
    
    643
    +
    
    644
    +        self.children = []
    
    645
    +
    
    646
    +    def append(self, item):
    
    647
    +        self.children.append(item)
    
    648
    +
    
    649
    +    def execute(self, batch):
    
    650
    +        batch.execute_group(self)
    
    651
    +
    
    652
    +    def execute_children(self, batch):
    
    653
    +        for item in self.children:
    
    654
    +            item.execute(batch)
    
    655
    +
    
    656
    +
    
    657
    +# _SandboxBatchCall()
    
    658
    +#
    
    659
    +# A call item in a command batch.
    
    660
    +#
    
    661
    +class _SandboxBatchCall(_SandboxBatchItem):
    
    662
    +
    
    663
    +    def __init__(self, callback):
    
    664
    +        super().__init__()
    
    665
    +
    
    666
    +        self.callback = callback
    
    667
    +
    
    668
    +    def execute(self, batch):
    
    669
    +        batch.execute_call(self)

  • buildstream/scriptelement.py
    ... ... @@ -226,10 +226,11 @@ class ScriptElement(Element):
    226 226
                                              .format(build_dep.name), silent_nested=True):
    
    227 227
                         build_dep.stage_dependency_artifacts(sandbox, Scope.RUN, path="/")
    
    228 228
     
    
    229
    -            for build_dep in self.dependencies(Scope.BUILD, recurse=False):
    
    230
    -                with self.timed_activity("Integrating {}".format(build_dep.name), silent_nested=True):
    
    231
    -                    for dep in build_dep.dependencies(Scope.RUN):
    
    232
    -                        dep.integrate(sandbox)
    
    229
    +            with sandbox.batch(SandboxFlags.NONE):
    
    230
    +                for build_dep in self.dependencies(Scope.BUILD, recurse=False):
    
    231
    +                    with self.timed_activity("Integrating {}".format(build_dep.name), silent_nested=True):
    
    232
    +                        for dep in build_dep.dependencies(Scope.RUN):
    
    233
    +                            dep.integrate(sandbox)
    
    233 234
             else:
    
    234 235
                 # If layout, follow its rules.
    
    235 236
                 for item in self.__layout:
    
    ... ... @@ -251,37 +252,40 @@ class ScriptElement(Element):
    251 252
                             virtual_dstdir.descend(item['destination'].lstrip(os.sep).split(os.sep), create=True)
    
    252 253
                             element.stage_dependency_artifacts(sandbox, Scope.RUN, path=item['destination'])
    
    253 254
     
    
    254
    -            for item in self.__layout:
    
    255
    +            with sandbox.batch(SandboxFlags.NONE):
    
    256
    +                for item in self.__layout:
    
    255 257
     
    
    256
    -                # Skip layout members which dont stage an element
    
    257
    -                if not item['element']:
    
    258
    -                    continue
    
    258
    +                    # Skip layout members which dont stage an element
    
    259
    +                    if not item['element']:
    
    260
    +                        continue
    
    259 261
     
    
    260
    -                element = self.search(Scope.BUILD, item['element'])
    
    262
    +                    element = self.search(Scope.BUILD, item['element'])
    
    261 263
     
    
    262
    -                # Integration commands can only be run for elements staged to /
    
    263
    -                if item['destination'] == '/':
    
    264
    -                    with self.timed_activity("Integrating {}".format(element.name),
    
    265
    -                                             silent_nested=True):
    
    266
    -                        for dep in element.dependencies(Scope.RUN):
    
    267
    -                            dep.integrate(sandbox)
    
    264
    +                    # Integration commands can only be run for elements staged to /
    
    265
    +                    if item['destination'] == '/':
    
    266
    +                        with self.timed_activity("Integrating {}".format(element.name),
    
    267
    +                                                 silent_nested=True):
    
    268
    +                            for dep in element.dependencies(Scope.RUN):
    
    269
    +                                dep.integrate(sandbox)
    
    268 270
     
    
    269 271
             install_root_path_components = self.__install_root.lstrip(os.sep).split(os.sep)
    
    270 272
             sandbox.get_virtual_directory().descend(install_root_path_components, create=True)
    
    271 273
     
    
    272 274
         def assemble(self, sandbox):
    
    273 275
     
    
    274
    -        for groupname, commands in self.__commands.items():
    
    275
    -            with self.timed_activity("Running '{}'".format(groupname)):
    
    276
    -                for cmd in commands:
    
    277
    -                    self.status("Running command", detail=cmd)
    
    278
    -                    # Note the -e switch to 'sh' means to exit with an error
    
    279
    -                    # if any untested command fails.
    
    280
    -                    exitcode = sandbox.run(['sh', '-c', '-e', cmd + '\n'],
    
    281
    -                                           SandboxFlags.ROOT_READ_ONLY if self.__root_read_only else 0)
    
    282
    -                    if exitcode != 0:
    
    283
    -                        raise ElementError("Command '{}' failed with exitcode {}".format(cmd, exitcode),
    
    284
    -                                           collect=self.__install_root)
    
    276
    +        flags = SandboxFlags.NONE
    
    277
    +        if self.__root_read_only:
    
    278
    +            flags |= SandboxFlags.ROOT_READ_ONLY
    
    279
    +
    
    280
    +        with sandbox.batch(flags, collect=self.__install_root):
    
    281
    +            for groupname, commands in self.__commands.items():
    
    282
    +                with sandbox.batch(flags, label="Running '{}'".format(groupname)):
    
    283
    +                    for cmd in commands:
    
    284
    +                        # Note the -e switch to 'sh' means to exit with an error
    
    285
    +                        # if any untested command fails.
    
    286
    +                        sandbox.run(['sh', '-c', '-e', cmd + '\n'],
    
    287
    +                                    flags,
    
    288
    +                                    label=cmd)
    
    285 289
     
    
    286 290
             # Return where the result can be collected from
    
    287 291
             return self.__install_root
    

  • buildstream/source.py
    ... ... @@ -615,6 +615,23 @@ class Source(Plugin):
    615 615
             with utils._tempdir(dir=mirrordir) as tempdir:
    
    616 616
                 yield tempdir
    
    617 617
     
    
    618
    +    #############################################################
    
    619
    +    #       Private Abstract Methods used in BuildStream        #
    
    620
    +    #############################################################
    
    621
    +
    
    622
    +    # Returns the local path to the source
    
    623
    +    #
    
    624
    +    # If the source is locally available, this method returns the absolute
    
    625
    +    # path. Otherwise, the return value is None.
    
    626
    +    #
    
    627
    +    # This is an optimization for local sources and optional to implement.
    
    628
    +    #
    
    629
    +    # Returns:
    
    630
    +    #    (str): The local absolute path, or None
    
    631
    +    #
    
    632
    +    def _get_local_path(self):
    
    633
    +        return None
    
    634
    +
    
    618 635
         #############################################################
    
    619 636
         #            Private Methods used in BuildStream            #
    
    620 637
         #############################################################
    

  • buildstream/utils.py
    ... ... @@ -1242,3 +1242,17 @@ def _deduplicate(iterable, key=None):
    1242 1242
     def _get_link_mtime(path):
    
    1243 1243
         path_stat = os.lstat(path)
    
    1244 1244
         return path_stat.st_mtime
    
    1245
    +
    
    1246
    +
    
    1247
    +# Returns the first directory to contain filename, or an empty string if
    
    1248
    +# none found
    
    1249
    +#
    
    1250
    +def _search_upward_for_file(directory, filename):
    
    1251
    +    directory = os.path.abspath(directory)
    
    1252
    +    while not os.path.isfile(os.path.join(directory, filename)):
    
    1253
    +        parent_dir = os.path.dirname(directory)
    
    1254
    +        if directory == parent_dir:
    
    1255
    +            return ""
    
    1256
    +        directory = parent_dir
    
    1257
    +
    
    1258
    +    return directory

  • tests/artifactcache/pull.py
    ... ... @@ -25,6 +25,17 @@ def message_handler(message, context):
    25 25
         pass
    
    26 26
     
    
    27 27
     
    
    28
    +# Since parent processes wait for queue events, we need
    
    29
    +# to put something on it if the called process raises an
    
    30
    +# exception.
    
    31
    +def _queue_wrapper(target, queue, *args):
    
    32
    +    try:
    
    33
    +        target(*args, queue=queue)
    
    34
    +    except Exception as e:
    
    35
    +        queue.put(str(e))
    
    36
    +        raise
    
    37
    +
    
    38
    +
    
    28 39
     def tree_maker(cas, tree, directory):
    
    29 40
         if tree.root.ByteSize() == 0:
    
    30 41
             tree.root.CopyFrom(directory)
    
    ... ... @@ -97,9 +108,9 @@ def test_pull(cli, tmpdir, datafiles):
    97 108
             queue = multiprocessing.Queue()
    
    98 109
             # Use subprocess to avoid creation of gRPC threads in main BuildStream process
    
    99 110
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    100
    -        process = multiprocessing.Process(target=_test_pull,
    
    101
    -                                          args=(user_config_file, project_dir, artifact_dir,
    
    102
    -                                                'target.bst', element_key, queue))
    
    111
    +        process = multiprocessing.Process(target=_queue_wrapper,
    
    112
    +                                          args=(_test_pull, queue, user_config_file, project_dir,
    
    113
    +                                                artifact_dir, 'target.bst', element_key))
    
    103 114
     
    
    104 115
             try:
    
    105 116
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -205,9 +216,9 @@ def test_pull_tree(cli, tmpdir, datafiles):
    205 216
             queue = multiprocessing.Queue()
    
    206 217
             # Use subprocess to avoid creation of gRPC threads in main BuildStream process
    
    207 218
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    208
    -        process = multiprocessing.Process(target=_test_push_tree,
    
    209
    -                                          args=(user_config_file, project_dir, artifact_dir,
    
    210
    -                                                artifact_digest, queue))
    
    219
    +        process = multiprocessing.Process(target=_queue_wrapper,
    
    220
    +                                          args=(_test_push_tree, queue, user_config_file, project_dir,
    
    221
    +                                                artifact_dir, artifact_digest))
    
    211 222
     
    
    212 223
             try:
    
    213 224
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -233,9 +244,9 @@ def test_pull_tree(cli, tmpdir, datafiles):
    233 244
     
    
    234 245
             queue = multiprocessing.Queue()
    
    235 246
             # Use subprocess to avoid creation of gRPC threads in main BuildStream process
    
    236
    -        process = multiprocessing.Process(target=_test_pull_tree,
    
    237
    -                                          args=(user_config_file, project_dir, artifact_dir,
    
    238
    -                                                tree_digest, queue))
    
    247
    +        process = multiprocessing.Process(target=_queue_wrapper,
    
    248
    +                                          args=(_test_pull_tree, queue, user_config_file, project_dir,
    
    249
    +                                                artifact_dir, tree_digest))
    
    239 250
     
    
    240 251
             try:
    
    241 252
                 # Keep SIGINT blocked in the child process
    

  • tests/artifactcache/push.py
    ... ... @@ -26,6 +26,17 @@ def message_handler(message, context):
    26 26
         pass
    
    27 27
     
    
    28 28
     
    
    29
    +# Since parent processes wait for queue events, we need
    
    30
    +# to put something on it if the called process raises an
    
    31
    +# exception.
    
    32
    +def _queue_wrapper(target, queue, *args):
    
    33
    +    try:
    
    34
    +        target(*args, queue=queue)
    
    35
    +    except Exception as e:
    
    36
    +        queue.put(str(e))
    
    37
    +        raise
    
    38
    +
    
    39
    +
    
    29 40
     @pytest.mark.datafiles(DATA_DIR)
    
    30 41
     def test_push(cli, tmpdir, datafiles):
    
    31 42
         project_dir = str(datafiles)
    
    ... ... @@ -76,9 +87,9 @@ def test_push(cli, tmpdir, datafiles):
    76 87
             queue = multiprocessing.Queue()
    
    77 88
             # Use subprocess to avoid creation of gRPC threads in main BuildStream process
    
    78 89
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    79
    -        process = multiprocessing.Process(target=_test_push,
    
    80
    -                                          args=(user_config_file, project_dir, artifact_dir,
    
    81
    -                                                'target.bst', element_key, queue))
    
    90
    +        process = multiprocessing.Process(target=_queue_wrapper,
    
    91
    +                                          args=(_test_push, queue, user_config_file, project_dir,
    
    92
    +                                                artifact_dir, 'target.bst', element_key))
    
    82 93
     
    
    83 94
             try:
    
    84 95
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -185,9 +196,9 @@ def test_push_directory(cli, tmpdir, datafiles):
    185 196
             queue = multiprocessing.Queue()
    
    186 197
             # Use subprocess to avoid creation of gRPC threads in main BuildStream process
    
    187 198
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    188
    -        process = multiprocessing.Process(target=_test_push_directory,
    
    189
    -                                          args=(user_config_file, project_dir, artifact_dir,
    
    190
    -                                                artifact_digest, queue))
    
    199
    +        process = multiprocessing.Process(target=_queue_wrapper,
    
    200
    +                                          args=(_test_push_directory, queue, user_config_file,
    
    201
    +                                                project_dir, artifact_dir, artifact_digest))
    
    191 202
     
    
    192 203
             try:
    
    193 204
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -260,8 +271,9 @@ def test_push_message(cli, tmpdir, datafiles):
    260 271
             queue = multiprocessing.Queue()
    
    261 272
             # Use subprocess to avoid creation of gRPC threads in main BuildStream process
    
    262 273
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    263
    -        process = multiprocessing.Process(target=_test_push_message,
    
    264
    -                                          args=(user_config_file, project_dir, artifact_dir, queue))
    
    274
    +        process = multiprocessing.Process(target=_queue_wrapper,
    
    275
    +                                          args=(_test_push_message, queue, user_config_file,
    
    276
    +                                                project_dir, artifact_dir))
    
    265 277
     
    
    266 278
             try:
    
    267 279
                 # Keep SIGINT blocked in the child process
    

  • tests/frontend/workspace.py
    ... ... @@ -31,6 +31,7 @@ import shutil
    31 31
     import subprocess
    
    32 32
     from ruamel.yaml.comments import CommentedSet
    
    33 33
     from tests.testutils import cli, create_repo, ALL_REPO_KINDS, wait_for_cache_granularity
    
    34
    +from tests.testutils import create_artifact_share
    
    34 35
     
    
    35 36
     from buildstream import _yaml
    
    36 37
     from buildstream._exceptions import ErrorDomain, LoadError, LoadErrorReason
    
    ... ... @@ -615,9 +616,12 @@ def test_list(cli, tmpdir, datafiles):
    615 616
     @pytest.mark.datafiles(DATA_DIR)
    
    616 617
     @pytest.mark.parametrize("kind", repo_kinds)
    
    617 618
     @pytest.mark.parametrize("strict", [("strict"), ("non-strict")])
    
    618
    -def test_build(cli, tmpdir, datafiles, kind, strict):
    
    619
    +@pytest.mark.parametrize("call_from", [("project"), ("workspace")])
    
    620
    +def test_build(cli, tmpdir_factory, datafiles, kind, strict, call_from):
    
    621
    +    tmpdir = tmpdir_factory.mktemp('')
    
    619 622
         element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
    
    620 623
         checkout = os.path.join(str(tmpdir), 'checkout')
    
    624
    +    args_pre = ['-C', workspace] if call_from == "workspace" else []
    
    621 625
     
    
    622 626
         # Modify workspace
    
    623 627
         shutil.rmtree(os.path.join(workspace, 'usr', 'bin'))
    
    ... ... @@ -640,15 +644,14 @@ def test_build(cli, tmpdir, datafiles, kind, strict):
    640 644
         # Build modified workspace
    
    641 645
         assert cli.get_element_state(project, element_name) == 'buildable'
    
    642 646
         assert cli.get_element_key(project, element_name) == "{:?<64}".format('')
    
    643
    -    result = cli.run(project=project, args=['build', element_name])
    
    647
    +    result = cli.run(project=project, args=args_pre + ['build', element_name])
    
    644 648
         result.assert_success()
    
    645 649
         assert cli.get_element_state(project, element_name) == 'cached'
    
    646 650
         assert cli.get_element_key(project, element_name) != "{:?<64}".format('')
    
    647 651
     
    
    648 652
         # Checkout the result
    
    649
    -    result = cli.run(project=project, args=[
    
    650
    -        'checkout', element_name, checkout
    
    651
    -    ])
    
    653
    +    result = cli.run(project=project,
    
    654
    +                     args=args_pre + ['checkout', element_name, checkout])
    
    652 655
         result.assert_success()
    
    653 656
     
    
    654 657
         # Check that the pony.conf from the modified workspace exists
    
    ... ... @@ -1055,3 +1058,131 @@ def test_multiple_failed_builds(cli, tmpdir, datafiles):
    1055 1058
             result = cli.run(project=project, args=["build", element_name])
    
    1056 1059
             assert "BUG" not in result.stderr
    
    1057 1060
             assert cli.get_element_state(project, element_name) != "cached"
    
    1061
    +
    
    1062
    +
    
    1063
    +@pytest.mark.datafiles(DATA_DIR)
    
    1064
    +def test_external_fetch(cli, datafiles, tmpdir_factory):
    
    1065
    +    # Fetching from a workspace outside a project doesn't fail horribly
    
    1066
    +    tmpdir = tmpdir_factory.mktemp('')
    
    1067
    +    element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", False)
    
    1068
    +
    
    1069
    +    result = cli.run(project=project, args=['-C', workspace, 'fetch', element_name])
    
    1070
    +    result.assert_success()
    
    1071
    +
    
    1072
    +    # We already fetched it by opening the workspace, but we're also checking
    
    1073
    +    # `bst show` works here
    
    1074
    +    assert cli.get_element_state(project, element_name) == 'buildable'
    
    1075
    +
    
    1076
    +
    
    1077
    +@pytest.mark.datafiles(DATA_DIR)
    
    1078
    +def test_external_push_pull(cli, datafiles, tmpdir_factory):
    
    1079
    +    # Pushing and pulling to/from an artifact cache works from an external workspace
    
    1080
    +    tmpdir = tmpdir_factory.mktemp('')
    
    1081
    +    element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", False)
    
    1082
    +
    
    1083
    +    with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
    
    1084
    +        result = cli.run(project=project, args=['-C', workspace, 'build', element_name])
    
    1085
    +        result.assert_success()
    
    1086
    +
    
    1087
    +        cli.configure({
    
    1088
    +            'artifacts': {'url': share.repo, 'push': True}
    
    1089
    +        })
    
    1090
    +
    
    1091
    +        result = cli.run(project=project, args=['-C', workspace, 'push', element_name])
    
    1092
    +        result.assert_success()
    
    1093
    +
    
    1094
    +        result = cli.run(project=project, args=['-C', workspace, 'pull', '--deps', 'all', element_name])
    
    1095
    +        result.assert_success()
    
    1096
    +
    
    1097
    +
    
    1098
    +@pytest.mark.datafiles(DATA_DIR)
    
    1099
    +def test_external_track(cli, datafiles, tmpdir_factory):
    
    1100
    +    # Tracking does not get horribly confused
    
    1101
    +    tmpdir = tmpdir_factory.mktemp('')
    
    1102
    +    element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", True)
    
    1103
    +
    
    1104
    +    # The workspace is necessarily already tracked, so we only care that
    
    1105
    +    # there's no weird errors.
    
    1106
    +    result = cli.run(project=project, args=['-C', workspace, 'track', element_name])
    
    1107
    +    result.assert_success()
    
    1108
    +
    
    1109
    +
    
    1110
    +@pytest.mark.datafiles(DATA_DIR)
    
    1111
    +def test_external_open_other(cli, datafiles, tmpdir_factory):
    
    1112
    +    # >From inside an external workspace, open another workspace
    
    1113
    +    tmpdir1 = tmpdir_factory.mktemp('')
    
    1114
    +    tmpdir2 = tmpdir_factory.mktemp('')
    
    1115
    +    # Making use of the assumption that it's the same project in both invocations of open_workspace
    
    1116
    +    alpha_element, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", False, suffix="-alpha")
    
    1117
    +    beta_element, _, beta_workspace = open_workspace(cli, tmpdir2, datafiles, "git", False, suffix="-beta")
    
    1118
    +
    
    1119
    +    # Closing the other element first, because I'm too lazy to create an
    
    1120
    +    # element without opening it
    
    1121
    +    result = cli.run(project=project, args=['workspace', 'close', beta_element])
    
    1122
    +    result.assert_success()
    
    1123
    +
    
    1124
    +    result = cli.run(project=project, args=[
    
    1125
    +        '-C', alpha_workspace, 'workspace', 'open', '--force', '--directory', beta_workspace, beta_element
    
    1126
    +    ])
    
    1127
    +    result.assert_success()
    
    1128
    +
    
    1129
    +
    
    1130
    +@pytest.mark.datafiles(DATA_DIR)
    
    1131
    +def test_external_close_other(cli, datafiles, tmpdir_factory):
    
    1132
    +    # >From inside an external workspace, close the other workspace
    
    1133
    +    tmpdir1 = tmpdir_factory.mktemp('')
    
    1134
    +    tmpdir2 = tmpdir_factory.mktemp('')
    
    1135
    +    # Making use of the assumption that it's the same project in both invocations of open_workspace
    
    1136
    +    alpha_element, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", False, suffix="-alpha")
    
    1137
    +    beta_element, _, beta_workspace = open_workspace(cli, tmpdir2, datafiles, "git", False, suffix="-beta")
    
    1138
    +
    
    1139
    +    result = cli.run(project=project, args=['-C', alpha_workspace, 'workspace', 'close', beta_element])
    
    1140
    +    result.assert_success()
    
    1141
    +
    
    1142
    +
    
    1143
    +@pytest.mark.datafiles(DATA_DIR)
    
    1144
    +def test_external_close_self(cli, datafiles, tmpdir_factory):
    
    1145
    +    # >From inside an external workspace, close it
    
    1146
    +    tmpdir1 = tmpdir_factory.mktemp('')
    
    1147
    +    tmpdir2 = tmpdir_factory.mktemp('')
    
    1148
    +    # Making use of the assumption that it's the same project in both invocations of open_workspace
    
    1149
    +    alpha_element, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", False, suffix="-alpha")
    
    1150
    +    beta_element, _, beta_workspace = open_workspace(cli, tmpdir2, datafiles, "git", False, suffix="-beta")
    
    1151
    +
    
    1152
    +    result = cli.run(project=project, args=['-C', alpha_workspace, 'workspace', 'close', alpha_element])
    
    1153
    +    result.assert_success()
    
    1154
    +
    
    1155
    +
    
    1156
    +@pytest.mark.datafiles(DATA_DIR)
    
    1157
    +def test_external_reset_other(cli, datafiles, tmpdir_factory):
    
    1158
    +    tmpdir1 = tmpdir_factory.mktemp('')
    
    1159
    +    tmpdir2 = tmpdir_factory.mktemp('')
    
    1160
    +    # Making use of the assumption that it's the same project in both invocations of open_workspace
    
    1161
    +    alpha_element, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", False, suffix="-alpha")
    
    1162
    +    beta_element, _, beta_workspace = open_workspace(cli, tmpdir2, datafiles, "git", False, suffix="-beta")
    
    1163
    +
    
    1164
    +    result = cli.run(project=project, args=['-C', alpha_workspace, 'workspace', 'reset', beta_element])
    
    1165
    +    result.assert_success()
    
    1166
    +
    
    1167
    +
    
    1168
    +@pytest.mark.datafiles(DATA_DIR)
    
    1169
    +def test_external_reset_self(cli, datafiles, tmpdir):
    
    1170
    +    element, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", False)
    
    1171
    +
    
    1172
    +    # Command succeeds
    
    1173
    +    result = cli.run(project=project, args=['-C', workspace, 'workspace', 'reset', element])
    
    1174
    +    result.assert_success()
    
    1175
    +
    
    1176
    +    # Successive commands still work (i.e. .bstproject.yaml hasn't been deleted)
    
    1177
    +    result = cli.run(project=project, args=['-C', workspace, 'workspace', 'list'])
    
    1178
    +    result.assert_success()
    
    1179
    +
    
    1180
    +
    
    1181
    +@pytest.mark.datafiles(DATA_DIR)
    
    1182
    +def test_external_list(cli, datafiles, tmpdir_factory):
    
    1183
    +    tmpdir = tmpdir_factory.mktemp('')
    
    1184
    +    # Making use of the assumption that it's the same project in both invocations of open_workspace
    
    1185
    +    element, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", False)
    
    1186
    +
    
    1187
    +    result = cli.run(project=project, args=['-C', workspace, 'workspace', 'list'])
    
    1188
    +    result.assert_success()

  • tests/integration/manual.py
    ... ... @@ -128,3 +128,28 @@ def test_manual_element_noparallel(cli, tmpdir, datafiles):
    128 128
         assert text == """-j1 -Wall
    
    129 129
     2
    
    130 130
     """
    
    131
    +
    
    132
    +
    
    133
    +@pytest.mark.datafiles(DATA_DIR)
    
    134
    +@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
    
    135
    +def test_manual_element_logging(cli, tmpdir, datafiles):
    
    136
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    137
    +    checkout = os.path.join(cli.directory, 'checkout')
    
    138
    +    element_path = os.path.join(project, 'elements')
    
    139
    +    element_name = 'import/import.bst'
    
    140
    +
    
    141
    +    create_manual_element(element_name, element_path, {
    
    142
    +        'configure-commands': ["echo configure"],
    
    143
    +        'build-commands': ["echo build"],
    
    144
    +        'install-commands': ["echo install"],
    
    145
    +        'strip-commands': ["echo strip"]
    
    146
    +    }, {}, {})
    
    147
    +
    
    148
    +    res = cli.run(project=project, args=['build', element_name])
    
    149
    +    assert res.exit_code == 0
    
    150
    +
    
    151
    +    # Verify that individual commands are logged
    
    152
    +    assert "echo configure" in res.stderr
    
    153
    +    assert "echo build" in res.stderr
    
    154
    +    assert "echo install" in res.stderr
    
    155
    +    assert "echo strip" in res.stderr

  • tests/integration/sandbox-bwrap.py
    ... ... @@ -58,5 +58,5 @@ def test_sandbox_bwrap_return_subprocess(cli, tmpdir, datafiles):
    58 58
         })
    
    59 59
     
    
    60 60
         result = cli.run(project=project, args=['build', element_name])
    
    61
    -    result.assert_task_error(error_domain=ErrorDomain.ELEMENT, error_reason=None)
    
    61
    +    result.assert_task_error(error_domain=ErrorDomain.SANDBOX, error_reason="command-failed")
    
    62 62
         assert "sandbox-bwrap/command-exit-42.bst|Command 'exit 42' failed with exitcode 42" in result.stderr

  • tests/integration/shell.py
    ... ... @@ -353,3 +353,29 @@ def test_integration_devices(cli, tmpdir, datafiles):
    353 353
     
    
    354 354
         result = execute_shell(cli, project, ["true"], element=element_name)
    
    355 355
         assert result.exit_code == 0
    
    356
    +
    
    357
    +
    
    358
    +# Test that a shell can be opened from an external workspace
    
    359
    +@pytest.mark.datafiles(DATA_DIR)
    
    360
    +@pytest.mark.parametrize("build_shell", [("build"), ("nobuild")])
    
    361
    +@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
    
    362
    +def test_integration_external_workspace(cli, tmpdir_factory, datafiles, build_shell):
    
    363
    +    tmpdir = tmpdir_factory.mktemp("")
    
    364
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    365
    +    element_name = 'autotools/amhello.bst'
    
    366
    +    workspace_dir = os.path.join(str(tmpdir), 'workspace')
    
    367
    +
    
    368
    +    result = cli.run(project=project, args=[
    
    369
    +        'workspace', 'open', '--directory', workspace_dir, element_name
    
    370
    +    ])
    
    371
    +    result.assert_success()
    
    372
    +
    
    373
    +    result = cli.run(project=project, args=['-C', workspace_dir, 'build', element_name])
    
    374
    +    result.assert_success()
    
    375
    +
    
    376
    +    command = ['shell']
    
    377
    +    if build_shell == 'build':
    
    378
    +        command.append('--build')
    
    379
    +    command.extend([element_name, '--', 'true'])
    
    380
    +    result = cli.run(project=project, cwd=workspace_dir, args=command)
    
    381
    +    result.assert_success()

  • tests/testutils/artifactshare.py
    ... ... @@ -67,19 +67,24 @@ class ArtifactShare():
    67 67
         def run(self, q):
    
    68 68
             pytest_cov.embed.cleanup_on_sigterm()
    
    69 69
     
    
    70
    -        # Optionally mock statvfs
    
    71
    -        if self.total_space:
    
    72
    -            if self.free_space is None:
    
    73
    -                self.free_space = self.total_space
    
    74
    -            os.statvfs = self._mock_statvfs
    
    70
    +        try:
    
    71
    +            # Optionally mock statvfs
    
    72
    +            if self.total_space:
    
    73
    +                if self.free_space is None:
    
    74
    +                    self.free_space = self.total_space
    
    75
    +                os.statvfs = self._mock_statvfs
    
    76
    +
    
    77
    +            server = create_server(self.repodir, enable_push=True)
    
    78
    +            port = server.add_insecure_port('localhost:0')
    
    75 79
     
    
    76
    -        server = create_server(self.repodir, enable_push=True)
    
    77
    -        port = server.add_insecure_port('localhost:0')
    
    80
    +            server.start()
    
    78 81
     
    
    79
    -        server.start()
    
    82
    +            # Send port to parent
    
    83
    +            q.put(port)
    
    80 84
     
    
    81
    -        # Send port to parent
    
    82
    -        q.put(port)
    
    85
    +        except Exception as e:
    
    86
    +            q.put(None)
    
    87
    +            raise
    
    83 88
     
    
    84 89
             # Sleep until termination by signal
    
    85 90
             signal.pause()
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]