[Notes] [Git][BuildStream/buildstream][463-make-dependency-type-default-to-build] 15 commits: Keep original flags for create in SafeHardlinks.



Title: GitLab

Jonathan Maw pushed to branch 463-make-dependency-type-default-to-build at BuildStream / buildstream

Commits:

23 changed files:

Changes:

  • NEWS
    ... ... @@ -17,6 +17,10 @@ buildstream 1.1.5
    17 17
         to not be scheduled and fail during artifact assembly,
    
    18 18
         and display the retry prompt during an interactive session.
    
    19 19
     
    
    20
    +  o Due to enabling the use of relative workspaces, "Legacy" workspaces
    
    21
    +    may need to be closed and remade before the changes will affect them.
    
    22
    +    Downgrading after using this feature may result in workspaces
    
    23
    +    not functioning correctly
    
    20 24
     
    
    21 25
     =================
    
    22 26
     buildstream 1.1.4
    

  • buildstream/_frontend/widget.py
    ... ... @@ -418,7 +418,9 @@ class LogLine(Widget):
    418 418
                 if "%{workspace-dirs" in format_:
    
    419 419
                     workspace = element._get_workspace()
    
    420 420
                     if workspace is not None:
    
    421
    -                    path = workspace.path.replace(os.getenv('HOME', '/root'), '~')
    
    421
    +                    path = workspace.get_absolute_path()
    
    422
    +                    if path.startswith("~/"):
    
    423
    +                        path = os.path.join(os.getenv('HOME', '/root'), path[2:])
    
    422 424
                         line = p.fmt_subst(line, 'workspace-dirs', "Workspace: {}".format(path))
    
    423 425
                     else:
    
    424 426
                         line = p.fmt_subst(
    

  • buildstream/_fuse/fuse.py
    ... ... @@ -757,7 +757,11 @@ class FUSE(object):
    757 757
             if self.raw_fi:
    
    758 758
                 return self.operations('create', path, mode, fi)
    
    759 759
             else:
    
    760
    -            fi.fh = self.operations('create', path, mode)
    
    760
    +            # This line is different from upstream to fix issues
    
    761
    +            # reading file opened with O_CREAT|O_RDWR.
    
    762
    +            # See issue #143.
    
    763
    +            fi.fh = self.operations('create', path, mode, fi.flags)
    
    764
    +            # END OF MODIFICATION
    
    761 765
                 return 0
    
    762 766
     
    
    763 767
         def ftruncate(self, path, length, fip):
    

  • buildstream/_fuse/hardlinks.py
    ... ... @@ -185,12 +185,12 @@ class SafeHardlinkOps(Operations):
    185 185
     
    
    186 186
             return os.open(full_path, flags)
    
    187 187
     
    
    188
    -    def create(self, path, mode, fi=None):
    
    188
    +    def create(self, path, mode, flags):
    
    189 189
             full_path = self._full_path(path)
    
    190 190
     
    
    191 191
             # If it already exists, ensure it's a copy first
    
    192 192
             self._ensure_copy(full_path)
    
    193
    -        return os.open(full_path, os.O_WRONLY | os.O_CREAT, mode)
    
    193
    +        return os.open(full_path, flags, mode)
    
    194 194
     
    
    195 195
         def read(self, path, length, offset, fh):
    
    196 196
             os.lseek(fh, offset, os.SEEK_SET)
    

  • buildstream/_loader/loadelement.py
    ... ... @@ -71,6 +71,7 @@ class LoadElement():
    71 71
                 'kind', 'depends', 'sources', 'sandbox',
    
    72 72
                 'variables', 'environment', 'environment-nocache',
    
    73 73
                 'config', 'public', 'description',
    
    74
    +            'build-depends', 'runtime-depends',
    
    74 75
             ])
    
    75 76
     
    
    76 77
             # Extract the Dependencies
    
    ... ... @@ -127,28 +128,44 @@ class LoadElement():
    127 128
     # Returns:
    
    128 129
     #    (list): a list of Dependency objects
    
    129 130
     #
    
    130
    -def _extract_depends_from_node(node):
    
    131
    -    depends = _yaml.node_get(node, list, Symbol.DEPENDS, default_value=[])
    
    131
    +def _extract_depends_from_node(node, *, key=None):
    
    132
    +    if key is None:
    
    133
    +        build_depends = _extract_depends_from_node(node, key=Symbol.BUILD_DEPENDS)
    
    134
    +        runtime_depends = _extract_depends_from_node(node, key=Symbol.RUNTIME_DEPENDS)
    
    135
    +        depends = _extract_depends_from_node(node, key=Symbol.DEPENDS)
    
    136
    +        return build_depends + runtime_depends + depends
    
    137
    +    elif key == Symbol.BUILD_DEPENDS:
    
    138
    +        default_dep_type = Symbol.BUILD
    
    139
    +    elif key == Symbol.RUNTIME_DEPENDS:
    
    140
    +        default_dep_type = Symbol.RUNTIME
    
    141
    +    elif key == Symbol.DEPENDS:
    
    142
    +        default_dep_type = None
    
    143
    +
    
    144
    +    depends = _yaml.node_get(node, list, key, default_value=[])
    
    132 145
         output_deps = []
    
    133 146
     
    
    134 147
         for dep in depends:
    
    135
    -        dep_provenance = _yaml.node_get_provenance(node, key=Symbol.DEPENDS, indices=[depends.index(dep)])
    
    148
    +        dep_provenance = _yaml.node_get_provenance(node, key=key, indices=[depends.index(dep)])
    
    136 149
     
    
    137 150
             if isinstance(dep, str):
    
    138
    -            dependency = Dependency(dep, provenance=dep_provenance)
    
    151
    +            dependency = Dependency(dep, provenance=dep_provenance, dep_type=default_dep_type)
    
    139 152
     
    
    140 153
             elif isinstance(dep, Mapping):
    
    141
    -            _yaml.node_validate(dep, ['filename', 'type', 'junction'])
    
    142
    -
    
    143
    -            # Make type optional, for this we set it to None
    
    144
    -            dep_type = _yaml.node_get(dep, str, Symbol.TYPE, default_value=None)
    
    145
    -            if dep_type is None or dep_type == Symbol.ALL:
    
    146
    -                dep_type = None
    
    147
    -            elif dep_type not in [Symbol.BUILD, Symbol.RUNTIME]:
    
    148
    -                provenance = _yaml.node_get_provenance(dep, key=Symbol.TYPE)
    
    149
    -                raise LoadError(LoadErrorReason.INVALID_DATA,
    
    150
    -                                "{}: Dependency type '{}' is not 'build', 'runtime' or 'all'"
    
    151
    -                                .format(provenance, dep_type))
    
    154
    +            if default_dep_type:
    
    155
    +                _yaml.node_validate(dep, ['filename', 'junction'])
    
    156
    +                dep_type = default_dep_type
    
    157
    +            else:
    
    158
    +                _yaml.node_validate(dep, ['filename', 'type', 'junction'])
    
    159
    +
    
    160
    +                # Make type optional, for this we set it to None
    
    161
    +                dep_type = _yaml.node_get(dep, str, Symbol.TYPE, default_value=None)
    
    162
    +                if dep_type is None or dep_type == Symbol.ALL:
    
    163
    +                    dep_type = None
    
    164
    +                elif dep_type not in [Symbol.BUILD, Symbol.RUNTIME]:
    
    165
    +                    provenance = _yaml.node_get_provenance(dep, key=Symbol.TYPE)
    
    166
    +                    raise LoadError(LoadErrorReason.INVALID_DATA,
    
    167
    +                                    "{}: Dependency type '{}' is not 'build', 'runtime' or 'all'"
    
    168
    +                                    .format(provenance, dep_type))
    
    152 169
     
    
    153 170
                 filename = _yaml.node_get(dep, str, Symbol.FILENAME)
    
    154 171
                 junction = _yaml.node_get(dep, str, Symbol.JUNCTION, default_value=None)
    
    ... ... @@ -159,13 +176,13 @@ def _extract_depends_from_node(node):
    159 176
     
    
    160 177
             else:
    
    161 178
                 index = depends.index(dep)
    
    162
    -            p = _yaml.node_get_provenance(node, key=Symbol.DEPENDS, indices=[index])
    
    179
    +            p = _yaml.node_get_provenance(node, key=key, indices=[index])
    
    163 180
                 raise LoadError(LoadErrorReason.INVALID_DATA,
    
    164 181
                                 "{}: Dependency is not specified as a string or a dictionary".format(p))
    
    165 182
     
    
    166 183
             output_deps.append(dependency)
    
    167 184
     
    
    168
    -    # Now delete "depends", we dont want it anymore
    
    169
    -    del node[Symbol.DEPENDS]
    
    185
    +    # Now delete the field, we dont want it anymore
    
    186
    +    del node[key]
    
    170 187
     
    
    171 188
         return output_deps

  • buildstream/_loader/types.py
    ... ... @@ -26,6 +26,8 @@ class Symbol():
    26 26
         FILENAME = "filename"
    
    27 27
         KIND = "kind"
    
    28 28
         DEPENDS = "depends"
    
    29
    +    BUILD_DEPENDS = "build-depends"
    
    30
    +    RUNTIME_DEPENDS = "runtime-depends"
    
    29 31
         SOURCES = "sources"
    
    30 32
         CONFIG = "config"
    
    31 33
         VARIABLES = "variables"
    

  • buildstream/_project.py
    ... ... @@ -19,7 +19,6 @@
    19 19
     #        Tiago Gomes <tiago gomes codethink co uk>
    
    20 20
     
    
    21 21
     import os
    
    22
    -import multiprocessing  # for cpu_count()
    
    23 22
     from collections import Mapping, OrderedDict
    
    24 23
     from pluginbase import PluginBase
    
    25 24
     from . import utils
    
    ... ... @@ -572,7 +571,10 @@ class Project():
    572 571
     
    
    573 572
             # Extend variables with automatic variables and option exports
    
    574 573
             # Initialize it as a string as all variables are processed as strings.
    
    575
    -        output.base_variables['max-jobs'] = str(multiprocessing.cpu_count())
    
    574
    +        # Based on some testing (mainly on AWS), maximum effective
    
    575
    +        # max-jobs value seems to be around 8-10 if we have enough cores
    
    576
    +        # users should set values based on workload and build infrastructure
    
    577
    +        output.base_variables['max-jobs'] = str(min(len(os.sched_getaffinity(0)), 8))
    
    576 578
     
    
    577 579
             # Export options into variables, if that was requested
    
    578 580
             output.options.export_variables(output.base_variables)
    

  • buildstream/_stream.py
    ... ... @@ -460,7 +460,7 @@ class Stream():
    460 460
                                                   selection=PipelineSelection.REDIRECT,
    
    461 461
                                                   track_selection=PipelineSelection.REDIRECT)
    
    462 462
             target = elements[0]
    
    463
    -        workdir = os.path.abspath(directory)
    
    463
    +        directory = os.path.abspath(directory)
    
    464 464
     
    
    465 465
             if not list(target.sources()):
    
    466 466
                 build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
    
    ... ... @@ -476,7 +476,7 @@ class Stream():
    476 476
             workspace = workspaces.get_workspace(target._get_full_name())
    
    477 477
             if workspace and not force:
    
    478 478
                 raise StreamError("Workspace '{}' is already defined at: {}"
    
    479
    -                              .format(target.name, workspace.path))
    
    479
    +                              .format(target.name, workspace.get_absolute_path()))
    
    480 480
     
    
    481 481
             # If we're going to checkout, we need at least a fetch,
    
    482 482
             # if we were asked to track first, we're going to fetch anyway.
    
    ... ... @@ -502,7 +502,7 @@ class Stream():
    502 502
             except OSError as e:
    
    503 503
                 raise StreamError("Failed to create workspace directory: {}".format(e)) from e
    
    504 504
     
    
    505
    -        workspaces.create_workspace(target._get_full_name(), workdir)
    
    505
    +        workspaces.create_workspace(target._get_full_name(), directory)
    
    506 506
     
    
    507 507
             if not no_checkout:
    
    508 508
                 with target.timed_activity("Staging sources to {}".format(directory)):
    
    ... ... @@ -526,12 +526,12 @@ class Stream():
    526 526
             # Remove workspace directory if prompted
    
    527 527
             if remove_dir:
    
    528 528
                 with self._context.timed_activity("Removing workspace directory {}"
    
    529
    -                                              .format(workspace.path)):
    
    529
    +                                              .format(workspace.get_absolute_path())):
    
    530 530
                     try:
    
    531
    -                    shutil.rmtree(workspace.path)
    
    531
    +                    shutil.rmtree(workspace.get_absolute_path())
    
    532 532
                     except OSError as e:
    
    533 533
                         raise StreamError("Could not remove  '{}': {}"
    
    534
    -                                      .format(workspace.path, e)) from e
    
    534
    +                                      .format(workspace.get_absolute_path(), e)) from e
    
    535 535
     
    
    536 536
             # Delete the workspace and save the configuration
    
    537 537
             workspaces.delete_workspace(element_name)
    
    ... ... @@ -574,28 +574,30 @@ class Stream():
    574 574
     
    
    575 575
             for element in elements:
    
    576 576
                 workspace = workspaces.get_workspace(element._get_full_name())
    
    577
    -
    
    577
    +            workspace_path = workspace.get_absolute_path()
    
    578 578
                 if soft:
    
    579 579
                     workspace.prepared = False
    
    580 580
                     self._message(MessageType.INFO, "Reset workspace state for {} at: {}"
    
    581
    -                              .format(element.name, workspace.path))
    
    581
    +                              .format(element.name, workspace_path))
    
    582 582
                     continue
    
    583 583
     
    
    584 584
                 with element.timed_activity("Removing workspace directory {}"
    
    585
    -                                        .format(workspace.path)):
    
    585
    +                                        .format(workspace_path)):
    
    586 586
                     try:
    
    587
    -                    shutil.rmtree(workspace.path)
    
    587
    +                    shutil.rmtree(workspace_path)
    
    588 588
                     except OSError as e:
    
    589 589
                         raise StreamError("Could not remove  '{}': {}"
    
    590
    -                                      .format(workspace.path, e)) from e
    
    590
    +                                      .format(workspace_path, e)) from e
    
    591 591
     
    
    592 592
                 workspaces.delete_workspace(element._get_full_name())
    
    593
    -            workspaces.create_workspace(element._get_full_name(), workspace.path)
    
    593
    +            workspaces.create_workspace(element._get_full_name(), workspace_path)
    
    594 594
     
    
    595
    -            with element.timed_activity("Staging sources to {}".format(workspace.path)):
    
    595
    +            with element.timed_activity("Staging sources to {}".format(workspace_path)):
    
    596 596
                     element._open_workspace()
    
    597 597
     
    
    598
    -            self._message(MessageType.INFO, "Reset workspace for {} at: {}".format(element.name, workspace.path))
    
    598
    +            self._message(MessageType.INFO,
    
    599
    +                          "Reset workspace for {} at: {}".format(element.name,
    
    600
    +                                                                 workspace_path))
    
    599 601
     
    
    600 602
             workspaces.save_config()
    
    601 603
     
    
    ... ... @@ -632,7 +634,7 @@ class Stream():
    632 634
             for element_name, workspace_ in self._context.get_workspaces().list():
    
    633 635
                 workspace_detail = {
    
    634 636
                     'element': element_name,
    
    635
    -                'directory': workspace_.path,
    
    637
    +                'directory': workspace_.get_absolute_path(),
    
    636 638
                 }
    
    637 639
                 workspaces.append(workspace_detail)
    
    638 640
     
    

  • buildstream/_versions.py
    ... ... @@ -23,7 +23,7 @@
    23 23
     # This version is bumped whenever enhancements are made
    
    24 24
     # to the `project.conf` format or the core element format.
    
    25 25
     #
    
    26
    -BST_FORMAT_VERSION = 13
    
    26
    +BST_FORMAT_VERSION = 14
    
    27 27
     
    
    28 28
     
    
    29 29
     # The base BuildStream artifact version
    

  • buildstream/_workspaces.py
    ... ... @@ -26,14 +26,6 @@ from ._exceptions import LoadError, LoadErrorReason
    26 26
     
    
    27 27
     BST_WORKSPACE_FORMAT_VERSION = 3
    
    28 28
     
    
    29
    -# Hold on to a list of members which get serialized
    
    30
    -_WORKSPACE_MEMBERS = [
    
    31
    -    'prepared',
    
    32
    -    'path',
    
    33
    -    'last_successful',
    
    34
    -    'running_files'
    
    35
    -]
    
    36
    -
    
    37 29
     
    
    38 30
     # Workspace()
    
    39 31
     #
    
    ... ... @@ -56,7 +48,7 @@ class Workspace():
    56 48
         def __init__(self, toplevel_project, *, last_successful=None, path=None, prepared=False, running_files=None):
    
    57 49
             self.prepared = prepared
    
    58 50
             self.last_successful = last_successful
    
    59
    -        self.path = path
    
    51
    +        self._path = path
    
    60 52
             self.running_files = running_files if running_files is not None else {}
    
    61 53
     
    
    62 54
             self._toplevel_project = toplevel_project
    
    ... ... @@ -64,14 +56,20 @@ class Workspace():
    64 56
     
    
    65 57
         # to_dict()
    
    66 58
         #
    
    67
    -    # Convert this object to a dict for serialization purposes
    
    59
    +    # Convert a list of members which get serialized to a dict for serialization purposes
    
    68 60
         #
    
    69 61
         # Returns:
    
    70 62
         #     (dict) A dict representation of the workspace
    
    71 63
         #
    
    72 64
         def to_dict(self):
    
    73
    -        return {key: val for key, val in self.__dict__.items()
    
    74
    -                if key in _WORKSPACE_MEMBERS and val is not None}
    
    65
    +        ret = {
    
    66
    +            'prepared': self.prepared,
    
    67
    +            'path': self._path,
    
    68
    +            'running_files': self.running_files
    
    69
    +        }
    
    70
    +        if self.last_successful is not None:
    
    71
    +            ret["last_successful"] = self.last_successful
    
    72
    +        return ret
    
    75 73
     
    
    76 74
         # from_dict():
    
    77 75
         #
    
    ... ... @@ -103,15 +101,7 @@ class Workspace():
    103 101
         #    True if the workspace differs from 'other', otherwise False
    
    104 102
         #
    
    105 103
         def differs(self, other):
    
    106
    -
    
    107
    -        for member in _WORKSPACE_MEMBERS:
    
    108
    -            member_a = getattr(self, member)
    
    109
    -            member_b = getattr(other, member)
    
    110
    -
    
    111
    -            if member_a != member_b:
    
    112
    -                return True
    
    113
    -
    
    114
    -        return False
    
    104
    +        return self.to_dict() != other.to_dict()
    
    115 105
     
    
    116 106
         # invalidate_key()
    
    117 107
         #
    
    ... ... @@ -133,7 +123,7 @@ class Workspace():
    133 123
             if os.path.isdir(fullpath):
    
    134 124
                 utils.copy_files(fullpath, directory)
    
    135 125
             else:
    
    136
    -            destfile = os.path.join(directory, os.path.basename(self.path))
    
    126
    +            destfile = os.path.join(directory, os.path.basename(self.get_absolute_path()))
    
    137 127
                 utils.safe_copy(fullpath, destfile)
    
    138 128
     
    
    139 129
         # add_running_files()
    
    ... ... @@ -189,7 +179,7 @@ class Workspace():
    189 179
                     filelist = utils.list_relative_paths(fullpath)
    
    190 180
                     filelist = [(relpath, os.path.join(fullpath, relpath)) for relpath in filelist]
    
    191 181
                 else:
    
    192
    -                filelist = [(self.path, fullpath)]
    
    182
    +                filelist = [(self.get_absolute_path(), fullpath)]
    
    193 183
     
    
    194 184
                 self._key = [(relpath, unique_key(fullpath)) for relpath, fullpath in filelist]
    
    195 185
     
    
    ... ... @@ -200,7 +190,7 @@ class Workspace():
    200 190
         # Returns: The absolute path of the element's workspace.
    
    201 191
         #
    
    202 192
         def get_absolute_path(self):
    
    203
    -        return os.path.join(self._toplevel_project.directory, self.path)
    
    193
    +        return os.path.join(self._toplevel_project.directory, self._path)
    
    204 194
     
    
    205 195
     
    
    206 196
     # Workspaces()
    
    ... ... @@ -236,6 +226,9 @@ class Workspaces():
    236 226
         #    path (str) - The path in which the workspace should be kept
    
    237 227
         #
    
    238 228
         def create_workspace(self, element_name, path):
    
    229
    +        if path.startswith(self._toplevel_project.directory):
    
    230
    +            path = os.path.relpath(path, self._toplevel_project.directory)
    
    231
    +
    
    239 232
             self._workspaces[element_name] = Workspace(self._toplevel_project, path=path)
    
    240 233
     
    
    241 234
             return self._workspaces[element_name]
    

  • buildstream/element.py
    ... ... @@ -1403,7 +1403,8 @@ class Element(Plugin):
    1403 1403
                         # If mount_workspaces is set and we're doing incremental builds,
    
    1404 1404
                         # the workspace is already mounted into the sandbox.
    
    1405 1405
                         if not (mount_workspaces and self.__can_build_incrementally()):
    
    1406
    -                        with self.timed_activity("Staging local files at {}".format(workspace.path)):
    
    1406
    +                        with self.timed_activity("Staging local files at {}"
    
    1407
    +                                                 .format(workspace.get_absolute_path())):
    
    1407 1408
                                 workspace.stage(temp_staging_directory)
    
    1408 1409
                     else:
    
    1409 1410
                         # No workspace, stage directly
    
    ... ... @@ -1566,7 +1567,7 @@ class Element(Plugin):
    1566 1567
                             path_components = self.__staged_sources_directory.lstrip(os.sep).split(os.sep)
    
    1567 1568
                             sandbox_vpath = sandbox_vroot.descend(path_components)
    
    1568 1569
                             try:
    
    1569
    -                            sandbox_vpath.import_files(workspace.path)
    
    1570
    +                            sandbox_vpath.import_files(workspace.get_absolute_path())
    
    1570 1571
                             except UtilError as e:
    
    1571 1572
                                 self.warn("Failed to preserve workspace state for failed build sysroot: {}"
    
    1572 1573
                                           .format(e))
    
    ... ... @@ -1893,7 +1894,7 @@ class Element(Plugin):
    1893 1894
                     source._init_workspace(temp)
    
    1894 1895
     
    
    1895 1896
                 # Now hardlink the files into the workspace target.
    
    1896
    -            utils.link_files(temp, workspace.path)
    
    1897
    +            utils.link_files(temp, workspace.get_absolute_path())
    
    1897 1898
     
    
    1898 1899
         # _get_workspace():
    
    1899 1900
         #
    

  • buildstream/sandbox/_sandboxbwrap.py
    ... ... @@ -89,6 +89,11 @@ class SandboxBwrap(Sandbox):
    89 89
             # Grab the full path of the bwrap binary
    
    90 90
             bwrap_command = [utils.get_host_tool('bwrap')]
    
    91 91
     
    
    92
    +        for k, v in env.items():
    
    93
    +            bwrap_command += ['--setenv', k, v]
    
    94
    +        for k in os.environ.keys() - env.keys():
    
    95
    +            bwrap_command += ['--unsetenv', k]
    
    96
    +
    
    92 97
             # Create a new pid namespace, this also ensures that any subprocesses
    
    93 98
             # are cleaned up when the bwrap process exits.
    
    94 99
             bwrap_command += ['--unshare-pid']
    
    ... ... @@ -194,7 +199,7 @@ class SandboxBwrap(Sandbox):
    194 199
                     stdin = stack.enter_context(open(os.devnull, "r"))
    
    195 200
     
    
    196 201
                 # Run bubblewrap !
    
    197
    -            exit_code = self.run_bwrap(bwrap_command, stdin, stdout, stderr, env,
    
    202
    +            exit_code = self.run_bwrap(bwrap_command, stdin, stdout, stderr,
    
    198 203
                                            (flags & SandboxFlags.INTERACTIVE))
    
    199 204
     
    
    200 205
                 # Cleanup things which bwrap might have left behind, while
    
    ... ... @@ -245,7 +250,7 @@ class SandboxBwrap(Sandbox):
    245 250
     
    
    246 251
             return exit_code
    
    247 252
     
    
    248
    -    def run_bwrap(self, argv, stdin, stdout, stderr, env, interactive):
    
    253
    +    def run_bwrap(self, argv, stdin, stdout, stderr, interactive):
    
    249 254
             # Wrapper around subprocess.Popen() with common settings.
    
    250 255
             #
    
    251 256
             # This function blocks until the subprocess has terminated.
    
    ... ... @@ -321,7 +326,6 @@ class SandboxBwrap(Sandbox):
    321 326
                     # The default is to share file descriptors from the parent process
    
    322 327
                     # to the subprocess, which is rarely good for sandboxing.
    
    323 328
                     close_fds=True,
    
    324
    -                env=env,
    
    325 329
                     stdin=stdin,
    
    326 330
                     stdout=stdout,
    
    327 331
                     stderr=stderr,
    

  • doc/source/format_declaring.rst
    ... ... @@ -98,6 +98,68 @@ relative filename to the elements they depend on here.
    98 98
     See :ref:`format_dependencies` for more information on the dependency model.
    
    99 99
     
    
    100 100
     
    
    101
    +.. _format_build_depends:
    
    102
    +
    
    103
    +Build-Depends
    
    104
    +~~~~~~~~~~~~~
    
    105
    +
    
    106
    +.. code:: yaml
    
    107
    +
    
    108
    +   # Specify some build-dependencies
    
    109
    +   build-depends:
    
    110
    +   - element1.bst
    
    111
    +   - element2.bst
    
    112
    +
    
    113
    +Build dependencies between elements can be specified with the ``build-depends`` attribute.
    
    114
    +The above code snippet is equivalent to:
    
    115
    +
    
    116
    +.. code:: yaml
    
    117
    +
    
    118
    +   # Specify some build-dependencies
    
    119
    +   depends:
    
    120
    +   - filename: element1.bst
    
    121
    +     type: build
    
    122
    +   - filename: element2.bst
    
    123
    +     type: build
    
    124
    +
    
    125
    +See :ref:`format_dependencies` for more information on the dependency model.
    
    126
    +
    
    127
    +.. note::
    
    128
    +
    
    129
    +   The ``build-depends`` configuration is available since :ref:`format version 14 <project_format_version>`
    
    130
    +
    
    131
    +
    
    132
    +.. _format_runtime_depends:
    
    133
    +
    
    134
    +Runtime-Depends
    
    135
    +~~~~~~~~~~~~~~~
    
    136
    +
    
    137
    +.. code:: yaml
    
    138
    +
    
    139
    +   # Specify some runtime-dependencies
    
    140
    +   runtime-depends:
    
    141
    +   - element1.bst
    
    142
    +   - element2.bst
    
    143
    +
    
    144
    +Runtime dependencies between elements can be specified with the ``runtime-depends`` attribute.
    
    145
    +The above code snippet is equivalent to:
    
    146
    +
    
    147
    +.. code:: yaml
    
    148
    +
    
    149
    +   # Specify some runtime-dependencies
    
    150
    +   depends:
    
    151
    +   - filename: element1.bst
    
    152
    +     type: runtime
    
    153
    +   - filename: element2.bst
    
    154
    +     type: runtime
    
    155
    +
    
    156
    +See :ref:`format_dependencies` for more information on the dependency model.
    
    157
    +
    
    158
    +.. note::
    
    159
    +
    
    160
    +   The ``runtime-depends`` configuration is available since :ref:`format version 14 <project_format_version>`
    
    161
    +
    
    162
    +
    
    101 163
     .. _format_sources:
    
    102 164
     
    
    103 165
     Sources
    
    ... ... @@ -276,8 +338,8 @@ attributes are suitable.
    276 338
     
    
    277 339
     .. note::
    
    278 340
     
    
    279
    -   Note the order in which element dependencies are declared in the ``depends``
    
    280
    -   list is not meaningful.
    
    341
    +   Note the order in which element dependencies are declared in the ``depends``,
    
    342
    +   ``build-depends`` and ``runtime-depends`` lists are not meaningful.
    
    281 343
     
    
    282 344
     Dependency dictionary:
    
    283 345
     
    
    ... ... @@ -299,6 +361,8 @@ Attributes:
    299 361
     * ``type``
    
    300 362
     
    
    301 363
       This attribute is used to express the :ref:`dependency type <format_dependencies_types>`.
    
    364
    +  This field is not permitted in :ref:`Build-Depends <format_build_depends>` or
    
    365
    +  :ref:`Runtime-Depends <format_runtime_depends>`.
    
    302 366
     
    
    303 367
     * ``junction``
    
    304 368
     
    

  • setup.py
    ... ... @@ -251,7 +251,7 @@ setup(name='BuildStream',
    251 251
           install_requires=[
    
    252 252
               'setuptools',
    
    253 253
               'psutil',
    
    254
    -          'ruamel.yaml',
    
    254
    +          'ruamel.yaml <= 0.15',
    
    255 255
               'pluginbase',
    
    256 256
               'Click',
    
    257 257
               'blessings',
    

  • tests/frontend/workspace.py
    ... ... @@ -18,12 +18,13 @@ DATA_DIR = os.path.join(
    18 18
     )
    
    19 19
     
    
    20 20
     
    
    21
    -def open_workspace(cli, tmpdir, datafiles, kind, track, suffix=''):
    
    22
    -    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    23
    -    bin_files_path = os.path.join(project, 'files', 'bin-files')
    
    24
    -    element_path = os.path.join(project, 'elements')
    
    21
    +def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None):
    
    22
    +    if not workspace_dir:
    
    23
    +        workspace_dir = os.path.join(str(tmpdir), 'workspace{}'.format(suffix))
    
    24
    +    project_path = os.path.join(datafiles.dirname, datafiles.basename)
    
    25
    +    bin_files_path = os.path.join(project_path, 'files', 'bin-files')
    
    26
    +    element_path = os.path.join(project_path, 'elements')
    
    25 27
         element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
    
    26
    -    workspace = os.path.join(str(tmpdir), 'workspace{}'.format(suffix))
    
    27 28
     
    
    28 29
         # Create our repo object of the given source type with
    
    29 30
         # the bin files, and then collect the initial ref.
    
    ... ... @@ -45,7 +46,7 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix=''):
    45 46
                                 element_name))
    
    46 47
     
    
    47 48
         # Assert that there is no reference, a track & fetch is needed
    
    48
    -    state = cli.get_element_state(project, element_name)
    
    49
    +    state = cli.get_element_state(project_path, element_name)
    
    49 50
         if track:
    
    50 51
             assert state == 'no reference'
    
    51 52
         else:
    
    ... ... @@ -56,20 +57,20 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix=''):
    56 57
         args = ['workspace', 'open']
    
    57 58
         if track:
    
    58 59
             args.append('--track')
    
    59
    -    args.extend([element_name, workspace])
    
    60
    +    args.extend([element_name, workspace_dir])
    
    61
    +    result = cli.run(project=project_path, args=args)
    
    60 62
     
    
    61
    -    result = cli.run(project=project, args=args)
    
    62 63
         result.assert_success()
    
    63 64
     
    
    64 65
         # Assert that we are now buildable because the source is
    
    65 66
         # now cached.
    
    66
    -    assert cli.get_element_state(project, element_name) == 'buildable'
    
    67
    +    assert cli.get_element_state(project_path, element_name) == 'buildable'
    
    67 68
     
    
    68 69
         # Check that the executable hello file is found in the workspace
    
    69
    -    filename = os.path.join(workspace, 'usr', 'bin', 'hello')
    
    70
    +    filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
    
    70 71
         assert os.path.exists(filename)
    
    71 72
     
    
    72
    -    return (element_name, project, workspace)
    
    73
    +    return (element_name, project_path, workspace_dir)
    
    73 74
     
    
    74 75
     
    
    75 76
     @pytest.mark.datafiles(DATA_DIR)
    
    ... ... @@ -190,6 +191,46 @@ def test_close(cli, tmpdir, datafiles, kind):
    190 191
         assert not os.path.exists(workspace)
    
    191 192
     
    
    192 193
     
    
    194
    +@pytest.mark.datafiles(DATA_DIR)
    
    195
    +def test_close_external_after_move_project(cli, tmpdir, datafiles):
    
    196
    +    tmp_parent = os.path.dirname(str(tmpdir))
    
    197
    +    workspace_dir = os.path.join(tmp_parent, "workspace")
    
    198
    +    element_name, project_path, _ = open_workspace(cli, tmpdir, datafiles, 'git', False, "", workspace_dir)
    
    199
    +    assert os.path.exists(workspace_dir)
    
    200
    +    tmp_dir = os.path.join(tmp_parent, 'external_project')
    
    201
    +    shutil.move(project_path, tmp_dir)
    
    202
    +    assert os.path.exists(tmp_dir)
    
    203
    +
    
    204
    +    # Close the workspace
    
    205
    +    result = cli.run(configure=False, project=tmp_dir, args=[
    
    206
    +        'workspace', 'close', '--remove-dir', element_name
    
    207
    +    ])
    
    208
    +    result.assert_success()
    
    209
    +
    
    210
    +    # Assert the workspace dir has been deleted
    
    211
    +    assert not os.path.exists(workspace_dir)
    
    212
    +    # Move directory back inside tmp directory so it can be recognised
    
    213
    +    shutil.move(tmp_dir, project_path)
    
    214
    +
    
    215
    +
    
    216
    +@pytest.mark.datafiles(DATA_DIR)
    
    217
    +def test_close_internal_after_move_project(cli, tmpdir, datafiles):
    
    218
    +    element_name, project, _ = open_workspace(cli, tmpdir, datafiles, 'git', False)
    
    219
    +    tmp_dir = os.path.join(os.path.dirname(str(tmpdir)), 'external_project')
    
    220
    +    shutil.move(str(tmpdir), tmp_dir)
    
    221
    +    assert os.path.exists(tmp_dir)
    
    222
    +
    
    223
    +    # Close the workspace
    
    224
    +    result = cli.run(configure=False, project=tmp_dir, args=[
    
    225
    +        'workspace', 'close', '--remove-dir', element_name
    
    226
    +    ])
    
    227
    +    result.assert_success()
    
    228
    +
    
    229
    +    # Assert the workspace dir has been deleted
    
    230
    +    workspace = os.path.join(tmp_dir, 'workspace')
    
    231
    +    assert not os.path.exists(workspace)
    
    232
    +
    
    233
    +
    
    193 234
     @pytest.mark.datafiles(DATA_DIR)
    
    194 235
     def test_close_removed(cli, tmpdir, datafiles):
    
    195 236
         element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git', False)
    

  • tests/loader/dependencies.py
    ... ... @@ -3,6 +3,7 @@ import pytest
    3 3
     
    
    4 4
     from buildstream._exceptions import LoadError, LoadErrorReason
    
    5 5
     from buildstream._loader import Loader, MetaElement
    
    6
    +from tests.testutils import cli
    
    6 7
     from . import make_loader
    
    7 8
     
    
    8 9
     DATA_DIR = os.path.join(
    
    ... ... @@ -27,7 +28,7 @@ def test_two_files(datafiles):
    27 28
         assert(len(element.dependencies) == 1)
    
    28 29
         firstdep = element.dependencies[0]
    
    29 30
         assert(isinstance(firstdep, MetaElement))
    
    30
    -    assert(firstdep.kind == 'thefirstdep')
    
    31
    +    assert(firstdep.kind == 'manual')
    
    31 32
     
    
    32 33
     
    
    33 34
     @pytest.mark.datafiles(DATA_DIR)
    
    ... ... @@ -47,7 +48,7 @@ def test_shared_dependency(datafiles):
    47 48
         #
    
    48 49
         firstdep = element.dependencies[0]
    
    49 50
         assert(isinstance(firstdep, MetaElement))
    
    50
    -    assert(firstdep.kind == 'thefirstdep')
    
    51
    +    assert(firstdep.kind == 'manual')
    
    51 52
         assert(len(firstdep.dependencies) == 0)
    
    52 53
     
    
    53 54
         # The second specified dependency is 'shareddep'
    
    ... ... @@ -86,7 +87,7 @@ def test_dependency_dict(datafiles):
    86 87
         assert(len(element.dependencies) == 1)
    
    87 88
         firstdep = element.dependencies[0]
    
    88 89
         assert(isinstance(firstdep, MetaElement))
    
    89
    -    assert(firstdep.kind == 'thefirstdep')
    
    90
    +    assert(firstdep.kind == 'manual')
    
    90 91
     
    
    91 92
     
    
    92 93
     @pytest.mark.datafiles(DATA_DIR)
    
    ... ... @@ -186,3 +187,49 @@ def test_all_dependency(datafiles):
    186 187
         assert(isinstance(firstdep, MetaElement))
    
    187 188
         firstbuilddep = element.build_dependencies[0]
    
    188 189
         assert(firstdep == firstbuilddep)
    
    190
    +
    
    191
    +
    
    192
    +@pytest.mark.datafiles(DATA_DIR)
    
    193
    +def test_list_build_dependency(cli, datafiles):
    
    194
    +    project = str(datafiles)
    
    195
    +
    
    196
    +    # Check that the pipeline includes the build dependency
    
    197
    +    deps = cli.get_pipeline(project, ['elements/builddep-list.bst'], scope="build")
    
    198
    +    assert "elements/firstdep.bst" in deps
    
    199
    +
    
    200
    +
    
    201
    +@pytest.mark.datafiles(DATA_DIR)
    
    202
    +def test_list_runtime_dependency(cli, datafiles):
    
    203
    +    project = str(datafiles)
    
    204
    +
    
    205
    +    # Check that the pipeline includes the runtime dependency
    
    206
    +    deps = cli.get_pipeline(project, ['elements/runtimedep-list.bst'], scope="run")
    
    207
    +    assert "elements/firstdep.bst" in deps
    
    208
    +
    
    209
    +
    
    210
    +@pytest.mark.datafiles(DATA_DIR)
    
    211
    +def test_list_dependencies_combined(cli, datafiles):
    
    212
    +    project = str(datafiles)
    
    213
    +
    
    214
    +    # Check that runtime deps get combined
    
    215
    +    rundeps = cli.get_pipeline(project, ['elements/list-combine.bst'], scope="run")
    
    216
    +    assert "elements/firstdep.bst" not in rundeps
    
    217
    +    assert "elements/seconddep.bst" in rundeps
    
    218
    +    assert "elements/thirddep.bst" in rundeps
    
    219
    +
    
    220
    +    # Check that build deps get combined
    
    221
    +    builddeps = cli.get_pipeline(project, ['elements/list-combine.bst'], scope="build")
    
    222
    +    assert "elements/firstdep.bst" in builddeps
    
    223
    +    assert "elements/seconddep.bst" not in builddeps
    
    224
    +    assert "elements/thirddep.bst" in builddeps
    
    225
    +
    
    226
    +
    
    227
    +@pytest.mark.datafiles(DATA_DIR)
    
    228
    +def test_list_overlap(cli, datafiles):
    
    229
    +    project = str(datafiles)
    
    230
    +
    
    231
    +    # Check that dependencies get merged
    
    232
    +    rundeps = cli.get_pipeline(project, ['elements/list-overlap.bst'], scope="run")
    
    233
    +    assert "elements/firstdep.bst" in rundeps
    
    234
    +    builddeps = cli.get_pipeline(project, ['elements/list-overlap.bst'], scope="build")
    
    235
    +    assert "elements/firstdep.bst" in builddeps

  • tests/loader/dependencies/elements/builddep-list.bst
    1
    +kind: stack
    
    2
    +description: This element has a build-only dependency specified via build-depends
    
    3
    +build-depends:
    
    4
    +  - elements/firstdep.bst

  • tests/loader/dependencies/elements/firstdep.bst
    1
    -kind: thefirstdep
    
    1
    +kind: manual
    
    2 2
     description: This is the first dependency

  • tests/loader/dependencies/elements/list-combine.bst
    1
    +kind: stack
    
    2
    +description: This element depends on three elements in different ways
    
    3
    +build-depends:
    
    4
    +- elements/firstdep.bst
    
    5
    +runtime-depends:
    
    6
    +- elements/seconddep.bst
    
    7
    +depends:
    
    8
    +- elements/thirddep.bst

  • tests/loader/dependencies/elements/list-overlap.bst
    1
    +kind: stack
    
    2
    +description: This element depends on two elements in different ways
    
    3
    +build-depends:
    
    4
    +- elements/firstdep.bst
    
    5
    +depends:
    
    6
    +- filename: elements/firstdep.bst
    
    7
    +  type: runtime

  • tests/loader/dependencies/elements/runtimedep-list.bst
    1
    +kind: stack
    
    2
    +description: This element has a runtime-only dependency
    
    3
    +runtime-depends:
    
    4
    +  - elements/firstdep.bst

  • tests/loader/dependencies/elements/seconddep.bst
    1
    +kind: manual
    
    2
    +description: This is the second dependency

  • tests/loader/dependencies/elements/thirddep.bst
    1
    +kind: manual
    
    2
    +description: This is the third dependency



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]