[Notes] [Git][BuildStream/buildstream][jmac/cache_artifacts_with_vdir] 21 commits: contrib/bst-graph: Add script to print graph in DOT format



Title: GitLab

Jürg Billeter pushed to branch jmac/cache_artifacts_with_vdir at BuildStream / buildstream

Commits:

18 changed files:

Changes:

  • README.rst
    ... ... @@ -100,3 +100,9 @@ We also recommend exploring some existing BuildStream projects:
    100 100
     * https://gitlab.com/baserock/definitions
    
    101 101
     
    
    102 102
     If you have any questions please ask on our `#buildstream <irc://irc.gnome.org/buildstream>`_ channel in `irc.gnome.org <irc://irc.gnome.org>`_
    
    103
    +
    
    104
    +
    
    105
    +Availability in distros
    
    106
    +=======================
    
    107
    +.. image:: https://repology.org/badge/vertical-allrepos/buildstream.svg
    
    108
    +   :target: https://repology.org/metapackage/buildstream/versions

  • buildstream/_artifactcache.py
    ... ... @@ -588,13 +588,16 @@ class ArtifactCache():
    588 588
         #
    
    589 589
         # Args:
    
    590 590
         #     element (Element): The Element commit an artifact for
    
    591
    -    #     content (str): The element's content directory
    
    591
    +    #     content (Directory): The element's content directory
    
    592 592
         #     keys (list): The cache keys to use
    
    593 593
         #
    
    594 594
         def commit(self, element, content, keys):
    
    595 595
             refs = [element.get_artifact_name(key) for key in keys]
    
    596 596
     
    
    597
    -        self.cas.commit(refs, content)
    
    597
    +        tree = content._get_digest()
    
    598
    +
    
    599
    +        for ref in refs:
    
    600
    +            self.cas.set_ref(ref, tree)
    
    598 601
     
    
    599 602
         # diff():
    
    600 603
         #
    

  • buildstream/_context.py
    ... ... @@ -124,10 +124,6 @@ class Context():
    124 124
             # Whether or not to cache build trees on artifact creation
    
    125 125
             self.cache_buildtrees = None
    
    126 126
     
    
    127
    -        # Boolean, whether we double-check with the user that they meant to
    
    128
    -        # close the workspace when they're using it to access the project.
    
    129
    -        self.prompt_workspace_close_project_inaccessible = None
    
    130
    -
    
    131 127
             # Whether elements must be rebuilt when their dependencies have changed
    
    132 128
             self._strict_build_plan = None
    
    133 129
     
    
    ... ... @@ -248,22 +244,6 @@ class Context():
    248 244
             self.sched_pushers = _yaml.node_get(scheduler, int, 'pushers')
    
    249 245
             self.sched_network_retries = _yaml.node_get(scheduler, int, 'network-retries')
    
    250 246
     
    
    251
    -        # Load prompt preferences
    
    252
    -        #
    
    253
    -        # We convert string options to booleans here, so we can be both user
    
    254
    -        # and coder-friendly. The string options are worded to match the
    
    255
    -        # responses the user would give at the cli, for least surprise. The
    
    256
    -        # booleans are converted here because it's easiest to eyeball that the
    
    257
    -        # strings are right.
    
    258
    -        #
    
    259
    -        prompt = _yaml.node_get(
    
    260
    -            defaults, Mapping, 'prompt')
    
    261
    -        _yaml.node_validate(prompt, [
    
    262
    -            'really-workspace-close-project-inaccessible',
    
    263
    -        ])
    
    264
    -        self.prompt_workspace_close_project_inaccessible = _node_get_option_str(
    
    265
    -            prompt, 'really-workspace-close-project-inaccessible', ['ask', 'yes']) == 'ask'
    
    266
    -
    
    267 247
             # Load per-projects overrides
    
    268 248
             self._project_overrides = _yaml.node_get(defaults, Mapping, 'projects', default_value={})
    
    269 249
     
    

  • buildstream/_frontend/cli.py
    ... ... @@ -814,6 +814,8 @@ def workspace_open(app, no_checkout, force, track_, directory, elements):
    814 814
     def workspace_close(app, remove_dir, all_, elements):
    
    815 815
         """Close a workspace"""
    
    816 816
     
    
    817
    +    removed_required_element = False
    
    818
    +
    
    817 819
         with app.initialized():
    
    818 820
             if not (all_ or elements):
    
    819 821
                 # NOTE: I may need to revisit this when implementing multiple projects
    
    ... ... @@ -840,18 +842,20 @@ def workspace_close(app, remove_dir, all_, elements):
    840 842
             for element_name in elements:
    
    841 843
                 if not app.stream.workspace_exists(element_name):
    
    842 844
                     nonexisting.append(element_name)
    
    843
    -            if (app.stream.workspace_is_required(element_name) and app.interactive and
    
    844
    -                    app.context.prompt_workspace_close_project_inaccessible):
    
    845
    -                click.echo("Removing '{}' will prevent you from running "
    
    846
    -                           "BuildStream commands from the current directory".format(element_name))
    
    847
    -                if not click.confirm('Are you sure you want to close this workspace?'):
    
    848
    -                    click.echo('Aborting', err=True)
    
    849
    -                    sys.exit(-1)
    
    850 845
             if nonexisting:
    
    851 846
                 raise AppError("Workspace does not exist", detail="\n".join(nonexisting))
    
    852 847
     
    
    853 848
             for element_name in elements:
    
    854 849
                 app.stream.workspace_close(element_name, remove_dir=remove_dir)
    
    850
    +            if app.stream.workspace_is_required(element_name):
    
    851
    +                removed_required_element = True
    
    852
    +
    
    853
    +    # This message is echo'd last, as it's most relevant to the next
    
    854
    +    # thing the user will type.
    
    855
    +    if removed_required_element:
    
    856
    +        click.echo(
    
    857
    +            "Removed '{}', therefore you can no longer run BuildStream "
    
    858
    +            "commands from the current directory.".format(element_name), err=True)
    
    855 859
     
    
    856 860
     
    
    857 861
     ##################################################################
    

  • buildstream/_frontend/widget.py
    ... ... @@ -398,7 +398,7 @@ class LogLine(Widget):
    398 398
     
    
    399 399
                 # Variables
    
    400 400
                 if "%{vars" in format_:
    
    401
    -                variables = _yaml.node_sanitize(element._Element__variables.variables)
    
    401
    +                variables = _yaml.node_sanitize(element._Element__variables.flat)
    
    402 402
                     line = p.fmt_subst(
    
    403 403
                         line, 'vars',
    
    404 404
                         yaml.round_trip_dump(variables, default_flow_style=False, allow_unicode=True))
    

  • buildstream/_loader/loader.py
    ... ... @@ -284,17 +284,17 @@ class Loader():
    284 284
         def _check_circular_deps(self, element, check_elements=None, validated=None, sequence=None):
    
    285 285
     
    
    286 286
             if check_elements is None:
    
    287
    -            check_elements = {}
    
    287
    +            check_elements = set()
    
    288 288
             if validated is None:
    
    289
    -            validated = {}
    
    289
    +            validated = set()
    
    290 290
             if sequence is None:
    
    291 291
                 sequence = []
    
    292 292
     
    
    293 293
             # Skip already validated branches
    
    294
    -        if validated.get(element) is not None:
    
    294
    +        if element in validated:
    
    295 295
                 return
    
    296 296
     
    
    297
    -        if check_elements.get(element) is not None:
    
    297
    +        if element in check_elements:
    
    298 298
                 # Create `chain`, the loop of element dependencies from this
    
    299 299
                 # element back to itself, by trimming everything before this
    
    300 300
                 # element from the sequence under consideration.
    
    ... ... @@ -306,15 +306,15 @@ class Loader():
    306 306
                                 .format(element.full_name, " -> ".join(chain)))
    
    307 307
     
    
    308 308
             # Push / Check each dependency / Pop
    
    309
    -        check_elements[element] = True
    
    309
    +        check_elements.add(element)
    
    310 310
             sequence.append(element.full_name)
    
    311 311
             for dep in element.dependencies:
    
    312 312
                 dep.element._loader._check_circular_deps(dep.element, check_elements, validated, sequence)
    
    313
    -        del check_elements[element]
    
    313
    +        check_elements.remove(element)
    
    314 314
             sequence.pop()
    
    315 315
     
    
    316 316
             # Eliminate duplicate paths
    
    317
    -        validated[element] = True
    
    317
    +        validated.add(element)
    
    318 318
     
    
    319 319
         # _sort_dependencies():
    
    320 320
         #
    

  • buildstream/_profile.py
    ... ... @@ -26,7 +26,7 @@ import datetime
    26 26
     import time
    
    27 27
     
    
    28 28
     # Track what profile topics are active
    
    29
    -active_topics = {}
    
    29
    +active_topics = set()
    
    30 30
     active_profiles = {}
    
    31 31
     initialized = False
    
    32 32
     
    
    ... ... @@ -144,14 +144,10 @@ def profile_init():
    144 144
             if setting:
    
    145 145
                 topics = setting.split(':')
    
    146 146
                 for topic in topics:
    
    147
    -                active_topics[topic] = True
    
    147
    +                active_topics.add(topic)
    
    148 148
             initialized = True
    
    149 149
     
    
    150 150
     
    
    151 151
     def profile_enabled(topic):
    
    152 152
         profile_init()
    
    153
    -    if active_topics.get(topic):
    
    154
    -        return True
    
    155
    -    if active_topics.get(Topics.ALL):
    
    156
    -        return True
    
    157
    -    return False
    153
    +    return topic in active_topics or Topics.ALL in active_topics

  • buildstream/_variables.py
    1 1
     #
    
    2 2
     #  Copyright (C) 2016 Codethink Limited
    
    3
    +#  Copyright (C) 2019 Bloomberg L.P.
    
    3 4
     #
    
    4 5
     #  This program is free software; you can redistribute it and/or
    
    5 6
     #  modify it under the terms of the GNU Lesser General Public
    
    ... ... @@ -16,15 +17,37 @@
    16 17
     #
    
    17 18
     #  Authors:
    
    18 19
     #        Tristan Van Berkom <tristan vanberkom codethink co uk>
    
    20
    +#        Daniel Silverstone <daniel silverstone codethink co uk>
    
    19 21
     
    
    20 22
     import re
    
    23
    +import sys
    
    21 24
     
    
    22 25
     from ._exceptions import LoadError, LoadErrorReason
    
    23 26
     from . import _yaml
    
    24 27
     
    
    25 28
     # Variables are allowed to have dashes here
    
    26 29
     #
    
    27
    -_VARIABLE_MATCH = r'\%\{([a-zA-Z][a-zA-Z0-9_-]*)\}'
    
    30
    +PARSE_EXPANSION = re.compile(r"\%\{([a-zA-Z][a-zA-Z0-9_-]*)\}")
    
    31
    +
    
    32
    +
    
    33
    +# Throughout this code you will see variables named things like `expstr`.
    
    34
    +# These hold data structures called "expansion strings" and are the parsed
    
    35
    +# form of the strings which are the input to this subsystem.  Strings
    
    36
    +# such as "Hello %{name}, how are you?" are parsed into the form:
    
    37
    +# (3, ["Hello ", "name", ", how are you?"])
    
    38
    +# i.e. a tuple of an integer and a list, where the integer is the cached
    
    39
    +# length of the list, and the list consists of one or more strings.
    
    40
    +# Strings in even indices of the list (0, 2, 4, etc) are constants which
    
    41
    +# are copied into the output of the expansion algorithm.  Strings in the
    
    42
    +# odd indices (1, 3, 5, etc) are the names of further expansions to make.
    
    43
    +# In the example above, first "Hello " is copied, then "name" is expanded
    
    44
    +# and so must be another named expansion string passed in to the constructor
    
    45
    +# of the Variables class, and whatever is yielded from the expansion of "name"
    
    46
    +# is added to the concatenation for the result.  Finally ", how are you?" is
    
    47
    +# copied in and the whole lot concatenated for return.
    
    48
    +#
    
    49
    +# To see how strings are parsed, see `_parse_expstr()` after the class, and
    
    50
    +# to see how expansion strings are expanded, see `_expand_expstr()` after that.
    
    28 51
     
    
    29 52
     
    
    30 53
     # The Variables helper object will resolve the variable references in
    
    ... ... @@ -38,14 +61,15 @@ _VARIABLE_MATCH = r'\%\{([a-zA-Z][a-zA-Z0-9_-]*)\}'
    38 61
     #     node (dict): A node loaded and composited with yaml tools
    
    39 62
     #
    
    40 63
     # Raises:
    
    41
    -#     LoadError, if unresolved variables occur.
    
    64
    +#     LoadError, if unresolved variables, or cycles in resolution, occur.
    
    42 65
     #
    
    43 66
     class Variables():
    
    44 67
     
    
    45 68
         def __init__(self, node):
    
    46 69
     
    
    47 70
             self.original = node
    
    48
    -        self.variables = self._resolve(node)
    
    71
    +        self._expstr_map = self._resolve(node)
    
    72
    +        self.flat = self._flatten()
    
    49 73
     
    
    50 74
         # subst():
    
    51 75
         #
    
    ... ... @@ -61,139 +85,167 @@ class Variables():
    61 85
         #    LoadError, if the string contains unresolved variable references.
    
    62 86
         #
    
    63 87
         def subst(self, string):
    
    64
    -        substitute, unmatched, _ = self._subst(string, self.variables)
    
    65
    -        unmatched = list(set(unmatched))
    
    66
    -        if unmatched:
    
    67
    -            if len(unmatched) == 1:
    
    68
    -                message = "Unresolved variable '{var}'".format(var=unmatched[0])
    
    69
    -            else:
    
    70
    -                message = "Unresolved variables: "
    
    71
    -                for unmatch in unmatched:
    
    72
    -                    if unmatched.index(unmatch) > 0:
    
    73
    -                        message += ', '
    
    74
    -                    message += unmatch
    
    75
    -
    
    76
    -            raise LoadError(LoadErrorReason.UNRESOLVED_VARIABLE, message)
    
    77
    -
    
    78
    -        return substitute
    
    79
    -
    
    80
    -    def _subst(self, string, variables):
    
    81
    -
    
    82
    -        def subst_callback(match):
    
    83
    -            nonlocal variables
    
    84
    -            nonlocal unmatched
    
    85
    -            nonlocal matched
    
    86
    -
    
    87
    -            token = match.group(0)
    
    88
    -            varname = match.group(1)
    
    89
    -
    
    90
    -            value = _yaml.node_get(variables, str, varname, default_value=None)
    
    91
    -            if value is not None:
    
    92
    -                # We have to check if the inner string has variables
    
    93
    -                # and return unmatches for those
    
    94
    -                unmatched += re.findall(_VARIABLE_MATCH, value)
    
    95
    -                matched += [varname]
    
    96
    -            else:
    
    97
    -                # Return unmodified token
    
    98
    -                unmatched += [varname]
    
    99
    -                value = token
    
    100
    -
    
    101
    -            return value
    
    102
    -
    
    103
    -        matched = []
    
    104
    -        unmatched = []
    
    105
    -        replacement = re.sub(_VARIABLE_MATCH, subst_callback, string)
    
    106
    -
    
    107
    -        return (replacement, unmatched, matched)
    
    88
    +        expstr = _parse_expstr(string)
    
    89
    +
    
    90
    +        try:
    
    91
    +            return _expand_expstr(self._expstr_map, expstr)
    
    92
    +        except KeyError:
    
    93
    +            unmatched = []
    
    94
    +
    
    95
    +            # Look for any unmatched variable names in the expansion string
    
    96
    +            for var in expstr[1][1::2]:
    
    97
    +                if var not in self._expstr_map:
    
    98
    +                    unmatched.append(var)
    
    99
    +
    
    100
    +            if unmatched:
    
    101
    +                message = "Unresolved variable{}: {}".format(
    
    102
    +                    "s" if len(unmatched) > 1 else "",
    
    103
    +                    ", ".join(unmatched)
    
    104
    +                )
    
    105
    +
    
    106
    +                raise LoadError(LoadErrorReason.UNRESOLVED_VARIABLE, message)
    
    107
    +            # Otherwise, re-raise the KeyError since it clearly came from some
    
    108
    +            # other unknowable cause.
    
    109
    +            raise
    
    108 110
     
    
    109 111
         # Variable resolving code
    
    110 112
         #
    
    111
    -    # Here we substitute variables for values (resolve variables) repeatedly
    
    112
    -    # in a dictionary, each time creating a new dictionary until there is no
    
    113
    -    # more unresolved variables to resolve, or, until resolving further no
    
    114
    -    # longer resolves anything, in which case we throw an exception.
    
    113
    +    # Here we resolve all of our inputs into a dictionary, ready for use
    
    114
    +    # in subst()
    
    115 115
         def _resolve(self, node):
    
    116
    -        variables = node
    
    117
    -
    
    118 116
             # Special case, if notparallel is specified in the variables for this
    
    119 117
             # element, then override max-jobs to be 1.
    
    120 118
             # Initialize it as a string as all variables are processed as strings.
    
    121 119
             #
    
    122
    -        if _yaml.node_get(variables, bool, 'notparallel', default_value=False):
    
    123
    -            variables['max-jobs'] = str(1)
    
    124
    -
    
    125
    -        # Resolve the dictionary once, reporting the new dictionary with things
    
    126
    -        # substituted in it, and reporting unmatched tokens.
    
    127
    -        #
    
    128
    -        def resolve_one(variables):
    
    129
    -            unmatched = []
    
    130
    -            resolved = {}
    
    131
    -
    
    132
    -            for key, value in _yaml.node_items(variables):
    
    133
    -
    
    134
    -                # Ensure stringness of the value before substitution
    
    135
    -                value = _yaml.node_get(variables, str, key)
    
    136
    -
    
    137
    -                resolved_var, item_unmatched, matched = self._subst(value, variables)
    
    138
    -
    
    139
    -                if _wrap_variable(key) in resolved_var:
    
    140
    -                    referenced_through = find_recursive_variable(key, matched, variables)
    
    120
    +        if _yaml.node_get(node, bool, 'notparallel', default_value=False):
    
    121
    +            node['max-jobs'] = str(1)
    
    122
    +
    
    123
    +        ret = {}
    
    124
    +        for key, value in _yaml.node_items(node):
    
    125
    +            value = _yaml.node_get(node, str, key)
    
    126
    +            ret[sys.intern(key)] = _parse_expstr(value)
    
    127
    +        return ret
    
    128
    +
    
    129
    +    def _check_for_missing(self):
    
    130
    +        # First the check for anything unresolvable
    
    131
    +        summary = []
    
    132
    +        for key, expstr in self._expstr_map.items():
    
    133
    +            for var in expstr[1][1::2]:
    
    134
    +                if var not in self._expstr_map:
    
    135
    +                    line = "  unresolved variable '{unmatched}' in declaration of '{variable}' at: {provenance}"
    
    136
    +                    provenance = _yaml.node_get_provenance(self.original, key)
    
    137
    +                    summary.append(line.format(unmatched=var, variable=key, provenance=provenance))
    
    138
    +        if summary:
    
    139
    +            raise LoadError(LoadErrorReason.UNRESOLVED_VARIABLE,
    
    140
    +                            "Failed to resolve one or more variable:\n{}\n".format("\n".join(summary)))
    
    141
    +
    
    142
    +    def _check_for_cycles(self):
    
    143
    +        # And now the cycle checks
    
    144
    +        def cycle_check(expstr, visited, cleared):
    
    145
    +            for var in expstr[1][1::2]:
    
    146
    +                if var in cleared:
    
    147
    +                    continue
    
    148
    +                if var in visited:
    
    141 149
                         raise LoadError(LoadErrorReason.RECURSIVE_VARIABLE,
    
    142
    -                                    "{}: ".format(_yaml.node_get_provenance(variables, key)) +
    
    150
    +                                    "{}: ".format(_yaml.node_get_provenance(self.original, var)) +
    
    143 151
                                         ("Variable '{}' expands to contain a reference to itself. " +
    
    144
    -                                     "Perhaps '{}' contains '{}").format(key, referenced_through, _wrap_variable(key)))
    
    145
    -
    
    146
    -                resolved[key] = resolved_var
    
    147
    -                unmatched += item_unmatched
    
    148
    -
    
    149
    -            # Carry over provenance
    
    150
    -            resolved[_yaml.PROVENANCE_KEY] = variables[_yaml.PROVENANCE_KEY]
    
    151
    -            return (resolved, unmatched)
    
    152
    -
    
    153
    -        # Resolve it until it's resolved or broken
    
    154
    -        #
    
    155
    -        resolved = variables
    
    156
    -        unmatched = ['dummy']
    
    157
    -        last_unmatched = ['dummy']
    
    158
    -        while unmatched:
    
    159
    -            resolved, unmatched = resolve_one(resolved)
    
    160
    -
    
    161
    -            # Lists of strings can be compared like this
    
    162
    -            if unmatched == last_unmatched:
    
    163
    -                # We've got the same result twice without matching everything,
    
    164
    -                # something is undeclared or cyclic, compose a summary.
    
    165
    -                #
    
    166
    -                summary = ''
    
    167
    -                for unmatch in set(unmatched):
    
    168
    -                    for var, provenance in self._find_references(unmatch):
    
    169
    -                        line = "  unresolved variable '{unmatched}' in declaration of '{variable}' at: {provenance}\n"
    
    170
    -                        summary += line.format(unmatched=unmatch, variable=var, provenance=provenance)
    
    171
    -
    
    172
    -                raise LoadError(LoadErrorReason.UNRESOLVED_VARIABLE,
    
    173
    -                                "Failed to resolve one or more variable:\n{}".format(summary))
    
    174
    -
    
    175
    -            last_unmatched = unmatched
    
    176
    -
    
    177
    -        return resolved
    
    178
    -
    
    179
    -    # Helper function to fetch information about the node referring to a variable
    
    152
    +                                     "Perhaps '{}' contains '%{{{}}}").format(var, visited[-1], var))
    
    153
    +                visited.append(var)
    
    154
    +                cycle_check(self._expstr_map[var], visited, cleared)
    
    155
    +                visited.pop()
    
    156
    +                cleared.add(var)
    
    157
    +
    
    158
    +        cleared = set()
    
    159
    +        for key, expstr in self._expstr_map.items():
    
    160
    +            if key not in cleared:
    
    161
    +                cycle_check(expstr, [key], cleared)
    
    162
    +
    
    163
    +    # _flatten():
    
    180 164
         #
    
    181
    -    def _find_references(self, varname):
    
    182
    -        fullname = _wrap_variable(varname)
    
    183
    -        for key, value in _yaml.node_items(self.original):
    
    184
    -            if fullname in value:
    
    185
    -                provenance = _yaml.node_get_provenance(self.original, key)
    
    186
    -                yield (key, provenance)
    
    187
    -
    
    188
    -
    
    189
    -def find_recursive_variable(variable, matched_variables, all_vars):
    
    190
    -    matched_values = (_yaml.node_get(all_vars, str, key) for key in matched_variables)
    
    191
    -    for key, value in zip(matched_variables, matched_values):
    
    192
    -        if _wrap_variable(variable) in value:
    
    193
    -            return key
    
    194
    -    # We failed to find a recursive variable
    
    195
    -    return None
    
    196
    -
    
    197
    -
    
    198
    -def _wrap_variable(var):
    
    199
    -    return "%{" + var + "}"
    165
    +    # Turn our dictionary of expansion strings into a flattened dict
    
    166
    +    # so that we can run expansions faster in the future
    
    167
    +    #
    
    168
    +    # Raises:
    
    169
    +    #    LoadError, if the string contains unresolved variable references or
    
    170
    +    #               if cycles are detected in the variable references
    
    171
    +    #
    
    172
    +    def _flatten(self):
    
    173
    +        flat = {}
    
    174
    +        try:
    
    175
    +            for key, expstr in self._expstr_map.items():
    
    176
    +                if expstr[0] > 1:
    
    177
    +                    expstr = (1, [sys.intern(_expand_expstr(self._expstr_map, expstr))])
    
    178
    +                    self._expstr_map[key] = expstr
    
    179
    +                flat[key] = expstr[1][0]
    
    180
    +        except KeyError:
    
    181
    +            self._check_for_missing()
    
    182
    +            raise
    
    183
    +        except RecursionError:
    
    184
    +            self._check_for_cycles()
    
    185
    +            raise
    
    186
    +        return flat
    
    187
    +
    
    188
    +
    
    189
    +# Cache for the parsed expansion strings.  While this is nominally
    
    190
    +# something which might "waste" memory, in reality each of these
    
    191
    +# will live as long as the element which uses it, which is the
    
    192
    +# vast majority of the memory usage across the execution of BuildStream.
    
    193
    +PARSE_CACHE = {
    
    194
    +    # Prime the cache with the empty string since otherwise that can
    
    195
    +    # cause issues with the parser, complications to which cause slowdown
    
    196
    +    "": (1, [""]),
    
    197
    +}
    
    198
    +
    
    199
    +
    
    200
    +# Helper to parse a string into an expansion string tuple, caching
    
    201
    +# the results so that future parse requests don't need to think about
    
    202
    +# the string
    
    203
    +def _parse_expstr(instr):
    
    204
    +    try:
    
    205
    +        return PARSE_CACHE[instr]
    
    206
    +    except KeyError:
    
    207
    +        # This use of the regex turns a string like "foo %{bar} baz" into
    
    208
    +        # a list ["foo ", "bar", " baz"]
    
    209
    +        splits = PARSE_EXPANSION.split(instr)
    
    210
    +        # If an expansion ends the string, we get an empty string on the end
    
    211
    +        # which we can optimise away, making the expansion routines not need
    
    212
    +        # a test for this.
    
    213
    +        if splits[-1] == '':
    
    214
    +            splits = splits[:-1]
    
    215
    +        # Cache an interned copy of this.  We intern it to try and reduce the
    
    216
    +        # memory impact of the cache.  It seems odd to cache the list length
    
    217
    +        # but this is measurably cheaper than calculating it each time during
    
    218
    +        # string expansion.
    
    219
    +        PARSE_CACHE[instr] = (len(splits), [sys.intern(s) for s in splits])
    
    220
    +        return PARSE_CACHE[instr]
    
    221
    +
    
    222
    +
    
    223
    +# Helper to expand a given top level expansion string tuple in the context
    
    224
    +# of the given dictionary of expansion strings.
    
    225
    +#
    
    226
    +# Note: Will raise KeyError if any expansion is missing
    
    227
    +def _expand_expstr(content, topvalue):
    
    228
    +    # Short-circuit constant strings
    
    229
    +    if topvalue[0] == 1:
    
    230
    +        return topvalue[1][0]
    
    231
    +
    
    232
    +    # Short-circuit strings which are entirely an expansion of another variable
    
    233
    +    # e.g. "%{another}"
    
    234
    +    if topvalue[0] == 2 and topvalue[1][0] == "":
    
    235
    +        return _expand_expstr(content, content[topvalue[1][1]])
    
    236
    +
    
    237
    +    # Otherwise process fully...
    
    238
    +    def internal_expand(value):
    
    239
    +        (expansion_len, expansion_bits) = value
    
    240
    +        idx = 0
    
    241
    +        while idx < expansion_len:
    
    242
    +            # First yield any constant string content
    
    243
    +            yield expansion_bits[idx]
    
    244
    +            idx += 1
    
    245
    +            # Now, if there is an expansion variable left to expand, yield
    
    246
    +            # the expansion of that variable too
    
    247
    +            if idx < expansion_len:
    
    248
    +                yield from internal_expand(content[expansion_bits[idx]])
    
    249
    +            idx += 1
    
    250
    +
    
    251
    +    return "".join(internal_expand(topvalue))

  • buildstream/data/userconfig.yaml
    ... ... @@ -111,20 +111,3 @@ logging:
    111 111
       message-format: |
    
    112 112
     
    
    113 113
         [%{elapsed}][%{key}][%{element}] %{action} %{message}
    114
    -
    
    115
    -#
    
    116
    -#    Prompt overrides
    
    117
    -#
    
    118
    -# Here you can suppress 'are you sure?' and other kinds of prompts by supplying
    
    119
    -# override values. Note that e.g. 'yes' and 'no' have the same meaning here as
    
    120
    -# they do in the actual cli prompt.
    
    121
    -#
    
    122
    -prompt:
    
    123
    -
    
    124
    -  # Whether to really proceed with 'bst workspace close' when doing so would
    
    125
    -  # stop them from running bst commands in this workspace.
    
    126
    -  #
    
    127
    -  #  ask - Ask the user if they are sure.
    
    128
    -  #  yes - Always close, without asking.
    
    129
    -  #
    
    130
    -  really-workspace-close-project-inaccessible: ask

  • buildstream/element.py
    ... ... @@ -103,6 +103,7 @@ from .types import _KeyStrength, CoreWarnings
    103 103
     
    
    104 104
     from .storage.directory import Directory
    
    105 105
     from .storage._filebaseddirectory import FileBasedDirectory
    
    106
    +from .storage._casbaseddirectory import CasBasedDirectory
    
    106 107
     from .storage.directory import VirtualDirectoryError
    
    107 108
     
    
    108 109
     
    
    ... ... @@ -894,10 +895,7 @@ class Element(Plugin):
    894 895
                (str): The resolved value for *varname*, or None if no
    
    895 896
                variable was declared with the given name.
    
    896 897
             """
    
    897
    -        if varname in self.__variables.variables:
    
    898
    -            return self.__variables.variables[varname]
    
    899
    -
    
    900
    -        return None
    
    898
    +        return self.__variables.flat.get(varname)
    
    901 899
     
    
    902 900
         def batch_prepare_assemble(self, flags, *, collect=None):
    
    903 901
             """ Configure command batching across prepare() and assemble()
    
    ... ... @@ -1673,106 +1671,109 @@ class Element(Plugin):
    1673 1671
                         cleanup_rootdir()
    
    1674 1672
     
    
    1675 1673
         def _cache_artifact(self, rootdir, sandbox, collect):
    
    1676
    -        if collect is not None:
    
    1677
    -            try:
    
    1678
    -                sandbox_vroot = sandbox.get_virtual_directory()
    
    1679
    -                collectvdir = sandbox_vroot.descend(collect.lstrip(os.sep).split(os.sep))
    
    1680
    -            except VirtualDirectoryError:
    
    1681
    -                # No collect directory existed
    
    1682
    -                collectvdir = None
    
    1674
    +        with self.timed_activity("Caching artifact"):
    
    1675
    +            if collect is not None:
    
    1676
    +                try:
    
    1677
    +                    sandbox_vroot = sandbox.get_virtual_directory()
    
    1678
    +                    collectvdir = sandbox_vroot.descend(collect.lstrip(os.sep).split(os.sep))
    
    1679
    +                except VirtualDirectoryError:
    
    1680
    +                    # No collect directory existed
    
    1681
    +                    collectvdir = None
    
    1683 1682
     
    
    1684
    -        context = self._get_context()
    
    1683
    +            context = self._get_context()
    
    1685 1684
     
    
    1686
    -        # Create artifact directory structure
    
    1687
    -        assembledir = os.path.join(rootdir, 'artifact')
    
    1688
    -        filesdir = os.path.join(assembledir, 'files')
    
    1689
    -        logsdir = os.path.join(assembledir, 'logs')
    
    1690
    -        metadir = os.path.join(assembledir, 'meta')
    
    1691
    -        buildtreedir = os.path.join(assembledir, 'buildtree')
    
    1692
    -        os.mkdir(assembledir)
    
    1693
    -        if collect is not None and collectvdir is not None:
    
    1694
    -            os.mkdir(filesdir)
    
    1695
    -        os.mkdir(logsdir)
    
    1696
    -        os.mkdir(metadir)
    
    1697
    -        os.mkdir(buildtreedir)
    
    1698
    -
    
    1699
    -        # Hard link files from collect dir to files directory
    
    1700
    -        if collect is not None and collectvdir is not None:
    
    1701
    -            collectvdir.export_files(filesdir, can_link=True)
    
    1702
    -
    
    1703
    -        cache_buildtrees = context.cache_buildtrees
    
    1704
    -        build_success = self.__build_result[0]
    
    1705
    -
    
    1706
    -        # cache_buildtrees defaults to 'always', as such the
    
    1707
    -        # default behaviour is to attempt to cache them. If only
    
    1708
    -        # caching failed artifact buildtrees, then query the build
    
    1709
    -        # result. Element types without a build-root dir will be cached
    
    1710
    -        # with an empty buildtreedir regardless of this configuration.
    
    1711
    -
    
    1712
    -        if cache_buildtrees == 'always' or (cache_buildtrees == 'failure' and not build_success):
    
    1713
    -            try:
    
    1685
    +            assemblevdir = CasBasedDirectory(cas_cache=context.artifactcache.cas, ref=None)
    
    1686
    +            logsvdir = assemblevdir.descend("logs", create=True)
    
    1687
    +            metavdir = assemblevdir.descend("meta", create=True)
    
    1688
    +            buildtreevdir = assemblevdir.descend("buildtree", create=True)
    
    1689
    +
    
    1690
    +            # Create artifact directory structure
    
    1691
    +            assembledir = os.path.join(rootdir, 'artifact')
    
    1692
    +            logsdir = os.path.join(assembledir, 'logs')
    
    1693
    +            metadir = os.path.join(assembledir, 'meta')
    
    1694
    +            os.mkdir(assembledir)
    
    1695
    +            os.mkdir(logsdir)
    
    1696
    +            os.mkdir(metadir)
    
    1697
    +
    
    1698
    +            if collect is not None and collectvdir is not None:
    
    1699
    +                filesvdir = assemblevdir.descend("files", create=True)
    
    1700
    +                filesvdir.import_files(collectvdir)
    
    1701
    +
    
    1702
    +            cache_buildtrees = context.cache_buildtrees
    
    1703
    +            build_success = self.__build_result[0]
    
    1704
    +
    
    1705
    +            # cache_buildtrees defaults to 'always', as such the
    
    1706
    +            # default behaviour is to attempt to cache them. If only
    
    1707
    +            # caching failed artifact buildtrees, then query the build
    
    1708
    +            # result. Element types without a build-root dir will be cached
    
    1709
    +            # with an empty buildtreedir regardless of this configuration.
    
    1710
    +
    
    1711
    +            if cache_buildtrees == 'always' or (cache_buildtrees == 'failure' and not build_success):
    
    1714 1712
                     sandbox_vroot = sandbox.get_virtual_directory()
    
    1715
    -                sandbox_build_dir = sandbox_vroot.descend(
    
    1716
    -                    self.get_variable('build-root').lstrip(os.sep).split(os.sep))
    
    1717
    -                # Hard link files from build-root dir to buildtreedir directory
    
    1718
    -                sandbox_build_dir.export_files(buildtreedir)
    
    1719
    -            except VirtualDirectoryError:
    
    1720
    -                # Directory could not be found. Pre-virtual
    
    1721
    -                # directory behaviour was to continue silently
    
    1722
    -                # if the directory could not be found.
    
    1723
    -                pass
    
    1713
    +                try:
    
    1714
    +                    sandbox_build_dir = sandbox_vroot.descend(
    
    1715
    +                        self.get_variable('build-root').lstrip(os.sep).split(os.sep))
    
    1716
    +                    buildtreevdir.import_files(sandbox_build_dir)
    
    1717
    +                except VirtualDirectoryError:
    
    1718
    +                    # Directory could not be found. Pre-virtual
    
    1719
    +                    # directory behaviour was to continue silently
    
    1720
    +                    # if the directory could not be found.
    
    1721
    +                    pass
    
    1722
    +
    
    1723
    +            # Write some logs out to normal directories: logsdir and metadir
    
    1724
    +            # Copy build log
    
    1725
    +            log_filename = context.get_log_filename()
    
    1726
    +            self._build_log_path = os.path.join(logsdir, 'build.log')
    
    1727
    +            if log_filename:
    
    1728
    +                shutil.copyfile(log_filename, self._build_log_path)
    
    1729
    +
    
    1730
    +            # Store public data
    
    1731
    +            _yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
    
    1732
    +
    
    1733
    +            # Store result
    
    1734
    +            build_result_dict = {"success": self.__build_result[0], "description": self.__build_result[1]}
    
    1735
    +            if self.__build_result[2] is not None:
    
    1736
    +                build_result_dict["detail"] = self.__build_result[2]
    
    1737
    +            _yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
    
    1738
    +
    
    1739
    +            # ensure we have cache keys
    
    1740
    +            self._assemble_done()
    
    1741
    +
    
    1742
    +            # Store keys.yaml
    
    1743
    +            _yaml.dump(_yaml.node_sanitize({
    
    1744
    +                'strong': self._get_cache_key(),
    
    1745
    +                'weak': self._get_cache_key(_KeyStrength.WEAK),
    
    1746
    +            }), os.path.join(metadir, 'keys.yaml'))
    
    1747
    +
    
    1748
    +            # Store dependencies.yaml
    
    1749
    +            _yaml.dump(_yaml.node_sanitize({
    
    1750
    +                e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
    
    1751
    +            }), os.path.join(metadir, 'dependencies.yaml'))
    
    1752
    +
    
    1753
    +            # Store workspaced.yaml
    
    1754
    +            _yaml.dump(_yaml.node_sanitize({
    
    1755
    +                'workspaced': bool(self._get_workspace())
    
    1756
    +            }), os.path.join(metadir, 'workspaced.yaml'))
    
    1757
    +
    
    1758
    +            # Store workspaced-dependencies.yaml
    
    1759
    +            _yaml.dump(_yaml.node_sanitize({
    
    1760
    +                'workspaced-dependencies': [
    
    1761
    +                    e.name for e in self.dependencies(Scope.BUILD)
    
    1762
    +                    if e._get_workspace()
    
    1763
    +                ]
    
    1764
    +            }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
    
    1724 1765
     
    
    1725
    -        # Copy build log
    
    1726
    -        log_filename = context.get_log_filename()
    
    1727
    -        self._build_log_path = os.path.join(logsdir, 'build.log')
    
    1728
    -        if log_filename:
    
    1729
    -            shutil.copyfile(log_filename, self._build_log_path)
    
    1730
    -
    
    1731
    -        # Store public data
    
    1732
    -        _yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
    
    1733
    -
    
    1734
    -        # Store result
    
    1735
    -        build_result_dict = {"success": self.__build_result[0], "description": self.__build_result[1]}
    
    1736
    -        if self.__build_result[2] is not None:
    
    1737
    -            build_result_dict["detail"] = self.__build_result[2]
    
    1738
    -        _yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
    
    1739
    -
    
    1740
    -        # ensure we have cache keys
    
    1741
    -        self._assemble_done()
    
    1742
    -
    
    1743
    -        # Store keys.yaml
    
    1744
    -        _yaml.dump(_yaml.node_sanitize({
    
    1745
    -            'strong': self._get_cache_key(),
    
    1746
    -            'weak': self._get_cache_key(_KeyStrength.WEAK),
    
    1747
    -        }), os.path.join(metadir, 'keys.yaml'))
    
    1748
    -
    
    1749
    -        # Store dependencies.yaml
    
    1750
    -        _yaml.dump(_yaml.node_sanitize({
    
    1751
    -            e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
    
    1752
    -        }), os.path.join(metadir, 'dependencies.yaml'))
    
    1753
    -
    
    1754
    -        # Store workspaced.yaml
    
    1755
    -        _yaml.dump(_yaml.node_sanitize({
    
    1756
    -            'workspaced': bool(self._get_workspace())
    
    1757
    -        }), os.path.join(metadir, 'workspaced.yaml'))
    
    1758
    -
    
    1759
    -        # Store workspaced-dependencies.yaml
    
    1760
    -        _yaml.dump(_yaml.node_sanitize({
    
    1761
    -            'workspaced-dependencies': [
    
    1762
    -                e.name for e in self.dependencies(Scope.BUILD)
    
    1763
    -                if e._get_workspace()
    
    1764
    -            ]
    
    1765
    -        }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
    
    1766
    +            metavdir.import_files(metadir)
    
    1767
    +            logsvdir.import_files(logsdir)
    
    1766 1768
     
    
    1767
    -        with self.timed_activity("Caching artifact"):
    
    1768
    -            artifact_size = utils._get_dir_size(assembledir)
    
    1769
    -            self.__artifacts.commit(self, assembledir, self.__get_cache_keys_for_commit())
    
    1770
    -
    
    1771
    -        if collect is not None and collectvdir is None:
    
    1772
    -            raise ElementError(
    
    1773
    -                "Directory '{}' was not found inside the sandbox, "
    
    1774
    -                "unable to collect artifact contents"
    
    1775
    -                .format(collect))
    
    1769
    +            artifact_size = assemblevdir.get_size()
    
    1770
    +            self.__artifacts.commit(self, assemblevdir, self.__get_cache_keys_for_commit())
    
    1771
    +
    
    1772
    +            if collect is not None and collectvdir is None:
    
    1773
    +                raise ElementError(
    
    1774
    +                    "Directory '{}' was not found inside the sandbox, "
    
    1775
    +                    "unable to collect artifact contents"
    
    1776
    +                    .format(collect))
    
    1776 1777
     
    
    1777 1778
             return artifact_size
    
    1778 1779
     
    

  • buildstream/plugins/sources/tar.py
    ... ... @@ -154,7 +154,7 @@ class TarSource(DownloadableFileSource):
    154 154
         # directory paths for the archived files.
    
    155 155
         def _list_tar_paths(self, tar):
    
    156 156
     
    
    157
    -        visited = {}
    
    157
    +        visited = set()
    
    158 158
             for member in tar.getmembers():
    
    159 159
     
    
    160 160
                 # Remove any possible leading './', offer more consistent behavior
    
    ... ... @@ -170,7 +170,7 @@ class TarSource(DownloadableFileSource):
    170 170
                     for i in range(len(components) - 1):
    
    171 171
                         dir_component = '/'.join([components[j] for j in range(i + 1)])
    
    172 172
                         if dir_component not in visited:
    
    173
    -                        visited[dir_component] = True
    
    173
    +                        visited.add(dir_component)
    
    174 174
                             try:
    
    175 175
                                 # Dont yield directory members which actually do
    
    176 176
                                 # exist in the archive
    

  • buildstream/storage/_casbaseddirectory.py
    ... ... @@ -36,7 +36,7 @@ from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
    36 36
     from .._exceptions import BstError
    
    37 37
     from .directory import Directory, VirtualDirectoryError
    
    38 38
     from ._filebaseddirectory import FileBasedDirectory
    
    39
    -from ..utils import FileListResult, safe_copy, list_relative_paths
    
    39
    +from ..utils import FileListResult, safe_copy, list_relative_paths, _magic_timestamp
    
    40 40
     
    
    41 41
     
    
    42 42
     class IndexEntry():
    
    ... ... @@ -136,10 +136,10 @@ class CasBasedDirectory(Directory):
    136 136
             the parent).
    
    137 137
     
    
    138 138
             """
    
    139
    -        self.ref = self.cas_cache.add_object(buffer=self.pb2_directory.SerializeToString())
    
    140 139
             if caller:
    
    141 140
                 old_dir = self._find_pb2_entry(caller.filename)
    
    142 141
                 self.cas_cache.add_object(digest=old_dir.digest, buffer=caller.pb2_directory.SerializeToString())
    
    142
    +        self.ref = self.cas_cache.add_object(buffer=self.pb2_directory.SerializeToString())
    
    143 143
             if self.parent:
    
    144 144
                 self.parent._recalculate_recursing_up(self)
    
    145 145
     
    
    ... ... @@ -277,14 +277,6 @@ class CasBasedDirectory(Directory):
    277 277
                                                              directory_list))
    
    278 278
             return None
    
    279 279
     
    
    280
    -    def find_root(self):
    
    281
    -        """ Finds the root of this directory tree by following 'parent' until there is
    
    282
    -        no parent. """
    
    283
    -        if self.parent:
    
    284
    -            return self.parent.find_root()
    
    285
    -        else:
    
    286
    -            return self
    
    287
    -
    
    288 280
         def _check_replacement(self, name, path_prefix, fileListResult):
    
    289 281
             """ Checks whether 'name' exists, and if so, whether we can overwrite it.
    
    290 282
             If we can, add the name to 'overwritten_files' and delete the existing entry.
    
    ... ... @@ -451,7 +443,7 @@ class CasBasedDirectory(Directory):
    451 443
                     files = external_pathspec.list_relative_paths()
    
    452 444
     
    
    453 445
             if isinstance(external_pathspec, FileBasedDirectory):
    
    454
    -            source_directory = external_pathspec.get_underlying_directory()
    
    446
    +            source_directory = external_pathspec._get_underlying_directory()
    
    455 447
                 result = self._import_files_from_directory(source_directory, files=files)
    
    456 448
             elif isinstance(external_pathspec, str):
    
    457 449
                 source_directory = external_pathspec
    
    ... ... @@ -535,7 +527,7 @@ class CasBasedDirectory(Directory):
    535 527
                                     " The original error was: {}").
    
    536 528
                                    format(src_name, entry.target, e))
    
    537 529
     
    
    538
    -    def export_to_tar(self, tarfile, destination_dir, mtime=0):
    
    530
    +    def export_to_tar(self, tarfile, destination_dir, mtime=_magic_timestamp):
    
    539 531
             raise NotImplementedError()
    
    540 532
     
    
    541 533
         def mark_changed(self):
    
    ... ... @@ -635,6 +627,18 @@ class CasBasedDirectory(Directory):
    635 627
             self._recalculate_recursing_up()
    
    636 628
             self._recalculate_recursing_down()
    
    637 629
     
    
    630
    +    def get_size(self):
    
    631
    +        total = len(self.pb2_directory.SerializeToString())
    
    632
    +        for i in self.index.values():
    
    633
    +            if isinstance(i.buildstream_object, CasBasedDirectory):
    
    634
    +                total += i.buildstream_object.get_size()
    
    635
    +            elif isinstance(i.pb_object, remote_execution_pb2.FileNode):
    
    636
    +                src_name = self.cas_cache.objpath(i.pb_object.digest)
    
    637
    +                filesize = os.stat(src_name).st_size
    
    638
    +                total += filesize
    
    639
    +            # Symlink nodes are encoded as part of the directory serialization.
    
    640
    +        return total
    
    641
    +
    
    638 642
         def _get_identifier(self):
    
    639 643
             path = ""
    
    640 644
             if self.parent:
    
    ... ... @@ -653,3 +657,15 @@ class CasBasedDirectory(Directory):
    653 657
             throw an exception. """
    
    654 658
             raise VirtualDirectoryError("_get_underlying_directory was called on a CAS-backed directory," +
    
    655 659
                                         " which has no underlying directory.")
    
    660
    +
    
    661
    +    # _get_digest():
    
    662
    +    #
    
    663
    +    # Return the Digest for this directory.
    
    664
    +    #
    
    665
    +    # Returns:
    
    666
    +    #   (Digest): The Digest protobuf object for the Directory protobuf
    
    667
    +    #
    
    668
    +    def _get_digest(self):
    
    669
    +        if not self.ref:
    
    670
    +            self.ref = self.cas_cache.add_object(buffer=self.pb2_directory.SerializeToString())
    
    671
    +        return self.ref

  • buildstream/storage/_filebaseddirectory.py
    ... ... @@ -30,6 +30,7 @@ See also: :ref:`sandboxing`.
    30 30
     import os
    
    31 31
     import time
    
    32 32
     from .directory import Directory, VirtualDirectoryError
    
    33
    +from .. import utils
    
    33 34
     from ..utils import link_files, copy_files, list_relative_paths, _get_link_mtime, _magic_timestamp
    
    34 35
     from ..utils import _set_deterministic_user, _set_deterministic_mtime
    
    35 36
     
    
    ... ... @@ -157,7 +158,7 @@ class FileBasedDirectory(Directory):
    157 158
         # First, it sorts the results of os.listdir() to ensure the ordering of
    
    158 159
         # the files in the archive is the same.  Second, it sets a fixed
    
    159 160
         # timestamp for each entry. See also https://bugs.python.org/issue24465.
    
    160
    -    def export_to_tar(self, tf, dir_arcname, mtime=0):
    
    161
    +    def export_to_tar(self, tf, dir_arcname, mtime=_magic_timestamp):
    
    161 162
             # We need directories here, including non-empty ones,
    
    162 163
             # so list_relative_paths is not used.
    
    163 164
             for filename in sorted(os.listdir(self.external_directory)):
    
    ... ... @@ -201,6 +202,9 @@ class FileBasedDirectory(Directory):
    201 202
     
    
    202 203
             return list_relative_paths(self.external_directory)
    
    203 204
     
    
    205
    +    def get_size(self):
    
    206
    +        return utils._get_dir_size(self.external_directory)
    
    207
    +
    
    204 208
         def __str__(self):
    
    205 209
             # This returns the whole path (since we don't know where the directory started)
    
    206 210
             # which exposes the sandbox directory; we will have to assume for the time being
    

  • buildstream/storage/directory.py
    ... ... @@ -32,6 +32,7 @@ See also: :ref:`sandboxing`.
    32 32
     """
    
    33 33
     
    
    34 34
     from .._exceptions import BstError, ErrorDomain
    
    35
    +from ..utils import _magic_timestamp
    
    35 36
     
    
    36 37
     
    
    37 38
     class VirtualDirectoryError(BstError):
    
    ... ... @@ -114,7 +115,7 @@ class Directory():
    114 115
     
    
    115 116
             raise NotImplementedError()
    
    116 117
     
    
    117
    -    def export_to_tar(self, tarfile, destination_dir, mtime=0):
    
    118
    +    def export_to_tar(self, tarfile, destination_dir, mtime=_magic_timestamp):
    
    118 119
             """ Exports this directory into the given tar file.
    
    119 120
     
    
    120 121
             Args:
    
    ... ... @@ -176,3 +177,9 @@ class Directory():
    176 177
     
    
    177 178
             """
    
    178 179
             raise NotImplementedError()
    
    180
    +
    
    181
    +    def get_size(self):
    
    182
    +        """ Get an approximation of the storage space in bytes used by this directory
    
    183
    +        and all files and subdirectories in it. Storage space varies by implementation
    
    184
    +        and effective space used may be lower than this number due to deduplication. """
    
    185
    +        raise NotImplementedError()

  • contrib/bst-graph
    1
    +#!/usr/bin/env python3
    
    2
    +'''Print dependency graph of given element(s) in DOT format.
    
    3
    +
    
    4
    +This script must be run from the same directory where you would normally
    
    5
    +run `bst` commands.
    
    6
    +
    
    7
    +When `--format` option is specified, the output will also be rendered in the
    
    8
    +given format. A file with name `bst-graph.{format}` will be created in the same
    
    9
    +directory. To use this option, you must have the `graphviz` command line tool
    
    10
    +installed.
    
    11
    +'''
    
    12
    +
    
    13
    +import argparse
    
    14
    +import subprocess
    
    15
    +
    
    16
    +from graphviz import Digraph
    
    17
    +
    
    18
    +
    
    19
    +def parse_args():
    
    20
    +    '''Handle parsing of command line arguments.
    
    21
    +
    
    22
    +    Returns:
    
    23
    +       A argparse.Namespace object
    
    24
    +    '''
    
    25
    +    parser = argparse.ArgumentParser(description=__doc__)
    
    26
    +    parser.add_argument(
    
    27
    +        'ELEMENT', nargs='*',
    
    28
    +        help='Name of the element'
    
    29
    +    )
    
    30
    +    parser.add_argument(
    
    31
    +        '--format',
    
    32
    +        help='Redner the graph in given format (`pdf`, `png`, `svg` etc)'
    
    33
    +    )
    
    34
    +    parser.add_argument(
    
    35
    +        '--view', action='store_true',
    
    36
    +        help='Open the rendered graph with the default application'
    
    37
    +    )
    
    38
    +    return parser.parse_args()
    
    39
    +
    
    40
    +
    
    41
    +def parse_graph(lines):
    
    42
    +    '''Return nodes and edges of the parsed grpah.
    
    43
    +
    
    44
    +    Args:
    
    45
    +       lines: List of lines in format 'NAME|BUILD-DEPS|RUNTIME-DEPS'
    
    46
    +
    
    47
    +    Returns:
    
    48
    +       Tuple of format (nodes,build_deps,runtime_deps)
    
    49
    +       Each member of build_deps and runtime_deps is also a tuple.
    
    50
    +    '''
    
    51
    +    nodes = set()
    
    52
    +    build_deps = set()
    
    53
    +    runtime_deps = set()
    
    54
    +    for line in lines:
    
    55
    +        # It is safe to split on '|' as it is not a valid character for
    
    56
    +        # element names.
    
    57
    +        name, build_dep, runtime_dep = line.split('|')
    
    58
    +        build_dep = build_dep.lstrip('[').rstrip(']').split(',')
    
    59
    +        runtime_dep = runtime_dep.lstrip('[').rstrip(']').split(',')
    
    60
    +        nodes.add(name)
    
    61
    +        [build_deps.add((name, dep)) for dep in build_dep if dep]
    
    62
    +        [runtime_deps.add((name, dep)) for dep in runtime_dep if dep]
    
    63
    +
    
    64
    +    return nodes, build_deps, runtime_deps
    
    65
    +
    
    66
    +
    
    67
    +def generate_graph(nodes, build_deps, runtime_deps):
    
    68
    +    '''Generate graph from given nodes and edges.
    
    69
    +
    
    70
    +    Args:
    
    71
    +       nodes: set of nodes
    
    72
    +       build_deps: set of tuples of build depdencies
    
    73
    +       runtime_deps: set of tuples of runtime depdencies
    
    74
    +
    
    75
    +    Returns:
    
    76
    +       A graphviz.Digraph object
    
    77
    +    '''
    
    78
    +    graph = Digraph()
    
    79
    +    for node in nodes:
    
    80
    +        graph.node(node)
    
    81
    +    for source, target in build_deps:
    
    82
    +        graph.edge(source, target, label='build-dep')
    
    83
    +    for source, target in runtime_deps:
    
    84
    +        graph.edge(source, target, label='runtime-dep')
    
    85
    +    return graph
    
    86
    +
    
    87
    +
    
    88
    +def main():
    
    89
    +    args = parse_args()
    
    90
    +    cmd = ['bst', 'show', '--format', '%{name}|%{build-deps}|%{runtime-deps}']
    
    91
    +    if 'element' in args:
    
    92
    +        cmd += args.element
    
    93
    +    graph_lines = subprocess.check_output(cmd, universal_newlines=True)
    
    94
    +    # NOTE: We generate nodes and edges before giving them to graphviz as
    
    95
    +    # the library does not de-deuplicate them.
    
    96
    +    nodes, build_deps, runtime_deps = parse_graph(graph_lines.splitlines())
    
    97
    +    graph = generate_graph(nodes, build_deps, runtime_deps)
    
    98
    +    print(graph.source)
    
    99
    +    if args.format:
    
    100
    +        graph.render(cleanup=True,
    
    101
    +                     filename='bst-graph',
    
    102
    +                     format=args.format,
    
    103
    +                     view=args.view)
    
    104
    +
    
    105
    +
    
    106
    +if __name__ == '__main__':
    
    107
    +    main()

  • contrib/bst-here
    ... ... @@ -25,16 +25,22 @@
    25 25
     usage() {
    
    26 26
         cat <<EOF
    
    27 27
     
    
    28
    -USAGE: $(basename "$0") [-i BST_HERE_IMAGE] [-p] [-t] [-T] [-v VOLUME ...] [-h] [COMMAND [ARG..]]
    
    28
    +USAGE: $(basename "$0") [-i BST_HERE_IMAGE] [-j TAG] [-p] [-t] [-T] [-v VOLUME ...] [-h] [COMMAND [ARG..]]
    
    29 29
     
    
    30 30
     Run a bst command in a new BuildStream container.
    
    31 31
     
    
    32 32
     If no command is specified, an interactive shell is launched
    
    33 33
     using "/bin/bash -i".
    
    34 34
     
    
    35
    +See https://hub.docker.com/r/buildstream/buildstream for details on image
    
    36
    +variants.
    
    37
    +
    
    35 38
     OPTIONS:
    
    36 39
         -i IMAGE      Specify Docker image to use; can also be specified by setting
    
    37 40
                       BST_HERE_IMAGE environment variable.
    
    41
    +                  (default: buildstream/buildstream)
    
    42
    +    -j TAG        Specify the tag of the Docker image to use.
    
    43
    +                  (default: latest)
    
    38 44
         -p            Pull the latest buildstream image before running.
    
    39 45
         -t            Force pseudo-terminal allocation.
    
    40 46
         -T            Disable pseudo-terminal allocation.
    
    ... ... @@ -46,7 +52,8 @@ EOF
    46 52
         exit "$1"
    
    47 53
     }
    
    48 54
     
    
    49
    -bst_here_image="${BST_HERE_IMAGE:-buildstream/buildstream-fedora:latest}"
    
    55
    +bst_here_image="${BST_HERE_IMAGE:-buildstream/buildstream}"
    
    56
    +bst_here_tag=
    
    50 57
     
    
    51 58
     is_tty=
    
    52 59
     update=false
    
    ... ... @@ -57,12 +64,15 @@ then
    57 64
         is_tty=y
    
    58 65
     fi
    
    59 66
     
    
    60
    -while getopts i:ptTv:h arg
    
    67
    +while getopts i:j:ptTv:h arg
    
    61 68
     do
    
    62 69
         case $arg in
    
    63 70
         i)
    
    64 71
             bst_here_image="$OPTARG"
    
    65 72
             ;;
    
    73
    +    j)
    
    74
    +        bst_here_tag="$OPTARG"
    
    75
    +        ;;
    
    66 76
         p)
    
    67 77
             update=true
    
    68 78
             ;;
    
    ... ... @@ -83,6 +93,10 @@ do
    83 93
         esac
    
    84 94
     done
    
    85 95
     
    
    96
    +if [ -n "$bst_here_tag" ]; then
    
    97
    +    bst_here_image="$bst_here_image:$bst_here_tag"
    
    98
    +fi
    
    99
    +
    
    86 100
     test "$OPTIND" -gt 1 &&
    
    87 101
         shift $(( OPTIND - 1 ))
    
    88 102
     
    

  • tests/frontend/buildcheckout.py
    ... ... @@ -252,6 +252,26 @@ def test_build_checkout_tarball_stdout(datafiles, cli):
    252 252
         assert os.path.join('.', 'usr', 'include', 'pony.h') in tar.getnames()
    
    253 253
     
    
    254 254
     
    
    255
    +@pytest.mark.datafiles(DATA_DIR)
    
    256
    +def test_build_checkout_tarball_mtime_nonzero(datafiles, cli):
    
    257
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    258
    +    tarpath = os.path.join(cli.directory, 'mtime_tar.tar')
    
    259
    +
    
    260
    +    result = cli.run(project=project, args=['build', 'target.bst'])
    
    261
    +    result.assert_success()
    
    262
    +
    
    263
    +    checkout_args = ['artifact', 'checkout', '--tar', tarpath, 'target.bst']
    
    264
    +    result = cli.run(project=project, args=checkout_args)
    
    265
    +    result.assert_success()
    
    266
    +
    
    267
    +    tar = tarfile.TarFile(tarpath)
    
    268
    +    for tarinfo in tar.getmembers():
    
    269
    +        # An mtime of zero can be confusing to other software,
    
    270
    +        # e.g. ninja build and template toolkit have both taken zero mtime to
    
    271
    +        # mean 'file does not exist'.
    
    272
    +        assert tarinfo.mtime > 0
    
    273
    +
    
    274
    +
    
    255 275
     @pytest.mark.datafiles(DATA_DIR)
    
    256 276
     def test_build_checkout_tarball_is_deterministic(datafiles, cli):
    
    257 277
         project = os.path.join(datafiles.dirname, datafiles.basename)
    

  • tests/frontend/workspace.py
    ... ... @@ -1184,6 +1184,7 @@ def test_external_close_other(cli, datafiles, tmpdir_factory):
    1184 1184
     
    
    1185 1185
         result = cli.run(project=project, args=['-C', alpha_workspace, 'workspace', 'close', beta_element])
    
    1186 1186
         result.assert_success()
    
    1187
    +    assert 'you can no longer run BuildStream' not in result.stderr
    
    1187 1188
     
    
    1188 1189
     
    
    1189 1190
     @pytest.mark.datafiles(DATA_DIR)
    
    ... ... @@ -1199,6 +1200,7 @@ def test_external_close_self(cli, datafiles, tmpdir_factory, guess_element):
    1199 1200
     
    
    1200 1201
         result = cli.run(project=project, args=['-C', alpha_workspace, 'workspace', 'close'] + arg_elm)
    
    1201 1202
         result.assert_success()
    
    1203
    +    assert 'you can no longer run BuildStream' in result.stderr
    
    1202 1204
     
    
    1203 1205
     
    
    1204 1206
     @pytest.mark.datafiles(DATA_DIR)
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]