[Notes] [Git][BuildStream/buildstream][jennis/refactor_artifact_log] 13 commits: element.py: Make calculate_cache_key() API private



Title: GitLab

James Ennis pushed to branch jennis/refactor_artifact_log at BuildStream / buildstream

Commits:

9 changed files:

Changes:

  • buildstream/_artifactcache.py
    ... ... @@ -513,11 +513,14 @@ class ArtifactCache():
    513 513
         #
    
    514 514
         # List artifacts in this cache in LRU order.
    
    515 515
         #
    
    516
    +    # Args:
    
    517
    +    #     glob (str): An option glob _expression_ to be used to list artifacts satisfying the glob
    
    518
    +    #
    
    516 519
         # Returns:
    
    517 520
         #     ([str]) - A list of artifact names as generated in LRU order
    
    518 521
         #
    
    519
    -    def list_artifacts(self):
    
    520
    -        return self.cas.list_refs()
    
    522
    +    def list_artifacts(self, *, glob=None):
    
    523
    +        return self.cas.list_refs(glob=glob)
    
    521 524
     
    
    522 525
         # remove():
    
    523 526
         #
    
    ... ... @@ -821,6 +824,20 @@ class ArtifactCache():
    821 824
     
    
    822 825
             self.cas.link_ref(oldref, newref)
    
    823 826
     
    
    827
    +    # get_artifact_logs():
    
    828
    +    #
    
    829
    +    # Get the logs of an existing artifact
    
    830
    +    #
    
    831
    +    # Args:
    
    832
    +    #     ref (str): The ref of the artifact
    
    833
    +    #
    
    834
    +    # Returns:
    
    835
    +    #     logsdir (CasBasedDirectory): A CasBasedDirectory containing the artifact's logs
    
    836
    +    #
    
    837
    +    def get_artifact_logs(self, ref):
    
    838
    +        descend = ["logs"]
    
    839
    +        return self.cas.get_toplevel_dir(ref, descend)
    
    840
    +
    
    824 841
         ################################################
    
    825 842
         #               Local Private Methods          #
    
    826 843
         ################################################
    

  • buildstream/_artifactelement.py
    1
    +#
    
    2
    +#  Copyright (C) 2019 Bloomberg Finance LP
    
    3
    +#
    
    4
    +#  This program is free software; you can redistribute it and/or
    
    5
    +#  modify it under the terms of the GNU Lesser General Public
    
    6
    +#  License as published by the Free Software Foundation; either
    
    7
    +#  version 2 of the License, or (at your option) any later version.
    
    8
    +#
    
    9
    +#  This library is distributed in the hope that it will be useful,
    
    10
    +#  but WITHOUT ANY WARRANTY; without even the implied warranty of
    
    11
    +#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the GNU
    
    12
    +#  Lesser General Public License for more details.
    
    13
    +#
    
    14
    +#  You should have received a copy of the GNU Lesser General Public
    
    15
    +#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
    
    16
    +#
    
    17
    +#  Authors:
    
    18
    +#        James Ennis <james ennis codethink co uk>
    
    19
    +from . import Element
    
    20
    +from ._exceptions import ArtifactElementError
    
    21
    +from ._loader.metaelement import MetaElement
    
    22
    +
    
    23
    +
    
    24
    +# ArtifactElement()
    
    25
    +#
    
    26
    +# Object to be used for directly processing an artifact
    
    27
    +#
    
    28
    +# Args:
    
    29
    +#    context (Context): The Context object
    
    30
    +#    ref (str): The artifact ref
    
    31
    +#
    
    32
    +class ArtifactElement(Element):
    
    33
    +    def __init__(self, context, ref):
    
    34
    +        self._ref = ref
    
    35
    +
    
    36
    +        # Ensure that the provided ref matches the form of an artifact
    
    37
    +        try:
    
    38
    +            _, element, key = ref.split('/', 2)
    
    39
    +        except ValueError:
    
    40
    +            raise ArtifactElementError("Artifact: {} is not of the expected format".format(ref))
    
    41
    +        if len(key) != 64:
    
    42
    +            raise ArtifactElementError("Artifact: {} is not of the expected format".format(ref))
    
    43
    +
    
    44
    +        self._key = key
    
    45
    +
    
    46
    +        project = context.get_toplevel_project()
    
    47
    +        meta = MetaElement(project, element)  # NOTE element has no .bst suffix
    
    48
    +        plugin_conf = None
    
    49
    +
    
    50
    +        super().__init__(context, project, meta, plugin_conf)
    
    51
    +
    
    52
    +    # Override Element.get_artifact_name()
    
    53
    +    def get_artifact_name(self, key=None):
    
    54
    +        return self._ref
    
    55
    +
    
    56
    +    # Dummy configure method
    
    57
    +    def configure(self, node):
    
    58
    +        pass
    
    59
    +
    
    60
    +    # Dummy preflight method
    
    61
    +    def preflight(self):
    
    62
    +        pass
    
    63
    +
    
    64
    +    # Override Element._calculate_cache_key
    
    65
    +    def _calculate_cache_key(self, dependencies=None):
    
    66
    +        return self._key

  • buildstream/_cas/cascache.py
    ... ... @@ -24,6 +24,7 @@ import stat
    24 24
     import errno
    
    25 25
     import uuid
    
    26 26
     import contextlib
    
    27
    +from fnmatch import fnmatch
    
    27 28
     
    
    28 29
     import grpc
    
    29 30
     
    
    ... ... @@ -32,6 +33,7 @@ from .._protos.buildstream.v2 import buildstream_pb2
    32 33
     
    
    33 34
     from .. import utils
    
    34 35
     from .._exceptions import CASCacheError
    
    36
    +from ..storage._casbaseddirectory import CasBasedDirectory
    
    35 37
     
    
    36 38
     from .casremote import BlobNotFound, _CASBatchRead, _CASBatchUpdate
    
    37 39
     
    
    ... ... @@ -472,22 +474,35 @@ class CASCache():
    472 474
         #
    
    473 475
         # List refs in Least Recently Modified (LRM) order.
    
    474 476
         #
    
    477
    +    # Args:
    
    478
    +    #     glob (str) - An optional glob _expression_ to be used to list refs satisfying the glob
    
    479
    +    #
    
    475 480
         # Returns:
    
    476 481
         #     (list) - A list of refs in LRM order
    
    477 482
         #
    
    478
    -    def list_refs(self):
    
    483
    +    def list_refs(self, *, glob=None):
    
    479 484
             # string of: /path/to/repo/refs/heads
    
    480 485
             ref_heads = os.path.join(self.casdir, 'refs', 'heads')
    
    486
    +        path = ref_heads
    
    487
    +
    
    488
    +        if glob is not None:
    
    489
    +            globdir = os.path.dirname(glob)
    
    490
    +            if not any(c in "*?[" for c in globdir):
    
    491
    +                # path prefix contains no globbing characters so
    
    492
    +                # append the glob to optimise the os.walk()
    
    493
    +                path = os.path.join(ref_heads, globdir)
    
    481 494
     
    
    482 495
             refs = []
    
    483 496
             mtimes = []
    
    484 497
     
    
    485
    -        for root, _, files in os.walk(ref_heads):
    
    498
    +        for root, _, files in os.walk(path):
    
    486 499
                 for filename in files:
    
    487 500
                     ref_path = os.path.join(root, filename)
    
    488
    -                refs.append(os.path.relpath(ref_path, ref_heads))
    
    489
    -                # Obtain the mtime (the time a file was last modified)
    
    490
    -                mtimes.append(os.path.getmtime(ref_path))
    
    501
    +                relative_path = os.path.relpath(ref_path, ref_heads)  # Relative to refs head
    
    502
    +                if not glob or fnmatch(relative_path, glob):
    
    503
    +                    refs.append(relative_path)
    
    504
    +                    # Obtain the mtime (the time a file was last modified)
    
    505
    +                    mtimes.append(os.path.getmtime(ref_path))
    
    491 506
     
    
    492 507
             # NOTE: Sorted will sort from earliest to latest, thus the
    
    493 508
             # first ref of this list will be the file modified earliest.
    
    ... ... @@ -587,6 +602,22 @@ class CASCache():
    587 602
             reachable = set()
    
    588 603
             self._reachable_refs_dir(reachable, tree, update_mtime=True)
    
    589 604
     
    
    605
    +    # get_toplevel_dir()
    
    606
    +    #
    
    607
    +    # Return a CasBasedDirectory object of the specified sub_directories
    
    608
    +    #
    
    609
    +    # Args:
    
    610
    +    #     ref (str): The artifact ref
    
    611
    +    #     descend (list): A list of strings of artifact subdirectories
    
    612
    +    #
    
    613
    +    # Returns:
    
    614
    +    #     (CasBasedDirectory): The CasBasedDirectory object
    
    615
    +    #
    
    616
    +    def get_toplevel_dir(self, ref, descend):
    
    617
    +        cache_id = self.resolve_ref(ref, update_mtime=True)
    
    618
    +        vdir = CasBasedDirectory(self, cache_id).descend(descend)
    
    619
    +        return vdir
    
    620
    +
    
    590 621
         ################################################
    
    591 622
         #             Local Private Methods            #
    
    592 623
         ################################################
    

  • buildstream/_exceptions.py
    ... ... @@ -344,3 +344,12 @@ class AppError(BstError):
    344 344
     #
    
    345 345
     class SkipJob(Exception):
    
    346 346
         pass
    
    347
    +
    
    348
    +
    
    349
    +# ArtifactElementError
    
    350
    +#
    
    351
    +# Raised when errors are encountered by artifact elements
    
    352
    +#
    
    353
    +class ArtifactElementError(BstError):
    
    354
    +    def __init__(self, message, *, detail=None, reason=None, temporary=False):
    
    355
    +        super().__init__(message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason, temporary=True)

  • buildstream/_frontend/cli.py
    1 1
     import os
    
    2 2
     import sys
    
    3 3
     from contextlib import ExitStack
    
    4
    -from fnmatch import fnmatch
    
    5 4
     from functools import partial
    
    6 5
     from tempfile import TemporaryDirectory
    
    7 6
     
    
    ... ... @@ -895,38 +894,6 @@ def workspace_list(app):
    895 894
     #############################################################
    
    896 895
     #                     Artifact Commands                     #
    
    897 896
     #############################################################
    
    898
    -def _classify_artifacts(names, cas, project_directory):
    
    899
    -    element_targets = []
    
    900
    -    artifact_refs = []
    
    901
    -    element_globs = []
    
    902
    -    artifact_globs = []
    
    903
    -
    
    904
    -    for name in names:
    
    905
    -        if name.endswith('.bst'):
    
    906
    -            if any(c in "*?[" for c in name):
    
    907
    -                element_globs.append(name)
    
    908
    -            else:
    
    909
    -                element_targets.append(name)
    
    910
    -        else:
    
    911
    -            if any(c in "*?[" for c in name):
    
    912
    -                artifact_globs.append(name)
    
    913
    -            else:
    
    914
    -                artifact_refs.append(name)
    
    915
    -
    
    916
    -    if element_globs:
    
    917
    -        for dirpath, _, filenames in os.walk(project_directory):
    
    918
    -            for filename in filenames:
    
    919
    -                element_path = os.path.join(dirpath, filename).lstrip(project_directory).lstrip('/')
    
    920
    -                if any(fnmatch(element_path, glob) for glob in element_globs):
    
    921
    -                    element_targets.append(element_path)
    
    922
    -
    
    923
    -    if artifact_globs:
    
    924
    -        artifact_refs.extend(ref for ref in cas.list_refs()
    
    925
    -                             if any(fnmatch(ref, glob) for glob in artifact_globs))
    
    926
    -
    
    927
    -    return element_targets, artifact_refs
    
    928
    -
    
    929
    -
    
    930 897
     @cli.group(short_help="Manipulate cached artifacts")
    
    931 898
     def artifact():
    
    932 899
         """Manipulate cached artifacts"""
    
    ... ... @@ -1105,53 +1072,30 @@ def artifact_push(app, elements, deps, remote):
    1105 1072
     @click.pass_obj
    
    1106 1073
     def artifact_log(app, artifacts):
    
    1107 1074
         """Show logs of all artifacts"""
    
    1108
    -    from .._exceptions import CASError
    
    1109
    -    from .._message import MessageType
    
    1110
    -    from .._pipeline import PipelineSelection
    
    1111
    -    from ..storage._casbaseddirectory import CasBasedDirectory
    
    1112
    -
    
    1113
    -    with ExitStack() as stack:
    
    1114
    -        stack.enter_context(app.initialized())
    
    1115
    -        cache = app.context.artifactcache
    
    1075
    +    # Guess the element if we're in a workspace
    
    1076
    +    if not artifacts:
    
    1077
    +        guessed_target = app.context.guess_element()
    
    1078
    +        if guessed_target:
    
    1079
    +            artifacts = [guessed_target]
    
    1116 1080
     
    
    1117
    -        elements, artifacts = _classify_artifacts(artifacts, cache.cas,
    
    1118
    -                                                  app.project.directory)
    
    1119
    -
    
    1120
    -        vdirs = []
    
    1121
    -        extractdirs = []
    
    1122
    -        if artifacts:
    
    1123
    -            for ref in artifacts:
    
    1124
    -                try:
    
    1125
    -                    cache_id = cache.cas.resolve_ref(ref, update_mtime=True)
    
    1126
    -                    vdir = CasBasedDirectory(cache.cas, cache_id)
    
    1127
    -                    vdirs.append(vdir)
    
    1128
    -                except CASError as e:
    
    1129
    -                    app._message(MessageType.WARN, "Artifact {} is not cached".format(ref), detail=str(e))
    
    1130
    -                    continue
    
    1131
    -        if elements:
    
    1132
    -            elements = app.stream.load_selection(elements, selection=PipelineSelection.NONE)
    
    1133
    -            for element in elements:
    
    1134
    -                if not element._cached():
    
    1135
    -                    app._message(MessageType.WARN, "Element {} is not cached".format(element))
    
    1136
    -                    continue
    
    1137
    -                ref = cache.get_artifact_fullname(element, element._get_cache_key())
    
    1138
    -                cache_id = cache.cas.resolve_ref(ref, update_mtime=True)
    
    1139
    -                vdir = CasBasedDirectory(cache.cas, cache_id)
    
    1140
    -                vdirs.append(vdir)
    
    1141
    -
    
    1142
    -        for vdir in vdirs:
    
    1143
    -            # NOTE: If reading the logs feels unresponsive, here would be a good place to provide progress information.
    
    1144
    -            logsdir = vdir.descend(["logs"])
    
    1145
    -            td = stack.enter_context(TemporaryDirectory())
    
    1146
    -            logsdir.export_files(td, can_link=True)
    
    1147
    -            extractdirs.append(td)
    
    1148
    -
    
    1149
    -        for extractdir in extractdirs:
    
    1150
    -            for log in (os.path.join(extractdir, log) for log in os.listdir(extractdir)):
    
    1151
    -                # NOTE: Should click gain the ability to pass files to the pager this can be optimised.
    
    1152
    -                with open(log) as f:
    
    1153
    -                    data = f.read()
    
    1154
    -                    click.echo_via_pager(data)
    
    1081
    +    with app.initialized():
    
    1082
    +        logsdirs = app.stream.artifact_log(artifacts)
    
    1083
    +
    
    1084
    +        with ExitStack() as stack:
    
    1085
    +            extractdirs = []
    
    1086
    +            for logsdir in logsdirs:
    
    1087
    +                # NOTE: If reading the logs feels unresponsive, here would be a good place
    
    1088
    +                # to provide progress information.
    
    1089
    +                td = stack.enter_context(TemporaryDirectory())
    
    1090
    +                logsdir.export_files(td, can_link=True)
    
    1091
    +                extractdirs.append(td)
    
    1092
    +
    
    1093
    +            for extractdir in extractdirs:
    
    1094
    +                for log in (os.path.join(extractdir, log) for log in os.listdir(extractdir)):
    
    1095
    +                    # NOTE: Should click gain the ability to pass files to the pager this can be optimised.
    
    1096
    +                    with open(log) as f:
    
    1097
    +                        data = f.read()
    
    1098
    +                        click.echo_via_pager(data)
    
    1155 1099
     
    
    1156 1100
     
    
    1157 1101
     ##################################################################
    

  • buildstream/_loader/metaelement.py
    ... ... @@ -38,20 +38,20 @@ class MetaElement():
    38 38
         #    sandbox: Configuration specific to the sandbox environment
    
    39 39
         #    first_pass: The element is to be loaded with first pass configuration (junction)
    
    40 40
         #
    
    41
    -    def __init__(self, project, name, kind, provenance, sources, config,
    
    42
    -                 variables, environment, env_nocache, public, sandbox,
    
    43
    -                 first_pass):
    
    41
    +    def __init__(self, project, name, kind=None, provenance=None, sources=None, config=None,
    
    42
    +                 variables=None, environment=None, env_nocache=None, public=None,
    
    43
    +                 sandbox=None, first_pass=False):
    
    44 44
             self.project = project
    
    45 45
             self.name = name
    
    46 46
             self.kind = kind
    
    47 47
             self.provenance = provenance
    
    48 48
             self.sources = sources
    
    49
    -        self.config = config
    
    50
    -        self.variables = variables
    
    51
    -        self.environment = environment
    
    52
    -        self.env_nocache = env_nocache
    
    53
    -        self.public = public
    
    54
    -        self.sandbox = sandbox
    
    49
    +        self.config = config or {}
    
    50
    +        self.variables = variables or {}
    
    51
    +        self.environment = environment or {}
    
    52
    +        self.env_nocache = env_nocache or []
    
    53
    +        self.public = public or {}
    
    54
    +        self.sandbox = sandbox or {}
    
    55 55
             self.build_dependencies = []
    
    56 56
             self.dependencies = []
    
    57 57
             self.first_pass = first_pass

  • buildstream/_project.py
    ... ... @@ -26,6 +26,7 @@ from . import utils
    26 26
     from . import _cachekey
    
    27 27
     from . import _site
    
    28 28
     from . import _yaml
    
    29
    +from ._artifactelement import ArtifactElement
    
    29 30
     from ._profile import Topics, profile_start, profile_end
    
    30 31
     from ._exceptions import LoadError, LoadErrorReason
    
    31 32
     from ._options import OptionPool
    
    ... ... @@ -255,6 +256,19 @@ class Project():
    255 256
             else:
    
    256 257
                 return self.config.element_factory.create(self._context, self, meta)
    
    257 258
     
    
    259
    +    # create_artifact_element()
    
    260
    +    #
    
    261
    +    # Instantiate and return an ArtifactElement
    
    262
    +    #
    
    263
    +    # Args:
    
    264
    +    #    ref (str): A string of the artifact ref
    
    265
    +    #
    
    266
    +    # Returns:
    
    267
    +    #    (ArtifactElement): A newly created ArtifactElement object of the appropriate kind
    
    268
    +    #
    
    269
    +    def create_artifact_element(self, ref):
    
    270
    +        return ArtifactElement(self._context, ref)
    
    271
    +
    
    258 272
         # create_source()
    
    259 273
         #
    
    260 274
         # Instantiate and return a Source
    

  • buildstream/_stream.py
    ... ... @@ -27,8 +27,9 @@ import shutil
    27 27
     import tarfile
    
    28 28
     import tempfile
    
    29 29
     from contextlib import contextmanager, suppress
    
    30
    +from fnmatch import fnmatch
    
    30 31
     
    
    31
    -from ._exceptions import StreamError, ImplError, BstError, set_last_task_error
    
    32
    +from ._exceptions import StreamError, ImplError, BstError, ArtifactElementError, set_last_task_error
    
    32 33
     from ._message import Message, MessageType
    
    33 34
     from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
    
    34 35
     from ._pipeline import Pipeline, PipelineSelection
    
    ... ... @@ -108,19 +109,21 @@ class Stream():
    108 109
         def load_selection(self, targets, *,
    
    109 110
                            selection=PipelineSelection.NONE,
    
    110 111
                            except_targets=(),
    
    111
    -                       use_artifact_config=False):
    
    112
    +                       use_artifact_config=False,
    
    113
    +                       load_refs=False):
    
    112 114
     
    
    113 115
             profile_start(Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, '-') for t in targets))
    
    114 116
     
    
    115
    -        elements, _ = self._load(targets, (),
    
    116
    -                                 selection=selection,
    
    117
    -                                 except_targets=except_targets,
    
    118
    -                                 fetch_subprojects=False,
    
    119
    -                                 use_artifact_config=use_artifact_config)
    
    117
    +        target_objects, _ = self._load(targets, (),
    
    118
    +                                       selection=selection,
    
    119
    +                                       except_targets=except_targets,
    
    120
    +                                       fetch_subprojects=False,
    
    121
    +                                       use_artifact_config=use_artifact_config,
    
    122
    +                                       load_refs=load_refs)
    
    120 123
     
    
    121 124
             profile_end(Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, '-') for t in targets))
    
    122 125
     
    
    123
    -        return elements
    
    126
    +        return target_objects
    
    124 127
     
    
    125 128
         # shell()
    
    126 129
         #
    
    ... ... @@ -481,6 +484,31 @@ class Stream():
    481 484
                 raise StreamError("Error while staging dependencies into a sandbox"
    
    482 485
                                   ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
    
    483 486
     
    
    487
    +    # artifact_log()
    
    488
    +    #
    
    489
    +    # Show the full log of an artifact
    
    490
    +    #
    
    491
    +    # Args:
    
    492
    +    #    targets (str): Targets to view the logs of
    
    493
    +    #
    
    494
    +    # Returns:
    
    495
    +    #    logsdir (list): A list of CasBasedDirectory objects containing artifact logs
    
    496
    +    #
    
    497
    +    def artifact_log(self, targets):
    
    498
    +        # Return list of Element and/or ArtifactElement objects
    
    499
    +        target_objects = self.load_selection(targets, selection=PipelineSelection.NONE, load_refs=True)
    
    500
    +
    
    501
    +        logsdirs = []
    
    502
    +        for obj in target_objects:
    
    503
    +            ref = obj.get_artifact_name()
    
    504
    +            if not obj._cached():
    
    505
    +                self._message(MessageType.WARN, "{} is not cached".format(ref))
    
    506
    +                continue
    
    507
    +
    
    508
    +            logsdirs.append(self._artifacts.get_artifact_logs(ref))
    
    509
    +
    
    510
    +        return logsdirs
    
    511
    +
    
    484 512
         # source_checkout()
    
    485 513
         #
    
    486 514
         # Checkout sources of the target element to the specified location
    
    ... ... @@ -912,25 +940,35 @@ class Stream():
    912 940
                   use_artifact_config=False,
    
    913 941
                   artifact_remote_url=None,
    
    914 942
                   fetch_subprojects=False,
    
    915
    -              dynamic_plan=False):
    
    943
    +              dynamic_plan=False,
    
    944
    +              load_refs=False):
    
    945
    +
    
    946
    +        # Classify element and artifact strings
    
    947
    +        target_elements, target_artifacts = self._classify_artifacts(targets)
    
    948
    +
    
    949
    +        if target_artifacts and not load_refs:
    
    950
    +            detail = ''.join(target_artifacts)
    
    951
    +            raise ArtifactElementError("Cannot perform this operation with artifact refs:", detail=detail)
    
    916 952
     
    
    917 953
             # Load rewritable if we have any tracking selection to make
    
    918 954
             rewritable = False
    
    919 955
             if track_targets:
    
    920 956
                 rewritable = True
    
    921 957
     
    
    922
    -        # Load all targets
    
    958
    +        # Load all target elements
    
    923 959
             elements, except_elements, track_elements, track_except_elements = \
    
    924
    -            self._pipeline.load([targets, except_targets, track_targets, track_except_targets],
    
    960
    +            self._pipeline.load([target_elements, except_targets, track_targets, track_except_targets],
    
    925 961
                                     rewritable=rewritable,
    
    926 962
                                     fetch_subprojects=fetch_subprojects)
    
    927 963
     
    
    964
    +        artifacts = self._load_refs(target_artifacts)
    
    965
    +
    
    928 966
             # Optionally filter out junction elements
    
    929 967
             if ignore_junction_targets:
    
    930 968
                 elements = [e for e in elements if e.get_kind() != 'junction']
    
    931 969
     
    
    932 970
             # Hold on to the targets
    
    933
    -        self.targets = elements
    
    971
    +        self.targets = elements + artifacts
    
    934 972
     
    
    935 973
             # Here we should raise an error if the track_elements targets
    
    936 974
             # are not dependencies of the primary targets, this is not
    
    ... ... @@ -987,9 +1025,9 @@ class Stream():
    987 1025
     
    
    988 1026
             # Now move on to loading primary selection.
    
    989 1027
             #
    
    990
    -        self._pipeline.resolve_elements(elements)
    
    991
    -        selected = self._pipeline.get_selection(elements, selection, silent=False)
    
    992
    -        selected = self._pipeline.except_elements(elements,
    
    1028
    +        self._pipeline.resolve_elements(self.targets)
    
    1029
    +        selected = self._pipeline.get_selection(self.targets, selection, silent=False)
    
    1030
    +        selected = self._pipeline.except_elements(self.targets,
    
    993 1031
                                                       selected,
    
    994 1032
                                                       except_elements)
    
    995 1033
     
    
    ... ... @@ -1013,6 +1051,20 @@ class Stream():
    1013 1051
     
    
    1014 1052
             return selected, track_selected
    
    1015 1053
     
    
    1054
    +    # _load_refs()
    
    1055
    +    #
    
    1056
    +    #  Create and resolve ArtifactElement objects
    
    1057
    +    #
    
    1058
    +    def _load_refs(self, refs):
    
    1059
    +        artifact_elements = []
    
    1060
    +        for ref in refs:
    
    1061
    +            artifact_element = self._project.create_artifact_element(ref)
    
    1062
    +            artifact_elements.append(artifact_element)
    
    1063
    +
    
    1064
    +        self._pipeline.resolve_elements(artifact_elements)
    
    1065
    +
    
    1066
    +        return artifact_elements
    
    1067
    +
    
    1016 1068
         # _message()
    
    1017 1069
         #
    
    1018 1070
         # Local message propagator
    
    ... ... @@ -1321,3 +1373,59 @@ class Stream():
    1321 1373
                     required_list.append(element)
    
    1322 1374
     
    
    1323 1375
             return required_list
    
    1376
    +
    
    1377
    +    # _classify_artifacts()
    
    1378
    +    #
    
    1379
    +    # Split up a list of targets into element names and artifact refs
    
    1380
    +    #
    
    1381
    +    # Args:
    
    1382
    +    #    targets (list): A list of targets
    
    1383
    +    #
    
    1384
    +    # Returns:
    
    1385
    +    #    (list): element names present in the targets
    
    1386
    +    #    (list): artifact refs present in the targets
    
    1387
    +    #
    
    1388
    +    def _classify_artifacts(self, targets):
    
    1389
    +        element_targets = []
    
    1390
    +        artifact_refs = []
    
    1391
    +        element_globs = []
    
    1392
    +        artifact_globs = []
    
    1393
    +
    
    1394
    +        for target in targets:
    
    1395
    +            if target.endswith('.bst'):
    
    1396
    +                if any(c in "*?[" for c in target):
    
    1397
    +                    element_globs.append(target)
    
    1398
    +                else:
    
    1399
    +                    element_targets.append(target)
    
    1400
    +            else:
    
    1401
    +                if any(c in "*?[" for c in target):
    
    1402
    +                    artifact_globs.append(target)
    
    1403
    +                else:
    
    1404
    +                    try:
    
    1405
    +                        ref = target.split('/', 2)
    
    1406
    +                        key = ref[2]
    
    1407
    +                    except IndexError:
    
    1408
    +                        element_targets.append(target)
    
    1409
    +                        continue
    
    1410
    +                    if not len(key) == 64:
    
    1411
    +                        element_targets.append(target)
    
    1412
    +                        continue
    
    1413
    +                    artifact_refs.append(target)
    
    1414
    +
    
    1415
    +        if element_globs:
    
    1416
    +            for dirpath, _, filenames in os.walk(self._project.element_path):
    
    1417
    +                for filename in filenames:
    
    1418
    +                    element_path = os.path.join(dirpath, filename)
    
    1419
    +                    length = len(self._project.element_path) + 1
    
    1420
    +                    element_path = element_path[length:]  # Strip out the element_path
    
    1421
    +
    
    1422
    +                    if any(fnmatch(element_path, glob) for glob in element_globs):
    
    1423
    +                        element_targets.append(element_path)
    
    1424
    +
    
    1425
    +        if artifact_globs:
    
    1426
    +            for glob in artifact_globs:
    
    1427
    +                artifact_refs = self._artifacts.list_artifacts(glob=glob)
    
    1428
    +            if not artifact_refs:
    
    1429
    +                self._message(MessageType.WARN, "No artifacts found for globs: {}".format(', '.join(artifact_globs)))
    
    1430
    +
    
    1431
    +        return element_targets, artifact_refs

  • buildstream/element.py
    ... ... @@ -1150,7 +1150,7 @@ class Element(Plugin):
    1150 1150
                         e.name for e in self.dependencies(Scope.BUILD, recurse=False)
    
    1151 1151
                     ]
    
    1152 1152
     
    
    1153
    -            self.__weak_cache_key = self.__calculate_cache_key(dependencies)
    
    1153
    +            self.__weak_cache_key = self._calculate_cache_key(dependencies)
    
    1154 1154
     
    
    1155 1155
                 if self.__weak_cache_key is None:
    
    1156 1156
                     # Weak cache key could not be calculated yet
    
    ... ... @@ -1179,8 +1179,7 @@ class Element(Plugin):
    1179 1179
                 dependencies = [
    
    1180 1180
                     e.__strict_cache_key for e in self.dependencies(Scope.BUILD)
    
    1181 1181
                 ]
    
    1182
    -            self.__strict_cache_key = self.__calculate_cache_key(dependencies)
    
    1183
    -
    
    1182
    +            self.__strict_cache_key = self._calculate_cache_key(dependencies)
    
    1184 1183
                 if self.__strict_cache_key is None:
    
    1185 1184
                     # Strict cache key could not be calculated yet
    
    1186 1185
                     return
    
    ... ... @@ -1222,7 +1221,7 @@ class Element(Plugin):
    1222 1221
                     dependencies = [
    
    1223 1222
                         e._get_cache_key() for e in self.dependencies(Scope.BUILD)
    
    1224 1223
                     ]
    
    1225
    -                self.__cache_key = self.__calculate_cache_key(dependencies)
    
    1224
    +                self.__cache_key = self._calculate_cache_key(dependencies)
    
    1226 1225
     
    
    1227 1226
                 if self.__cache_key is None:
    
    1228 1227
                     # Strong cache key could not be calculated yet
    
    ... ... @@ -2064,41 +2063,7 @@ class Element(Plugin):
    2064 2063
                     source._fetch(previous_sources)
    
    2065 2064
                 previous_sources.append(source)
    
    2066 2065
     
    
    2067
    -    #############################################################
    
    2068
    -    #                   Private Local Methods                   #
    
    2069
    -    #############################################################
    
    2070
    -
    
    2071
    -    # __update_source_state()
    
    2072
    -    #
    
    2073
    -    # Updates source consistency state
    
    2074
    -    #
    
    2075
    -    def __update_source_state(self):
    
    2076
    -
    
    2077
    -        # Cannot resolve source state until tracked
    
    2078
    -        if self.__tracking_scheduled:
    
    2079
    -            return
    
    2080
    -
    
    2081
    -        self.__consistency = Consistency.CACHED
    
    2082
    -        workspace = self._get_workspace()
    
    2083
    -
    
    2084
    -        # Special case for workspaces
    
    2085
    -        if workspace:
    
    2086
    -
    
    2087
    -            # A workspace is considered inconsistent in the case
    
    2088
    -            # that its directory went missing
    
    2089
    -            #
    
    2090
    -            fullpath = workspace.get_absolute_path()
    
    2091
    -            if not os.path.exists(fullpath):
    
    2092
    -                self.__consistency = Consistency.INCONSISTENT
    
    2093
    -        else:
    
    2094
    -
    
    2095
    -            # Determine overall consistency of the element
    
    2096
    -            for source in self.__sources:
    
    2097
    -                source._update_state()
    
    2098
    -                source_consistency = source._get_consistency()
    
    2099
    -                self.__consistency = min(self.__consistency, source_consistency)
    
    2100
    -
    
    2101
    -    # __calculate_cache_key():
    
    2066
    +    # _calculate_cache_key():
    
    2102 2067
         #
    
    2103 2068
         # Calculates the cache key
    
    2104 2069
         #
    
    ... ... @@ -2107,7 +2072,7 @@ class Element(Plugin):
    2107 2072
         #
    
    2108 2073
         # None is returned if information for the cache key is missing.
    
    2109 2074
         #
    
    2110
    -    def __calculate_cache_key(self, dependencies):
    
    2075
    +    def _calculate_cache_key(self, dependencies):
    
    2111 2076
             # No cache keys for dependencies which have no cache keys
    
    2112 2077
             if None in dependencies:
    
    2113 2078
                 return None
    
    ... ... @@ -2146,6 +2111,40 @@ class Element(Plugin):
    2146 2111
     
    
    2147 2112
             return _cachekey.generate_key(cache_key_dict)
    
    2148 2113
     
    
    2114
    +    #############################################################
    
    2115
    +    #                   Private Local Methods                   #
    
    2116
    +    #############################################################
    
    2117
    +
    
    2118
    +    # __update_source_state()
    
    2119
    +    #
    
    2120
    +    # Updates source consistency state
    
    2121
    +    #
    
    2122
    +    def __update_source_state(self):
    
    2123
    +
    
    2124
    +        # Cannot resolve source state until tracked
    
    2125
    +        if self.__tracking_scheduled:
    
    2126
    +            return
    
    2127
    +
    
    2128
    +        self.__consistency = Consistency.CACHED
    
    2129
    +        workspace = self._get_workspace()
    
    2130
    +
    
    2131
    +        # Special case for workspaces
    
    2132
    +        if workspace:
    
    2133
    +
    
    2134
    +            # A workspace is considered inconsistent in the case
    
    2135
    +            # that its directory went missing
    
    2136
    +            #
    
    2137
    +            fullpath = workspace.get_absolute_path()
    
    2138
    +            if not os.path.exists(fullpath):
    
    2139
    +                self.__consistency = Consistency.INCONSISTENT
    
    2140
    +        else:
    
    2141
    +
    
    2142
    +            # Determine overall consistency of the element
    
    2143
    +            for source in self.__sources:
    
    2144
    +                source._update_state()
    
    2145
    +                source_consistency = source._get_consistency()
    
    2146
    +                self.__consistency = min(self.__consistency, source_consistency)
    
    2147
    +
    
    2149 2148
         # __can_build_incrementally()
    
    2150 2149
         #
    
    2151 2150
         # Check if the element can be built incrementally, this
    
    ... ... @@ -2329,6 +2328,8 @@ class Element(Plugin):
    2329 2328
             defaults['public'] = element_public
    
    2330 2329
     
    
    2331 2330
         def __init_defaults(self, plugin_conf):
    
    2331
    +        if plugin_conf is None:
    
    2332
    +            return
    
    2332 2333
     
    
    2333 2334
             # Defaults are loaded once per class and then reused
    
    2334 2335
             #
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]