[Notes] [Git][BuildStream/buildstream][mablanch/447-stack-trace-checkout] 17 commits: HACKING.rst: Add note about asking for dev permissions



Title: GitLab

Martin Blanchard pushed to branch mablanch/447-stack-trace-checkout at BuildStream / buildstream

Commits:

13 changed files:

Changes:

  • HACKING.rst
    ... ... @@ -23,6 +23,11 @@ a reasonable timeframe for identifying these.
    23 23
     
    
    24 24
     Patch submissions
    
    25 25
     -----------------
    
    26
    +If you want to submit a patch, do ask for developer permissions on our
    
    27
    +IRC channel first (GitLab's button also works, but you may need to
    
    28
    +shout about it - we often overlook this) - for CI reasons, it's much
    
    29
    +easier if patches are in branches of the main repository.
    
    30
    +
    
    26 31
     Branches must be submitted as merge requests in gitlab. If the branch
    
    27 32
     fixes an issue or is related to any issues, these issues must be mentioned
    
    28 33
     in the merge request or preferably the commit messages themselves.
    

  • NEWS
    ... ... @@ -11,6 +11,13 @@ buildstream 1.1.5
    11 11
     
    
    12 12
       o Added new `remote` source plugin for downloading file blobs
    
    13 13
     
    
    14
    +  o Failed builds are included in the cache as well.
    
    15
    +    `bst checkout` will provide anything in `%{install-root}`.
    
    16
    +    A build including cached fails will cause any dependant elements
    
    17
    +    to not be scheduled and fail during artifact assembly,
    
    18
    +    and display the retry prompt during an interactive session.
    
    19
    +
    
    20
    +
    
    14 21
     =================
    
    15 22
     buildstream 1.1.4
    
    16 23
     =================
    

  • buildstream/_frontend/widget.py
    ... ... @@ -368,7 +368,9 @@ class LogLine(Widget):
    368 368
                 if consistency == Consistency.INCONSISTENT:
    
    369 369
                     line = p.fmt_subst(line, 'state', "no reference", fg='red')
    
    370 370
                 else:
    
    371
    -                if element._cached():
    
    371
    +                if element._cached_failure():
    
    372
    +                    line = p.fmt_subst(line, 'state', "failed", fg='red')
    
    373
    +                elif element._cached_success():
    
    372 374
                         line = p.fmt_subst(line, 'state', "cached", fg='magenta')
    
    373 375
                     elif consistency == Consistency.RESOLVED:
    
    374 376
                         line = p.fmt_subst(line, 'state', "fetch needed", fg='red')
    

  • buildstream/_pipeline.py
    ... ... @@ -489,7 +489,7 @@ class _Planner():
    489 489
                 self.plan_element(dep, depth)
    
    490 490
     
    
    491 491
             # Dont try to plan builds of elements that are cached already
    
    492
    -        if not element._cached():
    
    492
    +        if not element._cached_success():
    
    493 493
                 for dep in element.dependencies(Scope.BUILD, recurse=False):
    
    494 494
                     self.plan_element(dep, depth + 1)
    
    495 495
     
    
    ... ... @@ -501,4 +501,4 @@ class _Planner():
    501 501
                 self.plan_element(root, 0)
    
    502 502
     
    
    503 503
             depth_sorted = sorted(self.depth_map.items(), key=itemgetter(1), reverse=True)
    
    504
    -        return [item[0] for item in depth_sorted if plan_cached or not item[0]._cached()]
    504
    +        return [item[0] for item in depth_sorted if plan_cached or not item[0]._cached_success()]

  • buildstream/_scheduler/queues/buildqueue.py
    ... ... @@ -18,8 +18,12 @@
    18 18
     #        Tristan Van Berkom <tristan vanberkom codethink co uk>
    
    19 19
     #        Jürg Billeter <juerg billeter codethink co uk>
    
    20 20
     
    
    21
    +from datetime import timedelta
    
    22
    +
    
    21 23
     from . import Queue, QueueStatus
    
    24
    +from ..jobs import ElementJob
    
    22 25
     from ..resources import ResourceType
    
    26
    +from ..._message import MessageType
    
    23 27
     
    
    24 28
     
    
    25 29
     # A queue which assembles elements
    
    ... ... @@ -30,6 +34,38 @@ class BuildQueue(Queue):
    30 34
         complete_name = "Built"
    
    31 35
         resources = [ResourceType.PROCESS]
    
    32 36
     
    
    37
    +    def __init__(self, *args, **kwargs):
    
    38
    +        super().__init__(*args, **kwargs)
    
    39
    +        self._tried = set()
    
    40
    +
    
    41
    +    def enqueue(self, elts):
    
    42
    +        to_queue = []
    
    43
    +
    
    44
    +        for element in elts:
    
    45
    +            if not element._cached_failure() or element in self._tried:
    
    46
    +                to_queue.append(element)
    
    47
    +                continue
    
    48
    +
    
    49
    +            # Bypass queue processing entirely the first time it's tried.
    
    50
    +            self._tried.add(element)
    
    51
    +            _, description, detail = element._get_build_result()
    
    52
    +            logfile = element._get_build_log()
    
    53
    +            self._message(element, MessageType.FAIL, description,
    
    54
    +                          detail=detail, action_name=self.action_name,
    
    55
    +                          elapsed=timedelta(seconds=0),
    
    56
    +                          logfile=logfile)
    
    57
    +            job = ElementJob(self._scheduler, self.action_name,
    
    58
    +                             logfile, element=element, queue=self,
    
    59
    +                             resources=self.resources,
    
    60
    +                             action_cb=self.process,
    
    61
    +                             complete_cb=self._job_done,
    
    62
    +                             max_retries=self._max_retries)
    
    63
    +            self._done_queue.append(job)
    
    64
    +            self.failed_elements.append(element)
    
    65
    +            self._scheduler._job_complete_callback(job, False)
    
    66
    +
    
    67
    +        return super().enqueue(to_queue)
    
    68
    +
    
    33 69
         def process(self, element):
    
    34 70
             element._assemble()
    
    35 71
             return element._get_unique_id()
    
    ... ... @@ -43,7 +79,7 @@ class BuildQueue(Queue):
    43 79
                 # Keep it in the queue.
    
    44 80
                 return QueueStatus.WAIT
    
    45 81
     
    
    46
    -        if element._cached():
    
    82
    +        if element._cached_success():
    
    47 83
                 return QueueStatus.SKIP
    
    48 84
     
    
    49 85
             if not element._buildable():
    

  • buildstream/_scheduler/queues/queue.py
    ... ... @@ -296,6 +296,7 @@ class Queue():
    296 296
         # See the Job object for an explanation of the call signature
    
    297 297
         #
    
    298 298
         def _job_done(self, job, element, success, result):
    
    299
    +        element._update_state()
    
    299 300
     
    
    300 301
             # Update values that need to be synchronized in the main task
    
    301 302
             # before calling any queue implementation
    
    ... ... @@ -335,8 +336,9 @@ class Queue():
    335 336
     
    
    336 337
                 # No exception occured, handle the success/failure state in the normal way
    
    337 338
                 #
    
    339
    +            self._done_queue.append(job)
    
    340
    +
    
    338 341
                 if success:
    
    339
    -                self._done_queue.append(job)
    
    340 342
                     if processed:
    
    341 343
                         self.processed_elements.append(element)
    
    342 344
                     else:
    

  • buildstream/_stream.py
    ... ... @@ -435,7 +435,7 @@ class Stream():
    435 435
     
    
    436 436
             except BstError as e:
    
    437 437
                 raise StreamError("Error while staging dependencies into a sandbox"
    
    438
    -                              ": '{}'".format(e), reason=e.reason) from e
    
    438
    +                              ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
    
    439 439
     
    
    440 440
         # workspace_open
    
    441 441
         #
    

  • buildstream/buildelement.py
    ... ... @@ -233,12 +233,14 @@ class BuildElement(Element):
    233 233
             return commands
    
    234 234
     
    
    235 235
         def __run_command(self, sandbox, cmd, cmd_name):
    
    236
    -        self.status("Running {}".format(cmd_name), detail=cmd)
    
    237
    -
    
    238
    -        # Note the -e switch to 'sh' means to exit with an error
    
    239
    -        # if any untested command fails.
    
    240
    -        #
    
    241
    -        exitcode = sandbox.run(['sh', '-c', '-e', cmd + '\n'],
    
    242
    -                               SandboxFlags.ROOT_READ_ONLY)
    
    243
    -        if exitcode != 0:
    
    244
    -            raise ElementError("Command '{}' failed with exitcode {}".format(cmd, exitcode))
    236
    +        with self.timed_activity("Running {}".format(cmd_name)):
    
    237
    +            self.status("Running {}".format(cmd_name), detail=cmd)
    
    238
    +
    
    239
    +            # Note the -e switch to 'sh' means to exit with an error
    
    240
    +            # if any untested command fails.
    
    241
    +            #
    
    242
    +            exitcode = sandbox.run(['sh', '-c', '-e', cmd + '\n'],
    
    243
    +                                   SandboxFlags.ROOT_READ_ONLY)
    
    244
    +            if exitcode != 0:
    
    245
    +                raise ElementError("Command '{}' failed with exitcode {}".format(cmd, exitcode),
    
    246
    +                                   collect=self.get_variable('install-root'))

  • buildstream/element.py
    ... ... @@ -140,11 +140,14 @@ class ElementError(BstError):
    140 140
            message (str): The error message to report to the user
    
    141 141
            detail (str): A possibly multiline, more detailed error message
    
    142 142
            reason (str): An optional machine readable reason string, used for test cases
    
    143
    +       collect (str): An optional directory containing partial install contents
    
    143 144
            temporary (bool): An indicator to whether the error may occur if the operation was run again. (*Since: 1.2*)
    
    144 145
         """
    
    145
    -    def __init__(self, message, *, detail=None, reason=None, temporary=False):
    
    146
    +    def __init__(self, message, *, detail=None, reason=None, collect=None, temporary=False):
    
    146 147
             super().__init__(message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason, temporary=temporary)
    
    147 148
     
    
    149
    +        self.collect = collect
    
    150
    +
    
    148 151
     
    
    149 152
     class Element(Plugin):
    
    150 153
         """Element()
    
    ... ... @@ -216,6 +219,7 @@ class Element(Plugin):
    216 219
             self.__consistency = Consistency.INCONSISTENT  # Cached overall consistency state
    
    217 220
             self.__cached = None                    # Whether we have a cached artifact
    
    218 221
             self.__strong_cached = None             # Whether we have a cached artifact
    
    222
    +        self.__weak_cached = None               # Whether we have a cached artifact
    
    219 223
             self.__assemble_scheduled = False       # Element is scheduled to be assembled
    
    220 224
             self.__assemble_done = False            # Element is assembled
    
    221 225
             self.__tracking_scheduled = False       # Sources are scheduled to be tracked
    
    ... ... @@ -227,6 +231,8 @@ class Element(Plugin):
    227 231
             self.__tainted = None                   # Whether the artifact is tainted and should not be shared
    
    228 232
             self.__required = False                 # Whether the artifact is required in the current session
    
    229 233
             self.__artifact_size = None             # The size of data committed to the artifact cache
    
    234
    +        self.__build_result = None              # The result of assembling this Element
    
    235
    +        self._build_log_path = None            # The path of the build log for this Element
    
    230 236
     
    
    231 237
             # hash tables of loaded artifact metadata, hashed by key
    
    232 238
             self.__metadata_keys = {}                     # Strong and weak keys for this key
    
    ... ... @@ -608,6 +614,12 @@ class Element(Plugin):
    608 614
                   dep.stage_artifact(sandbox)
    
    609 615
             """
    
    610 616
     
    
    617
    +        if not self._cached():
    
    618
    +            detail = "No artifacts have been cached yet for that element\n" + \
    
    619
    +                     "Try building the element first with `bst build`\n"
    
    620
    +            raise ElementError("No artifacts to stage",
    
    621
    +                               detail=detail, reason="uncached-checkout-attempt")
    
    622
    +
    
    611 623
             if update_mtimes is None:
    
    612 624
                 update_mtimes = []
    
    613 625
     
    
    ... ... @@ -951,7 +963,51 @@ class Element(Plugin):
    951 963
         #            the artifact cache
    
    952 964
         #
    
    953 965
         def _cached(self):
    
    954
    -        return self.__cached
    
    966
    +        return self.__is_cached(keystrength=None)
    
    967
    +
    
    968
    +    # _get_build_result():
    
    969
    +    #
    
    970
    +    # Returns:
    
    971
    +    #    (bool): Whether the artifact of this element present in the artifact cache is of a success
    
    972
    +    #    (str): Short description of the result
    
    973
    +    #    (str): Detailed description of the result
    
    974
    +    #
    
    975
    +    def _get_build_result(self):
    
    976
    +        return self.__get_build_result(keystrength=None)
    
    977
    +
    
    978
    +    # __set_build_result():
    
    979
    +    #
    
    980
    +    # Sets the assembly result
    
    981
    +    #
    
    982
    +    # Args:
    
    983
    +    #    success (bool): Whether the result is a success
    
    984
    +    #    description (str): Short description of the result
    
    985
    +    #    detail (str): Detailed description of the result
    
    986
    +    #
    
    987
    +    def __set_build_result(self, success, description, detail=None):
    
    988
    +        self.__build_result = (success, description, detail)
    
    989
    +
    
    990
    +    # _cached_success():
    
    991
    +    #
    
    992
    +    # Returns:
    
    993
    +    #    (bool): Whether this element is already present in
    
    994
    +    #            the artifact cache and the element assembled successfully
    
    995
    +    #
    
    996
    +    def _cached_success(self):
    
    997
    +        return self.__cached_success(keystrength=None)
    
    998
    +
    
    999
    +    # _cached_failure():
    
    1000
    +    #
    
    1001
    +    # Returns:
    
    1002
    +    #    (bool): Whether this element is already present in
    
    1003
    +    #            the artifact cache and the element did not assemble successfully
    
    1004
    +    #
    
    1005
    +    def _cached_failure(self):
    
    1006
    +        if not self._cached():
    
    1007
    +            return False
    
    1008
    +
    
    1009
    +        success, _, _ = self._get_build_result()
    
    1010
    +        return not success
    
    955 1011
     
    
    956 1012
         # _buildable():
    
    957 1013
         #
    
    ... ... @@ -968,7 +1024,7 @@ class Element(Plugin):
    968 1024
                 # if the pull job is still pending as the remote cache may have an artifact
    
    969 1025
                 # that matches the strict cache key, which is preferred over a locally
    
    970 1026
                 # cached artifact with a weak cache key match.
    
    971
    -            if not dependency._cached() or not dependency._get_cache_key(strength=_KeyStrength.STRONG):
    
    1027
    +            if not dependency._cached_success() or not dependency._get_cache_key(strength=_KeyStrength.STRONG):
    
    972 1028
                     return False
    
    973 1029
     
    
    974 1030
             if not self.__assemble_scheduled:
    
    ... ... @@ -1039,6 +1095,8 @@ class Element(Plugin):
    1039 1095
                 self.__weak_cache_key = None
    
    1040 1096
                 self.__strict_cache_key = None
    
    1041 1097
                 self.__strong_cached = None
    
    1098
    +            self.__weak_cached = None
    
    1099
    +            self.__build_result = None
    
    1042 1100
                 return
    
    1043 1101
     
    
    1044 1102
             if self.__weak_cache_key is None:
    
    ... ... @@ -1061,6 +1119,9 @@ class Element(Plugin):
    1061 1119
                     # Weak cache key could not be calculated yet
    
    1062 1120
                     return
    
    1063 1121
     
    
    1122
    +            if not self.__weak_cached:
    
    1123
    +                self.__weak_cached = self.__artifacts.contains(self, self.__weak_cache_key)
    
    1124
    +
    
    1064 1125
             if not context.get_strict():
    
    1065 1126
                 # Full cache query in non-strict mode requires both the weak and
    
    1066 1127
                 # strict cache keys. However, we need to determine as early as
    
    ... ... @@ -1068,9 +1129,9 @@ class Element(Plugin):
    1068 1129
                 # for workspaced elements. For this cache check the weak cache keys
    
    1069 1130
                 # are sufficient. However, don't update the `cached` attributes
    
    1070 1131
                 # until the full cache query below.
    
    1071
    -            cached = self.__artifacts.contains(self, self.__weak_cache_key)
    
    1072 1132
                 if (not self.__assemble_scheduled and not self.__assemble_done and
    
    1073
    -                    not cached and not self._pull_pending() and self._is_required()):
    
    1133
    +                    not self.__cached_success(keystrength=_KeyStrength.WEAK) and
    
    1134
    +                    not self._pull_pending() and self._is_required()):
    
    1074 1135
                     self._schedule_assemble()
    
    1075 1136
                     return
    
    1076 1137
     
    
    ... ... @@ -1090,9 +1151,12 @@ class Element(Plugin):
    1090 1151
                 self.__cached = self.__artifacts.contains(self, key_for_cache_lookup)
    
    1091 1152
             if not self.__strong_cached:
    
    1092 1153
                 self.__strong_cached = self.__artifacts.contains(self, self.__strict_cache_key)
    
    1154
    +        if key_for_cache_lookup == self.__weak_cache_key:
    
    1155
    +            if not self.__weak_cached:
    
    1156
    +                self.__weak_cached = self.__artifacts.contains(self, self.__weak_cache_key)
    
    1093 1157
     
    
    1094 1158
             if (not self.__assemble_scheduled and not self.__assemble_done and
    
    1095
    -                not self.__cached and not self._pull_pending() and self._is_required()):
    
    1159
    +                not self._cached_success() and not self._pull_pending() and self._is_required()):
    
    1096 1160
                 # Workspaced sources are considered unstable if a build is pending
    
    1097 1161
                 # as the build will modify the contents of the workspace.
    
    1098 1162
                 # Determine as early as possible if a build is pending to discard
    
    ... ... @@ -1434,7 +1498,7 @@ class Element(Plugin):
    1434 1498
         def _assemble(self):
    
    1435 1499
     
    
    1436 1500
             # Assert call ordering
    
    1437
    -        assert not self._cached()
    
    1501
    +        assert not self._cached_success()
    
    1438 1502
     
    
    1439 1503
             context = self._get_context()
    
    1440 1504
             with self._output_file() as output_file:
    
    ... ... @@ -1457,6 +1521,7 @@ class Element(Plugin):
    1457 1521
                     self.__dynamic_public = _yaml.node_copy(self.__public)
    
    1458 1522
     
    
    1459 1523
                     # Call the abstract plugin methods
    
    1524
    +                collect = None
    
    1460 1525
                     try:
    
    1461 1526
                         # Step 1 - Configure
    
    1462 1527
                         self.configure_sandbox(sandbox)
    
    ... ... @@ -1466,6 +1531,7 @@ class Element(Plugin):
    1466 1531
                         self.__prepare(sandbox)
    
    1467 1532
                         # Step 4 - Assemble
    
    1468 1533
                         collect = self.assemble(sandbox)
    
    1534
    +                    self.__set_build_result(success=True, description="succeeded")
    
    1469 1535
                     except BstError as e:
    
    1470 1536
                         # If an error occurred assembling an element in a sandbox,
    
    1471 1537
                         # then tack on the sandbox directory to the error
    
    ... ... @@ -1489,80 +1555,95 @@ class Element(Plugin):
    1489 1555
                                 self.warn("Failed to preserve workspace state for failed build sysroot: {}"
    
    1490 1556
                                           .format(e))
    
    1491 1557
     
    
    1492
    -                    raise
    
    1558
    +                    if isinstance(e, ElementError):
    
    1559
    +                        collect = e.collect  # pylint: disable=no-member
    
    1493 1560
     
    
    1494
    -                collectdir = os.path.join(sandbox_root, collect.lstrip(os.sep))
    
    1495
    -                if not os.path.exists(collectdir):
    
    1496
    -                    raise ElementError(
    
    1497
    -                        "Directory '{}' was not found inside the sandbox, "
    
    1498
    -                        "unable to collect artifact contents"
    
    1499
    -                        .format(collect))
    
    1500
    -
    
    1501
    -                # At this point, we expect an exception was raised leading to
    
    1502
    -                # an error message, or we have good output to collect.
    
    1503
    -
    
    1504
    -                # Create artifact directory structure
    
    1505
    -                assembledir = os.path.join(rootdir, 'artifact')
    
    1506
    -                filesdir = os.path.join(assembledir, 'files')
    
    1507
    -                logsdir = os.path.join(assembledir, 'logs')
    
    1508
    -                metadir = os.path.join(assembledir, 'meta')
    
    1509
    -                buildtreedir = os.path.join(assembledir, 'buildtree')
    
    1510
    -                os.mkdir(assembledir)
    
    1511
    -                os.mkdir(filesdir)
    
    1512
    -                os.mkdir(logsdir)
    
    1513
    -                os.mkdir(metadir)
    
    1514
    -                os.mkdir(buildtreedir)
    
    1515
    -
    
    1516
    -                # Hard link files from collect dir to files directory
    
    1517
    -                utils.link_files(collectdir, filesdir)
    
    1518
    -
    
    1519
    -                sandbox_build_dir = os.path.join(sandbox_root, self.get_variable('build-root').lstrip(os.sep))
    
    1520
    -                # Hard link files from build-root dir to buildtreedir directory
    
    1521
    -                if os.path.isdir(sandbox_build_dir):
    
    1522
    -                    utils.link_files(sandbox_build_dir, buildtreedir)
    
    1523
    -
    
    1524
    -                # Copy build log
    
    1525
    -                log_filename = context.get_log_filename()
    
    1526
    -                if log_filename:
    
    1527
    -                    shutil.copyfile(log_filename, os.path.join(logsdir, 'build.log'))
    
    1528
    -
    
    1529
    -                # Store public data
    
    1530
    -                _yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
    
    1531
    -
    
    1532
    -                # ensure we have cache keys
    
    1533
    -                self._assemble_done()
    
    1534
    -
    
    1535
    -                # Store keys.yaml
    
    1536
    -                _yaml.dump(_yaml.node_sanitize({
    
    1537
    -                    'strong': self._get_cache_key(),
    
    1538
    -                    'weak': self._get_cache_key(_KeyStrength.WEAK),
    
    1539
    -                }), os.path.join(metadir, 'keys.yaml'))
    
    1540
    -
    
    1541
    -                # Store dependencies.yaml
    
    1542
    -                _yaml.dump(_yaml.node_sanitize({
    
    1543
    -                    e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
    
    1544
    -                }), os.path.join(metadir, 'dependencies.yaml'))
    
    1545
    -
    
    1546
    -                # Store workspaced.yaml
    
    1547
    -                _yaml.dump(_yaml.node_sanitize({
    
    1548
    -                    'workspaced': True if self._get_workspace() else False
    
    1549
    -                }), os.path.join(metadir, 'workspaced.yaml'))
    
    1550
    -
    
    1551
    -                # Store workspaced-dependencies.yaml
    
    1552
    -                _yaml.dump(_yaml.node_sanitize({
    
    1553
    -                    'workspaced-dependencies': [
    
    1554
    -                        e.name for e in self.dependencies(Scope.BUILD)
    
    1555
    -                        if e._get_workspace()
    
    1556
    -                    ]
    
    1557
    -                }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
    
    1558
    -
    
    1559
    -                with self.timed_activity("Caching artifact"):
    
    1560
    -                    self.__artifact_size = utils._get_dir_size(assembledir)
    
    1561
    -                    self.__artifacts.commit(self, assembledir, self.__get_cache_keys_for_commit())
    
    1561
    +                    self.__set_build_result(success=False, description=str(e), detail=e.detail)
    
    1562
    +                    raise
    
    1563
    +                finally:
    
    1564
    +                    if collect is not None:
    
    1565
    +                        collectdir = os.path.join(sandbox_root, collect.lstrip(os.sep))
    
    1566
    +
    
    1567
    +                    # Create artifact directory structure
    
    1568
    +                    assembledir = os.path.join(rootdir, 'artifact')
    
    1569
    +                    filesdir = os.path.join(assembledir, 'files')
    
    1570
    +                    logsdir = os.path.join(assembledir, 'logs')
    
    1571
    +                    metadir = os.path.join(assembledir, 'meta')
    
    1572
    +                    buildtreedir = os.path.join(assembledir, 'buildtree')
    
    1573
    +                    os.mkdir(assembledir)
    
    1574
    +                    if collect is not None and os.path.exists(collectdir):
    
    1575
    +                        os.mkdir(filesdir)
    
    1576
    +                    os.mkdir(logsdir)
    
    1577
    +                    os.mkdir(metadir)
    
    1578
    +                    os.mkdir(buildtreedir)
    
    1579
    +
    
    1580
    +                    # Hard link files from collect dir to files directory
    
    1581
    +                    if collect is not None and os.path.exists(collectdir):
    
    1582
    +                        utils.link_files(collectdir, filesdir)
    
    1583
    +
    
    1584
    +                    sandbox_build_dir = os.path.join(sandbox_root, self.get_variable('build-root').lstrip(os.sep))
    
    1585
    +                    # Hard link files from build-root dir to buildtreedir directory
    
    1586
    +                    if os.path.isdir(sandbox_build_dir):
    
    1587
    +                        utils.link_files(sandbox_build_dir, buildtreedir)
    
    1588
    +
    
    1589
    +                    # Copy build log
    
    1590
    +                    log_filename = context.get_log_filename()
    
    1591
    +                    self._build_log_path = os.path.join(logsdir, 'build.log')
    
    1592
    +                    if log_filename:
    
    1593
    +                        shutil.copyfile(log_filename, self._build_log_path)
    
    1594
    +
    
    1595
    +                    # Store public data
    
    1596
    +                    _yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
    
    1597
    +
    
    1598
    +                    # Store result
    
    1599
    +                    build_result_dict = {"success": self.__build_result[0], "description": self.__build_result[1]}
    
    1600
    +                    if self.__build_result[2] is not None:
    
    1601
    +                        build_result_dict["detail"] = self.__build_result[2]
    
    1602
    +                    _yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
    
    1603
    +
    
    1604
    +                    # ensure we have cache keys
    
    1605
    +                    self._assemble_done()
    
    1606
    +
    
    1607
    +                    # Store keys.yaml
    
    1608
    +                    _yaml.dump(_yaml.node_sanitize({
    
    1609
    +                        'strong': self._get_cache_key(),
    
    1610
    +                        'weak': self._get_cache_key(_KeyStrength.WEAK),
    
    1611
    +                    }), os.path.join(metadir, 'keys.yaml'))
    
    1612
    +
    
    1613
    +                    # Store dependencies.yaml
    
    1614
    +                    _yaml.dump(_yaml.node_sanitize({
    
    1615
    +                        e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
    
    1616
    +                    }), os.path.join(metadir, 'dependencies.yaml'))
    
    1617
    +
    
    1618
    +                    # Store workspaced.yaml
    
    1619
    +                    _yaml.dump(_yaml.node_sanitize({
    
    1620
    +                        'workspaced': True if self._get_workspace() else False
    
    1621
    +                    }), os.path.join(metadir, 'workspaced.yaml'))
    
    1622
    +
    
    1623
    +                    # Store workspaced-dependencies.yaml
    
    1624
    +                    _yaml.dump(_yaml.node_sanitize({
    
    1625
    +                        'workspaced-dependencies': [
    
    1626
    +                            e.name for e in self.dependencies(Scope.BUILD)
    
    1627
    +                            if e._get_workspace()
    
    1628
    +                        ]
    
    1629
    +                    }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
    
    1630
    +
    
    1631
    +                    with self.timed_activity("Caching artifact"):
    
    1632
    +                        self.__artifact_size = utils._get_dir_size(assembledir)
    
    1633
    +                        self.__artifacts.commit(self, assembledir, self.__get_cache_keys_for_commit())
    
    1634
    +
    
    1635
    +                    if collect is not None and not os.path.exists(collectdir):
    
    1636
    +                        raise ElementError(
    
    1637
    +                            "Directory '{}' was not found inside the sandbox, "
    
    1638
    +                            "unable to collect artifact contents"
    
    1639
    +                            .format(collect))
    
    1562 1640
     
    
    1563 1641
                 # Finally cleanup the build dir
    
    1564 1642
                 cleanup_rootdir()
    
    1565 1643
     
    
    1644
    +    def _get_build_log(self):
    
    1645
    +        return self._build_log_path
    
    1646
    +
    
    1566 1647
         # _pull_pending()
    
    1567 1648
         #
    
    1568 1649
         # Check whether the artifact will be pulled.
    
    ... ... @@ -1983,12 +2064,19 @@ class Element(Plugin):
    1983 2064
                 if workspace:
    
    1984 2065
                     workspace.prepared = True
    
    1985 2066
     
    
    2067
    +    def __is_cached(self, keystrength):
    
    2068
    +        if keystrength is None:
    
    2069
    +            return self.__cached
    
    2070
    +
    
    2071
    +        return self.__strong_cached if keystrength == _KeyStrength.STRONG else self.__weak_cached
    
    2072
    +
    
    1986 2073
         # __assert_cached()
    
    1987 2074
         #
    
    1988 2075
         # Raises an error if the artifact is not cached.
    
    1989 2076
         #
    
    1990
    -    def __assert_cached(self):
    
    1991
    -        assert self._cached(), "{}: Missing artifact {}".format(self, self._get_brief_display_key())
    
    2077
    +    def __assert_cached(self, keystrength=_KeyStrength.STRONG):
    
    2078
    +        assert self.__is_cached(keystrength=keystrength), "{}: Missing artifact {}".format(
    
    2079
    +            self, self._get_brief_display_key())
    
    1992 2080
     
    
    1993 2081
         # __get_tainted():
    
    1994 2082
         #
    
    ... ... @@ -2448,6 +2536,38 @@ class Element(Plugin):
    2448 2536
             metadir = os.path.join(artifact_base, 'meta')
    
    2449 2537
             self.__dynamic_public = _yaml.load(os.path.join(metadir, 'public.yaml'))
    
    2450 2538
     
    
    2539
    +    def __load_build_result(self, keystrength):
    
    2540
    +        self.__assert_cached(keystrength=keystrength)
    
    2541
    +        assert self.__build_result is None
    
    2542
    +
    
    2543
    +        artifact_base, _ = self.__extract(key=self.__weak_cache_key if keystrength is _KeyStrength.WEAK
    
    2544
    +                                          else self.__strict_cache_key)
    
    2545
    +
    
    2546
    +        metadir = os.path.join(artifact_base, 'meta')
    
    2547
    +        result_path = os.path.join(metadir, 'build-result.yaml')
    
    2548
    +        if not os.path.exists(result_path):
    
    2549
    +            self.__build_result = (True, "succeeded", None)
    
    2550
    +            return
    
    2551
    +
    
    2552
    +        data = _yaml.load(result_path)
    
    2553
    +        self.__build_result = (data["success"], data.get("description"), data.get("detail"))
    
    2554
    +
    
    2555
    +    def __get_build_result(self, keystrength):
    
    2556
    +        if keystrength is None:
    
    2557
    +            keystrength = _KeyStrength.STRONG if self._get_context().get_strict() else _KeyStrength.WEAK
    
    2558
    +
    
    2559
    +        if self.__build_result is None:
    
    2560
    +            self.__load_build_result(keystrength)
    
    2561
    +
    
    2562
    +        return self.__build_result
    
    2563
    +
    
    2564
    +    def __cached_success(self, keystrength):
    
    2565
    +        if not self.__is_cached(keystrength=keystrength):
    
    2566
    +            return False
    
    2567
    +
    
    2568
    +        success, _, _ = self.__get_build_result(keystrength=keystrength)
    
    2569
    +        return success
    
    2570
    +
    
    2451 2571
         def __get_cache_keys_for_commit(self):
    
    2452 2572
             keys = []
    
    2453 2573
     
    

  • buildstream/scriptelement.py
    ... ... @@ -277,7 +277,8 @@ class ScriptElement(Element):
    277 277
                         exitcode = sandbox.run(['sh', '-c', '-e', cmd + '\n'],
    
    278 278
                                                SandboxFlags.ROOT_READ_ONLY if self.__root_read_only else 0)
    
    279 279
                         if exitcode != 0:
    
    280
    -                        raise ElementError("Command '{}' failed with exitcode {}".format(cmd, exitcode))
    
    280
    +                        raise ElementError("Command '{}' failed with exitcode {}".format(cmd, exitcode),
    
    281
    +                                           collect=self.__install_root)
    
    281 282
     
    
    282 283
             # Return where the result can be collected from
    
    283 284
             return self.__install_root
    

  • doc/source/advanced-features/junction-elements.rst
    ... ... @@ -34,6 +34,7 @@ The below bst file describes an element which depends on the hello.bst element
    34 34
     from the autotools example:
    
    35 35
     
    
    36 36
     .. literalinclude:: ../../examples/junctions/elements/callHello.bst
    
    37
    +    :language: yaml
    
    37 38
     
    
    38 39
     This element consists of a script which calls hello.bst's hello command.
    
    39 40
     
    

  • tests/frontend/buildcheckout.py
    ... ... @@ -96,6 +96,16 @@ def test_build_checkout_deps(datafiles, cli, deps):
    96 96
             assert not os.path.exists(filename)
    
    97 97
     
    
    98 98
     
    
    99
    +@pytest.mark.datafiles(DATA_DIR)
    
    100
    +def test_build_checkout_unbuilt(datafiles, cli):
    
    101
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    102
    +    checkout = os.path.join(cli.directory, 'checkout')
    
    103
    +
    
    104
    +    # Check that checking out an unbuilt element fails nicely
    
    105
    +    result = cli.run(project=project, args=['checkout', 'target.bst', checkout])
    
    106
    +    result.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
    
    107
    +
    
    108
    +
    
    99 109
     @pytest.mark.datafiles(DATA_DIR)
    
    100 110
     def test_build_checkout_tarball(datafiles, cli):
    
    101 111
         project = os.path.join(datafiles.dirname, datafiles.basename)
    

  • tests/integration/cachedfail.py
    1
    +import os
    
    2
    +import pytest
    
    3
    +
    
    4
    +from buildstream import _yaml
    
    5
    +from buildstream._exceptions import ErrorDomain
    
    6
    +
    
    7
    +from tests.testutils import cli_integration as cli, create_artifact_share
    
    8
    +from tests.testutils.site import IS_LINUX
    
    9
    +
    
    10
    +
    
    11
    +pytestmark = pytest.mark.integration
    
    12
    +
    
    13
    +
    
    14
    +DATA_DIR = os.path.join(
    
    15
    +    os.path.dirname(os.path.realpath(__file__)),
    
    16
    +    "project"
    
    17
    +)
    
    18
    +
    
    19
    +
    
    20
    +@pytest.mark.datafiles(DATA_DIR)
    
    21
    +def test_build_checkout_cached_fail(cli, tmpdir, datafiles):
    
    22
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    23
    +    element_path = os.path.join(project, 'elements', 'element.bst')
    
    24
    +    workspace = os.path.join(cli.directory, 'workspace')
    
    25
    +    checkout = os.path.join(cli.directory, 'checkout')
    
    26
    +
    
    27
    +    # Write out our test target
    
    28
    +    element = {
    
    29
    +        'kind': 'script',
    
    30
    +        'depends': [
    
    31
    +            {
    
    32
    +                'filename': 'base.bst',
    
    33
    +                'type': 'build',
    
    34
    +            },
    
    35
    +        ],
    
    36
    +        'config': {
    
    37
    +            'commands': [
    
    38
    +                'touch %{install-root}/foo',
    
    39
    +                'false',
    
    40
    +            ],
    
    41
    +        },
    
    42
    +    }
    
    43
    +    _yaml.dump(element, element_path)
    
    44
    +
    
    45
    +    # Try to build it, this should result in a failure that contains the content
    
    46
    +    result = cli.run(project=project, args=['build', 'element.bst'])
    
    47
    +    result.assert_main_error(ErrorDomain.STREAM, None)
    
    48
    +
    
    49
    +    # Assert that it's cached in a failed artifact
    
    50
    +    assert cli.get_element_state(project, 'element.bst') == 'failed'
    
    51
    +
    
    52
    +    # Now check it out
    
    53
    +    result = cli.run(project=project, args=[
    
    54
    +        'checkout', 'element.bst', checkout
    
    55
    +    ])
    
    56
    +    result.assert_success()
    
    57
    +
    
    58
    +    # Check that the checkout contains the file created before failure
    
    59
    +    filename = os.path.join(checkout, 'foo')
    
    60
    +    assert os.path.exists(filename)
    
    61
    +
    
    62
    +
    
    63
    +@pytest.mark.datafiles(DATA_DIR)
    
    64
    +def test_build_depend_on_cached_fail(cli, tmpdir, datafiles):
    
    65
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    66
    +    dep_path = os.path.join(project, 'elements', 'dep.bst')
    
    67
    +    target_path = os.path.join(project, 'elements', 'target.bst')
    
    68
    +    workspace = os.path.join(cli.directory, 'workspace')
    
    69
    +    checkout = os.path.join(cli.directory, 'checkout')
    
    70
    +
    
    71
    +    dep = {
    
    72
    +        'kind': 'script',
    
    73
    +        'depends': [
    
    74
    +            {
    
    75
    +                'filename': 'base.bst',
    
    76
    +                'type': 'build',
    
    77
    +            },
    
    78
    +        ],
    
    79
    +        'config': {
    
    80
    +            'commands': [
    
    81
    +                'touch %{install-root}/foo',
    
    82
    +                'false',
    
    83
    +            ],
    
    84
    +        },
    
    85
    +    }
    
    86
    +    _yaml.dump(dep, dep_path)
    
    87
    +    target = {
    
    88
    +        'kind': 'script',
    
    89
    +        'depends': [
    
    90
    +            {
    
    91
    +                'filename': 'base.bst',
    
    92
    +                'type': 'build',
    
    93
    +            },
    
    94
    +            {
    
    95
    +                'filename': 'dep.bst',
    
    96
    +                'type': 'build',
    
    97
    +            },
    
    98
    +        ],
    
    99
    +        'config': {
    
    100
    +            'commands': [
    
    101
    +                'test -e /foo',
    
    102
    +            ],
    
    103
    +        },
    
    104
    +    }
    
    105
    +    _yaml.dump(target, target_path)
    
    106
    +
    
    107
    +    # Try to build it, this should result in caching a failure to build dep
    
    108
    +    result = cli.run(project=project, args=['build', 'dep.bst'])
    
    109
    +    result.assert_main_error(ErrorDomain.STREAM, None)
    
    110
    +
    
    111
    +    # Assert that it's cached in a failed artifact
    
    112
    +    assert cli.get_element_state(project, 'dep.bst') == 'failed'
    
    113
    +
    
    114
    +    # Now we should fail because we've a cached fail of dep
    
    115
    +    result = cli.run(project=project, args=['build', 'target.bst'])
    
    116
    +    result.assert_main_error(ErrorDomain.STREAM, None)
    
    117
    +
    
    118
    +    # Assert that it's not yet built, since one of its dependencies isn't ready.
    
    119
    +    assert cli.get_element_state(project, 'target.bst') == 'waiting'
    
    120
    +
    
    121
    +
    
    122
    +@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
    
    123
    +@pytest.mark.datafiles(DATA_DIR)
    
    124
    +@pytest.mark.parametrize("on_error", ("continue",))
    
    125
    +def test_push_cached_fail(cli, tmpdir, datafiles, on_error):
    
    126
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    127
    +    element_path = os.path.join(project, 'elements', 'element.bst')
    
    128
    +    workspace = os.path.join(cli.directory, 'workspace')
    
    129
    +    checkout = os.path.join(cli.directory, 'checkout')
    
    130
    +
    
    131
    +    # Write out our test target
    
    132
    +    element = {
    
    133
    +        'kind': 'script',
    
    134
    +        'depends': [
    
    135
    +            {
    
    136
    +                'filename': 'base.bst',
    
    137
    +                'type': 'build',
    
    138
    +            },
    
    139
    +        ],
    
    140
    +        'config': {
    
    141
    +            'commands': [
    
    142
    +                'false',
    
    143
    +            ],
    
    144
    +        },
    
    145
    +    }
    
    146
    +    _yaml.dump(element, element_path)
    
    147
    +
    
    148
    +    with create_artifact_share(os.path.join(str(tmpdir), 'remote')) as share:
    
    149
    +        cli.configure({
    
    150
    +            'artifacts': {'url': share.repo, 'push': True},
    
    151
    +        })
    
    152
    +
    
    153
    +        # Build the element, continuing to finish active jobs on error.
    
    154
    +        result = cli.run(project=project, args=['--on-error={}'.format(on_error), 'build', 'element.bst'])
    
    155
    +        result.assert_main_error(ErrorDomain.STREAM, None)
    
    156
    +
    
    157
    +        # This element should have failed
    
    158
    +        assert cli.get_element_state(project, 'element.bst') == 'failed'
    
    159
    +        # This element should have been pushed to the remote
    
    160
    +        assert share.has_artifact('test', 'element.bst', cli.get_element_key(project, 'element.bst'))



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]