[Notes] [Git][BuildStream/buildstream][jonathan/debug-remote-failed-builds] 17 commits: _artifactcache: Rename update_atime() to update_mtime()



Title: GitLab

Jonathan Maw pushed to branch jonathan/debug-remote-failed-builds at BuildStream / buildstream

Commits:

14 changed files:

Changes:

  • NEWS
    ... ... @@ -31,6 +31,15 @@ buildstream 1.3.1
    31 31
         new the `conf-root` variable to make the process easier. And there has been
    
    32 32
         a bug fix to workspaces so they can be build in workspaces too.
    
    33 33
     
    
    34
    +  o Creating a build shell through the interactive mode or `bst shell --build`
    
    35
    +    will now use the cached build tree. It is now easier to debug local build
    
    36
    +    failures.
    
    37
    +
    
    38
    +  o `bst shell --sysroot` now takes any directory that contains a sysroot,
    
    39
    +    instead of just a specially-formatted build-root with a `root` and `scratch`
    
    40
    +    subdirectory.
    
    41
    +
    
    42
    +
    
    34 43
     =================
    
    35 44
     buildstream 1.1.5
    
    36 45
     =================
    

  • buildstream/_artifactcache/artifactcache.py
    ... ... @@ -228,7 +228,7 @@ class ArtifactCache():
    228 228
             self._required_elements.update(elements)
    
    229 229
     
    
    230 230
             # For the cache keys which were resolved so far, we bump
    
    231
    -        # the atime of them.
    
    231
    +        # the mtime of them.
    
    232 232
             #
    
    233 233
             # This is just in case we have concurrent instances of
    
    234 234
             # BuildStream running with the same artifact cache, it will
    
    ... ... @@ -240,7 +240,7 @@ class ArtifactCache():
    240 240
                 for key in (strong_key, weak_key):
    
    241 241
                     if key:
    
    242 242
                         try:
    
    243
    -                        self.update_atime(key)
    
    243
    +                        self.update_mtime(element, key)
    
    244 244
                         except ArtifactError:
    
    245 245
                             pass
    
    246 246
     
    
    ... ... @@ -391,15 +391,16 @@ class ArtifactCache():
    391 391
         def preflight(self):
    
    392 392
             pass
    
    393 393
     
    
    394
    -    # update_atime()
    
    394
    +    # update_mtime()
    
    395 395
         #
    
    396
    -    # Update the atime of an artifact.
    
    396
    +    # Update the mtime of an artifact.
    
    397 397
         #
    
    398 398
         # Args:
    
    399
    +    #     element (Element): The Element to update
    
    399 400
         #     key (str): The key of the artifact.
    
    400 401
         #
    
    401
    -    def update_atime(self, key):
    
    402
    -        raise ImplError("Cache '{kind}' does not implement contains()"
    
    402
    +    def update_mtime(self, element, key):
    
    403
    +        raise ImplError("Cache '{kind}' does not implement update_mtime()"
    
    403 404
                             .format(kind=type(self).__name__))
    
    404 405
     
    
    405 406
         # initialize_remotes():
    

  • buildstream/_artifactcache/cascache.py
    ... ... @@ -538,8 +538,9 @@ class CASCache(ArtifactCache):
    538 538
             except FileNotFoundError as e:
    
    539 539
                 raise ArtifactError("Attempt to access unavailable artifact: {}".format(e)) from e
    
    540 540
     
    
    541
    -    def update_atime(self, ref):
    
    541
    +    def update_mtime(self, element, key):
    
    542 542
             try:
    
    543
    +            ref = self.get_artifact_fullname(element, key)
    
    543 544
                 os.utime(self._refpath(ref))
    
    544 545
             except FileNotFoundError as e:
    
    545 546
                 raise ArtifactError("Attempt to access unavailable artifact: {}".format(e)) from e
    

  • buildstream/_frontend/app.py
    ... ... @@ -564,18 +564,15 @@ class App():
    564 564
                            "  (c)ontinue  - Continue queueing jobs as much as possible\n" +
    
    565 565
                            "  (q)uit      - Exit after all ongoing jobs complete\n" +
    
    566 566
                            "  (t)erminate - Terminate any ongoing jobs and exit\n" +
    
    567
    -                       "  (r)etry     - Retry this job\n")
    
    567
    +                       "  (r)etry     - Retry this job\n" +
    
    568
    +                       "  (s)hell     - Drop into a shell in the failed build sandbox\n")
    
    568 569
                 if failure.logfile:
    
    569 570
                     summary += "  (l)og       - View the full log file\n"
    
    570
    -            if failure.sandbox:
    
    571
    -                summary += "  (s)hell     - Drop into a shell in the failed build sandbox\n"
    
    572 571
                 summary += "\nPressing ^C will terminate jobs and exit\n"
    
    573 572
     
    
    574
    -            choices = ['continue', 'quit', 'terminate', 'retry']
    
    573
    +            choices = ['continue', 'quit', 'terminate', 'retry', 'shell']
    
    575 574
                 if failure.logfile:
    
    576 575
                     choices += ['log']
    
    577
    -            if failure.sandbox:
    
    578
    -                choices += ['shell']
    
    579 576
     
    
    580 577
                 choice = ''
    
    581 578
                 while choice not in ['continue', 'quit', 'terminate', 'retry']:
    
    ... ... @@ -598,7 +595,7 @@ class App():
    598 595
                         click.echo("\nDropping into an interactive shell in the failed build sandbox\n", err=True)
    
    599 596
                         try:
    
    600 597
                             prompt = self.shell_prompt(element)
    
    601
    -                        self.stream.shell(element, Scope.BUILD, prompt, directory=failure.sandbox, isolate=True)
    
    598
    +                        self.stream.shell(element, Scope.BUILD, prompt, isolate=True)
    
    602 599
                         except BstError as e:
    
    603 600
                             click.echo("Error while attempting to create interactive shell: {}".format(e), err=True)
    
    604 601
                     elif choice == 'log':
    

  • buildstream/element.py
    ... ... @@ -1318,7 +1318,9 @@ class Element(Plugin):
    1318 1318
         @contextmanager
    
    1319 1319
         def _prepare_sandbox(self, scope, directory, deps='run', integrate=True):
    
    1320 1320
             # bst shell and bst checkout require a local sandbox.
    
    1321
    -        with self.__sandbox(directory, config=self.__sandbox_config, allow_remote=False) as sandbox:
    
    1321
    +        bare_directory = True if directory else False
    
    1322
    +        with self.__sandbox(directory, config=self.__sandbox_config, allow_remote=False,
    
    1323
    +                            bare_directory=True if directory else False) as sandbox:
    
    1322 1324
     
    
    1323 1325
                 # Configure always comes first, and we need it.
    
    1324 1326
                 self.configure_sandbox(sandbox)
    
    ... ... @@ -1385,6 +1387,7 @@ class Element(Plugin):
    1385 1387
                 # the same filing system as the rest of our cache.
    
    1386 1388
                 temp_staging_location = os.path.join(self._get_context().artifactdir, "staging_temp")
    
    1387 1389
                 temp_staging_directory = tempfile.mkdtemp(prefix=temp_staging_location)
    
    1390
    +            import_dir = temp_staging_directory
    
    1388 1391
     
    
    1389 1392
                 try:
    
    1390 1393
                     workspace = self._get_workspace()
    
    ... ... @@ -1395,12 +1398,16 @@ class Element(Plugin):
    1395 1398
                             with self.timed_activity("Staging local files at {}"
    
    1396 1399
                                                      .format(workspace.get_absolute_path())):
    
    1397 1400
                                 workspace.stage(temp_staging_directory)
    
    1401
    +                elif self._cached():
    
    1402
    +                    # We have a cached buildtree to use, instead
    
    1403
    +                    artifact_base, _ = self.__extract()
    
    1404
    +                    import_dir = os.path.join(artifact_base, 'buildtree')
    
    1398 1405
                     else:
    
    1399 1406
                         # No workspace, stage directly
    
    1400 1407
                         for source in self.sources():
    
    1401 1408
                             source._stage(temp_staging_directory)
    
    1402 1409
     
    
    1403
    -                vdirectory.import_files(temp_staging_directory)
    
    1410
    +                vdirectory.import_files(import_dir)
    
    1404 1411
     
    
    1405 1412
                 finally:
    
    1406 1413
                     # Staging may produce directories with less than 'rwx' permissions
    
    ... ... @@ -1566,10 +1573,6 @@ class Element(Plugin):
    1566 1573
                         collect = self.assemble(sandbox)
    
    1567 1574
                         self.__set_build_result(success=True, description="succeeded")
    
    1568 1575
                     except BstError as e:
    
    1569
    -                    # If an error occurred assembling an element in a sandbox,
    
    1570
    -                    # then tack on the sandbox directory to the error
    
    1571
    -                    e.sandbox = rootdir
    
    1572
    -
    
    1573 1576
                         # If there is a workspace open on this element, it will have
    
    1574 1577
                         # been mounted for sandbox invocations instead of being staged.
    
    1575 1578
                         #
    
    ... ... @@ -1683,8 +1686,8 @@ class Element(Plugin):
    1683 1686
                                 "unable to collect artifact contents"
    
    1684 1687
                                 .format(collect))
    
    1685 1688
     
    
    1686
    -            # Finally cleanup the build dir
    
    1687
    -            cleanup_rootdir()
    
    1689
    +                    # Finally cleanup the build dir
    
    1690
    +                    cleanup_rootdir()
    
    1688 1691
     
    
    1689 1692
             return artifact_size
    
    1690 1693
     
    
    ... ... @@ -2152,12 +2155,14 @@ class Element(Plugin):
    2152 2155
         #    stderr (fileobject): The stream for stderr for the sandbox
    
    2153 2156
         #    config (SandboxConfig): The SandboxConfig object
    
    2154 2157
         #    allow_remote (bool): Whether the sandbox is allowed to be remote
    
    2158
    +    #    bare_directory (bool): Whether the directory is bare i.e. doesn't have
    
    2159
    +    #                           a separate 'root' subdir
    
    2155 2160
         #
    
    2156 2161
         # Yields:
    
    2157 2162
         #    (Sandbox): A usable sandbox
    
    2158 2163
         #
    
    2159 2164
         @contextmanager
    
    2160
    -    def __sandbox(self, directory, stdout=None, stderr=None, config=None, allow_remote=True):
    
    2165
    +    def __sandbox(self, directory, stdout=None, stderr=None, config=None, allow_remote=True, bare_directory=False):
    
    2161 2166
             context = self._get_context()
    
    2162 2167
             project = self._get_project()
    
    2163 2168
             platform = Platform.get_platform()
    
    ... ... @@ -2188,6 +2193,7 @@ class Element(Plugin):
    2188 2193
                                                   stdout=stdout,
    
    2189 2194
                                                   stderr=stderr,
    
    2190 2195
                                                   config=config,
    
    2196
    +                                              bare_directory=bare_directory,
    
    2191 2197
                                                   allow_real_directory=not self.BST_VIRTUAL_DIRECTORY)
    
    2192 2198
                 yield sandbox
    
    2193 2199
     
    
    ... ... @@ -2197,7 +2203,7 @@ class Element(Plugin):
    2197 2203
     
    
    2198 2204
                 # Recursive contextmanager...
    
    2199 2205
                 with self.__sandbox(rootdir, stdout=stdout, stderr=stderr, config=config,
    
    2200
    -                                allow_remote=allow_remote) as sandbox:
    
    2206
    +                                allow_remote=allow_remote, bare_directory=False) as sandbox:
    
    2201 2207
                     yield sandbox
    
    2202 2208
     
    
    2203 2209
                 # Cleanup the build dir
    

  • buildstream/plugins/sources/deb.py
    ... ... @@ -50,7 +50,7 @@ deb - stage files from .deb packages
    50 50
     """
    
    51 51
     
    
    52 52
     import tarfile
    
    53
    -from contextlib import contextmanager, ExitStack
    
    53
    +from contextlib import contextmanager
    
    54 54
     import arpy                                       # pylint: disable=import-error
    
    55 55
     
    
    56 56
     from .tar import TarSource
    
    ... ... @@ -69,8 +69,7 @@ class DebSource(TarSource):
    69 69
     
    
    70 70
         @contextmanager
    
    71 71
         def _get_tar(self):
    
    72
    -        with ExitStack() as context:
    
    73
    -            deb_file = context.enter_context(open(self._get_mirror_file(), 'rb'))
    
    72
    +        with open(self._get_mirror_file(), 'rb') as deb_file:
    
    74 73
                 arpy_archive = arpy.Archive(fileobj=deb_file)
    
    75 74
                 arpy_archive.read_all_headers()
    
    76 75
                 data_tar_arpy = [v for k, v in arpy_archive.archived_files.items() if b"data.tar" in k][0]
    

  • buildstream/plugins/sources/tar.py
    ... ... @@ -57,7 +57,7 @@ tar - stage files from tar archives
    57 57
     
    
    58 58
     import os
    
    59 59
     import tarfile
    
    60
    -from contextlib import contextmanager, ExitStack
    
    60
    +from contextlib import contextmanager
    
    61 61
     from tempfile import TemporaryFile
    
    62 62
     
    
    63 63
     from buildstream import SourceError
    
    ... ... @@ -88,8 +88,7 @@ class TarSource(DownloadableFileSource):
    88 88
         def _run_lzip(self):
    
    89 89
             assert self.host_lzip
    
    90 90
             with TemporaryFile() as lzip_stdout:
    
    91
    -            with ExitStack() as context:
    
    92
    -                lzip_file = context.enter_context(open(self._get_mirror_file(), 'r'))
    
    91
    +            with open(self._get_mirror_file(), 'r') as lzip_file:
    
    93 92
                     self.call([self.host_lzip, '-d'],
    
    94 93
                               stdin=lzip_file,
    
    95 94
                               stdout=lzip_stdout)
    

  • buildstream/sandbox/_mount.py
    ... ... @@ -31,7 +31,6 @@ from .._fuse import SafeHardlinks
    31 31
     #
    
    32 32
     class Mount():
    
    33 33
         def __init__(self, sandbox, mount_point, safe_hardlinks, fuse_mount_options={}):
    
    34
    -        scratch_directory = sandbox._get_scratch_directory()
    
    35 34
             # Getting _get_underlying_directory() here is acceptable as
    
    36 35
             # we're part of the sandbox code. This will fail if our
    
    37 36
             # directory is CAS-based.
    
    ... ... @@ -51,6 +50,7 @@ class Mount():
    51 50
             #        a regular mount point within the parent's redirected mount.
    
    52 51
             #
    
    53 52
             if self.safe_hardlinks:
    
    53
    +            scratch_directory = sandbox._get_scratch_directory()
    
    54 54
                 # Redirected mount
    
    55 55
                 self.mount_origin = os.path.join(root_directory, mount_point.lstrip(os.sep))
    
    56 56
                 self.mount_base = os.path.join(scratch_directory, utils.url_directory_name(mount_point))
    

  • buildstream/sandbox/_sandboxremote.py
    ... ... @@ -76,8 +76,7 @@ class SandboxRemote(Sandbox):
    76 76
             # Upload the Command message to the remote CAS server
    
    77 77
             command_digest = cascache.push_message(self._get_project(), remote_command)
    
    78 78
             if not command_digest or not cascache.verify_digest_pushed(self._get_project(), command_digest):
    
    79
    -            # Command push failed
    
    80
    -            return None
    
    79
    +            raise SandboxError("Failed pushing build command to remote CAS.")
    
    81 80
     
    
    82 81
             # Create and send the action.
    
    83 82
             action = remote_execution_pb2.Action(command_digest=command_digest,
    
    ... ... @@ -88,27 +87,57 @@ class SandboxRemote(Sandbox):
    88 87
             # Upload the Action message to the remote CAS server
    
    89 88
             action_digest = cascache.push_message(self._get_project(), action)
    
    90 89
             if not action_digest or not cascache.verify_digest_pushed(self._get_project(), action_digest):
    
    91
    -            # Action push failed
    
    92
    -            return None
    
    90
    +            raise SandboxError("Failed pushing build action to remote CAS.")
    
    93 91
     
    
    94 92
             # Next, try to create a communication channel to the BuildGrid server.
    
    95 93
             channel = grpc.insecure_channel(self.server_url)
    
    96 94
             stub = remote_execution_pb2_grpc.ExecutionStub(channel)
    
    97 95
             request = remote_execution_pb2.ExecuteRequest(action_digest=action_digest,
    
    98 96
                                                           skip_cache_lookup=False)
    
    99
    -        try:
    
    100
    -            operation_iterator = stub.Execute(request)
    
    101
    -        except grpc.RpcError:
    
    102
    -            return None
    
    97
    +
    
    98
    +        def __run_remote_command(stub, execute_request=None, running_operation=None):
    
    99
    +            try:
    
    100
    +                last_operation = None
    
    101
    +                if execute_request is not None:
    
    102
    +                    operation_iterator = stub.Execute(execute_request)
    
    103
    +                else:
    
    104
    +                    request = remote_execution_pb2.WaitExecutionRequest(name=running_operation.name)
    
    105
    +                    operation_iterator = stub.WaitExecution(request)
    
    106
    +
    
    107
    +                for operation in operation_iterator:
    
    108
    +                    if operation.done:
    
    109
    +                        return operation
    
    110
    +                    else:
    
    111
    +                        last_operation = operation
    
    112
    +            except grpc.RpcError as e:
    
    113
    +                status_code = e.code()
    
    114
    +                if status_code == grpc.StatusCode.UNAVAILABLE:
    
    115
    +                    raise SandboxError("Failed contacting remote execution server at {}."
    
    116
    +                                       .format(self.server_url))
    
    117
    +
    
    118
    +                elif status_code in (grpc.StatusCode.INVALID_ARGUMENT,
    
    119
    +                                     grpc.StatusCode.FAILED_PRECONDITION,
    
    120
    +                                     grpc.StatusCode.RESOURCE_EXHAUSTED,
    
    121
    +                                     grpc.StatusCode.INTERNAL,
    
    122
    +                                     grpc.StatusCode.DEADLINE_EXCEEDED):
    
    123
    +                    raise SandboxError("{} ({}).".format(e.details(), status_code.name))
    
    124
    +
    
    125
    +                elif running_operation and status_code == grpc.StatusCode.UNIMPLEMENTED:
    
    126
    +                    raise SandboxError("Failed trying to recover from connection loss: "
    
    127
    +                                       "server does not support operation status polling recovery.")
    
    128
    +
    
    129
    +            return last_operation
    
    103 130
     
    
    104 131
             operation = None
    
    105 132
             with self._get_context().timed_activity("Waiting for the remote build to complete"):
    
    106
    -            # It is advantageous to check operation_iterator.code() is grpc.StatusCode.OK here,
    
    107
    -            # which will check the server is actually contactable. However, calling it when the
    
    108
    -            # server is available seems to cause .code() to hang forever.
    
    109
    -            for operation in operation_iterator:
    
    110
    -                if operation.done:
    
    111
    -                    break
    
    133
    +            operation = __run_remote_command(stub, execute_request=request)
    
    134
    +            if operation is None:
    
    135
    +                return None
    
    136
    +            elif operation.done:
    
    137
    +                return operation
    
    138
    +
    
    139
    +            while operation is not None and not operation.done:
    
    140
    +                operation = __run_remote_command(stub, running_operation=operation)
    
    112 141
     
    
    113 142
             return operation
    
    114 143
     
    
    ... ... @@ -192,7 +221,6 @@ class SandboxRemote(Sandbox):
    192 221
     
    
    193 222
             if operation is None:
    
    194 223
                 # Failure of remote execution, usually due to an error in BuildStream
    
    195
    -            # NB This error could be raised in __run_remote_command
    
    196 224
                 raise SandboxError("No response returned from server")
    
    197 225
     
    
    198 226
             assert not operation.HasField('error') and operation.HasField('response')
    

  • buildstream/sandbox/sandbox.py
    ... ... @@ -98,16 +98,23 @@ class Sandbox():
    98 98
             self.__config = kwargs['config']
    
    99 99
             self.__stdout = kwargs['stdout']
    
    100 100
             self.__stderr = kwargs['stderr']
    
    101
    +        self.__bare_directory = kwargs['bare_directory']
    
    101 102
     
    
    102 103
             # Setup the directories. Root and output_directory should be
    
    103 104
             # available to subclasses, hence being single-underscore. The
    
    104 105
             # others are private to this class.
    
    105
    -        self._root = os.path.join(directory, 'root')
    
    106
    +        # If the directory is bare, it probably doesn't need scratch
    
    107
    +        if self.__bare_directory:
    
    108
    +            self._root = directory
    
    109
    +            self.__scratch = None
    
    110
    +            os.makedirs(self._root, exist_ok=True)
    
    111
    +        else:
    
    112
    +            self._root = os.path.join(directory, 'root')
    
    113
    +            self.__scratch = os.path.join(directory, 'scratch')
    
    114
    +            for directory_ in [self._root, self.__scratch]:
    
    115
    +                os.makedirs(directory_, exist_ok=True)
    
    116
    +
    
    106 117
             self._output_directory = None
    
    107
    -        self.__directory = directory
    
    108
    -        self.__scratch = os.path.join(self.__directory, 'scratch')
    
    109
    -        for directory_ in [self._root, self.__scratch]:
    
    110
    -            os.makedirs(directory_, exist_ok=True)
    
    111 118
             self._vdir = None
    
    112 119
     
    
    113 120
             # This is set if anyone requests access to the underlying
    
    ... ... @@ -334,6 +341,7 @@ class Sandbox():
    334 341
         # Returns:
    
    335 342
         #    (str): The sandbox scratch directory
    
    336 343
         def _get_scratch_directory(self):
    
    344
    +        assert not self.__bare_directory, "Scratch is not going to work with bare directories"
    
    337 345
             return self.__scratch
    
    338 346
     
    
    339 347
         # _get_output()
    

  • tests/integration/build-tree.py
    1
    +import os
    
    2
    +import pytest
    
    3
    +import shutil
    
    4
    +
    
    5
    +from tests.testutils import cli, cli_integration, create_artifact_share
    
    6
    +
    
    7
    +
    
    8
    +pytestmark = pytest.mark.integration
    
    9
    +
    
    10
    +
    
    11
    +DATA_DIR = os.path.join(
    
    12
    +    os.path.dirname(os.path.realpath(__file__)),
    
    13
    +    "project"
    
    14
    +)
    
    15
    +
    
    16
    +
    
    17
    +@pytest.mark.datafiles(DATA_DIR)
    
    18
    +def test_buildtree_staged(cli_integration, tmpdir, datafiles):
    
    19
    +    # i.e. tests that cached build trees are staged by `bst shell --build`
    
    20
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    21
    +    element_name = 'build-shell/buildtree.bst'
    
    22
    +
    
    23
    +    res = cli_integration.run(project=project, args=['build', element_name])
    
    24
    +    res.assert_success()
    
    25
    +
    
    26
    +    res = cli_integration.run(project=project, args=[
    
    27
    +        'shell', '--build', element_name, '--', 'grep', '-q', 'Hi', 'test'
    
    28
    +    ])
    
    29
    +    res.assert_success()
    
    30
    +
    
    31
    +
    
    32
    +# Check that build shells work when pulled from a remote cache
    
    33
    +# This is to roughly simulate remote execution
    
    34
    +@pytest.mark.datafiles(DATA_DIR)
    
    35
    +def test_buildtree_pulled(cli, tmpdir, datafiles):
    
    36
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    37
    +    element_name = 'build-shell/buildtree.bst'
    
    38
    +
    
    39
    +    with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
    
    40
    +        # Build the element to push it to cache
    
    41
    +        cli.configure({
    
    42
    +            'artifacts': {'url': share.repo, 'push': True}
    
    43
    +        })
    
    44
    +        result = cli.run(project=project, args=['build', element_name])
    
    45
    +        result.assert_success()
    
    46
    +        assert cli.get_element_state(project, element_name) == 'cached'
    
    47
    +
    
    48
    +        # Discard the cache
    
    49
    +        cli.configure({
    
    50
    +            'artifacts': {'url': share.repo, 'push': True},
    
    51
    +            'artifactdir': os.path.join(cli.directory, 'artifacts2')
    
    52
    +        })
    
    53
    +        assert cli.get_element_state(project, element_name) != 'cached'
    
    54
    +
    
    55
    +        # Pull from cache
    
    56
    +        result = cli.run(project=project, args=['pull', '--deps', 'all', element_name])
    
    57
    +        result.assert_success()
    
    58
    +
    
    59
    +        # Check it's using the cached build tree
    
    60
    +        res = cli.run(project=project, args=[
    
    61
    +            'shell', '--build', element_name, '--', 'grep', '-q', 'Hi', 'test'
    
    62
    +        ])
    
    63
    +        res.assert_success()

  • tests/integration/project/elements/build-shell/buildtree.bst
    1
    +kind: manual
    
    2
    +description: |
    
    3
    +  Puts a file in the build tree so that build tree caching and staging can be tested.
    
    4
    +
    
    5
    +depends:
    
    6
    +  - filename: base.bst
    
    7
    +    type: build
    
    8
    +
    
    9
    +config:
    
    10
    +  build-commands:
    
    11
    +    - "echo 'Hi' > %{build-root}/test"

  • tests/integration/shell.py
    ... ... @@ -302,46 +302,33 @@ def test_workspace_visible(cli, tmpdir, datafiles):
    302 302
         assert result.output == workspace_hello
    
    303 303
     
    
    304 304
     
    
    305
    -# Test that we can see the workspace files in a shell
    
    306
    -@pytest.mark.integration
    
    305
    +# Test that '--sysroot' works
    
    307 306
     @pytest.mark.datafiles(DATA_DIR)
    
    308
    -def test_sysroot_workspace_visible(cli, tmpdir, datafiles):
    
    307
    +def test_sysroot(cli, tmpdir, datafiles):
    
    309 308
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    310
    -    workspace = os.path.join(cli.directory, 'workspace')
    
    311
    -    element_name = 'workspace/workspace-mount-fail.bst'
    
    312
    -
    
    313
    -    # Open a workspace on our build failing element
    
    314
    -    #
    
    315
    -    res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
    
    316
    -    assert res.exit_code == 0
    
    317
    -
    
    318
    -    # Ensure the dependencies of our build failing element are built
    
    319
    -    result = cli.run(project=project, args=['build', element_name])
    
    320
    -    result.assert_main_error(ErrorDomain.STREAM, None)
    
    321
    -
    
    322
    -    # Discover the sysroot of the failed build directory, after one
    
    323
    -    # failed build, there should be only one directory there.
    
    324
    -    #
    
    325
    -    build_base = os.path.join(cli.directory, 'build')
    
    326
    -    build_dirs = os.listdir(path=build_base)
    
    327
    -    assert len(build_dirs) == 1
    
    328
    -    build_dir = os.path.join(build_base, build_dirs[0])
    
    329
    -
    
    330
    -    # Obtain a copy of the hello.c content from the workspace
    
    331
    -    #
    
    332
    -    workspace_hello_path = os.path.join(cli.directory, 'workspace', 'hello.c')
    
    333
    -    assert os.path.exists(workspace_hello_path)
    
    334
    -    with open(workspace_hello_path, 'r') as f:
    
    335
    -        workspace_hello = f.read()
    
    336
    -
    
    337
    -    # Cat the hello.c file from a bst shell command, and assert
    
    338
    -    # that we got the same content here
    
    339
    -    #
    
    340
    -    result = cli.run(project=project, args=[
    
    341
    -        'shell', '--build', '--sysroot', build_dir, element_name, '--', 'cat', 'hello.c'
    
    309
    +    base_element = "base/base-alpine.bst"
    
    310
    +    # test element only needs to be something lightweight for this test
    
    311
    +    test_element = "script/script.bst"
    
    312
    +    checkout_dir = os.path.join(str(tmpdir), 'alpine-sysroot')
    
    313
    +    test_file = 'hello'
    
    314
    +
    
    315
    +    # Build and check out a sysroot
    
    316
    +    res = cli.run(project=project, args=['build', base_element])
    
    317
    +    res.assert_success()
    
    318
    +    res = cli.run(project=project, args=['checkout', base_element, checkout_dir])
    
    319
    +    res.assert_success()
    
    320
    +
    
    321
    +    # Mutate the sysroot
    
    322
    +    test_path = os.path.join(checkout_dir, test_file)
    
    323
    +    with open(test_path, 'w') as f:
    
    324
    +        f.write('hello\n')
    
    325
    +
    
    326
    +    # Shell into the sysroot and check the test file exists
    
    327
    +    res = cli.run(project=project, args=[
    
    328
    +        'shell', '--build', '--sysroot', checkout_dir, test_element, '--',
    
    329
    +        'grep', '-q', 'hello', '/' + test_file
    
    342 330
         ])
    
    343
    -    assert result.exit_code == 0
    
    344
    -    assert result.output == workspace_hello
    
    331
    +    res.assert_success()
    
    345 332
     
    
    346 333
     
    
    347 334
     # Test system integration commands can access devices in /dev
    

  • tests/testutils/artifactshare.py
    ... ... @@ -122,9 +122,8 @@ class ArtifactShare():
    122 122
             #       same algo for creating an artifact reference
    
    123 123
             #
    
    124 124
     
    
    125
    -        # Chop off the .bst suffix first
    
    126
    -        assert element_name.endswith('.bst')
    
    127
    -        element_name = element_name[:-4]
    
    125
    +        # Replace path separator and chop off the .bst suffix
    
    126
    +        element_name = os.path.splitext(element_name.replace(os.sep, '-'))[0]
    
    128 127
     
    
    129 128
             valid_chars = string.digits + string.ascii_letters + '-._'
    
    130 129
             element_name = ''.join([
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]