[Notes] [Git][BuildStream/buildstream][tpollard/494] 34 commits: Forcing CI to install Sphinx 1.7.9



Title: GitLab

Tom Pollard pushed to branch tpollard/494 at BuildStream / buildstream

Commits:

23 changed files:

Changes:

  • .gitlab-ci.yml
    ... ... @@ -145,7 +145,8 @@ docs:
    145 145
       stage: test
    
    146 146
       script:
    
    147 147
       - export BST_SOURCE_CACHE="$(pwd)/cache/integration-cache/sources"
    
    148
    -  - pip3 install sphinx
    
    148
    +  # Currently sphinx_rtd_theme does not support Sphinx >1.8, this breaks search functionality
    
    149
    +  - pip3 install sphinx==1.7.9
    
    149 150
       - pip3 install sphinx-click
    
    150 151
       - pip3 install sphinx_rtd_theme
    
    151 152
       - cd dist && ./unpack.sh && cd buildstream
    

  • buildstream/_artifactcache/artifactcache.py
    ... ... @@ -426,6 +426,22 @@ class ArtifactCache():
    426 426
             raise ImplError("Cache '{kind}' does not implement contains()"
    
    427 427
                             .format(kind=type(self).__name__))
    
    428 428
     
    
    429
    +    # contains_subdir_artifact():
    
    430
    +    #
    
    431
    +    # Check whether an artifact element contains a digest for a subdir
    
    432
    +    # which is populated in the cache, i.e non dangling.
    
    433
    +    #
    
    434
    +    # Args:
    
    435
    +    #     element (Element): The Element to check
    
    436
    +    #     key (str): The cache key to use
    
    437
    +    #     subdir (str): The subdir to check
    
    438
    +    #
    
    439
    +    # Returns: True if the subdir exists & is populated in the cache, False otherwise
    
    440
    +    #
    
    441
    +    def contains_subdir_artifact(self, element, key, subdir):
    
    442
    +        raise ImplError("Cache '{kind}' does not implement contains_subdir_artifact()"
    
    443
    +                        .format(kind=type(self).__name__))
    
    444
    +
    
    429 445
         # list_artifacts():
    
    430 446
         #
    
    431 447
         # List artifacts in this cache in LRU order.
    
    ... ... @@ -551,11 +567,12 @@ class ArtifactCache():
    551 567
         #     element (Element): The Element whose artifact is to be fetched
    
    552 568
         #     key (str): The cache key to use
    
    553 569
         #     progress (callable): The progress callback, if any
    
    570
    +    #     subdir (str): The optional specific subdir to pull
    
    554 571
         #
    
    555 572
         # Returns:
    
    556 573
         #   (bool): True if pull was successful, False if artifact was not available
    
    557 574
         #
    
    558
    -    def pull(self, element, key, *, progress=None):
    
    575
    +    def pull(self, element, key, *, progress=None, subdir=None, excluded_subdirs=None):
    
    559 576
             raise ImplError("Cache '{kind}' does not implement pull()"
    
    560 577
                             .format(kind=type(self).__name__))
    
    561 578
     
    

  • buildstream/_artifactcache/cascache.py
    ... ... @@ -92,6 +92,16 @@ class CASCache(ArtifactCache):
    92 92
             # This assumes that the repository doesn't have any dangling pointers
    
    93 93
             return os.path.exists(refpath)
    
    94 94
     
    
    95
    +    def contains_subdir_artifact(self, element, key, subdir):
    
    96
    +        tree = self.resolve_ref(self.get_artifact_fullname(element, key))
    
    97
    +
    
    98
    +        # This assumes that the subdir digest is present in the element tree
    
    99
    +        subdirdigest = self._get_subdir(tree, subdir)
    
    100
    +        objpath = self.objpath(subdirdigest)
    
    101
    +
    
    102
    +        # True if subdir content is cached or if empty as expected
    
    103
    +        return os.path.exists(objpath)
    
    104
    +
    
    95 105
         def extract(self, element, key):
    
    96 106
             ref = self.get_artifact_fullname(element, key)
    
    97 107
     
    
    ... ... @@ -228,7 +238,7 @@ class CASCache(ArtifactCache):
    228 238
                 remotes_for_project = self._remotes[element._get_project()]
    
    229 239
                 return any(remote.spec.push for remote in remotes_for_project)
    
    230 240
     
    
    231
    -    def pull(self, element, key, *, progress=None):
    
    241
    +    def pull(self, element, key, *, progress=None, subdir=None, excluded_subdirs=None):
    
    232 242
             ref = self.get_artifact_fullname(element, key)
    
    233 243
     
    
    234 244
             project = element._get_project()
    
    ... ... @@ -247,8 +257,14 @@ class CASCache(ArtifactCache):
    247 257
                     tree.hash = response.digest.hash
    
    248 258
                     tree.size_bytes = response.digest.size_bytes
    
    249 259
     
    
    250
    -                self._fetch_directory(remote, tree)
    
    260
    +                # Check if the element artifact is present, if so just fetch subdir
    
    261
    +                if subdir and os.path.exists(self.objpath(tree)):
    
    262
    +                    self._fetch_subdir(remote, tree, subdir)
    
    263
    +                else:
    
    264
    +                    # Fetch artifact, excluded_subdirs determined in pullqueue
    
    265
    +                    self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
    
    251 266
     
    
    267
    +                # tree is the remote value, so is the same without or without dangling ref locally
    
    252 268
                     self.set_ref(ref, tree)
    
    253 269
     
    
    254 270
                     element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
    
    ... ... @@ -506,7 +522,7 @@ class CASCache(ArtifactCache):
    506 522
         def set_ref(self, ref, tree):
    
    507 523
             refpath = self._refpath(ref)
    
    508 524
             os.makedirs(os.path.dirname(refpath), exist_ok=True)
    
    509
    -        with utils.save_file_atomic(refpath, 'wb') as f:
    
    525
    +        with utils.save_file_atomic(refpath, 'wb', tempdir=self.tmpdir) as f:
    
    510 526
                 f.write(tree.SerializeToString())
    
    511 527
     
    
    512 528
         # resolve_ref():
    
    ... ... @@ -668,8 +684,10 @@ class CASCache(ArtifactCache):
    668 684
                              stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
    
    669 685
     
    
    670 686
             for dirnode in directory.directories:
    
    671
    -            fullpath = os.path.join(dest, dirnode.name)
    
    672
    -            self._checkout(fullpath, dirnode.digest)
    
    687
    +            # Don't try to checkout a dangling ref
    
    688
    +            if os.path.exists(self.objpath(dirnode.digest)):
    
    689
    +                fullpath = os.path.join(dest, dirnode.name)
    
    690
    +                self._checkout(fullpath, dirnode.digest)
    
    673 691
     
    
    674 692
             for symlinknode in directory.symlinks:
    
    675 693
                 # symlink
    
    ... ... @@ -948,10 +966,12 @@ class CASCache(ArtifactCache):
    948 966
         #     remote (Remote): The remote to use.
    
    949 967
         #     dir_digest (Digest): Digest object for the directory to fetch.
    
    950 968
         #
    
    951
    -    def _fetch_directory(self, remote, dir_digest):
    
    969
    +    def _fetch_directory(self, remote, dir_digest, *, excluded_subdirs=None):
    
    952 970
             fetch_queue = [dir_digest]
    
    953 971
             fetch_next_queue = []
    
    954 972
             batch = _CASBatchRead(remote)
    
    973
    +        if not excluded_subdirs:
    
    974
    +            excluded_subdirs = []
    
    955 975
     
    
    956 976
             while len(fetch_queue) + len(fetch_next_queue) > 0:
    
    957 977
                 if len(fetch_queue) == 0:
    
    ... ... @@ -966,8 +986,9 @@ class CASCache(ArtifactCache):
    966 986
                     directory.ParseFromString(f.read())
    
    967 987
     
    
    968 988
                 for dirnode in directory.directories:
    
    969
    -                batch = self._fetch_directory_node(remote, dirnode.digest, batch,
    
    970
    -                                                   fetch_queue, fetch_next_queue, recursive=True)
    
    989
    +                if dirnode.name not in excluded_subdirs:
    
    990
    +                    batch = self._fetch_directory_node(remote, dirnode.digest, batch,
    
    991
    +                                                       fetch_queue, fetch_next_queue, recursive=True)
    
    971 992
     
    
    972 993
                 for filenode in directory.files:
    
    973 994
                     batch = self._fetch_directory_node(remote, filenode.digest, batch,
    
    ... ... @@ -976,6 +997,10 @@ class CASCache(ArtifactCache):
    976 997
             # Fetch final batch
    
    977 998
             self._fetch_directory_batch(remote, batch, fetch_queue, fetch_next_queue)
    
    978 999
     
    
    1000
    +    def _fetch_subdir(self, remote, tree, subdir):
    
    1001
    +        subdirdigest = self._get_subdir(tree, subdir)
    
    1002
    +        self._fetch_directory(remote, subdirdigest)
    
    1003
    +
    
    979 1004
         def _fetch_tree(self, remote, digest):
    
    980 1005
             # download but do not store the Tree object
    
    981 1006
             with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
    

  • buildstream/_context.py
    ... ... @@ -110,6 +110,9 @@ class Context():
    110 110
             # Make sure the XDG vars are set in the environment before loading anything
    
    111 111
             self._init_xdg()
    
    112 112
     
    
    113
    +        # Whether or not to attempt to pull buildtrees globally
    
    114
    +        self.pullbuildtrees = False
    
    115
    +
    
    113 116
             # Private variables
    
    114 117
             self._cache_key = None
    
    115 118
             self._message_handler = None
    
    ... ... @@ -160,7 +163,7 @@ class Context():
    160 163
             _yaml.node_validate(defaults, [
    
    161 164
                 'sourcedir', 'builddir', 'artifactdir', 'logdir',
    
    162 165
                 'scheduler', 'artifacts', 'logging', 'projects',
    
    163
    -            'cache'
    
    166
    +            'cache', 'pullbuildtrees'
    
    164 167
             ])
    
    165 168
     
    
    166 169
             for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir']:
    
    ... ... @@ -185,6 +188,9 @@ class Context():
    185 188
             # Load artifact share configuration
    
    186 189
             self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
    
    187 190
     
    
    191
    +        # Load pull buildtrees configuration
    
    192
    +        self.pullbuildtrees = _yaml.node_get(defaults, bool, 'pullbuildtrees', default_value='False')
    
    193
    +
    
    188 194
             # Load logging config
    
    189 195
             logging = _yaml.node_get(defaults, Mapping, 'logging')
    
    190 196
             _yaml.node_validate(logging, [
    

  • buildstream/_frontend/cli.py
    ... ... @@ -305,10 +305,12 @@ def init(app, project_name, format_version, element_path, force):
    305 305
                   help="Allow tracking to cross junction boundaries")
    
    306 306
     @click.option('--track-save', default=False, is_flag=True,
    
    307 307
                   help="Deprecated: This is ignored")
    
    308
    +@click.option('--pull-buildtrees', default=False, is_flag=True,
    
    309
    +              help="Pull buildtrees from a remote cache server")
    
    308 310
     @click.argument('elements', nargs=-1,
    
    309 311
                     type=click.Path(readable=False))
    
    310 312
     @click.pass_obj
    
    311
    -def build(app, elements, all_, track_, track_save, track_all, track_except, track_cross_junctions):
    
    313
    +def build(app, elements, all_, track_, track_save, track_all, track_except, track_cross_junctions, pull_buildtrees):
    
    312 314
         """Build elements in a pipeline"""
    
    313 315
     
    
    314 316
         if (track_except or track_cross_junctions) and not (track_ or track_all):
    
    ... ... @@ -327,7 +329,8 @@ def build(app, elements, all_, track_, track_save, track_all, track_except, trac
    327 329
                              track_targets=track_,
    
    328 330
                              track_except=track_except,
    
    329 331
                              track_cross_junctions=track_cross_junctions,
    
    330
    -                         build_all=all_)
    
    332
    +                         build_all=all_,
    
    333
    +                         pull_buildtrees=pull_buildtrees)
    
    331 334
     
    
    332 335
     
    
    333 336
     ##################################################################
    
    ... ... @@ -429,10 +432,12 @@ def track(app, elements, deps, except_, cross_junctions):
    429 432
                   help='The dependency artifacts to pull (default: none)')
    
    430 433
     @click.option('--remote', '-r',
    
    431 434
                   help="The URL of the remote cache (defaults to the first configured cache)")
    
    435
    +@click.option('--pull-buildtrees', default=False, is_flag=True,
    
    436
    +              help="Pull buildtrees from a remote cache server")
    
    432 437
     @click.argument('elements', nargs=-1,
    
    433 438
                     type=click.Path(readable=False))
    
    434 439
     @click.pass_obj
    
    435
    -def pull(app, elements, deps, remote):
    
    440
    +def pull(app, elements, deps, remote, pull_buildtrees):
    
    436 441
         """Pull a built artifact from the configured remote artifact cache.
    
    437 442
     
    
    438 443
         By default the artifact will be pulled one of the configured caches
    
    ... ... @@ -446,7 +451,7 @@ def pull(app, elements, deps, remote):
    446 451
             all:   All dependencies
    
    447 452
         """
    
    448 453
         with app.initialized(session_name="Pull"):
    
    449
    -        app.stream.pull(elements, selection=deps, remote=remote)
    
    454
    +        app.stream.pull(elements, selection=deps, remote=remote, pull_buildtrees=pull_buildtrees)
    
    450 455
     
    
    451 456
     
    
    452 457
     ##################################################################
    

  • buildstream/_platform/darwin.py
    ... ... @@ -34,6 +34,9 @@ class Darwin(Platform):
    34 34
             super().__init__()
    
    35 35
     
    
    36 36
         def create_sandbox(self, *args, **kwargs):
    
    37
    +        kwargs['dummy_reason'] = \
    
    38
    +            "OSXFUSE is not supported and there are no supported sandbox" + \
    
    39
    +            "technologies for OSX at this time"
    
    37 40
             return SandboxDummy(*args, **kwargs)
    
    38 41
     
    
    39 42
         def check_sandbox_config(self, config):
    

  • buildstream/_platform/linux.py
    ... ... @@ -37,24 +37,30 @@ class Linux(Platform):
    37 37
             self._uid = os.geteuid()
    
    38 38
             self._gid = os.getegid()
    
    39 39
     
    
    40
    +        self._have_fuse = os.path.exists("/dev/fuse")
    
    41
    +        self._bwrap_exists = _site.check_bwrap_version(0, 0, 0)
    
    42
    +        self._have_good_bwrap = _site.check_bwrap_version(0, 1, 2)
    
    43
    +
    
    44
    +        self._local_sandbox_available = self._have_fuse and self._have_good_bwrap
    
    45
    +
    
    40 46
             self._die_with_parent_available = _site.check_bwrap_version(0, 1, 8)
    
    41 47
     
    
    42
    -        if self._local_sandbox_available():
    
    48
    +        if self._local_sandbox_available:
    
    43 49
                 self._user_ns_available = self._check_user_ns_available()
    
    44 50
             else:
    
    45 51
                 self._user_ns_available = False
    
    46 52
     
    
    47 53
         def create_sandbox(self, *args, **kwargs):
    
    48
    -        if not self._local_sandbox_available():
    
    49
    -            return SandboxDummy(*args, **kwargs)
    
    54
    +        if not self._local_sandbox_available:
    
    55
    +            return self._create_dummy_sandbox(*args, **kwargs)
    
    50 56
             else:
    
    51
    -            from ..sandbox._sandboxbwrap import SandboxBwrap
    
    52
    -            # Inform the bubblewrap sandbox as to whether it can use user namespaces or not
    
    53
    -            kwargs['user_ns_available'] = self._user_ns_available
    
    54
    -            kwargs['die_with_parent_available'] = self._die_with_parent_available
    
    55
    -            return SandboxBwrap(*args, **kwargs)
    
    57
    +            return self._create_bwrap_sandbox(*args, **kwargs)
    
    56 58
     
    
    57 59
         def check_sandbox_config(self, config):
    
    60
    +        if not self._local_sandbox_available:
    
    61
    +            # Accept all sandbox configs as it's irrelevant with the dummy sandbox (no Sandbox.run).
    
    62
    +            return True
    
    63
    +
    
    58 64
             if self._user_ns_available:
    
    59 65
                 # User namespace support allows arbitrary build UID/GID settings.
    
    60 66
                 return True
    
    ... ... @@ -66,11 +72,26 @@ class Linux(Platform):
    66 72
         ################################################
    
    67 73
         #              Private Methods                 #
    
    68 74
         ################################################
    
    69
    -    def _local_sandbox_available(self):
    
    70
    -        try:
    
    71
    -            return os.path.exists(utils.get_host_tool('bwrap')) and os.path.exists('/dev/fuse')
    
    72
    -        except utils.ProgramNotFoundError:
    
    73
    -            return False
    
    75
    +
    
    76
    +    def _create_dummy_sandbox(self, *args, **kwargs):
    
    77
    +        reasons = []
    
    78
    +        if not self._have_fuse:
    
    79
    +            reasons.append("FUSE is unavailable")
    
    80
    +        if not self._have_good_bwrap:
    
    81
    +            if self._bwrap_exists:
    
    82
    +                reasons.append("`bwrap` is too old (bst needs at least 0.1.2)")
    
    83
    +            else:
    
    84
    +                reasons.append("`bwrap` executable not found")
    
    85
    +
    
    86
    +        kwargs['dummy_reason'] = " and ".join(reasons)
    
    87
    +        return SandboxDummy(*args, **kwargs)
    
    88
    +
    
    89
    +    def _create_bwrap_sandbox(self, *args, **kwargs):
    
    90
    +        from ..sandbox._sandboxbwrap import SandboxBwrap
    
    91
    +        # Inform the bubblewrap sandbox as to whether it can use user namespaces or not
    
    92
    +        kwargs['user_ns_available'] = self._user_ns_available
    
    93
    +        kwargs['die_with_parent_available'] = self._die_with_parent_available
    
    94
    +        return SandboxBwrap(*args, **kwargs)
    
    74 95
     
    
    75 96
         def _check_user_ns_available(self):
    
    76 97
             # Here, lets check if bwrap is able to create user namespaces,
    

  • buildstream/_scheduler/jobs/job.py
    ... ... @@ -119,6 +119,8 @@ class Job():
    119 119
             self._result = None                    # Return value of child action in the parent
    
    120 120
             self._tries = 0                        # Try count, for retryable jobs
    
    121 121
             self._skipped_flag = False             # Indicate whether the job was skipped.
    
    122
    +        self._terminated = False               # Whether this job has been explicitly terminated
    
    123
    +
    
    122 124
             # If False, a retry will not be attempted regardless of whether _tries is less than _max_retries.
    
    123 125
             #
    
    124 126
             self._retry_flag = True
    
    ... ... @@ -190,6 +192,8 @@ class Job():
    190 192
             # Terminate the process using multiprocessing API pathway
    
    191 193
             self._process.terminate()
    
    192 194
     
    
    195
    +        self._terminated = True
    
    196
    +
    
    193 197
         # terminate_wait()
    
    194 198
         #
    
    195 199
         # Wait for terminated jobs to complete
    
    ... ... @@ -273,18 +277,22 @@ class Job():
    273 277
         # running the integration commands).
    
    274 278
         #
    
    275 279
         # Args:
    
    276
    -    #     (int): The plugin identifier for this task
    
    280
    +    #     task_id (int): The plugin identifier for this task
    
    277 281
         #
    
    278 282
         def set_task_id(self, task_id):
    
    279 283
             self._task_id = task_id
    
    280 284
     
    
    281 285
         # skipped
    
    282 286
         #
    
    287
    +    # This will evaluate to True if the job was skipped
    
    288
    +    # during processing, or if it was forcefully terminated.
    
    289
    +    #
    
    283 290
         # Returns:
    
    284
    -    #    bool: True if the job was skipped while processing.
    
    291
    +    #    (bool): Whether the job should appear as skipped
    
    292
    +    #
    
    285 293
         @property
    
    286 294
         def skipped(self):
    
    287
    -        return self._skipped_flag
    
    295
    +        return self._skipped_flag or self._terminated
    
    288 296
     
    
    289 297
         #######################################################
    
    290 298
         #                  Abstract Methods                   #
    

  • buildstream/_scheduler/queues/pullqueue.py
    ... ... @@ -32,9 +32,20 @@ class PullQueue(Queue):
    32 32
         complete_name = "Pulled"
    
    33 33
         resources = [ResourceType.DOWNLOAD, ResourceType.CACHE]
    
    34 34
     
    
    35
    +    def __init__(self, scheduler, buildtrees=False):
    
    36
    +        super().__init__(scheduler)
    
    37
    +
    
    38
    +        # Current default exclusions on pull
    
    39
    +        self._excluded_subdirs = ["buildtree"]
    
    40
    +        self._subdir = None
    
    41
    +        # If buildtrees are to be pulled, remove the value from exclusion list
    
    42
    +        if buildtrees:
    
    43
    +            self._subdir = "buildtree"
    
    44
    +            self._excluded_subdirs.remove(self._subdir)
    
    45
    +
    
    35 46
         def process(self, element):
    
    36 47
             # returns whether an artifact was downloaded or not
    
    37
    -        if not element._pull():
    
    48
    +        if not element._pull(subdir=self._subdir, excluded_subdirs=self._excluded_subdirs):
    
    38 49
                 raise SkipJob(self.action_name)
    
    39 50
     
    
    40 51
         def status(self, element):
    
    ... ... @@ -49,7 +60,7 @@ class PullQueue(Queue):
    49 60
             if not element._can_query_cache():
    
    50 61
                 return QueueStatus.WAIT
    
    51 62
     
    
    52
    -        if element._pull_pending():
    
    63
    +        if element._pull_pending(subdir=self._subdir):
    
    53 64
                 return QueueStatus.READY
    
    54 65
             else:
    
    55 66
                 return QueueStatus.SKIP
    

  • buildstream/_scheduler/queues/queue.py
    ... ... @@ -326,16 +326,20 @@ class Queue():
    326 326
                               detail=traceback.format_exc())
    
    327 327
                 self.failed_elements.append(element)
    
    328 328
             else:
    
    329
    -
    
    330
    -            # No exception occured, handle the success/failure state in the normal way
    
    331 329
                 #
    
    330
    +            # No exception occured in post processing
    
    331
    +            #
    
    332
    +
    
    333
    +            # All jobs get placed on the done queue for later processing.
    
    332 334
                 self._done_queue.append(job)
    
    333 335
     
    
    334
    -            if success:
    
    335
    -                if not job.skipped:
    
    336
    -                    self.processed_elements.append(element)
    
    337
    -                else:
    
    338
    -                    self.skipped_elements.append(element)
    
    336
    +            # A Job can be skipped whether or not it has failed,
    
    337
    +            # we want to only bookkeep them as processed or failed
    
    338
    +            # if they are not skipped.
    
    339
    +            if job.skipped:
    
    340
    +                self.skipped_elements.append(element)
    
    341
    +            elif success:
    
    342
    +                self.processed_elements.append(element)
    
    339 343
                 else:
    
    340 344
                     self.failed_elements.append(element)
    
    341 345
     
    

  • buildstream/_scheduler/scheduler.py
    ... ... @@ -387,6 +387,15 @@ class Scheduler():
    387 387
         # A loop registered event callback for keyboard interrupts
    
    388 388
         #
    
    389 389
         def _interrupt_event(self):
    
    390
    +
    
    391
    +        # FIXME: This should not be needed, but for some reason we receive an
    
    392
    +        #        additional SIGINT event when the user hits ^C a second time
    
    393
    +        #        to inform us that they really intend to terminate; even though
    
    394
    +        #        we have disconnected our handlers at this time.
    
    395
    +        #
    
    396
    +        if self.terminated:
    
    397
    +            return
    
    398
    +
    
    390 399
             # Leave this to the frontend to decide, if no
    
    391 400
             # interrrupt callback was specified, then just terminate.
    
    392 401
             if self._interrupt_callback:
    

  • buildstream/_site.py
    ... ... @@ -78,18 +78,12 @@ def check_bwrap_version(major, minor, patch):
    78 78
             if not bwrap_path:
    
    79 79
                 return False
    
    80 80
             cmd = [bwrap_path, "--version"]
    
    81
    -        version = str(subprocess.check_output(cmd).split()[1], "utf-8")
    
    81
    +        try:
    
    82
    +            version = str(subprocess.check_output(cmd).split()[1], "utf-8")
    
    83
    +        except subprocess.CalledProcessError:
    
    84
    +            # Failure trying to run bubblewrap
    
    85
    +            return False
    
    82 86
             _bwrap_major, _bwrap_minor, _bwrap_patch = map(int, version.split("."))
    
    83 87
     
    
    84 88
         # Check whether the installed version meets the requirements
    
    85
    -    if _bwrap_major > major:
    
    86
    -        return True
    
    87
    -    elif _bwrap_major < major:
    
    88
    -        return False
    
    89
    -    else:
    
    90
    -        if _bwrap_minor > minor:
    
    91
    -            return True
    
    92
    -        elif _bwrap_minor < minor:
    
    93
    -            return False
    
    94
    -        else:
    
    95
    -            return _bwrap_patch >= patch
    89
    +    return (_bwrap_major, _bwrap_minor, _bwrap_patch) >= (major, minor, patch)

  • buildstream/_stream.py
    ... ... @@ -160,12 +160,14 @@ class Stream():
    160 160
         #    track_cross_junctions (bool): Whether tracking should cross junction boundaries
    
    161 161
         #    build_all (bool): Whether to build all elements, or only those
    
    162 162
         #                      which are required to build the target.
    
    163
    +    #    pull_buildtrees (bool): Whether to pull buildtrees from a remote cache server
    
    163 164
         #
    
    164 165
         def build(self, targets, *,
    
    165 166
                   track_targets=None,
    
    166 167
                   track_except=None,
    
    167 168
                   track_cross_junctions=False,
    
    168
    -              build_all=False):
    
    169
    +              build_all=False,
    
    170
    +              pull_buildtrees=False):
    
    169 171
     
    
    170 172
             if build_all:
    
    171 173
                 selection = PipelineSelection.ALL
    
    ... ... @@ -195,7 +197,10 @@ class Stream():
    195 197
                 self._add_queue(track_queue, track=True)
    
    196 198
     
    
    197 199
             if self._artifacts.has_fetch_remotes():
    
    198
    -            self._add_queue(PullQueue(self._scheduler))
    
    200
    +            # Query if pullbuildtrees has been set globally in user config
    
    201
    +            if self._context.pullbuildtrees:
    
    202
    +                pull_buildtrees = True
    
    203
    +            self._add_queue(PullQueue(self._scheduler, buildtrees=pull_buildtrees))
    
    199 204
     
    
    200 205
             self._add_queue(FetchQueue(self._scheduler, skip_cached=True))
    
    201 206
             self._add_queue(BuildQueue(self._scheduler))
    
    ... ... @@ -295,7 +300,8 @@ class Stream():
    295 300
         #
    
    296 301
         def pull(self, targets, *,
    
    297 302
                  selection=PipelineSelection.NONE,
    
    298
    -             remote=None):
    
    303
    +             remote=None,
    
    304
    +             pull_buildtrees=False):
    
    299 305
     
    
    300 306
             use_config = True
    
    301 307
             if remote:
    
    ... ... @@ -310,8 +316,12 @@ class Stream():
    310 316
             if not self._artifacts.has_fetch_remotes():
    
    311 317
                 raise StreamError("No artifact caches available for pulling artifacts")
    
    312 318
     
    
    319
    +        # Query if pullbuildtrees has been set globally in user config
    
    320
    +        if self._context.pullbuildtrees:
    
    321
    +            pull_buildtrees = True
    
    322
    +
    
    313 323
             self._pipeline.assert_consistent(elements)
    
    314
    -        self._add_queue(PullQueue(self._scheduler))
    
    324
    +        self._add_queue(PullQueue(self._scheduler, buildtrees=pull_buildtrees))
    
    315 325
             self._enqueue_plan(elements)
    
    316 326
             self._run()
    
    317 327
     
    

  • buildstream/element.py
    ... ... @@ -212,7 +212,7 @@ class Element(Plugin):
    212 212
             self.__staged_sources_directory = None  # Location where Element.stage_sources() was called
    
    213 213
             self.__tainted = None                   # Whether the artifact is tainted and should not be shared
    
    214 214
             self.__required = False                 # Whether the artifact is required in the current session
    
    215
    -        self.__build_result = None              # The result of assembling this Element
    
    215
    +        self.__build_result = None              # The result of assembling this Element (success, description, detail)
    
    216 216
             self._build_log_path = None            # The path of the build log for this Element
    
    217 217
     
    
    218 218
             # hash tables of loaded artifact metadata, hashed by key
    
    ... ... @@ -1379,10 +1379,10 @@ class Element(Plugin):
    1379 1379
                 if not vdirectory.is_empty():
    
    1380 1380
                     raise ElementError("Staging directory '{}' is not empty".format(vdirectory))
    
    1381 1381
     
    
    1382
    -            # While mkdtemp is advertised as using the TMP environment variable, it
    
    1383
    -            # doesn't, so this explicit extraction is necesasry.
    
    1384
    -            tmp_prefix = os.environ.get("TMP", None)
    
    1385
    -            temp_staging_directory = tempfile.mkdtemp(prefix=tmp_prefix)
    
    1382
    +            # It's advantageous to have this temporary directory on
    
    1383
    +            # the same filing system as the rest of our cache.
    
    1384
    +            temp_staging_location = os.path.join(self._get_context().artifactdir, "staging_temp")
    
    1385
    +            temp_staging_directory = tempfile.mkdtemp(prefix=temp_staging_location)
    
    1386 1386
     
    
    1387 1387
                 try:
    
    1388 1388
                     workspace = self._get_workspace()
    
    ... ... @@ -1479,11 +1479,13 @@ class Element(Plugin):
    1479 1479
     
    
    1480 1480
             self._update_state()
    
    1481 1481
     
    
    1482
    -        if self._get_workspace() and self._cached():
    
    1482
    +        if self._get_workspace() and self._cached_success():
    
    1483
    +            assert utils._is_main_process(), \
    
    1484
    +                "Attempted to save workspace configuration from child process"
    
    1483 1485
                 #
    
    1484 1486
                 # Note that this block can only happen in the
    
    1485
    -            # main process, since `self._cached()` cannot
    
    1486
    -            # be true when assembly is completed in the task.
    
    1487
    +            # main process, since `self._cached_success()` cannot
    
    1488
    +            # be true when assembly is successful in the task.
    
    1487 1489
                 #
    
    1488 1490
                 # For this reason, it is safe to update and
    
    1489 1491
                 # save the workspaces configuration
    
    ... ... @@ -1689,18 +1691,26 @@ class Element(Plugin):
    1689 1691
     
    
    1690 1692
         # _pull_pending()
    
    1691 1693
         #
    
    1692
    -    # Check whether the artifact will be pulled.
    
    1694
    +    # Check whether the artifact will be pulled. If the pull operation is to
    
    1695
    +    # include a specific subdir of the element artifact (from cli or user conf)
    
    1696
    +    # then the local cache is queried for the subdirs existence.
    
    1697
    +    #
    
    1698
    +    # Args:
    
    1699
    +    #    subdir (str): Whether the pull has been invoked with a specific subdir set
    
    1693 1700
         #
    
    1694 1701
         # Returns:
    
    1695 1702
         #   (bool): Whether a pull operation is pending
    
    1696 1703
         #
    
    1697
    -    def _pull_pending(self):
    
    1704
    +    def _pull_pending(self, subdir=None):
    
    1698 1705
             if self._get_workspace():
    
    1699 1706
                 # Workspace builds are never pushed to artifact servers
    
    1700 1707
                 return False
    
    1701 1708
     
    
    1702
    -        if self.__strong_cached:
    
    1703
    -            # Artifact already in local cache
    
    1709
    +        if self.__strong_cached and subdir:
    
    1710
    +            # If we've specified a subdir, check if the subdir is cached locally
    
    1711
    +            if self.__artifacts.contains_subdir_artifact(self, self.__strict_cache_key, subdir):
    
    1712
    +                return False
    
    1713
    +        elif self.__strong_cached:
    
    1704 1714
                 return False
    
    1705 1715
     
    
    1706 1716
             # Pull is pending if artifact remote server available
    
    ... ... @@ -1722,11 +1732,10 @@ class Element(Plugin):
    1722 1732
     
    
    1723 1733
             self._update_state()
    
    1724 1734
     
    
    1725
    -    def _pull_strong(self, *, progress=None):
    
    1735
    +    def _pull_strong(self, *, progress=None, subdir=None, excluded_subdirs=None):
    
    1726 1736
             weak_key = self._get_cache_key(strength=_KeyStrength.WEAK)
    
    1727
    -
    
    1728 1737
             key = self.__strict_cache_key
    
    1729
    -        if not self.__artifacts.pull(self, key, progress=progress):
    
    1738
    +        if not self.__artifacts.pull(self, key, progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs):
    
    1730 1739
                 return False
    
    1731 1740
     
    
    1732 1741
             # update weak ref by pointing it to this newly fetched artifact
    
    ... ... @@ -1734,10 +1743,10 @@ class Element(Plugin):
    1734 1743
     
    
    1735 1744
             return True
    
    1736 1745
     
    
    1737
    -    def _pull_weak(self, *, progress=None):
    
    1746
    +    def _pull_weak(self, *, progress=None, subdir=None, excluded_subdirs=None):
    
    1738 1747
             weak_key = self._get_cache_key(strength=_KeyStrength.WEAK)
    
    1739
    -
    
    1740
    -        if not self.__artifacts.pull(self, weak_key, progress=progress):
    
    1748
    +        if not self.__artifacts.pull(self, weak_key, progress=progress, subdir=subdir,
    
    1749
    +                                     excluded_subdirs=excluded_subdirs):
    
    1741 1750
                 return False
    
    1742 1751
     
    
    1743 1752
             # extract strong cache key from this newly fetched artifact
    
    ... ... @@ -1755,17 +1764,17 @@ class Element(Plugin):
    1755 1764
         #
    
    1756 1765
         # Returns: True if the artifact has been downloaded, False otherwise
    
    1757 1766
         #
    
    1758
    -    def _pull(self):
    
    1767
    +    def _pull(self, subdir=None, excluded_subdirs=None):
    
    1759 1768
             context = self._get_context()
    
    1760 1769
     
    
    1761 1770
             def progress(percent, message):
    
    1762 1771
                 self.status(message)
    
    1763 1772
     
    
    1764 1773
             # Attempt to pull artifact without knowing whether it's available
    
    1765
    -        pulled = self._pull_strong(progress=progress)
    
    1774
    +        pulled = self._pull_strong(progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs)
    
    1766 1775
     
    
    1767 1776
             if not pulled and not self._cached() and not context.get_strict():
    
    1768
    -            pulled = self._pull_weak(progress=progress)
    
    1777
    +            pulled = self._pull_weak(progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs)
    
    1769 1778
     
    
    1770 1779
             if not pulled:
    
    1771 1780
                 return False
    
    ... ... @@ -1788,10 +1797,21 @@ class Element(Plugin):
    1788 1797
             if not self._cached():
    
    1789 1798
                 return True
    
    1790 1799
     
    
    1791
    -        # Do not push tained artifact
    
    1800
    +        # Do not push tainted artifact
    
    1792 1801
             if self.__get_tainted():
    
    1793 1802
                 return True
    
    1794 1803
     
    
    1804
    +        # Do not push elements that have a dangling buildtree artifact unless element type is
    
    1805
    +        # expected to have an empty buildtree directory
    
    1806
    +        if not self.__artifacts.contains_subdir_artifact(self, self.__strict_cache_key, 'buildtree'):
    
    1807
    +            return True
    
    1808
    +
    
    1809
    +        # strict_cache_key can't be relied on to be available when running in non strict mode
    
    1810
    +        context = self._get_context()
    
    1811
    +        if not context.get_strict():
    
    1812
    +            if not self.__artifacts.contains_subdir_artifact(self, self.__weak_cache_key, 'buildtree'):
    
    1813
    +                return True
    
    1814
    +
    
    1795 1815
             return False
    
    1796 1816
     
    
    1797 1817
         # _push():
    

  • buildstream/plugins/sources/git.py
    ... ... @@ -184,10 +184,18 @@ class GitMirror(SourceFetcher):
    184 184
                              cwd=self.mirror)
    
    185 185
     
    
    186 186
         def fetch(self, alias_override=None):
    
    187
    -        self.ensure(alias_override)
    
    188
    -        if not self.has_ref():
    
    189
    -            self._fetch(alias_override)
    
    190
    -        self.assert_ref()
    
    187
    +        # Resolve the URL for the message
    
    188
    +        resolved_url = self.source.translate_url(self.url,
    
    189
    +                                                 alias_override=alias_override,
    
    190
    +                                                 primary=self.primary)
    
    191
    +
    
    192
    +        with self.source.timed_activity("Fetching from {}"
    
    193
    +                                        .format(resolved_url),
    
    194
    +                                        silent_nested=True):
    
    195
    +            self.ensure(alias_override)
    
    196
    +            if not self.has_ref():
    
    197
    +                self._fetch(alias_override)
    
    198
    +            self.assert_ref()
    
    191 199
     
    
    192 200
         def has_ref(self):
    
    193 201
             if not self.ref:
    

  • buildstream/sandbox/_sandboxdummy.py
    ... ... @@ -23,6 +23,7 @@ from . import Sandbox
    23 23
     class SandboxDummy(Sandbox):
    
    24 24
         def __init__(self, *args, **kwargs):
    
    25 25
             super().__init__(*args, **kwargs)
    
    26
    +        self._reason = kwargs.get("dummy_reason", "no reason given")
    
    26 27
     
    
    27 28
         def run(self, command, flags, *, cwd=None, env=None):
    
    28 29
     
    
    ... ... @@ -37,4 +38,4 @@ class SandboxDummy(Sandbox):
    37 38
                                    "'{}'".format(command[0]),
    
    38 39
                                    reason='missing-command')
    
    39 40
     
    
    40
    -        raise SandboxError("This platform does not support local builds")
    41
    +        raise SandboxError("This platform does not support local builds: {}".format(self._reason))

  • buildstream/source.py
    ... ... @@ -965,28 +965,48 @@ class Source(Plugin):
    965 965
         # Tries to call fetch for every mirror, stopping once it succeeds
    
    966 966
         def __do_fetch(self, **kwargs):
    
    967 967
             project = self._get_project()
    
    968
    -        source_fetchers = self.get_source_fetchers()
    
    968
    +        context = self._get_context()
    
    969
    +
    
    970
    +        # Silence the STATUS messages which might happen as a result
    
    971
    +        # of checking the source fetchers.
    
    972
    +        with context.silence():
    
    973
    +            source_fetchers = self.get_source_fetchers()
    
    969 974
     
    
    970 975
             # Use the source fetchers if they are provided
    
    971 976
             #
    
    972 977
             if source_fetchers:
    
    973
    -            for fetcher in source_fetchers:
    
    974
    -                alias = fetcher._get_alias()
    
    975
    -                for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
    
    976
    -                    try:
    
    977
    -                        fetcher.fetch(uri)
    
    978
    -                    # FIXME: Need to consider temporary vs. permanent failures,
    
    979
    -                    #        and how this works with retries.
    
    980
    -                    except BstError as e:
    
    981
    -                        last_error = e
    
    982
    -                        continue
    
    983
    -
    
    984
    -                    # No error, we're done with this fetcher
    
    985
    -                    break
    
    986 978
     
    
    987
    -                else:
    
    988
    -                    # No break occurred, raise the last detected error
    
    989
    -                    raise last_error
    
    979
    +            # Use a contorted loop here, this is to allow us to
    
    980
    +            # silence the messages which can result from consuming
    
    981
    +            # the items of source_fetchers, if it happens to be a generator.
    
    982
    +            #
    
    983
    +            source_fetchers = iter(source_fetchers)
    
    984
    +            try:
    
    985
    +
    
    986
    +                while True:
    
    987
    +
    
    988
    +                    with context.silence():
    
    989
    +                        fetcher = next(source_fetchers)
    
    990
    +
    
    991
    +                    alias = fetcher._get_alias()
    
    992
    +                    for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
    
    993
    +                        try:
    
    994
    +                            fetcher.fetch(uri)
    
    995
    +                        # FIXME: Need to consider temporary vs. permanent failures,
    
    996
    +                        #        and how this works with retries.
    
    997
    +                        except BstError as e:
    
    998
    +                            last_error = e
    
    999
    +                            continue
    
    1000
    +
    
    1001
    +                        # No error, we're done with this fetcher
    
    1002
    +                        break
    
    1003
    +
    
    1004
    +                    else:
    
    1005
    +                        # No break occurred, raise the last detected error
    
    1006
    +                        raise last_error
    
    1007
    +
    
    1008
    +            except StopIteration:
    
    1009
    +                pass
    
    990 1010
     
    
    991 1011
             # Default codepath is to reinstantiate the Source
    
    992 1012
             #
    

  • buildstream/utils.py
    ... ... @@ -502,7 +502,7 @@ def get_bst_version():
    502 502
     
    
    503 503
     @contextmanager
    
    504 504
     def save_file_atomic(filename, mode='w', *, buffering=-1, encoding=None,
    
    505
    -                     errors=None, newline=None, closefd=True, opener=None):
    
    505
    +                     errors=None, newline=None, closefd=True, opener=None, tempdir=None):
    
    506 506
         """Save a file with a temporary name and rename it into place when ready.
    
    507 507
     
    
    508 508
         This is a context manager which is meant for saving data to files.
    
    ... ... @@ -529,8 +529,9 @@ def save_file_atomic(filename, mode='w', *, buffering=-1, encoding=None,
    529 529
         # https://bugs.python.org/issue8604
    
    530 530
     
    
    531 531
         assert os.path.isabs(filename), "The utils.save_file_atomic() parameter ``filename`` must be an absolute path"
    
    532
    -    dirname = os.path.dirname(filename)
    
    533
    -    fd, tempname = tempfile.mkstemp(dir=dirname)
    
    532
    +    if tempdir is None:
    
    533
    +        tempdir = os.path.dirname(filename)
    
    534
    +    fd, tempname = tempfile.mkstemp(dir=tempdir)
    
    534 535
         os.close(fd)
    
    535 536
     
    
    536 537
         f = open(tempname, mode=mode, buffering=buffering, encoding=encoding,
    
    ... ... @@ -562,6 +563,9 @@ def save_file_atomic(filename, mode='w', *, buffering=-1, encoding=None,
    562 563
     #
    
    563 564
     # Get the disk usage of a given directory in bytes.
    
    564 565
     #
    
    566
    +# This function assumes that files do not inadvertantly
    
    567
    +# disappear while this function is running.
    
    568
    +#
    
    565 569
     # Arguments:
    
    566 570
     #     (str) The path whose size to check.
    
    567 571
     #
    
    ... ... @@ -682,7 +686,7 @@ def _force_rmtree(rootpath, **kwargs):
    682 686
     
    
    683 687
         try:
    
    684 688
             shutil.rmtree(rootpath, **kwargs)
    
    685
    -    except shutil.Error as e:
    
    689
    +    except OSError as e:
    
    686 690
             raise UtilError("Failed to remove cache directory '{}': {}"
    
    687 691
                             .format(rootpath, e))
    
    688 692
     
    

  • contrib/bst-docker-import
    1
    +#!/bin/bash
    
    2
    +#
    
    3
    +#  Copyright 2018 Bloomberg Finance LP
    
    4
    +#
    
    5
    +#  This program is free software; you can redistribute it and/or
    
    6
    +#  modify it under the terms of the GNU Lesser General Public
    
    7
    +#  License as published by the Free Software Foundation; either
    
    8
    +#  version 2 of the License, or (at your option) any later version.
    
    9
    +#
    
    10
    +#  This library is distributed in the hope that it will be useful,
    
    11
    +#  but WITHOUT ANY WARRANTY; without even the implied warranty of
    
    12
    +#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
    
    13
    +#  Lesser General Public License for more details.
    
    14
    +#
    
    15
    +#  You should have received a copy of the GNU Lesser General Public
    
    16
    +#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
    
    17
    +#
    
    18
    +#  Authors:
    
    19
    +#        Chadnan Singh <csingh43 bloomberg net>
    
    20
    +
    
    21
    +# This is a helper script to generate Docker images using checkouts of
    
    22
    +# BuildStream elements.
    
    23
    +
    
    24
    +usage() {
    
    25
    +    cat <<EOF
    
    26
    +
    
    27
    +USAGE: $(basename "$0") [-c BST_CMD] [-m MESSAGE] [-t TAG] [-h] ELEMENT
    
    28
    +
    
    29
    +Create a Docker image from bst checkout of an element.
    
    30
    +
    
    31
    +OPTIONS:
    
    32
    +    -c BST_CMD    Path to BuildStream command (default: bst).
    
    33
    +    -m MESSAGE    Commit message for the imported image.
    
    34
    +    -t TAG        Tag of the imported image.
    
    35
    +    -h            Print this help text and exit.
    
    36
    +
    
    37
    +EXAMPLES:
    
    38
    +
    
    39
    +    # Import hello.bst as a Docker image with tag "bst-hello" and message "hello"
    
    40
    +    $(basename "$0") -m hello -t bst-hello hello.bst
    
    41
    +
    
    42
    +    # Import hello.bst as a Docker image with tag "bst-hello" using bst-here
    
    43
    +    $(basename "$0") -c bst-here -t bst-hello hello.bst
    
    44
    +
    
    45
    +EOF
    
    46
    +    exit "$1"
    
    47
    +}
    
    48
    +
    
    49
    +die() {
    
    50
    +    echo "FATAL: $1" >&2
    
    51
    +    exit 1
    
    52
    +}
    
    53
    +
    
    54
    +bst_cmd=bst
    
    55
    +docker_import_cmd=(docker import)
    
    56
    +docker_image_tag=
    
    57
    +
    
    58
    +while getopts c:m:t:h arg
    
    59
    +do
    
    60
    +    case $arg in
    
    61
    +    c)
    
    62
    +        bst_cmd="$OPTARG"
    
    63
    +        ;;
    
    64
    +    m)
    
    65
    +        docker_import_cmd+=('-m' "$OPTARG")
    
    66
    +        ;;
    
    67
    +    t)
    
    68
    +        docker_image_tag="$OPTARG"
    
    69
    +        ;;
    
    70
    +    h)
    
    71
    +        usage 0
    
    72
    +        ;;
    
    73
    +    \?)
    
    74
    +        usage 1
    
    75
    +    esac
    
    76
    +done
    
    77
    +
    
    78
    +shift $((OPTIND-1))
    
    79
    +if [[ "$#" != 1 ]]; then
    
    80
    +    echo "$0: No element specified" >&2
    
    81
    +    usage 1
    
    82
    +fi
    
    83
    +element="$1"
    
    84
    +
    
    85
    +# Dump to a temporary file in the current directory.
    
    86
    +# NOTE: We use current directory to try to ensure compatibility with scripts
    
    87
    +# like bst-here, assuming that the current working directory is mounted
    
    88
    +# inside the container.
    
    89
    +
    
    90
    +checkout_tar="bst-checkout-$(basename "$element")-$RANDOM.tar"
    
    91
    +
    
    92
    +echo "INFO: Checking out $element ..." >&2
    
    93
    +$bst_cmd checkout --tar "$element" "$checkout_tar" || die "Failed to checkout $element"
    
    94
    +echo "INFO: Successfully checked out $element" >&2
    
    95
    +
    
    96
    +echo "INFO: Importing Docker image ..."
    
    97
    +"${docker_import_cmd[@]}" "$checkout_tar" "$docker_image_tag" || die "Failed to import Docker image from tarball"
    
    98
    +echo "INFO: Successfully import Docker image $docker_image_tag"
    
    99
    +
    
    100
    +echo "INFO: Cleaning up ..."
    
    101
    +rm "$checkout_tar" || die "Failed to remove $checkout_tar"
    
    102
    +echo "INFO: Clean up finished"

  • setup.py
    ... ... @@ -54,12 +54,13 @@ REQUIRED_BWRAP_MINOR = 1
    54 54
     REQUIRED_BWRAP_PATCH = 2
    
    55 55
     
    
    56 56
     
    
    57
    -def exit_bwrap(reason):
    
    57
    +def warn_bwrap(reason):
    
    58 58
         print(reason +
    
    59
    -          "\nBuildStream requires Bubblewrap (bwrap) for"
    
    60
    -          " sandboxing the build environment. Install it using your package manager"
    
    61
    -          " (usually bwrap or bubblewrap)")
    
    62
    -    sys.exit(1)
    
    59
    +          "\nBuildStream requires Bubblewrap (bwrap {}.{}.{} or better),"
    
    60
    +          " during local builds, for"
    
    61
    +          " sandboxing the build environment.\nInstall it using your package manager"
    
    62
    +          " (usually bwrap or bubblewrap) otherwise you will be limited to"
    
    63
    +          " remote builds only.".format(REQUIRED_BWRAP_MAJOR, REQUIRED_BWRAP_MINOR, REQUIRED_BWRAP_PATCH))
    
    63 64
     
    
    64 65
     
    
    65 66
     def bwrap_too_old(major, minor, patch):
    
    ... ... @@ -76,18 +77,19 @@ def bwrap_too_old(major, minor, patch):
    76 77
             return False
    
    77 78
     
    
    78 79
     
    
    79
    -def assert_bwrap():
    
    80
    +def check_for_bwrap():
    
    80 81
         platform = os.environ.get('BST_FORCE_BACKEND', '') or sys.platform
    
    81 82
         if platform.startswith('linux'):
    
    82 83
             bwrap_path = shutil.which('bwrap')
    
    83 84
             if not bwrap_path:
    
    84
    -            exit_bwrap("Bubblewrap not found")
    
    85
    +            warn_bwrap("Bubblewrap not found")
    
    86
    +            return
    
    85 87
     
    
    86 88
             version_bytes = subprocess.check_output([bwrap_path, "--version"]).split()[1]
    
    87 89
             version_string = str(version_bytes, "utf-8")
    
    88 90
             major, minor, patch = map(int, version_string.split("."))
    
    89 91
             if bwrap_too_old(major, minor, patch):
    
    90
    -            exit_bwrap("Bubblewrap too old")
    
    92
    +            warn_bwrap("Bubblewrap too old")
    
    91 93
     
    
    92 94
     
    
    93 95
     ###########################################
    
    ... ... @@ -126,7 +128,7 @@ bst_install_entry_points = {
    126 128
     }
    
    127 129
     
    
    128 130
     if not os.environ.get('BST_ARTIFACTS_ONLY', ''):
    
    129
    -    assert_bwrap()
    
    131
    +    check_for_bwrap()
    
    130 132
         bst_install_entry_points['console_scripts'] += [
    
    131 133
             'bst = buildstream._frontend:cli'
    
    132 134
         ]
    

  • tests/completions/completions.py
    ... ... @@ -103,7 +103,7 @@ def test_commands(cli, cmd, word_idx, expected):
    103 103
         ('bst --no-colors build -', 3, ['--all ', '--track ', '--track-all ',
    
    104 104
                                         '--track-except ',
    
    105 105
                                         '--track-cross-junctions ', '-J ',
    
    106
    -                                    '--track-save ']),
    
    106
    +                                    '--track-save ', '--pull-buildtrees ']),
    
    107 107
     
    
    108 108
         # Test the behavior of completing after an option that has a
    
    109 109
         # parameter that cannot be completed, vs an option that has
    

  • tests/frontend/mirror.py
    ... ... @@ -139,6 +139,82 @@ def test_mirror_fetch(cli, tmpdir, datafiles, kind):
    139 139
         result.assert_success()
    
    140 140
     
    
    141 141
     
    
    142
    +@pytest.mark.datafiles(DATA_DIR)
    
    143
    +@pytest.mark.parametrize("ref_storage", [("inline"), ("project.refs")])
    
    144
    +@pytest.mark.parametrize("mirror", [("no-mirror"), ("mirror"), ("unrelated-mirror")])
    
    145
    +def test_mirror_fetch_ref_storage(cli, tmpdir, datafiles, ref_storage, mirror):
    
    146
    +    bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
    
    147
    +    dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
    
    148
    +    upstream_repodir = os.path.join(str(tmpdir), 'upstream')
    
    149
    +    mirror_repodir = os.path.join(str(tmpdir), 'mirror')
    
    150
    +    project_dir = os.path.join(str(tmpdir), 'project')
    
    151
    +    os.makedirs(project_dir)
    
    152
    +    element_dir = os.path.join(project_dir, 'elements')
    
    153
    +
    
    154
    +    # Create repo objects of the upstream and mirror
    
    155
    +    upstream_repo = create_repo('tar', upstream_repodir)
    
    156
    +    upstream_ref = upstream_repo.create(bin_files_path)
    
    157
    +    mirror_repo = upstream_repo.copy(mirror_repodir)
    
    158
    +    mirror_ref = upstream_ref
    
    159
    +    upstream_ref = upstream_repo.create(dev_files_path)
    
    160
    +
    
    161
    +    element = {
    
    162
    +        'kind': 'import',
    
    163
    +        'sources': [
    
    164
    +            upstream_repo.source_config(ref=upstream_ref if ref_storage == 'inline' else None)
    
    165
    +        ]
    
    166
    +    }
    
    167
    +    element_name = 'test.bst'
    
    168
    +    element_path = os.path.join(element_dir, element_name)
    
    169
    +    full_repo = element['sources'][0]['url']
    
    170
    +    upstream_map, repo_name = os.path.split(full_repo)
    
    171
    +    alias = 'foo'
    
    172
    +    aliased_repo = alias + ':' + repo_name
    
    173
    +    element['sources'][0]['url'] = aliased_repo
    
    174
    +    full_mirror = mirror_repo.source_config()['url']
    
    175
    +    mirror_map, _ = os.path.split(full_mirror)
    
    176
    +    os.makedirs(element_dir)
    
    177
    +    _yaml.dump(element, element_path)
    
    178
    +
    
    179
    +    if ref_storage == 'project.refs':
    
    180
    +        # Manually set project.refs to avoid caching the repo prematurely
    
    181
    +        project_refs = {'projects': {
    
    182
    +            'test': {
    
    183
    +                element_name: [
    
    184
    +                    {'ref': upstream_ref}
    
    185
    +                ]
    
    186
    +            }
    
    187
    +        }}
    
    188
    +        project_refs_path = os.path.join(project_dir, 'project.refs')
    
    189
    +        _yaml.dump(project_refs, project_refs_path)
    
    190
    +
    
    191
    +    project = {
    
    192
    +        'name': 'test',
    
    193
    +        'element-path': 'elements',
    
    194
    +        'aliases': {
    
    195
    +            alias: upstream_map + "/"
    
    196
    +        },
    
    197
    +        'ref-storage': ref_storage
    
    198
    +    }
    
    199
    +    if mirror != 'no-mirror':
    
    200
    +        mirror_data = [{
    
    201
    +            'name': 'middle-earth',
    
    202
    +            'aliases': {alias: [mirror_map + '/']}
    
    203
    +        }]
    
    204
    +        if mirror == 'unrelated-mirror':
    
    205
    +            mirror_data.insert(0, {
    
    206
    +                'name': 'narnia',
    
    207
    +                'aliases': {'frob': ['http://www.example.com/repo']}
    
    208
    +            })
    
    209
    +        project['mirrors'] = mirror_data
    
    210
    +
    
    211
    +    project_file = os.path.join(project_dir, 'project.conf')
    
    212
    +    _yaml.dump(project, project_file)
    
    213
    +
    
    214
    +    result = cli.run(project=project_dir, args=['fetch', element_name])
    
    215
    +    result.assert_success()
    
    216
    +
    
    217
    +
    
    142 218
     @pytest.mark.datafiles(DATA_DIR)
    
    143 219
     @pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
    
    144 220
     def test_mirror_fetch_upstream_absent(cli, tmpdir, datafiles, kind):
    

  • tests/frontend/workspace.py
    ... ... @@ -43,7 +43,8 @@ DATA_DIR = os.path.join(
    43 43
     )
    
    44 44
     
    
    45 45
     
    
    46
    -def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None, project_path=None):
    
    46
    +def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
    
    47
    +                   project_path=None, element_attrs=None):
    
    47 48
         if not workspace_dir:
    
    48 49
             workspace_dir = os.path.join(str(tmpdir), 'workspace{}'.format(suffix))
    
    49 50
         if not project_path:
    
    ... ... @@ -69,6 +70,8 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir
    69 70
                 repo.source_config(ref=ref)
    
    70 71
             ]
    
    71 72
         }
    
    73
    +    if element_attrs:
    
    74
    +        element = {**element, **element_attrs}
    
    72 75
         _yaml.dump(element,
    
    73 76
                    os.path.join(element_path,
    
    74 77
                                 element_name))
    
    ... ... @@ -854,3 +857,22 @@ def test_cache_key_workspace_in_dependencies(cli, tmpdir, datafiles, strict):
    854 857
     
    
    855 858
         # Check that the original /usr/bin/hello is not in the checkout
    
    856 859
         assert not os.path.exists(os.path.join(checkout, 'usr', 'bin', 'hello'))
    
    860
    +
    
    861
    +
    
    862
    +@pytest.mark.datafiles(DATA_DIR)
    
    863
    +def test_multiple_failed_builds(cli, tmpdir, datafiles):
    
    864
    +    element_config = {
    
    865
    +        "kind": "manual",
    
    866
    +        "config": {
    
    867
    +            "configure-commands": [
    
    868
    +                "unknown_command_that_will_fail"
    
    869
    +            ]
    
    870
    +        }
    
    871
    +    }
    
    872
    +    element_name, project, _ = open_workspace(cli, tmpdir, datafiles,
    
    873
    +                                              "git", False, element_attrs=element_config)
    
    874
    +
    
    875
    +    for _ in range(2):
    
    876
    +        result = cli.run(project=project, args=["build", element_name])
    
    877
    +        assert "BUG" not in result.stderr
    
    878
    +        assert cli.get_element_state(project, element_name) != "cached"



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]