[Notes] [Git][BuildStream/buildstream][tpollard/workspacebuildtree] 4 commits: _frontend/cli.py: Add --pull-buildtrees to bst build & pull



Title: GitLab

Tom Pollard pushed to branch tpollard/workspacebuildtree at BuildStream / buildstream

Commits:

12 changed files:

Changes:

  • NEWS
    ... ... @@ -31,6 +31,17 @@ buildstream 1.3.1
    31 31
         new the `conf-root` variable to make the process easier. And there has been
    
    32 32
         a bug fix to workspaces so they can be build in workspaces too.
    
    33 33
     
    
    34
    +  o Due to the element `buildtree` being cached in the respective artifact their
    
    35
    +    size in some cases has significantly increased. In *most* cases the buildtree
    
    36
    +    is not utilised when building targets, as such by default bst 'pull' & 'build'
    
    37
    +    will not fetch buildtrees from remotes. This behaviour can be overriden with
    
    38
    +    the cli option '--pull-buildtrees', or the user configuration option
    
    39
    +    'pullbuildtrees = True'. The override will also add the buildtree to already
    
    40
    +    cached artifacts. When attempting to populate an artifactcache server with
    
    41
    +    cached artifacts, only 'complete' elements can be pushed. If the element is
    
    42
    +    expected to have a populated buildtree then it must be cached before pushing.
    
    43
    +
    
    44
    +
    
    34 45
     =================
    
    35 46
     buildstream 1.1.5
    
    36 47
     =================
    

  • buildstream/_artifactcache/artifactcache.py
    ... ... @@ -427,6 +427,22 @@ class ArtifactCache():
    427 427
             raise ImplError("Cache '{kind}' does not implement contains()"
    
    428 428
                             .format(kind=type(self).__name__))
    
    429 429
     
    
    430
    +    # contains_subdir_artifact():
    
    431
    +    #
    
    432
    +    # Check whether an artifact element contains a digest for a subdir
    
    433
    +    # which is populated in the cache, i.e non dangling.
    
    434
    +    #
    
    435
    +    # Args:
    
    436
    +    #     element (Element): The Element to check
    
    437
    +    #     key (str): The cache key to use
    
    438
    +    #     subdir (str): The subdir to check
    
    439
    +    #
    
    440
    +    # Returns: True if the subdir exists & is populated in the cache, False otherwise
    
    441
    +    #
    
    442
    +    def contains_subdir_artifact(self, element, key, subdir):
    
    443
    +        raise ImplError("Cache '{kind}' does not implement contains_subdir_artifact()"
    
    444
    +                        .format(kind=type(self).__name__))
    
    445
    +
    
    430 446
         # list_artifacts():
    
    431 447
         #
    
    432 448
         # List artifacts in this cache in LRU order.
    
    ... ... @@ -463,6 +479,7 @@ class ArtifactCache():
    463 479
         # Args:
    
    464 480
         #     element (Element): The Element to extract
    
    465 481
         #     key (str): The cache key to use
    
    482
    +    #     subdir (str): The optional subdir to check exists
    
    466 483
         #
    
    467 484
         # Raises:
    
    468 485
         #     ArtifactError: In cases there was an OSError, or if the artifact
    
    ... ... @@ -470,7 +487,7 @@ class ArtifactCache():
    470 487
         #
    
    471 488
         # Returns: path to extracted artifact
    
    472 489
         #
    
    473
    -    def extract(self, element, key):
    
    490
    +    def extract(self, element, key, subdir=None):
    
    474 491
             raise ImplError("Cache '{kind}' does not implement extract()"
    
    475 492
                             .format(kind=type(self).__name__))
    
    476 493
     
    
    ... ... @@ -552,11 +569,13 @@ class ArtifactCache():
    552 569
         #     element (Element): The Element whose artifact is to be fetched
    
    553 570
         #     key (str): The cache key to use
    
    554 571
         #     progress (callable): The progress callback, if any
    
    572
    +    #     subdir (str): The optional specific subdir to pull
    
    573
    +    #     excluded_subdirs (list): The optional list of subdirs to not pull
    
    555 574
         #
    
    556 575
         # Returns:
    
    557 576
         #   (bool): True if pull was successful, False if artifact was not available
    
    558 577
         #
    
    559
    -    def pull(self, element, key, *, progress=None):
    
    578
    +    def pull(self, element, key, *, progress=None, subdir=None, excluded_subdirs=None):
    
    560 579
             raise ImplError("Cache '{kind}' does not implement pull()"
    
    561 580
                             .format(kind=type(self).__name__))
    
    562 581
     
    
    ... ... @@ -584,6 +603,20 @@ class ArtifactCache():
    584 603
             raise ImplError("Cache '{kind}' does not implement calculate_cache_size()"
    
    585 604
                             .format(kind=type(self).__name__))
    
    586 605
     
    
    606
    +    # checkout_artifact_subdir()
    
    607
    +    #
    
    608
    +    # Checkout given artifact subdir into provided directory
    
    609
    +    #
    
    610
    +    # Args:
    
    611
    +    #     element (Element): The Element
    
    612
    +    #     key (str): The cache key to use
    
    613
    +    #     subdir (str): The subdir to checkout
    
    614
    +    #     tmpdir (str): The dir to place the buildtree content
    
    615
    +    #
    
    616
    +    def checkout_artifact_subdir(self, element, key, subdir, tmpdir):
    
    617
    +        raise ImplError("Cache '{kind}' does not implement checkout_artifact_subdir()"
    
    618
    +                        .format(kind=type(self).__name__))
    
    619
    +
    
    587 620
         ################################################
    
    588 621
         #               Local Private Methods          #
    
    589 622
         ################################################
    

  • buildstream/_artifactcache/cascache.py
    ... ... @@ -92,16 +92,36 @@ class CASCache(ArtifactCache):
    92 92
             # This assumes that the repository doesn't have any dangling pointers
    
    93 93
             return os.path.exists(refpath)
    
    94 94
     
    
    95
    -    def extract(self, element, key):
    
    95
    +    def contains_subdir_artifact(self, element, key, subdir):
    
    96
    +        tree = self.resolve_ref(self.get_artifact_fullname(element, key))
    
    97
    +
    
    98
    +        # This assumes that the subdir digest is present in the element tree
    
    99
    +        subdirdigest = self._get_subdir(tree, subdir)
    
    100
    +        objpath = self.objpath(subdirdigest)
    
    101
    +
    
    102
    +        # True if subdir content is cached or if empty as expected
    
    103
    +        return os.path.exists(objpath)
    
    104
    +
    
    105
    +    def extract(self, element, key, subdir=None):
    
    96 106
             ref = self.get_artifact_fullname(element, key)
    
    97 107
     
    
    98 108
             tree = self.resolve_ref(ref, update_mtime=True)
    
    99 109
     
    
    100
    -        dest = os.path.join(self.extractdir, element._get_project().name,
    
    101
    -                            element.normal_name, tree.hash)
    
    110
    +        dest = elementdest = os.path.join(self.extractdir, element._get_project().name,
    
    111
    +                                          element.normal_name, tree.hash)
    
    112
    +
    
    102 113
             if os.path.isdir(dest):
    
    103
    -            # artifact has already been extracted
    
    104
    -            return dest
    
    114
    +            if subdir:
    
    115
    +                # Check if we have optional subdir in the local cache and not already extracted
    
    116
    +                subdircached = self.contains_subdir_artifact(element, key, subdir)
    
    117
    +                if subdircached and not os.path.isdir(os.path.join(dest, subdir)):
    
    118
    +                    # Artifact has already been extracted without subdir content, only need to checkout the subdir
    
    119
    +                    tree = self._get_subdir(tree, subdir)
    
    120
    +                    dest = os.path.join(dest, subdir)
    
    121
    +                else:
    
    122
    +                    return dest
    
    123
    +            else:
    
    124
    +                return dest
    
    105 125
     
    
    106 126
             with tempfile.TemporaryDirectory(prefix='tmp', dir=self.extractdir) as tmpdir:
    
    107 127
                 checkoutdir = os.path.join(tmpdir, ref)
    
    ... ... @@ -120,7 +140,7 @@ class CASCache(ArtifactCache):
    120 140
                         raise ArtifactError("Failed to extract artifact for ref '{}': {}"
    
    121 141
                                             .format(ref, e)) from e
    
    122 142
     
    
    123
    -        return dest
    
    143
    +        return elementdest
    
    124 144
     
    
    125 145
         def commit(self, element, content, keys):
    
    126 146
             refs = [self.get_artifact_fullname(element, key) for key in keys]
    
    ... ... @@ -228,7 +248,7 @@ class CASCache(ArtifactCache):
    228 248
                 remotes_for_project = self._remotes[element._get_project()]
    
    229 249
                 return any(remote.spec.push for remote in remotes_for_project)
    
    230 250
     
    
    231
    -    def pull(self, element, key, *, progress=None):
    
    251
    +    def pull(self, element, key, *, progress=None, subdir=None, excluded_subdirs=None):
    
    232 252
             ref = self.get_artifact_fullname(element, key)
    
    233 253
     
    
    234 254
             project = element._get_project()
    
    ... ... @@ -247,8 +267,14 @@ class CASCache(ArtifactCache):
    247 267
                     tree.hash = response.digest.hash
    
    248 268
                     tree.size_bytes = response.digest.size_bytes
    
    249 269
     
    
    250
    -                self._fetch_directory(remote, tree)
    
    270
    +                # Check if the element artifact is present, if so just fetch subdir
    
    271
    +                if subdir and os.path.exists(self.objpath(tree)):
    
    272
    +                    self._fetch_subdir(remote, tree, subdir)
    
    273
    +                else:
    
    274
    +                    # Fetch artifact, excluded_subdirs determined in pullqueue
    
    275
    +                    self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
    
    251 276
     
    
    277
    +                # tree is the remote value, so is the same without or without dangling ref locally
    
    252 278
                     self.set_ref(ref, tree)
    
    253 279
     
    
    254 280
                     element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
    
    ... ... @@ -426,6 +452,13 @@ class CASCache(ArtifactCache):
    426 452
     
    
    427 453
             return pushed
    
    428 454
     
    
    455
    +    def checkout_artifact_subdir(self, element, key, subdir, tmpdir):
    
    456
    +        tree = self.resolve_ref(self.get_artifact_fullname(element, key))
    
    457
    +
    
    458
    +        # This assumes that the subdir digest is present in the element tree
    
    459
    +        subdirdigest = self._get_subdir(tree, subdir)
    
    460
    +        self._checkout(tmpdir, subdirdigest)
    
    461
    +
    
    429 462
         ################################################
    
    430 463
         #                API Private Methods           #
    
    431 464
         ################################################
    
    ... ... @@ -671,8 +704,10 @@ class CASCache(ArtifactCache):
    671 704
                              stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
    
    672 705
     
    
    673 706
             for dirnode in directory.directories:
    
    674
    -            fullpath = os.path.join(dest, dirnode.name)
    
    675
    -            self._checkout(fullpath, dirnode.digest)
    
    707
    +            # Don't try to checkout a dangling ref
    
    708
    +            if os.path.exists(self.objpath(dirnode.digest)):
    
    709
    +                fullpath = os.path.join(dest, dirnode.name)
    
    710
    +                self._checkout(fullpath, dirnode.digest)
    
    676 711
     
    
    677 712
             for symlinknode in directory.symlinks:
    
    678 713
                 # symlink
    
    ... ... @@ -950,11 +985,14 @@ class CASCache(ArtifactCache):
    950 985
         # Args:
    
    951 986
         #     remote (Remote): The remote to use.
    
    952 987
         #     dir_digest (Digest): Digest object for the directory to fetch.
    
    988
    +    #     excluded_subdirs (list): The optional list of subdirs to not fetch
    
    953 989
         #
    
    954
    -    def _fetch_directory(self, remote, dir_digest):
    
    990
    +    def _fetch_directory(self, remote, dir_digest, *, excluded_subdirs=None):
    
    955 991
             fetch_queue = [dir_digest]
    
    956 992
             fetch_next_queue = []
    
    957 993
             batch = _CASBatchRead(remote)
    
    994
    +        if not excluded_subdirs:
    
    995
    +            excluded_subdirs = []
    
    958 996
     
    
    959 997
             while len(fetch_queue) + len(fetch_next_queue) > 0:
    
    960 998
                 if len(fetch_queue) == 0:
    
    ... ... @@ -969,8 +1007,9 @@ class CASCache(ArtifactCache):
    969 1007
                     directory.ParseFromString(f.read())
    
    970 1008
     
    
    971 1009
                 for dirnode in directory.directories:
    
    972
    -                batch = self._fetch_directory_node(remote, dirnode.digest, batch,
    
    973
    -                                                   fetch_queue, fetch_next_queue, recursive=True)
    
    1010
    +                if dirnode.name not in excluded_subdirs:
    
    1011
    +                    batch = self._fetch_directory_node(remote, dirnode.digest, batch,
    
    1012
    +                                                       fetch_queue, fetch_next_queue, recursive=True)
    
    974 1013
     
    
    975 1014
                 for filenode in directory.files:
    
    976 1015
                     batch = self._fetch_directory_node(remote, filenode.digest, batch,
    
    ... ... @@ -979,6 +1018,10 @@ class CASCache(ArtifactCache):
    979 1018
             # Fetch final batch
    
    980 1019
             self._fetch_directory_batch(remote, batch, fetch_queue, fetch_next_queue)
    
    981 1020
     
    
    1021
    +    def _fetch_subdir(self, remote, tree, subdir):
    
    1022
    +        subdirdigest = self._get_subdir(tree, subdir)
    
    1023
    +        self._fetch_directory(remote, subdirdigest)
    
    1024
    +
    
    982 1025
         def _fetch_tree(self, remote, digest):
    
    983 1026
             # download but do not store the Tree object
    
    984 1027
             with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
    

  • buildstream/_context.py
    ... ... @@ -111,6 +111,12 @@ class Context():
    111 111
             # Make sure the XDG vars are set in the environment before loading anything
    
    112 112
             self._init_xdg()
    
    113 113
     
    
    114
    +        # Whether or not to attempt to pull buildtrees globally
    
    115
    +        self.pullbuildtrees = False
    
    116
    +
    
    117
    +        # Whether to not include artifact buildtrees in workspaces if available
    
    118
    +        self.workspacebuildtrees = True
    
    119
    +
    
    114 120
             # Private variables
    
    115 121
             self._cache_key = None
    
    116 122
             self._message_handler = None
    
    ... ... @@ -161,7 +167,7 @@ class Context():
    161 167
             _yaml.node_validate(defaults, [
    
    162 168
                 'sourcedir', 'builddir', 'artifactdir', 'logdir',
    
    163 169
                 'scheduler', 'artifacts', 'logging', 'projects',
    
    164
    -            'cache'
    
    170
    +            'cache', 'pullbuildtrees', 'workspacebuildtrees'
    
    165 171
             ])
    
    166 172
     
    
    167 173
             for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir']:
    
    ... ... @@ -186,6 +192,12 @@ class Context():
    186 192
             # Load artifact share configuration
    
    187 193
             self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
    
    188 194
     
    
    195
    +        # Load pull buildtrees configuration
    
    196
    +        self.pullbuildtrees = _yaml.node_get(defaults, bool, 'pullbuildtrees', default_value='False')
    
    197
    +
    
    198
    +        # Load workspace buildtrees configuration
    
    199
    +        self.workspacebuildtrees = _yaml.node_get(defaults, bool, 'workspacebuildtrees', default_value='True')
    
    200
    +
    
    189 201
             # Load logging config
    
    190 202
             logging = _yaml.node_get(defaults, Mapping, 'logging')
    
    191 203
             _yaml.node_validate(logging, [
    

  • buildstream/_frontend/cli.py
    ... ... @@ -305,10 +305,12 @@ def init(app, project_name, format_version, element_path, force):
    305 305
                   help="Allow tracking to cross junction boundaries")
    
    306 306
     @click.option('--track-save', default=False, is_flag=True,
    
    307 307
                   help="Deprecated: This is ignored")
    
    308
    +@click.option('--pull-buildtrees', default=False, is_flag=True,
    
    309
    +              help="Pull buildtrees from a remote cache server")
    
    308 310
     @click.argument('elements', nargs=-1,
    
    309 311
                     type=click.Path(readable=False))
    
    310 312
     @click.pass_obj
    
    311
    -def build(app, elements, all_, track_, track_save, track_all, track_except, track_cross_junctions):
    
    313
    +def build(app, elements, all_, track_, track_save, track_all, track_except, track_cross_junctions, pull_buildtrees):
    
    312 314
         """Build elements in a pipeline"""
    
    313 315
     
    
    314 316
         if (track_except or track_cross_junctions) and not (track_ or track_all):
    
    ... ... @@ -327,7 +329,8 @@ def build(app, elements, all_, track_, track_save, track_all, track_except, trac
    327 329
                              track_targets=track_,
    
    328 330
                              track_except=track_except,
    
    329 331
                              track_cross_junctions=track_cross_junctions,
    
    330
    -                         build_all=all_)
    
    332
    +                         build_all=all_,
    
    333
    +                         pull_buildtrees=pull_buildtrees)
    
    331 334
     
    
    332 335
     
    
    333 336
     ##################################################################
    
    ... ... @@ -429,10 +432,12 @@ def track(app, elements, deps, except_, cross_junctions):
    429 432
                   help='The dependency artifacts to pull (default: none)')
    
    430 433
     @click.option('--remote', '-r',
    
    431 434
                   help="The URL of the remote cache (defaults to the first configured cache)")
    
    435
    +@click.option('--pull-buildtrees', default=False, is_flag=True,
    
    436
    +              help="Pull buildtrees from a remote cache server")
    
    432 437
     @click.argument('elements', nargs=-1,
    
    433 438
                     type=click.Path(readable=False))
    
    434 439
     @click.pass_obj
    
    435
    -def pull(app, elements, deps, remote):
    
    440
    +def pull(app, elements, deps, remote, pull_buildtrees):
    
    436 441
         """Pull a built artifact from the configured remote artifact cache.
    
    437 442
     
    
    438 443
         By default the artifact will be pulled one of the configured caches
    
    ... ... @@ -446,7 +451,7 @@ def pull(app, elements, deps, remote):
    446 451
             all:   All dependencies
    
    447 452
         """
    
    448 453
         with app.initialized(session_name="Pull"):
    
    449
    -        app.stream.pull(elements, selection=deps, remote=remote)
    
    454
    +        app.stream.pull(elements, selection=deps, remote=remote, pull_buildtrees=pull_buildtrees)
    
    450 455
     
    
    451 456
     
    
    452 457
     ##################################################################
    
    ... ... @@ -681,12 +686,16 @@ def workspace():
    681 686
                   help="Overwrite files existing in checkout directory")
    
    682 687
     @click.option('--track', 'track_', default=False, is_flag=True,
    
    683 688
                   help="Track and fetch new source references before checking out the workspace")
    
    689
    +@click.option('--no-cache', default=False, is_flag=True,
    
    690
    +              help="Do not checkout the cached buildtree")
    
    684 691
     @click.argument('element',
    
    685 692
                     type=click.Path(readable=False))
    
    686 693
     @click.argument('directory', type=click.Path(file_okay=False))
    
    687 694
     @click.pass_obj
    
    688
    -def workspace_open(app, no_checkout, force, track_, element, directory):
    
    689
    -    """Open a workspace for manual source modification"""
    
    695
    +def workspace_open(app, no_checkout, force, track_, no_cache, element, directory):
    
    696
    +    """Open a workspace for manual source modification, the elements buildtree
    
    697
    +    will be provided if available in the local artifact cache.
    
    698
    +    """
    
    690 699
     
    
    691 700
         if os.path.exists(directory):
    
    692 701
     
    
    ... ... @@ -698,11 +707,15 @@ def workspace_open(app, no_checkout, force, track_, element, directory):
    698 707
                 click.echo("Checkout directory is not empty: {}".format(directory), err=True)
    
    699 708
                 sys.exit(-1)
    
    700 709
     
    
    710
    +    if not no_cache:
    
    711
    +        click.echo("WARNING: Workspace will be opened without the cached buildtree if not cached locally")
    
    712
    +
    
    701 713
         with app.initialized():
    
    702 714
             app.stream.workspace_open(element, directory,
    
    703 715
                                       no_checkout=no_checkout,
    
    704 716
                                       track_first=track_,
    
    705
    -                                  force=force)
    
    717
    +                                  force=force,
    
    718
    +                                  no_cache=no_cache)
    
    706 719
     
    
    707 720
     
    
    708 721
     ##################################################################
    

  • buildstream/_scheduler/queues/pullqueue.py
    ... ... @@ -32,9 +32,20 @@ class PullQueue(Queue):
    32 32
         complete_name = "Pulled"
    
    33 33
         resources = [ResourceType.DOWNLOAD, ResourceType.CACHE]
    
    34 34
     
    
    35
    +    def __init__(self, scheduler, buildtrees=False):
    
    36
    +        super().__init__(scheduler)
    
    37
    +
    
    38
    +        # Current default exclusions on pull
    
    39
    +        self._excluded_subdirs = ["buildtree"]
    
    40
    +        self._subdir = None
    
    41
    +        # If buildtrees are to be pulled, remove the value from exclusion list
    
    42
    +        if buildtrees:
    
    43
    +            self._subdir = "buildtree"
    
    44
    +            self._excluded_subdirs.remove(self._subdir)
    
    45
    +
    
    35 46
         def process(self, element):
    
    36 47
             # returns whether an artifact was downloaded or not
    
    37
    -        if not element._pull():
    
    48
    +        if not element._pull(subdir=self._subdir, excluded_subdirs=self._excluded_subdirs):
    
    38 49
                 raise SkipJob(self.action_name)
    
    39 50
     
    
    40 51
         def status(self, element):
    
    ... ... @@ -49,7 +60,7 @@ class PullQueue(Queue):
    49 60
             if not element._can_query_cache():
    
    50 61
                 return QueueStatus.WAIT
    
    51 62
     
    
    52
    -        if element._pull_pending():
    
    63
    +        if element._pull_pending(subdir=self._subdir):
    
    53 64
                 return QueueStatus.READY
    
    54 65
             else:
    
    55 66
                 return QueueStatus.SKIP
    

  • buildstream/_stream.py
    ... ... @@ -160,12 +160,14 @@ class Stream():
    160 160
         #    track_cross_junctions (bool): Whether tracking should cross junction boundaries
    
    161 161
         #    build_all (bool): Whether to build all elements, or only those
    
    162 162
         #                      which are required to build the target.
    
    163
    +    #    pull_buildtrees (bool): Whether to pull buildtrees from a remote cache server
    
    163 164
         #
    
    164 165
         def build(self, targets, *,
    
    165 166
                   track_targets=None,
    
    166 167
                   track_except=None,
    
    167 168
                   track_cross_junctions=False,
    
    168
    -              build_all=False):
    
    169
    +              build_all=False,
    
    170
    +              pull_buildtrees=False):
    
    169 171
     
    
    170 172
             if build_all:
    
    171 173
                 selection = PipelineSelection.ALL
    
    ... ... @@ -195,7 +197,10 @@ class Stream():
    195 197
                 self._add_queue(track_queue, track=True)
    
    196 198
     
    
    197 199
             if self._artifacts.has_fetch_remotes():
    
    198
    -            self._add_queue(PullQueue(self._scheduler))
    
    200
    +            # Query if pullbuildtrees has been set globally in user config
    
    201
    +            if self._context.pullbuildtrees:
    
    202
    +                pull_buildtrees = True
    
    203
    +            self._add_queue(PullQueue(self._scheduler, buildtrees=pull_buildtrees))
    
    199 204
     
    
    200 205
             self._add_queue(FetchQueue(self._scheduler, skip_cached=True))
    
    201 206
             self._add_queue(BuildQueue(self._scheduler))
    
    ... ... @@ -295,7 +300,8 @@ class Stream():
    295 300
         #
    
    296 301
         def pull(self, targets, *,
    
    297 302
                  selection=PipelineSelection.NONE,
    
    298
    -             remote=None):
    
    303
    +             remote=None,
    
    304
    +             pull_buildtrees=False):
    
    299 305
     
    
    300 306
             use_config = True
    
    301 307
             if remote:
    
    ... ... @@ -310,8 +316,12 @@ class Stream():
    310 316
             if not self._artifacts.has_fetch_remotes():
    
    311 317
                 raise StreamError("No artifact caches available for pulling artifacts")
    
    312 318
     
    
    319
    +        # Query if pullbuildtrees has been set globally in user config
    
    320
    +        if self._context.pullbuildtrees:
    
    321
    +            pull_buildtrees = True
    
    322
    +
    
    313 323
             self._pipeline.assert_consistent(elements)
    
    314
    -        self._add_queue(PullQueue(self._scheduler))
    
    324
    +        self._add_queue(PullQueue(self._scheduler, buildtrees=pull_buildtrees))
    
    315 325
             self._enqueue_plan(elements)
    
    316 326
             self._run()
    
    317 327
     
    
    ... ... @@ -446,11 +456,17 @@ class Stream():
    446 456
         #    no_checkout (bool): Whether to skip checking out the source
    
    447 457
         #    track_first (bool): Whether to track and fetch first
    
    448 458
         #    force (bool): Whether to ignore contents in an existing directory
    
    459
    +    #    no_cache (bool): Whether to not include the cached buildtree
    
    449 460
         #
    
    450 461
         def workspace_open(self, target, directory, *,
    
    451 462
                            no_checkout,
    
    452 463
                            track_first,
    
    453
    -                       force):
    
    464
    +                       force,
    
    465
    +                       no_cache):
    
    466
    +
    
    467
    +        # Override no_cache if the global user conf workspacebuildtrees is false
    
    468
    +        if not self._context.workspacebuildtrees:
    
    469
    +            no_cache = True
    
    454 470
     
    
    455 471
             if track_first:
    
    456 472
                 track_targets = (target,)
    
    ... ... @@ -463,7 +479,21 @@ class Stream():
    463 479
             target = elements[0]
    
    464 480
             directory = os.path.abspath(directory)
    
    465 481
     
    
    466
    -        if not list(target.sources()):
    
    482
    +        # Check if given target has a buildtree artifact cached locally
    
    483
    +        buildtree = None
    
    484
    +        if target._cached():
    
    485
    +            buildtree = self._artifacts.contains_subdir_artifact(target, target._get_cache_key(), 'buildtree')
    
    486
    +
    
    487
    +        # If we're running in the default state, make the user aware of buildtree usage
    
    488
    +        if not no_cache:
    
    489
    +            if buildtree:
    
    490
    +                self._message(MessageType.INFO, "{} buildtree artifact is available,"
    
    491
    +                              " workspace will be opened with it".format(target.name))
    
    492
    +            else:
    
    493
    +                self._message(MessageType.WARN, "{} buildtree artifact not available,"
    
    494
    +                              " workspace will be opened with source checkout".format(target.name))
    
    495
    +
    
    496
    +        if (not buildtree or no_cache) and not list(target.sources()):
    
    467 497
                 build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
    
    468 498
                 if not build_depends:
    
    469 499
                     raise StreamError("The given element has no sources")
    
    ... ... @@ -482,18 +512,21 @@ class Stream():
    482 512
             # If we're going to checkout, we need at least a fetch,
    
    483 513
             # if we were asked to track first, we're going to fetch anyway.
    
    484 514
             #
    
    485
    -        if not no_checkout or track_first:
    
    486
    -            track_elements = []
    
    487
    -            if track_first:
    
    488
    -                track_elements = elements
    
    489
    -            self._fetch(elements, track_elements=track_elements)
    
    490
    -
    
    491
    -        if not no_checkout and target._get_consistency() != Consistency.CACHED:
    
    492
    -            raise StreamError("Could not stage uncached source. " +
    
    493
    -                              "Use `--track` to track and " +
    
    494
    -                              "fetch the latest version of the " +
    
    495
    -                              "source.")
    
    496
    -
    
    515
    +        if not buildtree or no_cache:
    
    516
    +            if not no_checkout or track_first:
    
    517
    +                track_elements = []
    
    518
    +                if track_first:
    
    519
    +                    track_elements = elements
    
    520
    +                self._fetch(elements, track_elements=track_elements)
    
    521
    +
    
    522
    +        if not buildtree or no_cache:
    
    523
    +            if not no_checkout and target._get_consistency() != Consistency.CACHED:
    
    524
    +                raise StreamError("Could not stage uncached source. " +
    
    525
    +                                  "Use `--track` to track and " +
    
    526
    +                                  "fetch the latest version of the " +
    
    527
    +                                  "source.")
    
    528
    +
    
    529
    +        # Presume workspace to be forced if previous StreamError not raised
    
    497 530
             if workspace:
    
    498 531
                 workspaces.delete_workspace(target._get_full_name())
    
    499 532
                 workspaces.save_config()
    
    ... ... @@ -505,9 +538,13 @@ class Stream():
    505 538
     
    
    506 539
             workspaces.create_workspace(target._get_full_name(), directory)
    
    507 540
     
    
    508
    -        if not no_checkout:
    
    541
    +        if (not buildtree or no_cache) and not no_checkout:
    
    509 542
                 with target.timed_activity("Staging sources to {}".format(directory)):
    
    510 543
                     target._open_workspace()
    
    544
    +        # Handle opening workspace with buildtree instead of source staging
    
    545
    +        elif buildtree and not no_cache:
    
    546
    +            with target.timed_activity("Staging buildtree to {}".format(directory)):
    
    547
    +                target._open_workspace(buildtree=buildtree)
    
    511 548
     
    
    512 549
             workspaces.save_config()
    
    513 550
             self._message(MessageType.INFO, "Saved workspace configuration")
    

  • buildstream/element.py
    ... ... @@ -1693,18 +1693,26 @@ class Element(Plugin):
    1693 1693
     
    
    1694 1694
         # _pull_pending()
    
    1695 1695
         #
    
    1696
    -    # Check whether the artifact will be pulled.
    
    1696
    +    # Check whether the artifact will be pulled. If the pull operation is to
    
    1697
    +    # include a specific subdir of the element artifact (from cli or user conf)
    
    1698
    +    # then the local cache is queried for the subdirs existence.
    
    1699
    +    #
    
    1700
    +    # Args:
    
    1701
    +    #    subdir (str): Whether the pull has been invoked with a specific subdir set
    
    1697 1702
         #
    
    1698 1703
         # Returns:
    
    1699 1704
         #   (bool): Whether a pull operation is pending
    
    1700 1705
         #
    
    1701
    -    def _pull_pending(self):
    
    1706
    +    def _pull_pending(self, subdir=None):
    
    1702 1707
             if self._get_workspace():
    
    1703 1708
                 # Workspace builds are never pushed to artifact servers
    
    1704 1709
                 return False
    
    1705 1710
     
    
    1706
    -        if self.__strong_cached:
    
    1707
    -            # Artifact already in local cache
    
    1711
    +        if self.__strong_cached and subdir:
    
    1712
    +            # If we've specified a subdir, check if the subdir is cached locally
    
    1713
    +            if self.__artifacts.contains_subdir_artifact(self, self.__strict_cache_key, subdir):
    
    1714
    +                return False
    
    1715
    +        elif self.__strong_cached:
    
    1708 1716
                 return False
    
    1709 1717
     
    
    1710 1718
             # Pull is pending if artifact remote server available
    
    ... ... @@ -1726,11 +1734,10 @@ class Element(Plugin):
    1726 1734
     
    
    1727 1735
             self._update_state()
    
    1728 1736
     
    
    1729
    -    def _pull_strong(self, *, progress=None):
    
    1737
    +    def _pull_strong(self, *, progress=None, subdir=None, excluded_subdirs=None):
    
    1730 1738
             weak_key = self._get_cache_key(strength=_KeyStrength.WEAK)
    
    1731
    -
    
    1732 1739
             key = self.__strict_cache_key
    
    1733
    -        if not self.__artifacts.pull(self, key, progress=progress):
    
    1740
    +        if not self.__artifacts.pull(self, key, progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs):
    
    1734 1741
                 return False
    
    1735 1742
     
    
    1736 1743
             # update weak ref by pointing it to this newly fetched artifact
    
    ... ... @@ -1738,10 +1745,10 @@ class Element(Plugin):
    1738 1745
     
    
    1739 1746
             return True
    
    1740 1747
     
    
    1741
    -    def _pull_weak(self, *, progress=None):
    
    1748
    +    def _pull_weak(self, *, progress=None, subdir=None, excluded_subdirs=None):
    
    1742 1749
             weak_key = self._get_cache_key(strength=_KeyStrength.WEAK)
    
    1743
    -
    
    1744
    -        if not self.__artifacts.pull(self, weak_key, progress=progress):
    
    1750
    +        if not self.__artifacts.pull(self, weak_key, progress=progress, subdir=subdir,
    
    1751
    +                                     excluded_subdirs=excluded_subdirs):
    
    1745 1752
                 return False
    
    1746 1753
     
    
    1747 1754
             # extract strong cache key from this newly fetched artifact
    
    ... ... @@ -1759,17 +1766,17 @@ class Element(Plugin):
    1759 1766
         #
    
    1760 1767
         # Returns: True if the artifact has been downloaded, False otherwise
    
    1761 1768
         #
    
    1762
    -    def _pull(self):
    
    1769
    +    def _pull(self, subdir=None, excluded_subdirs=None):
    
    1763 1770
             context = self._get_context()
    
    1764 1771
     
    
    1765 1772
             def progress(percent, message):
    
    1766 1773
                 self.status(message)
    
    1767 1774
     
    
    1768 1775
             # Attempt to pull artifact without knowing whether it's available
    
    1769
    -        pulled = self._pull_strong(progress=progress)
    
    1776
    +        pulled = self._pull_strong(progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs)
    
    1770 1777
     
    
    1771 1778
             if not pulled and not self._cached() and not context.get_strict():
    
    1772
    -            pulled = self._pull_weak(progress=progress)
    
    1779
    +            pulled = self._pull_weak(progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs)
    
    1773 1780
     
    
    1774 1781
             if not pulled:
    
    1775 1782
                 return False
    
    ... ... @@ -1792,10 +1799,21 @@ class Element(Plugin):
    1792 1799
             if not self._cached():
    
    1793 1800
                 return True
    
    1794 1801
     
    
    1795
    -        # Do not push tained artifact
    
    1802
    +        # Do not push tainted artifact
    
    1796 1803
             if self.__get_tainted():
    
    1797 1804
                 return True
    
    1798 1805
     
    
    1806
    +        # Do not push elements that have a dangling buildtree artifact unless element type is
    
    1807
    +        # expected to have an empty buildtree directory
    
    1808
    +        if not self.__artifacts.contains_subdir_artifact(self, self.__strict_cache_key, 'buildtree'):
    
    1809
    +            return True
    
    1810
    +
    
    1811
    +        # strict_cache_key can't be relied on to be available when running in non strict mode
    
    1812
    +        context = self._get_context()
    
    1813
    +        if not context.get_strict():
    
    1814
    +            if not self.__artifacts.contains_subdir_artifact(self, self.__weak_cache_key, 'buildtree'):
    
    1815
    +                return True
    
    1816
    +
    
    1799 1817
             return False
    
    1800 1818
     
    
    1801 1819
         # _push():
    
    ... ... @@ -1893,7 +1911,10 @@ class Element(Plugin):
    1893 1911
         # This requires that a workspace already be created in
    
    1894 1912
         # the workspaces metadata first.
    
    1895 1913
         #
    
    1896
    -    def _open_workspace(self):
    
    1914
    +    # Args:
    
    1915
    +    #    buildtree (bool): Whether to open workspace with artifact buildtree
    
    1916
    +    #
    
    1917
    +    def _open_workspace(self, buildtree=None):
    
    1897 1918
             context = self._get_context()
    
    1898 1919
             workspace = self._get_workspace()
    
    1899 1920
             assert workspace is not None
    
    ... ... @@ -1906,11 +1927,15 @@ class Element(Plugin):
    1906 1927
             # files in the target directory actually works without any
    
    1907 1928
             # additional support from Source implementations.
    
    1908 1929
             #
    
    1930
    +
    
    1909 1931
             os.makedirs(context.builddir, exist_ok=True)
    
    1910 1932
             with utils._tempdir(dir=context.builddir, prefix='workspace-{}'
    
    1911 1933
                                 .format(self.normal_name)) as temp:
    
    1912
    -            for source in self.sources():
    
    1913
    -                source._init_workspace(temp)
    
    1934
    +            if not buildtree:
    
    1935
    +                for source in self.sources():
    
    1936
    +                    source._init_workspace(temp)
    
    1937
    +            else:
    
    1938
    +                self.__artifacts.checkout_artifact_subdir(self, self._get_cache_key(), 'buildtree', temp)
    
    1914 1939
     
    
    1915 1940
                 # Now hardlink the files into the workspace target.
    
    1916 1941
                 utils.link_files(temp, workspace.get_absolute_path())
    
    ... ... @@ -2492,7 +2517,7 @@ class Element(Plugin):
    2492 2517
                 if not context.get_strict() and not self.__artifacts.contains(self, key):
    
    2493 2518
                     key = self._get_cache_key(strength=_KeyStrength.WEAK)
    
    2494 2519
     
    
    2495
    -        return (self.__artifacts.extract(self, key), key)
    
    2520
    +        return (self.__artifacts.extract(self, key, subdir='buildtree'), key)
    
    2496 2521
     
    
    2497 2522
         # __get_artifact_metadata_keys():
    
    2498 2523
         #
    

  • tests/completions/completions.py
    ... ... @@ -103,7 +103,7 @@ def test_commands(cli, cmd, word_idx, expected):
    103 103
         ('bst --no-colors build -', 3, ['--all ', '--track ', '--track-all ',
    
    104 104
                                         '--track-except ',
    
    105 105
                                         '--track-cross-junctions ', '-J ',
    
    106
    -                                    '--track-save ']),
    
    106
    +                                    '--track-save ', '--pull-buildtrees ']),
    
    107 107
     
    
    108 108
         # Test the behavior of completing after an option that has a
    
    109 109
         # parameter that cannot be completed, vs an option that has
    

  • tests/frontend/workspace.py
    ... ... @@ -44,7 +44,7 @@ DATA_DIR = os.path.join(
    44 44
     
    
    45 45
     
    
    46 46
     def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
    
    47
    -                   project_path=None, element_attrs=None):
    
    47
    +                   project_path=None, element_attrs=None, no_cache=False):
    
    48 48
         if not workspace_dir:
    
    49 49
             workspace_dir = os.path.join(str(tmpdir), 'workspace{}'.format(suffix))
    
    50 50
         if not project_path:
    
    ... ... @@ -88,6 +88,8 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir
    88 88
         args = ['workspace', 'open']
    
    89 89
         if track:
    
    90 90
             args.append('--track')
    
    91
    +    if no_cache:
    
    92
    +        args.append('--no-cache')
    
    91 93
         args.extend([element_name, workspace_dir])
    
    92 94
         result = cli.run(project=project_path, args=args)
    
    93 95
     
    
    ... ... @@ -101,7 +103,7 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir
    101 103
         filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
    
    102 104
         assert os.path.exists(filename)
    
    103 105
     
    
    104
    -    return (element_name, project_path, workspace_dir)
    
    106
    +    return (element_name, project_path, workspace_dir, result)
    
    105 107
     
    
    106 108
     
    
    107 109
     @pytest.mark.datafiles(DATA_DIR)
    
    ... ... @@ -112,7 +114,7 @@ def test_open(cli, tmpdir, datafiles, kind):
    112 114
     
    
    113 115
     @pytest.mark.datafiles(DATA_DIR)
    
    114 116
     def test_open_bzr_customize(cli, tmpdir, datafiles):
    
    115
    -    element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "bzr", False)
    
    117
    +    element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, "bzr", False)
    
    116 118
     
    
    117 119
         # Check that the .bzr dir exists
    
    118 120
         bzrdir = os.path.join(workspace, ".bzr")
    
    ... ... @@ -137,7 +139,7 @@ def test_open_track(cli, tmpdir, datafiles, kind):
    137 139
     @pytest.mark.datafiles(DATA_DIR)
    
    138 140
     @pytest.mark.parametrize("kind", repo_kinds)
    
    139 141
     def test_open_force(cli, tmpdir, datafiles, kind):
    
    140
    -    element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
    
    142
    +    element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, kind, False)
    
    141 143
     
    
    142 144
         # Close the workspace
    
    143 145
         result = cli.run(project=project, args=[
    
    ... ... @@ -158,7 +160,7 @@ def test_open_force(cli, tmpdir, datafiles, kind):
    158 160
     @pytest.mark.datafiles(DATA_DIR)
    
    159 161
     @pytest.mark.parametrize("kind", repo_kinds)
    
    160 162
     def test_open_force_open(cli, tmpdir, datafiles, kind):
    
    161
    -    element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
    
    163
    +    element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, kind, False)
    
    162 164
     
    
    163 165
         # Assert the workspace dir exists
    
    164 166
         assert os.path.exists(workspace)
    
    ... ... @@ -173,7 +175,7 @@ def test_open_force_open(cli, tmpdir, datafiles, kind):
    173 175
     @pytest.mark.datafiles(DATA_DIR)
    
    174 176
     @pytest.mark.parametrize("kind", repo_kinds)
    
    175 177
     def test_open_force_different_workspace(cli, tmpdir, datafiles, kind):
    
    176
    -    element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False, "-alpha")
    
    178
    +    element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, kind, False, "-alpha")
    
    177 179
     
    
    178 180
         # Assert the workspace dir exists
    
    179 181
         assert os.path.exists(workspace)
    
    ... ... @@ -183,7 +185,7 @@ def test_open_force_different_workspace(cli, tmpdir, datafiles, kind):
    183 185
     
    
    184 186
         tmpdir = os.path.join(str(tmpdir), "-beta")
    
    185 187
         shutil.move(hello_path, hello1_path)
    
    186
    -    element_name2, project2, workspace2 = open_workspace(cli, tmpdir, datafiles, kind, False, "-beta")
    
    188
    +    element_name2, project2, workspace2, _ = open_workspace(cli, tmpdir, datafiles, kind, False, "-beta")
    
    187 189
     
    
    188 190
         # Assert the workspace dir exists
    
    189 191
         assert os.path.exists(workspace2)
    
    ... ... @@ -210,7 +212,7 @@ def test_open_force_different_workspace(cli, tmpdir, datafiles, kind):
    210 212
     @pytest.mark.datafiles(DATA_DIR)
    
    211 213
     @pytest.mark.parametrize("kind", repo_kinds)
    
    212 214
     def test_close(cli, tmpdir, datafiles, kind):
    
    213
    -    element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
    
    215
    +    element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, kind, False)
    
    214 216
     
    
    215 217
         # Close the workspace
    
    216 218
         result = cli.run(project=project, args=[
    
    ... ... @@ -226,7 +228,7 @@ def test_close(cli, tmpdir, datafiles, kind):
    226 228
     def test_close_external_after_move_project(cli, tmpdir, datafiles):
    
    227 229
         workspace_dir = os.path.join(str(tmpdir), "workspace")
    
    228 230
         project_path = os.path.join(str(tmpdir), 'initial_project')
    
    229
    -    element_name, _, _ = open_workspace(cli, tmpdir, datafiles, 'git', False, "", workspace_dir, project_path)
    
    231
    +    element_name, _, _, _ = open_workspace(cli, tmpdir, datafiles, 'git', False, "", workspace_dir, project_path)
    
    230 232
         assert os.path.exists(workspace_dir)
    
    231 233
         moved_dir = os.path.join(str(tmpdir), 'external_project')
    
    232 234
         shutil.move(project_path, moved_dir)
    
    ... ... @@ -246,8 +248,8 @@ def test_close_external_after_move_project(cli, tmpdir, datafiles):
    246 248
     def test_close_internal_after_move_project(cli, tmpdir, datafiles):
    
    247 249
         initial_dir = os.path.join(str(tmpdir), 'initial_project')
    
    248 250
         initial_workspace = os.path.join(initial_dir, 'workspace')
    
    249
    -    element_name, _, _ = open_workspace(cli, tmpdir, datafiles, 'git', False,
    
    250
    -                                        workspace_dir=initial_workspace, project_path=initial_dir)
    
    251
    +    element_name, _, _, _ = open_workspace(cli, tmpdir, datafiles, 'git', False,
    
    252
    +                                           workspace_dir=initial_workspace, project_path=initial_dir)
    
    251 253
         moved_dir = os.path.join(str(tmpdir), 'internal_project')
    
    252 254
         shutil.move(initial_dir, moved_dir)
    
    253 255
         assert os.path.exists(moved_dir)
    
    ... ... @@ -265,7 +267,7 @@ def test_close_internal_after_move_project(cli, tmpdir, datafiles):
    265 267
     
    
    266 268
     @pytest.mark.datafiles(DATA_DIR)
    
    267 269
     def test_close_removed(cli, tmpdir, datafiles):
    
    268
    -    element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git', False)
    
    270
    +    element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, 'git', False)
    
    269 271
     
    
    270 272
         # Remove it first, closing the workspace should work
    
    271 273
         shutil.rmtree(workspace)
    
    ... ... @@ -282,7 +284,7 @@ def test_close_removed(cli, tmpdir, datafiles):
    282 284
     
    
    283 285
     @pytest.mark.datafiles(DATA_DIR)
    
    284 286
     def test_close_nonexistant_element(cli, tmpdir, datafiles):
    
    285
    -    element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git', False)
    
    287
    +    element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, 'git', False)
    
    286 288
         element_path = os.path.join(datafiles.dirname, datafiles.basename, 'elements', element_name)
    
    287 289
     
    
    288 290
         # First brutally remove the element.bst file, ensuring that
    
    ... ... @@ -304,9 +306,9 @@ def test_close_nonexistant_element(cli, tmpdir, datafiles):
    304 306
     def test_close_multiple(cli, tmpdir, datafiles):
    
    305 307
         tmpdir_alpha = os.path.join(str(tmpdir), 'alpha')
    
    306 308
         tmpdir_beta = os.path.join(str(tmpdir), 'beta')
    
    307
    -    alpha, project, workspace_alpha = open_workspace(
    
    309
    +    alpha, project, workspace_alpha, _ = open_workspace(
    
    308 310
             cli, tmpdir_alpha, datafiles, 'git', False, suffix='-alpha')
    
    309
    -    beta, project, workspace_beta = open_workspace(
    
    311
    +    beta, project, workspace_beta, _ = open_workspace(
    
    310 312
             cli, tmpdir_beta, datafiles, 'git', False, suffix='-beta')
    
    311 313
     
    
    312 314
         # Close the workspaces
    
    ... ... @@ -324,9 +326,9 @@ def test_close_multiple(cli, tmpdir, datafiles):
    324 326
     def test_close_all(cli, tmpdir, datafiles):
    
    325 327
         tmpdir_alpha = os.path.join(str(tmpdir), 'alpha')
    
    326 328
         tmpdir_beta = os.path.join(str(tmpdir), 'beta')
    
    327
    -    alpha, project, workspace_alpha = open_workspace(
    
    329
    +    alpha, project, workspace_alpha, _ = open_workspace(
    
    328 330
             cli, tmpdir_alpha, datafiles, 'git', False, suffix='-alpha')
    
    329
    -    beta, project, workspace_beta = open_workspace(
    
    331
    +    beta, project, workspace_beta, _ = open_workspace(
    
    330 332
             cli, tmpdir_beta, datafiles, 'git', False, suffix='-beta')
    
    331 333
     
    
    332 334
         # Close the workspaces
    
    ... ... @@ -343,7 +345,7 @@ def test_close_all(cli, tmpdir, datafiles):
    343 345
     @pytest.mark.datafiles(DATA_DIR)
    
    344 346
     def test_reset(cli, tmpdir, datafiles):
    
    345 347
         # Open the workspace
    
    346
    -    element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git', False)
    
    348
    +    element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, 'git', False)
    
    347 349
     
    
    348 350
         # Modify workspace
    
    349 351
         shutil.rmtree(os.path.join(workspace, 'usr', 'bin'))
    
    ... ... @@ -366,9 +368,9 @@ def test_reset_multiple(cli, tmpdir, datafiles):
    366 368
         # Open the workspaces
    
    367 369
         tmpdir_alpha = os.path.join(str(tmpdir), 'alpha')
    
    368 370
         tmpdir_beta = os.path.join(str(tmpdir), 'beta')
    
    369
    -    alpha, project, workspace_alpha = open_workspace(
    
    371
    +    alpha, project, workspace_alpha, _ = open_workspace(
    
    370 372
             cli, tmpdir_alpha, datafiles, 'git', False, suffix='-alpha')
    
    371
    -    beta, project, workspace_beta = open_workspace(
    
    373
    +    beta, project, workspace_beta, _ = open_workspace(
    
    372 374
             cli, tmpdir_beta, datafiles, 'git', False, suffix='-beta')
    
    373 375
     
    
    374 376
         # Modify workspaces
    
    ... ... @@ -392,9 +394,9 @@ def test_reset_all(cli, tmpdir, datafiles):
    392 394
         # Open the workspaces
    
    393 395
         tmpdir_alpha = os.path.join(str(tmpdir), 'alpha')
    
    394 396
         tmpdir_beta = os.path.join(str(tmpdir), 'beta')
    
    395
    -    alpha, project, workspace_alpha = open_workspace(
    
    397
    +    alpha, project, workspace_alpha, _ = open_workspace(
    
    396 398
             cli, tmpdir_alpha, datafiles, 'git', False, suffix='-alpha')
    
    397
    -    beta, project, workspace_beta = open_workspace(
    
    399
    +    beta, project, workspace_beta, _ = open_workspace(
    
    398 400
             cli, tmpdir_beta, datafiles, 'git', False, suffix='-beta')
    
    399 401
     
    
    400 402
         # Modify workspaces
    
    ... ... @@ -415,7 +417,7 @@ def test_reset_all(cli, tmpdir, datafiles):
    415 417
     
    
    416 418
     @pytest.mark.datafiles(DATA_DIR)
    
    417 419
     def test_list(cli, tmpdir, datafiles):
    
    418
    -    element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git', False)
    
    420
    +    element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, 'git', False)
    
    419 421
     
    
    420 422
         # Now list the workspaces
    
    421 423
         result = cli.run(project=project, args=[
    
    ... ... @@ -437,7 +439,7 @@ def test_list(cli, tmpdir, datafiles):
    437 439
     @pytest.mark.parametrize("kind", repo_kinds)
    
    438 440
     @pytest.mark.parametrize("strict", [("strict"), ("non-strict")])
    
    439 441
     def test_build(cli, tmpdir, datafiles, kind, strict):
    
    440
    -    element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
    
    442
    +    element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, kind, False)
    
    441 443
         checkout = os.path.join(str(tmpdir), 'checkout')
    
    442 444
     
    
    443 445
         # Modify workspace
    
    ... ... @@ -516,7 +518,7 @@ def test_buildable_no_ref(cli, tmpdir, datafiles):
    516 518
     @pytest.mark.parametrize("modification", [("addfile"), ("removefile"), ("modifyfile")])
    
    517 519
     @pytest.mark.parametrize("strict", [("strict"), ("non-strict")])
    
    518 520
     def test_detect_modifications(cli, tmpdir, datafiles, modification, strict):
    
    519
    -    element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git', False)
    
    521
    +    element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, 'git', False)
    
    520 522
         checkout = os.path.join(str(tmpdir), 'checkout')
    
    521 523
     
    
    522 524
         # Configure strict mode
    
    ... ... @@ -779,7 +781,7 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte
    779 781
     @pytest.mark.datafiles(DATA_DIR)
    
    780 782
     @pytest.mark.parametrize("kind", repo_kinds)
    
    781 783
     def test_inconsitent_pipeline_message(cli, tmpdir, datafiles, kind):
    
    782
    -    element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
    
    784
    +    element_name, project, workspace, _ = open_workspace(cli, tmpdir, datafiles, kind, False)
    
    783 785
     
    
    784 786
         shutil.rmtree(workspace)
    
    785 787
     
    
    ... ... @@ -793,8 +795,8 @@ def test_inconsitent_pipeline_message(cli, tmpdir, datafiles, kind):
    793 795
     @pytest.mark.parametrize("strict", [("strict"), ("non-strict")])
    
    794 796
     def test_cache_key_workspace_in_dependencies(cli, tmpdir, datafiles, strict):
    
    795 797
         checkout = os.path.join(str(tmpdir), 'checkout')
    
    796
    -    element_name, project, workspace = open_workspace(cli, os.path.join(str(tmpdir), 'repo-a'),
    
    797
    -                                                      datafiles, 'git', False)
    
    798
    +    element_name, project, workspace, _ = open_workspace(cli, os.path.join(str(tmpdir), 'repo-a'),
    
    799
    +                                                         datafiles, 'git', False)
    
    798 800
     
    
    799 801
         element_path = os.path.join(project, 'elements')
    
    800 802
         back_dep_element_name = 'workspace-test-back-dep.bst'
    
    ... ... @@ -869,10 +871,26 @@ def test_multiple_failed_builds(cli, tmpdir, datafiles):
    869 871
                 ]
    
    870 872
             }
    
    871 873
         }
    
    872
    -    element_name, project, _ = open_workspace(cli, tmpdir, datafiles,
    
    873
    -                                              "git", False, element_attrs=element_config)
    
    874
    +    element_name, project, _, _ = open_workspace(cli, tmpdir, datafiles,
    
    875
    +                                                 "git", False, element_attrs=element_config)
    
    874 876
     
    
    875 877
         for _ in range(2):
    
    876 878
             result = cli.run(project=project, args=["build", element_name])
    
    877 879
             assert "BUG" not in result.stderr
    
    878 880
             assert cli.get_element_state(project, element_name) != "cached"
    
    881
    +
    
    882
    +
    
    883
    +@pytest.mark.datafiles(DATA_DIR)
    
    884
    +def test_nocache_messages(cli, tmpdir, datafiles):
    
    885
    +    # cli default WARN for source dropback possibility when no-cache flag is not passed
    
    886
    +    element_name, project, workspace, result = open_workspace(cli, tmpdir, datafiles, 'git', False, suffix='1')
    
    887
    +    assert "WARNING: Workspace will be opened without the cached buildtree if not cached locally" in result.output
    
    888
    +
    
    889
    +    # cli WARN for source dropback happening when no-cache flag not given, but buildtree not available
    
    890
    +    assert "workspace will be opened with source checkout" in result.stderr
    
    891
    +
    
    892
    +    # cli default WARN for source dropback possibilty not given when no-cache flag is passed
    
    893
    +    tmpdir = os.path.join(str(tmpdir), "2")
    
    894
    +    element_name, project, workspace, result = open_workspace(cli, tmpdir, datafiles, 'git', False, suffix='2',
    
    895
    +                                                              no_cache=True)
    
    896
    +    assert "WARNING: Workspace will be opened without the cached buildtree if not cached locally" not in result.output

  • tests/integration/pullbuildtrees.py
    1
    +import os
    
    2
    +import shutil
    
    3
    +import pytest
    
    4
    +
    
    5
    +from tests.testutils import cli_integration as cli, create_artifact_share
    
    6
    +from tests.testutils.integration import assert_contains
    
    7
    +
    
    8
    +
    
    9
    +DATA_DIR = os.path.join(
    
    10
    +    os.path.dirname(os.path.realpath(__file__)),
    
    11
    +    "project"
    
    12
    +)
    
    13
    +
    
    14
    +
    
    15
    +# Remove artifact cache & set cli.config value of pullbuildtrees
    
    16
    +# to false, which is the default user context
    
    17
    +def default_state(cli, integration_cache, share):
    
    18
    +    shutil.rmtree(os.path.join(integration_cache, 'artifacts2'))
    
    19
    +    cli.configure({
    
    20
    +        'pullbuildtrees': False,
    
    21
    +        'artifacts': {'url': share.repo, 'push': False},
    
    22
    +        'artifactdir': os.path.join(integration_cache, 'artifacts2')
    
    23
    +    })
    
    24
    +
    
    25
    +
    
    26
    +# A test to capture the integration of the pullbuildtrees
    
    27
    +# behaviour, which by default is to not include the buildtree
    
    28
    +# directory of an element
    
    29
    +@pytest.mark.integration
    
    30
    +@pytest.mark.datafiles(DATA_DIR)
    
    31
    +def test_pullbuildtrees(cli, tmpdir, datafiles, integration_cache):
    
    32
    +
    
    33
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    34
    +    element_name = 'autotools/amhello.bst'
    
    35
    +
    
    36
    +    # Create artifact shares for pull & push testing
    
    37
    +    with create_artifact_share(os.path.join(str(tmpdir), 'share1')) as share1,\
    
    38
    +        create_artifact_share(os.path.join(str(tmpdir), 'share2')) as share2:
    
    39
    +        cli.configure({
    
    40
    +            'artifacts': {'url': share1.repo, 'push': True},
    
    41
    +            'artifactdir': os.path.join(integration_cache, 'artifacts2')
    
    42
    +        })
    
    43
    +
    
    44
    +        # Build autotools element, checked pushed, delete local
    
    45
    +        result = cli.run(project=project, args=['build', element_name])
    
    46
    +        assert result.exit_code == 0
    
    47
    +        assert cli.get_element_state(project, element_name) == 'cached'
    
    48
    +        assert share1.has_artifact('test', element_name, cli.get_element_key(project, element_name))
    
    49
    +        default_state(cli, integration_cache, share1)
    
    50
    +
    
    51
    +        # Pull artifact with default config, assert that pulling again
    
    52
    +        # doesn't create a pull job, then assert with buildtrees user
    
    53
    +        # config set creates a pull job.
    
    54
    +        result = cli.run(project=project, args=['pull', element_name])
    
    55
    +        assert element_name in result.get_pulled_elements()
    
    56
    +        result = cli.run(project=project, args=['pull', element_name])
    
    57
    +        assert element_name not in result.get_pulled_elements()
    
    58
    +        cli.configure({'pullbuildtrees': True})
    
    59
    +        result = cli.run(project=project, args=['pull', element_name])
    
    60
    +        assert element_name in result.get_pulled_elements()
    
    61
    +        default_state(cli, integration_cache, share1)
    
    62
    +
    
    63
    +        # Pull artifact with default config, then assert that pulling
    
    64
    +        # with buildtrees cli flag set creates a pull job.
    
    65
    +        result = cli.run(project=project, args=['pull', element_name])
    
    66
    +        assert element_name in result.get_pulled_elements()
    
    67
    +        result = cli.run(project=project, args=['pull', '--pull-buildtrees', element_name])
    
    68
    +        assert element_name in result.get_pulled_elements()
    
    69
    +        default_state(cli, integration_cache, share1)
    
    70
    +
    
    71
    +        # Pull artifact with pullbuildtrees set in user config, then assert
    
    72
    +        # that pulling with the same user config doesn't creates a pull job,
    
    73
    +        # or when buildtrees cli flag is set.
    
    74
    +        cli.configure({'pullbuildtrees': True})
    
    75
    +        result = cli.run(project=project, args=['pull', element_name])
    
    76
    +        assert element_name in result.get_pulled_elements()
    
    77
    +        result = cli.run(project=project, args=['pull', element_name])
    
    78
    +        assert element_name not in result.get_pulled_elements()
    
    79
    +        result = cli.run(project=project, args=['pull', '--pull-buildtrees', element_name])
    
    80
    +        assert element_name not in result.get_pulled_elements()
    
    81
    +        default_state(cli, integration_cache, share1)
    
    82
    +
    
    83
    +        # Pull artifact with default config and buildtrees cli flag set, then assert
    
    84
    +        # that pulling with pullbuildtrees set in user config doesn't create a pull
    
    85
    +        # job.
    
    86
    +        result = cli.run(project=project, args=['pull', '--pull-buildtrees', element_name])
    
    87
    +        assert element_name in result.get_pulled_elements()
    
    88
    +        cli.configure({'pullbuildtrees': True})
    
    89
    +        result = cli.run(project=project, args=['pull', element_name])
    
    90
    +        assert element_name not in result.get_pulled_elements()
    
    91
    +        default_state(cli, integration_cache, share1)
    
    92
    +
    
    93
    +        # Assert that a partial build element (not containing a populated buildtree dir)
    
    94
    +        # can't be pushed to an artifact share, then assert that a complete build element
    
    95
    +        # can be. This will attempt a partial pull from share1 and then a partial push
    
    96
    +        # to share2
    
    97
    +        result = cli.run(project=project, args=['pull', element_name])
    
    98
    +        assert element_name in result.get_pulled_elements()
    
    99
    +        cli.configure({'artifacts': {'url': share2.repo, 'push': True}})
    
    100
    +        result = cli.run(project=project, args=['push', element_name])
    
    101
    +        assert element_name not in result.get_pushed_elements()
    
    102
    +        assert not share2.has_artifact('test', element_name, cli.get_element_key(project, element_name))
    
    103
    +
    
    104
    +        # Assert that after pulling the missing buildtree the element artifact can be
    
    105
    +        # successfully pushed to the remote. This will attempt to pull the buildtree
    
    106
    +        # from share1 and then a 'complete' push to share2
    
    107
    +        cli.configure({'artifacts': {'url': share1.repo, 'push': False}})
    
    108
    +        result = cli.run(project=project, args=['pull', '--pull-buildtrees', element_name])
    
    109
    +        assert element_name in result.get_pulled_elements()
    
    110
    +        cli.configure({'artifacts': {'url': share2.repo, 'push': True}})
    
    111
    +        result = cli.run(project=project, args=['push', element_name])
    
    112
    +        assert element_name in result.get_pushed_elements()
    
    113
    +        assert share2.has_artifact('test', element_name, cli.get_element_key(project, element_name))
    
    114
    +        default_state(cli, integration_cache, share1)

  • tests/testutils/artifactshare.py
    ... ... @@ -128,7 +128,7 @@ class ArtifactShare():
    128 128
     
    
    129 129
             valid_chars = string.digits + string.ascii_letters + '-._'
    
    130 130
             element_name = ''.join([
    
    131
    -            x if x in valid_chars else '_'
    
    131
    +            x if x in valid_chars else '-'
    
    132 132
                 for x in element_name
    
    133 133
             ])
    
    134 134
             artifact_key = '{0}/{1}/{2}'.format(project_name, element_name, cache_key)
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]