[Notes] [Git][BuildStream/buildstream][tpollard/workspacebuildtree] 18 commits: tests: Migrated cache quota test into artifactcache/cache_size.py



Title: GitLab

Tom Pollard pushed to branch tpollard/workspacebuildtree at BuildStream / buildstream

Commits:

21 changed files:

Changes:

  • buildstream/_artifactcache.py
    ... ... @@ -752,6 +752,20 @@ class ArtifactCache():
    752 752
     
    
    753 753
             self.cas.link_ref(oldref, newref)
    
    754 754
     
    
    755
    +    # checkout_artifact_subdir()
    
    756
    +    #
    
    757
    +    # Checkout given artifact subdir into provided directory
    
    758
    +    #
    
    759
    +    # Args:
    
    760
    +    #     element (Element): The Element
    
    761
    +    #     key (str): The cache key to use
    
    762
    +    #     subdir (str): The subdir to checkout
    
    763
    +    #     tmpdir (str): The dir to place the subdir content
    
    764
    +    #
    
    765
    +    def checkout_artifact_subdir(self, element, key, subdir, tmpdir):
    
    766
    +        ref = self.get_artifact_fullname(element, key)
    
    767
    +        return self.cas.checkout_artifact_subdir(ref, subdir, tmpdir)
    
    768
    +
    
    755 769
         ################################################
    
    756 770
         #               Local Private Methods          #
    
    757 771
         ################################################
    
    ... ... @@ -882,16 +896,16 @@ class ArtifactCache():
    882 896
                 else:
    
    883 897
                     available = utils._pretty_size(available_space)
    
    884 898
     
    
    885
    -            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    886
    -                            ("Your system does not have enough available " +
    
    887
    -                             "space to support the cache quota specified.\n" +
    
    888
    -                             "\nYou have specified a quota of {quota} total disk space.\n" +
    
    889
    -                             "- The filesystem containing {local_cache_path} only " +
    
    890
    -                             "has: {available_size} available.")
    
    891
    -                            .format(
    
    892
    -                                quota=self.context.config_cache_quota,
    
    893
    -                                local_cache_path=self.context.artifactdir,
    
    894
    -                                available_size=available))
    
    899
    +            raise ArtifactError("Your system does not have enough available " +
    
    900
    +                                "space to support the cache quota specified.",
    
    901
    +                                detail=("You have specified a quota of {quota} total disk space.\n" +
    
    902
    +                                        "The filesystem containing {local_cache_path} only " +
    
    903
    +                                        "has {available_size} available.")
    
    904
    +                                .format(
    
    905
    +                                    quota=self.context.config_cache_quota,
    
    906
    +                                    local_cache_path=self.context.artifactdir,
    
    907
    +                                    available_size=available),
    
    908
    +                                reason='insufficient-storage-for-quota')
    
    895 909
     
    
    896 910
             # Place a slight headroom (2e9 (2GB) on the cache_quota) into
    
    897 911
             # cache_quota to try and avoid exceptions.
    

  • buildstream/_cas/cascache.py
    ... ... @@ -587,6 +587,21 @@ class CASCache():
    587 587
             reachable = set()
    
    588 588
             self._reachable_refs_dir(reachable, tree, update_mtime=True)
    
    589 589
     
    
    590
    +    # checkout_artifact_subdir():
    
    591
    +    #
    
    592
    +    # Checkout given artifact subdir into provided directory
    
    593
    +    #
    
    594
    +    # Args:
    
    595
    +    #     ref (str): The ref to check
    
    596
    +    #     subdir (str): The subdir to checkout
    
    597
    +    #     tmpdir (str): The dir to place the subdir content
    
    598
    +    #
    
    599
    +    def checkout_artifact_subdir(self, ref, subdir, tmpdir):
    
    600
    +        tree = self.resolve_ref(ref)
    
    601
    +        # This assumes that the subdir digest is present in the element tree
    
    602
    +        subdirdigest = self._get_subdir(tree, subdir)
    
    603
    +        self._checkout(tmpdir, subdirdigest)
    
    604
    +
    
    590 605
         ################################################
    
    591 606
         #             Local Private Methods            #
    
    592 607
         ################################################
    

  • buildstream/_context.py
    ... ... @@ -133,6 +133,9 @@ class Context():
    133 133
             # a hard reset of a workspace, potentially losing changes.
    
    134 134
             self.prompt_workspace_reset_hard = None
    
    135 135
     
    
    136
    +        # Whether to not include artifact buildtrees in workspaces if available
    
    137
    +        self.workspace_buildtrees = True
    
    138
    +
    
    136 139
             # Whether elements must be rebuilt when their dependencies have changed
    
    137 140
             self._strict_build_plan = None
    
    138 141
     
    
    ... ... @@ -191,7 +194,8 @@ class Context():
    191 194
             _yaml.node_validate(defaults, [
    
    192 195
                 'sourcedir', 'builddir', 'artifactdir', 'logdir',
    
    193 196
                 'scheduler', 'artifacts', 'logging', 'projects',
    
    194
    -            'cache', 'prompt', 'workspacedir', 'remote-execution'
    
    197
    +            'cache', 'prompt', 'workspacedir', 'remote-execution',
    
    198
    +            'workspace-buildtrees'
    
    195 199
             ])
    
    196 200
     
    
    197 201
             for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
    
    ... ... @@ -221,6 +225,9 @@ class Context():
    221 225
             # Load pull build trees configuration
    
    222 226
             self.pull_buildtrees = _yaml.node_get(cache, bool, 'pull-buildtrees')
    
    223 227
     
    
    228
    +        # Load workspace buildtrees configuration
    
    229
    +        self.workspace_buildtrees = _yaml.node_get(defaults, bool, 'workspace-buildtrees', default_value='True')
    
    230
    +
    
    224 231
             # Load logging config
    
    225 232
             logging = _yaml.node_get(defaults, Mapping, 'logging')
    
    226 233
             _yaml.node_validate(logging, [
    

  • buildstream/_frontend/cli.py
    ... ... @@ -724,7 +724,7 @@ def workspace():
    724 724
     ##################################################################
    
    725 725
     @workspace.command(name='open', short_help="Open a new workspace")
    
    726 726
     @click.option('--no-checkout', default=False, is_flag=True,
    
    727
    -              help="Do not checkout the source, only link to the given directory")
    
    727
    +              help="Do not checkout the source or cached buildtree, only link to the given directory")
    
    728 728
     @click.option('--force', '-f', default=False, is_flag=True,
    
    729 729
                   help="The workspace will be created even if the directory in which it will be created is not empty " +
    
    730 730
                   "or if a workspace for that element already exists")
    
    ... ... @@ -733,16 +733,25 @@ def workspace():
    733 733
     @click.option('--directory', type=click.Path(file_okay=False), default=None,
    
    734 734
                   help="Only for use when a single Element is given: Set the directory to use to create the workspace")
    
    735 735
     @click.argument('elements', nargs=-1, type=click.Path(readable=False), required=True)
    
    736
    +@click.option('--no-cache', default=False, is_flag=True,
    
    737
    +              help="Do not checkout the cached buildtree")
    
    736 738
     @click.pass_obj
    
    737
    -def workspace_open(app, no_checkout, force, track_, directory, elements):
    
    738
    -    """Open a workspace for manual source modification"""
    
    739
    +def workspace_open(app, no_checkout, force, track_, directory, elements, no_cache):
    
    740
    +
    
    741
    +    """Open a workspace for manual source modification, the elements buildtree
    
    742
    +    will be provided if available in the local artifact cache.
    
    743
    +    """
    
    744
    +
    
    745
    +    if not no_cache and not no_checkout:
    
    746
    +        click.echo("WARNING: Workspace will be opened without the cached buildtree if not cached locally")
    
    739 747
     
    
    740 748
         with app.initialized():
    
    741 749
             app.stream.workspace_open(elements,
    
    742 750
                                       no_checkout=no_checkout,
    
    743 751
                                       track_first=track_,
    
    744 752
                                       force=force,
    
    745
    -                                  custom_dir=directory)
    
    753
    +                                  custom_dir=directory,
    
    754
    +                                  no_cache=no_cache)
    
    746 755
     
    
    747 756
     
    
    748 757
     ##################################################################
    

  • buildstream/_gitsourcebase.py
    ... ... @@ -223,6 +223,31 @@ class GitMirror(SourceFetcher):
    223 223
                              fail="Failed to checkout git ref {}".format(self.ref),
    
    224 224
                              cwd=fullpath)
    
    225 225
     
    
    226
    +    def init_cached_build_workspace(self, directory):
    
    227
    +        fullpath = os.path.join(directory, self.path)
    
    228
    +        url = self.source.translate_url(self.url)
    
    229
    +
    
    230
    +        self.source.call([self.source.host_git, 'init', fullpath],
    
    231
    +                         fail="Failed to init git in directory: {}".format(fullpath),
    
    232
    +                         fail_temporarily=True,
    
    233
    +                         cwd=fullpath)
    
    234
    +
    
    235
    +        self.source.call([self.source.host_git, 'fetch', self.mirror],
    
    236
    +                         fail='Failed to fetch from local mirror "{}"'.format(self.mirror),
    
    237
    +                         cwd=fullpath)
    
    238
    +
    
    239
    +        self.source.call([self.source.host_git, 'remote', 'add', 'origin', url],
    
    240
    +                         fail='Failed to add remote origin "{}"'.format(url),
    
    241
    +                         cwd=fullpath)
    
    242
    +
    
    243
    +        self.source.call([self.source.host_git, 'update-ref', '--no-deref', 'HEAD', self.ref],
    
    244
    +                         fail='Failed update HEAD to ref "{}"'.format(self.ref),
    
    245
    +                         cwd=fullpath)
    
    246
    +
    
    247
    +        self.source.call([self.source.host_git, 'read-tree', 'HEAD'],
    
    248
    +                         fail='Failed to read HEAD into index',
    
    249
    +                         cwd=fullpath)
    
    250
    +
    
    226 251
         # List the submodules (path/url tuples) present at the given ref of this repo
    
    227 252
         def submodule_list(self):
    
    228 253
             modules = "{}:{}".format(self.ref, GIT_MODULES)
    
    ... ... @@ -522,6 +547,14 @@ class _GitSourceBase(Source):
    522 547
                 for mirror in self.submodules:
    
    523 548
                     mirror.init_workspace(directory)
    
    524 549
     
    
    550
    +    def init_cached_build_workspace(self, directory):
    
    551
    +        self._refresh_submodules()
    
    552
    +
    
    553
    +        with self.timed_activity('Setting up workspace "{}"'.format(directory), silent_nested=True):
    
    554
    +            self.mirror.init_cached_build_workspace(directory)
    
    555
    +            for mirror in self.submodules:
    
    556
    +                mirror.init_cached_build_workspace(directory)
    
    557
    +
    
    525 558
         def stage(self, directory):
    
    526 559
     
    
    527 560
             # Need to refresh submodule list here again, because
    

  • buildstream/_stream.py
    ... ... @@ -489,14 +489,21 @@ class Stream():
    489 489
         #    track_first (bool): Whether to track and fetch first
    
    490 490
         #    force (bool): Whether to ignore contents in an existing directory
    
    491 491
         #    custom_dir (str): Custom location to create a workspace or false to use default location.
    
    492
    +    #    no_cache (bool): Whether to not include the cached buildtree
    
    492 493
         #
    
    493 494
         def workspace_open(self, targets, *,
    
    494 495
                            no_checkout,
    
    495 496
                            track_first,
    
    496 497
                            force,
    
    497
    -                       custom_dir):
    
    498
    +                       custom_dir,
    
    499
    +                       no_cache):
    
    500
    +
    
    498 501
             # This function is a little funny but it is trying to be as atomic as possible.
    
    499 502
     
    
    503
    +        # Set no_cache if the global user conf workspacebuildtrees is false
    
    504
    +        if not self._context.workspace_buildtrees:
    
    505
    +            no_cache = True
    
    506
    +
    
    500 507
             if track_first:
    
    501 508
                 track_targets = targets
    
    502 509
             else:
    
    ... ... @@ -554,7 +561,7 @@ class Stream():
    554 561
                 directory = os.path.abspath(custom_dir)
    
    555 562
                 expanded_directories = [directory, ]
    
    556 563
             else:
    
    557
    -            # If this fails it is a bug in what ever calls this, usually cli.py and so can not be tested for via the
    
    564
    +            # If this fails it is a bug in whatever calls this, usually cli.py and so can not be tested for via the
    
    558 565
                 # run bst test mechanism.
    
    559 566
                 assert len(elements) == len(expanded_directories)
    
    560 567
     
    
    ... ... @@ -569,12 +576,26 @@ class Stream():
    569 576
                                           .format(target.name, directory), reason='bad-directory')
    
    570 577
     
    
    571 578
             # So far this function has tried to catch as many issues as possible with out making any changes
    
    572
    -        # Now it dose the bits that can not be made atomic.
    
    579
    +        # Now it does the bits that can not be made atomic.
    
    573 580
             targetGenerator = zip(elements, expanded_directories)
    
    574 581
             for target, directory in targetGenerator:
    
    575 582
                 self._message(MessageType.INFO, "Creating workspace for element {}"
    
    576 583
                               .format(target.name))
    
    577 584
     
    
    585
    +            # Check if given target has a buildtree artifact cached locally
    
    586
    +            buildtree = None
    
    587
    +            if target._cached():
    
    588
    +                buildtree = target._cached_buildtree()
    
    589
    +
    
    590
    +            # If we're running in the default state, make the user aware of buildtree usage
    
    591
    +            if not no_cache and not no_checkout:
    
    592
    +                if buildtree:
    
    593
    +                    self._message(MessageType.INFO, "{} buildtree artifact is available,"
    
    594
    +                                  " workspace will be opened with it".format(target.name))
    
    595
    +                else:
    
    596
    +                    self._message(MessageType.WARN, "{} buildtree artifact not available,"
    
    597
    +                                  " workspace will be opened with source checkout".format(target.name))
    
    598
    +
    
    578 599
                 workspace = workspaces.get_workspace(target._get_full_name())
    
    579 600
                 if workspace:
    
    580 601
                     workspaces.delete_workspace(target._get_full_name())
    
    ... ... @@ -589,7 +610,20 @@ class Stream():
    589 610
                         todo_elements = "\nDid not try to create workspaces for " + todo_elements
    
    590 611
                     raise StreamError("Failed to create workspace directory: {}".format(e) + todo_elements) from e
    
    591 612
     
    
    592
    -            workspaces.create_workspace(target, directory, checkout=not no_checkout)
    
    613
    +            # Handle opening workspace with buildtree included
    
    614
    +            if (buildtree and not no_cache) and not no_checkout:
    
    615
    +                workspaces.create_workspace(target, directory, checkout=not no_checkout, cached_build=buildtree)
    
    616
    +                with target.timed_activity("Staging buildtree to {}".format(directory)):
    
    617
    +                    target._open_workspace(buildtree=buildtree)
    
    618
    +            else:
    
    619
    +                workspaces.create_workspace(target, directory, checkout=not no_checkout)
    
    620
    +                if (not buildtree or no_cache) and not no_checkout:
    
    621
    +                    with target.timed_activity("Staging sources to {}".format(directory)):
    
    622
    +                        target._open_workspace()
    
    623
    +
    
    624
    +            # Saving the workspace once it is set up means that if the next workspace fails to be created before
    
    625
    +            # the configuration gets saved. The successfully created workspace still gets saved.
    
    626
    +            workspaces.save_config()
    
    593 627
                 self._message(MessageType.INFO, "Created a workspace for element: {}"
    
    594 628
                               .format(target._get_full_name()))
    
    595 629
     
    
    ... ... @@ -672,7 +706,25 @@ class Stream():
    672 706
                                           .format(workspace_path, e)) from e
    
    673 707
     
    
    674 708
                 workspaces.delete_workspace(element._get_full_name())
    
    675
    -            workspaces.create_workspace(element, workspace_path, checkout=True)
    
    709
    +
    
    710
    +            # Create the workspace, ensuring the original optional cached build state is preserved if
    
    711
    +            # possible.
    
    712
    +            buildtree = False
    
    713
    +            if workspace.cached_build and element._cached():
    
    714
    +                if self._artifacts.contains_subdir_artifact(element, element._get_cache_key(), 'buildtree'):
    
    715
    +                    buildtree = True
    
    716
    +
    
    717
    +            # Warn the user if the workspace cannot be opened with the original cached build state
    
    718
    +            if workspace.cached_build and not buildtree:
    
    719
    +                self._message(MessageType.WARN, "{} original buildtree artifact not available,"
    
    720
    +                              " workspace will be opened with source checkout".format(element.name))
    
    721
    +
    
    722
    +            # If opening the cached build, set checkout to false
    
    723
    +            workspaces.create_workspace(element, workspace_path,
    
    724
    +                                        checkout=not buildtree, cached_build=buildtree)
    
    725
    +
    
    726
    +            with element.timed_activity("Staging to {}".format(workspace_path)):
    
    727
    +                element._open_workspace(buildtree=buildtree)
    
    676 728
     
    
    677 729
                 self._message(MessageType.INFO,
    
    678 730
                               "Reset workspace for {} at: {}".format(element.name,
    

  • buildstream/_workspaces.py
    ... ... @@ -24,7 +24,7 @@ from . import _yaml
    24 24
     from ._exceptions import LoadError, LoadErrorReason
    
    25 25
     
    
    26 26
     
    
    27
    -BST_WORKSPACE_FORMAT_VERSION = 3
    
    27
    +BST_WORKSPACE_FORMAT_VERSION = 4
    
    28 28
     BST_WORKSPACE_PROJECT_FORMAT_VERSION = 1
    
    29 29
     WORKSPACE_PROJECT_FILE = ".bstproject.yaml"
    
    30 30
     
    
    ... ... @@ -239,9 +239,11 @@ class WorkspaceProjectCache():
    239 239
     #    running_files (dict): A dict mapping dependency elements to files
    
    240 240
     #                          changed between failed builds. Should be
    
    241 241
     #                          made obsolete with failed build artifacts.
    
    242
    +#    cached_build (bool): If the workspace is staging the cached build artifact
    
    242 243
     #
    
    243 244
     class Workspace():
    
    244
    -    def __init__(self, toplevel_project, *, last_successful=None, path=None, prepared=False, running_files=None):
    
    245
    +    def __init__(self, toplevel_project, *, last_successful=None, path=None, prepared=False,
    
    246
    +                 running_files=None, cached_build=False):
    
    245 247
             self.prepared = prepared
    
    246 248
             self.last_successful = last_successful
    
    247 249
             self._path = path
    
    ... ... @@ -249,6 +251,7 @@ class Workspace():
    249 251
     
    
    250 252
             self._toplevel_project = toplevel_project
    
    251 253
             self._key = None
    
    254
    +        self.cached_build = cached_build
    
    252 255
     
    
    253 256
         # to_dict()
    
    254 257
         #
    
    ... ... @@ -261,7 +264,8 @@ class Workspace():
    261 264
             ret = {
    
    262 265
                 'prepared': self.prepared,
    
    263 266
                 'path': self._path,
    
    264
    -            'running_files': self.running_files
    
    267
    +            'running_files': self.running_files,
    
    268
    +            'cached_build': self.cached_build
    
    265 269
             }
    
    266 270
             if self.last_successful is not None:
    
    267 271
                 ret["last_successful"] = self.last_successful
    
    ... ... @@ -429,8 +433,9 @@ class Workspaces():
    429 433
         #    target (Element) - The element to create a workspace for
    
    430 434
         #    path (str) - The path in which the workspace should be kept
    
    431 435
         #    checkout (bool): Whether to check-out the element's sources into the directory
    
    436
    +    #    cached_build (bool) - If the workspace is staging the cached build artifact
    
    432 437
         #
    
    433
    -    def create_workspace(self, target, path, *, checkout):
    
    438
    +    def create_workspace(self, target, path, *, checkout, cached_build=False):
    
    434 439
             element_name = target._get_full_name()
    
    435 440
             project_dir = self._toplevel_project.directory
    
    436 441
             if path.startswith(project_dir):
    
    ... ... @@ -438,7 +443,8 @@ class Workspaces():
    438 443
             else:
    
    439 444
                 workspace_path = path
    
    440 445
     
    
    441
    -        self._workspaces[element_name] = Workspace(self._toplevel_project, path=workspace_path)
    
    446
    +        self._workspaces[element_name] = Workspace(self._toplevel_project, path=workspace_path,
    
    447
    +                                                   cached_build=cached_build)
    
    442 448
     
    
    443 449
             if checkout:
    
    444 450
                 with target.timed_activity("Staging sources to {}".format(path)):
    
    ... ... @@ -627,6 +633,7 @@ class Workspaces():
    627 633
                 'path': _yaml.node_get(node, str, 'path'),
    
    628 634
                 'last_successful': _yaml.node_get(node, str, 'last_successful', default_value=None),
    
    629 635
                 'running_files': _yaml.node_get(node, dict, 'running_files', default_value=None),
    
    636
    +            'cached_build': _yaml.node_get(node, bool, 'cached_build', default_value=False)
    
    630 637
             }
    
    631 638
             return Workspace.from_dict(self._toplevel_project, dictionary)
    
    632 639
     
    

  • buildstream/element.py
    ... ... @@ -1909,7 +1909,10 @@ class Element(Plugin):
    1909 1909
         # This requires that a workspace already be created in
    
    1910 1910
         # the workspaces metadata first.
    
    1911 1911
         #
    
    1912
    -    def _open_workspace(self):
    
    1912
    +    # Args:
    
    1913
    +    #    buildtree (bool): Whether to open workspace with artifact buildtree
    
    1914
    +    #
    
    1915
    +    def _open_workspace(self, buildtree=False):
    
    1913 1916
             context = self._get_context()
    
    1914 1917
             workspace = self._get_workspace()
    
    1915 1918
             assert workspace is not None
    
    ... ... @@ -1922,11 +1925,19 @@ class Element(Plugin):
    1922 1925
             # files in the target directory actually works without any
    
    1923 1926
             # additional support from Source implementations.
    
    1924 1927
             #
    
    1928
    +
    
    1925 1929
             os.makedirs(context.builddir, exist_ok=True)
    
    1926 1930
             with utils._tempdir(dir=context.builddir, prefix='workspace-{}'
    
    1927 1931
                                 .format(self.normal_name)) as temp:
    
    1928
    -            for source in self.sources():
    
    1929
    -                source._init_workspace(temp)
    
    1932
    +
    
    1933
    +            # Checkout cached buildtree, augment with source plugin if applicable
    
    1934
    +            if buildtree:
    
    1935
    +                self.__artifacts.checkout_artifact_subdir(self, self._get_cache_key(), 'buildtree', temp)
    
    1936
    +                for source in self.sources():
    
    1937
    +                    source._init_cached_build_workspace(temp)
    
    1938
    +            else:
    
    1939
    +                for source in self.sources():
    
    1940
    +                    source._init_workspace(temp)
    
    1930 1941
     
    
    1931 1942
                 # Now hardlink the files into the workspace target.
    
    1932 1943
                 utils.link_files(temp, workspace.get_absolute_path())
    

  • buildstream/source.py
    ... ... @@ -465,6 +465,24 @@ class Source(Plugin):
    465 465
             """
    
    466 466
             self.stage(directory)
    
    467 467
     
    
    468
    +    def init_cached_build_workspace(self, directory):
    
    469
    +        """Initialises a new cached build workspace
    
    470
    +
    
    471
    +        Args:
    
    472
    +           directory (str): Path of the workspace to init
    
    473
    +
    
    474
    +        Raises:
    
    475
    +           :class:`.SourceError`
    
    476
    +
    
    477
    +        Implementors overriding this method should assume that *directory*
    
    478
    +        already exists.
    
    479
    +
    
    480
    +        Implementors should raise :class:`.SourceError` when encountering
    
    481
    +        some system error.
    
    482
    +        """
    
    483
    +        # Allow a non implementation
    
    484
    +        return None
    
    485
    +
    
    468 486
         def get_source_fetchers(self):
    
    469 487
             """Get the objects that are used for fetching
    
    470 488
     
    
    ... ... @@ -717,6 +735,12 @@ class Source(Plugin):
    717 735
     
    
    718 736
             self.init_workspace(directory)
    
    719 737
     
    
    738
    +    # Wrapper for init_cached_build_workspace()
    
    739
    +    def _init_cached_build_workspace(self, directory):
    
    740
    +        directory = self.__ensure_directory(directory)
    
    741
    +
    
    742
    +        self.init_cached_build_workspace(directory)
    
    743
    +
    
    720 744
         # _get_unique_key():
    
    721 745
         #
    
    722 746
         # Wrapper for get_unique_key() api
    

  • doc/source/developing/workspaces.rst
    ... ... @@ -24,9 +24,32 @@ Suppose we now want to alter the functionality of the *hello* command. We can
    24 24
     make changes to the source code of Buildstream elements by making use of
    
    25 25
     BuildStream's workspace command.
    
    26 26
     
    
    27
    +Utilising cached buildtrees
    
    28
    +---------------------------
    
    29
    + When a BuildStream build element artifact is created and cached, a snapshot of
    
    30
    + the build directory after the build commands have completed is included in the
    
    31
    + artifact. This `build tree` can be considered an intermediary state of element,
    
    32
    + where the source is present along with any output created during the build
    
    33
    + execution.
    
    34
    +
    
    35
    + By default when opening a workspace, bst will attempt to stage the build tree
    
    36
    + into the workspace if it's available in the local cache. If the respective
    
    37
    + build tree is not present in the cache (element not cached, partially cached or
    
    38
    + is a non build element) then the source will be staged as is. The default
    
    39
    + behaviour to attempt to use the build tree can be overriden with specific bst
    
    40
    + workspace open option of `--no-cache`, or via setting user configuration option
    
    41
    + `workspacebuildtrees: False`
    
    42
    +
    
    27 43
     
    
    28 44
     Opening a workspace
    
    29 45
     -------------------
    
    46
    +.. note::
    
    47
    +
    
    48
    +    This example presumes you built the hello.bst during
    
    49
    +    :ref:`running commands <tutorial_running_commands>`
    
    50
    +    if not, please start by building it.
    
    51
    +
    
    52
    +
    
    30 53
     First we need to open a workspace, we can do this by running
    
    31 54
     
    
    32 55
     .. raw:: html
    
    ... ... @@ -93,6 +116,15 @@ Alternatively, if we wish to discard the changes we can use
    93 116
     
    
    94 117
     This resets the workspace to its original state.
    
    95 118
     
    
    119
    +.. note::
    
    120
    +
    
    121
    +    bst reset will attempt to open the workspace in
    
    122
    +    the condition in which it was originally staged,
    
    123
    +    i.e with or without consuming the element build tree.
    
    124
    +    If it was originally staged with a cached build tree
    
    125
    +    and there's no longer one available, the source will
    
    126
    +    be staged as is.
    
    127
    +
    
    96 128
     To discard the workspace completely we can do:
    
    97 129
     
    
    98 130
     .. raw:: html
    

  • tests/artifactcache/cache_size.py
    1 1
     import os
    
    2 2
     import pytest
    
    3
    +from unittest import mock
    
    3 4
     
    
    4 5
     from buildstream import _yaml
    
    5 6
     from buildstream._artifactcache import CACHE_SIZE_FILE
    
    7
    +from buildstream._exceptions import ErrorDomain
    
    6 8
     
    
    7 9
     from tests.testutils import cli, create_element_size
    
    8 10
     
    
    ... ... @@ -60,3 +62,29 @@ def test_cache_size_write(cli, tmpdir):
    60 62
         with open(sizefile, "r") as f:
    
    61 63
             size_data = f.read()
    
    62 64
         size = int(size_data)
    
    65
    +
    
    66
    +
    
    67
    +def test_quota_over_1024T(cli, tmpdir):
    
    68
    +    KiB = 1024
    
    69
    +    MiB = (KiB * 1024)
    
    70
    +    GiB = (MiB * 1024)
    
    71
    +    TiB = (GiB * 1024)
    
    72
    +
    
    73
    +    cli.configure({
    
    74
    +        'cache': {
    
    75
    +            'quota': 2048 * TiB
    
    76
    +        }
    
    77
    +    })
    
    78
    +    project = tmpdir.join("main")
    
    79
    +    os.makedirs(str(project))
    
    80
    +    _yaml.dump({'name': 'main'}, str(project.join("project.conf")))
    
    81
    +
    
    82
    +    volume_space_patch = mock.patch(
    
    83
    +        "buildstream._artifactcache.ArtifactCache._get_volume_space_info_for",
    
    84
    +        autospec=True,
    
    85
    +        return_value=(1025 * TiB, 1025 * TiB)
    
    86
    +    )
    
    87
    +
    
    88
    +    with volume_space_patch:
    
    89
    +        result = cli.run(project, args=["build", "file.bst"])
    
    90
    +        result.assert_main_error(ErrorDomain.ARTIFACT, 'insufficient-storage-for-quota')

  • tests/artifactcache/expiry.py
    ... ... @@ -66,8 +66,9 @@ def test_artifact_expires(cli, datafiles, tmpdir):
    66 66
         res.assert_success()
    
    67 67
     
    
    68 68
         # Check that the correct element remains in the cache
    
    69
    -    assert cli.get_element_state(project, 'target.bst') != 'cached'
    
    70
    -    assert cli.get_element_state(project, 'target2.bst') == 'cached'
    
    69
    +    states = cli.get_element_states(project, ['target.bst', 'target2.bst'])
    
    70
    +    assert states['target.bst'] != 'cached'
    
    71
    +    assert states['target2.bst'] == 'cached'
    
    71 72
     
    
    72 73
     
    
    73 74
     # Ensure that we don't end up deleting the whole cache (or worse) if
    
    ... ... @@ -144,9 +145,11 @@ def test_expiry_order(cli, datafiles, tmpdir):
    144 145
         # have been removed.
    
    145 146
         # Note that buildstream will reduce the cache to 50% of the
    
    146 147
         # original size - we therefore remove multiple elements.
    
    147
    -
    
    148
    -    assert (tuple(cli.get_element_state(project, element) for element in
    
    149
    -                  ('unrelated.bst', 'target.bst', 'target2.bst', 'dep.bst', 'expire.bst')) ==
    
    148
    +    check_elements = [
    
    149
    +        'unrelated.bst', 'target.bst', 'target2.bst', 'dep.bst', 'expire.bst'
    
    150
    +    ]
    
    151
    +    states = cli.get_element_states(project, check_elements)
    
    152
    +    assert (tuple(states[element] for element in check_elements) ==
    
    150 153
                 ('buildable', 'buildable', 'buildable', 'cached', 'cached', ))
    
    151 154
     
    
    152 155
     
    
    ... ... @@ -176,8 +179,9 @@ def test_keep_dependencies(cli, datafiles, tmpdir):
    176 179
         res.assert_success()
    
    177 180
     
    
    178 181
         # Check that the correct element remains in the cache
    
    179
    -    assert cli.get_element_state(project, 'dependency.bst') == 'cached'
    
    180
    -    assert cli.get_element_state(project, 'unrelated.bst') == 'cached'
    
    182
    +    states = cli.get_element_states(project, ['dependency.bst', 'unrelated.bst'])
    
    183
    +    assert states['dependency.bst'] == 'cached'
    
    184
    +    assert states['unrelated.bst'] == 'cached'
    
    181 185
     
    
    182 186
         # We try to build an element which depends on the LRU artifact,
    
    183 187
         # and could therefore fail if we didn't make sure dependencies
    
    ... ... @@ -192,9 +196,10 @@ def test_keep_dependencies(cli, datafiles, tmpdir):
    192 196
         res = cli.run(project=project, args=['build', 'target.bst'])
    
    193 197
         res.assert_success()
    
    194 198
     
    
    195
    -    assert cli.get_element_state(project, 'unrelated.bst') != 'cached'
    
    196
    -    assert cli.get_element_state(project, 'dependency.bst') == 'cached'
    
    197
    -    assert cli.get_element_state(project, 'target.bst') == 'cached'
    
    199
    +    states = cli.get_element_states(project, ['target.bst', 'unrelated.bst'])
    
    200
    +    assert states['target.bst'] == 'cached'
    
    201
    +    assert states['dependency.bst'] == 'cached'
    
    202
    +    assert states['unrelated.bst'] != 'cached'
    
    198 203
     
    
    199 204
     
    
    200 205
     # Assert that we never delete a dependency required for a build tree
    
    ... ... @@ -239,11 +244,11 @@ def test_never_delete_required(cli, datafiles, tmpdir):
    239 244
         # life there may potentially be N-builders cached artifacts
    
    240 245
         # which exceed the quota
    
    241 246
         #
    
    242
    -    assert cli.get_element_state(project, 'dep1.bst') == 'cached'
    
    243
    -    assert cli.get_element_state(project, 'dep2.bst') == 'cached'
    
    244
    -
    
    245
    -    assert cli.get_element_state(project, 'dep3.bst') != 'cached'
    
    246
    -    assert cli.get_element_state(project, 'target.bst') != 'cached'
    
    247
    +    states = cli.get_element_states(project, ['target.bst'])
    
    248
    +    assert states['dep1.bst'] == 'cached'
    
    249
    +    assert states['dep2.bst'] == 'cached'
    
    250
    +    assert states['dep3.bst'] != 'cached'
    
    251
    +    assert states['target.bst'] != 'cached'
    
    247 252
     
    
    248 253
     
    
    249 254
     # Assert that we never delete a dependency required for a build tree,
    
    ... ... @@ -275,10 +280,11 @@ def test_never_delete_required_track(cli, datafiles, tmpdir):
    275 280
         res.assert_success()
    
    276 281
     
    
    277 282
         # They should all be cached
    
    278
    -    assert cli.get_element_state(project, 'dep1.bst') == 'cached'
    
    279
    -    assert cli.get_element_state(project, 'dep2.bst') == 'cached'
    
    280
    -    assert cli.get_element_state(project, 'dep3.bst') == 'cached'
    
    281
    -    assert cli.get_element_state(project, 'target.bst') == 'cached'
    
    283
    +    states = cli.get_element_states(project, ['target.bst'])
    
    284
    +    assert states['dep1.bst'] == 'cached'
    
    285
    +    assert states['dep2.bst'] == 'cached'
    
    286
    +    assert states['dep3.bst'] == 'cached'
    
    287
    +    assert states['target.bst'] == 'cached'
    
    282 288
     
    
    283 289
         # Now increase the size of all the elements
    
    284 290
         #
    
    ... ... @@ -296,28 +302,37 @@ def test_never_delete_required_track(cli, datafiles, tmpdir):
    296 302
     
    
    297 303
         # Expect the same result that we did in test_never_delete_required()
    
    298 304
         #
    
    299
    -    assert cli.get_element_state(project, 'dep1.bst') == 'cached'
    
    300
    -    assert cli.get_element_state(project, 'dep2.bst') == 'cached'
    
    301
    -    assert cli.get_element_state(project, 'dep3.bst') != 'cached'
    
    302
    -    assert cli.get_element_state(project, 'target.bst') != 'cached'
    
    305
    +    states = cli.get_element_states(project, ['target.bst'])
    
    306
    +    assert states['dep1.bst'] == 'cached'
    
    307
    +    assert states['dep2.bst'] == 'cached'
    
    308
    +    assert states['dep3.bst'] != 'cached'
    
    309
    +    assert states['target.bst'] != 'cached'
    
    303 310
     
    
    304 311
     
    
    305 312
     # Ensure that only valid cache quotas make it through the loading
    
    306 313
     # process.
    
    307
    -@pytest.mark.parametrize("quota,success", [
    
    308
    -    ("1", True),
    
    309
    -    ("1K", True),
    
    310
    -    ("50%", True),
    
    311
    -    ("infinity", True),
    
    312
    -    ("0", True),
    
    313
    -    ("-1", False),
    
    314
    -    ("pony", False),
    
    315
    -    ("7K", False),
    
    316
    -    ("70%", False),
    
    317
    -    ("200%", False)
    
    314
    +#
    
    315
    +# This test virtualizes the condition to assume a storage volume
    
    316
    +# has 10K total disk space, and 6K of it is already in use (not
    
    317
    +# including any space used by the artifact cache).
    
    318
    +#
    
    319
    +@pytest.mark.parametrize("quota,err_domain,err_reason", [
    
    320
    +    # Valid configurations
    
    321
    +    ("1", 'success', None),
    
    322
    +    ("1K", 'success', None),
    
    323
    +    ("50%", 'success', None),
    
    324
    +    ("infinity", 'success', None),
    
    325
    +    ("0", 'success', None),
    
    326
    +    # Invalid configurations
    
    327
    +    ("-1", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
    
    328
    +    ("pony", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
    
    329
    +    ("200%", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
    
    330
    +    # Not enough space for these caches
    
    331
    +    ("7K", ErrorDomain.ARTIFACT, 'insufficient-storage-for-quota'),
    
    332
    +    ("70%", ErrorDomain.ARTIFACT, 'insufficient-storage-for-quota')
    
    318 333
     ])
    
    319 334
     @pytest.mark.datafiles(DATA_DIR)
    
    320
    -def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, success):
    
    335
    +def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, err_domain, err_reason):
    
    321 336
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    322 337
         os.makedirs(os.path.join(project, 'elements'))
    
    323 338
     
    
    ... ... @@ -356,10 +371,10 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, success):
    356 371
         with volume_space_patch, cache_size_patch:
    
    357 372
             res = cli.run(project=project, args=['workspace', 'list'])
    
    358 373
     
    
    359
    -    if success:
    
    374
    +    if err_domain == 'success':
    
    360 375
             res.assert_success()
    
    361 376
         else:
    
    362
    -        res.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
    
    377
    +        res.assert_main_error(err_domain, err_reason)
    
    363 378
     
    
    364 379
     
    
    365 380
     @pytest.mark.datafiles(DATA_DIR)
    

  • tests/elements/filter.py
    ... ... @@ -389,8 +389,9 @@ def test_filter_track_multi(datafiles, cli, tmpdir):
    389 389
         _yaml.dump(filter2_config, filter2_file)
    
    390 390
     
    
    391 391
         # Assert that a fetch is needed
    
    392
    -    assert cli.get_element_state(project, input_name) == 'no reference'
    
    393
    -    assert cli.get_element_state(project, input2_name) == 'no reference'
    
    392
    +    states = cli.get_element_states(project, [input_name, input2_name])
    
    393
    +    assert states[input_name] == 'no reference'
    
    394
    +    assert states[input2_name] == 'no reference'
    
    394 395
     
    
    395 396
         # Now try to track it
    
    396 397
         result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst"])
    
    ... ... @@ -450,8 +451,9 @@ def test_filter_track_multi_exclude(datafiles, cli, tmpdir):
    450 451
         _yaml.dump(filter2_config, filter2_file)
    
    451 452
     
    
    452 453
         # Assert that a fetch is needed
    
    453
    -    assert cli.get_element_state(project, input_name) == 'no reference'
    
    454
    -    assert cli.get_element_state(project, input2_name) == 'no reference'
    
    454
    +    states = cli.get_element_states(project, [input_name, input2_name])
    
    455
    +    assert states[input_name] == 'no reference'
    
    456
    +    assert states[input2_name] == 'no reference'
    
    455 457
     
    
    456 458
         # Now try to track it
    
    457 459
         result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst", "--except", input_name])
    

  • tests/frontend/pull.py
    ... ... @@ -66,16 +66,16 @@ def test_push_pull_all(cli, tmpdir, datafiles):
    66 66
             shutil.rmtree(artifacts)
    
    67 67
     
    
    68 68
             # Assert that nothing is cached locally anymore
    
    69
    -        for element_name in all_elements:
    
    70
    -            assert cli.get_element_state(project, element_name) != 'cached'
    
    69
    +        states = cli.get_element_states(project, all_elements)
    
    70
    +        assert not any(states[e] == 'cached' for e in all_elements)
    
    71 71
     
    
    72 72
             # Now try bst pull
    
    73 73
             result = cli.run(project=project, args=['artifact', 'pull', '--deps', 'all', 'target.bst'])
    
    74 74
             result.assert_success()
    
    75 75
     
    
    76 76
             # And assert that it's again in the local cache, without having built
    
    77
    -        for element_name in all_elements:
    
    78
    -            assert cli.get_element_state(project, element_name) == 'cached'
    
    77
    +        states = cli.get_element_states(project, all_elements)
    
    78
    +        assert not any(states[e] != 'cached' for e in all_elements)
    
    79 79
     
    
    80 80
     
    
    81 81
     # Tests that:
    

  • tests/frontend/push.py
    ... ... @@ -250,9 +250,10 @@ def test_artifact_expires(cli, datafiles, tmpdir):
    250 250
             result.assert_success()
    
    251 251
     
    
    252 252
             # check that element's 1 and 2 are cached both locally and remotely
    
    253
    -        assert cli.get_element_state(project, 'element1.bst') == 'cached'
    
    253
    +        states = cli.get_element_states(project, ['element1.bst', 'element2.bst'])
    
    254
    +        assert states['element1.bst'] == 'cached'
    
    255
    +        assert states['element2.bst'] == 'cached'
    
    254 256
             assert_shared(cli, share, project, 'element1.bst')
    
    255
    -        assert cli.get_element_state(project, 'element2.bst') == 'cached'
    
    256 257
             assert_shared(cli, share, project, 'element2.bst')
    
    257 258
     
    
    258 259
             # Create and build another element of 5 MB (This will exceed the free disk space available)
    
    ... ... @@ -298,11 +299,12 @@ def test_artifact_too_large(cli, datafiles, tmpdir):
    298 299
             result.assert_success()
    
    299 300
     
    
    300 301
             # Ensure that the small artifact is still in the share
    
    301
    -        assert cli.get_element_state(project, 'small_element.bst') == 'cached'
    
    302
    +        states = cli.get_element_states(project, ['small_element.bst', 'large_element.bst'])
    
    303
    +        states['small_element.bst'] == 'cached'
    
    302 304
             assert_shared(cli, share, project, 'small_element.bst')
    
    303 305
     
    
    304 306
             # Ensure that the artifact is cached locally but NOT remotely
    
    305
    -        assert cli.get_element_state(project, 'large_element.bst') == 'cached'
    
    307
    +        states['large_element.bst'] == 'cached'
    
    306 308
             assert_not_shared(cli, share, project, 'large_element.bst')
    
    307 309
     
    
    308 310
     
    
    ... ... @@ -334,8 +336,9 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
    334 336
             result.assert_success()
    
    335 337
     
    
    336 338
             # Ensure they are cached locally
    
    337
    -        assert cli.get_element_state(project, 'element1.bst') == 'cached'
    
    338
    -        assert cli.get_element_state(project, 'element2.bst') == 'cached'
    
    339
    +        states = cli.get_element_states(project, ['element1.bst', 'element2.bst'])
    
    340
    +        assert states['element1.bst'] == 'cached'
    
    341
    +        assert states['element2.bst'] == 'cached'
    
    339 342
     
    
    340 343
             # Ensure that they have  been pushed to the cache
    
    341 344
             assert_shared(cli, share, project, 'element1.bst')
    

  • tests/frontend/track.py
    ... ... @@ -123,7 +123,7 @@ def test_track_recurse(cli, tmpdir, datafiles, kind, amount):
    123 123
             last_element_name = element_name
    
    124 124
     
    
    125 125
         # Assert that a fetch is needed
    
    126
    -    states = cli.get_element_states(project, last_element_name)
    
    126
    +    states = cli.get_element_states(project, [last_element_name])
    
    127 127
         for element_name in element_names:
    
    128 128
             assert states[element_name] == 'no reference'
    
    129 129
     
    
    ... ... @@ -143,7 +143,7 @@ def test_track_recurse(cli, tmpdir, datafiles, kind, amount):
    143 143
         result.assert_success()
    
    144 144
     
    
    145 145
         # Assert that the base is buildable and the rest are waiting
    
    146
    -    states = cli.get_element_states(project, last_element_name)
    
    146
    +    states = cli.get_element_states(project, [last_element_name])
    
    147 147
         for element_name in element_names:
    
    148 148
             if element_name == element_names[0]:
    
    149 149
                 assert states[element_name] == 'buildable'
    
    ... ... @@ -171,8 +171,9 @@ def test_track_single(cli, tmpdir, datafiles):
    171 171
                          dep_name=element_dep_name)
    
    172 172
     
    
    173 173
         # Assert that tracking is needed for both elements
    
    174
    -    assert cli.get_element_state(project, element_dep_name) == 'no reference'
    
    175
    -    assert cli.get_element_state(project, element_target_name) == 'no reference'
    
    174
    +    states = cli.get_element_states(project, [element_target_name])
    
    175
    +    assert states[element_dep_name] == 'no reference'
    
    176
    +    assert states[element_target_name] == 'no reference'
    
    176 177
     
    
    177 178
         # Now first try to track only one element
    
    178 179
         result = cli.run(project=project, args=[
    
    ... ... @@ -187,8 +188,9 @@ def test_track_single(cli, tmpdir, datafiles):
    187 188
         result.assert_success()
    
    188 189
     
    
    189 190
         # Assert that the dependency is waiting and the target has still never been tracked
    
    190
    -    assert cli.get_element_state(project, element_dep_name) == 'no reference'
    
    191
    -    assert cli.get_element_state(project, element_target_name) == 'waiting'
    
    191
    +    states = cli.get_element_states(project, [element_target_name])
    
    192
    +    assert states[element_dep_name] == 'no reference'
    
    193
    +    assert states[element_target_name] == 'waiting'
    
    192 194
     
    
    193 195
     
    
    194 196
     @pytest.mark.datafiles(DATA_DIR)
    
    ... ... @@ -212,8 +214,9 @@ def test_track_recurse_except(cli, tmpdir, datafiles, kind):
    212 214
                          dep_name=element_dep_name)
    
    213 215
     
    
    214 216
         # Assert that a fetch is needed
    
    215
    -    assert cli.get_element_state(project, element_dep_name) == 'no reference'
    
    216
    -    assert cli.get_element_state(project, element_target_name) == 'no reference'
    
    217
    +    states = cli.get_element_states(project, [element_target_name])
    
    218
    +    assert states[element_dep_name] == 'no reference'
    
    219
    +    assert states[element_target_name] == 'no reference'
    
    217 220
     
    
    218 221
         # Now first try to track it
    
    219 222
         result = cli.run(project=project, args=[
    
    ... ... @@ -231,8 +234,9 @@ def test_track_recurse_except(cli, tmpdir, datafiles, kind):
    231 234
         result.assert_success()
    
    232 235
     
    
    233 236
         # Assert that the dependency is buildable and the target is waiting
    
    234
    -    assert cli.get_element_state(project, element_dep_name) == 'no reference'
    
    235
    -    assert cli.get_element_state(project, element_target_name) == 'waiting'
    
    237
    +    states = cli.get_element_states(project, [element_target_name])
    
    238
    +    assert states[element_dep_name] == 'no reference'
    
    239
    +    assert states[element_target_name] == 'waiting'
    
    236 240
     
    
    237 241
     
    
    238 242
     @pytest.mark.datafiles(os.path.join(TOP_DIR))
    
    ... ... @@ -672,21 +676,20 @@ def test_track_junction_included(cli, tmpdir, datafiles, ref_storage, kind):
    672 676
     
    
    673 677
     
    
    674 678
     @pytest.mark.datafiles(DATA_DIR)
    
    675
    -@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
    
    676
    -def test_track_error_cannot_write_file(cli, tmpdir, datafiles, kind):
    
    679
    +def test_track_error_cannot_write_file(cli, tmpdir, datafiles):
    
    677 680
         if os.geteuid() == 0:
    
    678 681
             pytest.skip("This is not testable with root permissions")
    
    679 682
     
    
    680 683
         project = str(datafiles)
    
    681 684
         dev_files_path = os.path.join(project, 'files', 'dev-files')
    
    682 685
         element_path = os.path.join(project, 'elements')
    
    683
    -    element_name = 'track-test-{}.bst'.format(kind)
    
    686
    +    element_name = 'track-test.bst'
    
    684 687
     
    
    685 688
         configure_project(project, {
    
    686 689
             'ref-storage': 'inline'
    
    687 690
         })
    
    688 691
     
    
    689
    -    repo = create_repo(kind, str(tmpdir))
    
    692
    +    repo = create_repo('git', str(tmpdir))
    
    690 693
         ref = repo.create(dev_files_path)
    
    691 694
     
    
    692 695
         element_full_path = os.path.join(element_path, element_name)
    

  • tests/frontend/workspace.py
    ... ... @@ -31,7 +31,7 @@ import shutil
    31 31
     import subprocess
    
    32 32
     from ruamel.yaml.comments import CommentedSet
    
    33 33
     from tests.testutils import cli, create_repo, ALL_REPO_KINDS, wait_for_cache_granularity
    
    34
    -from tests.testutils import create_artifact_share
    
    34
    +from tests.testutils import create_artifact_share, create_element_size
    
    35 35
     
    
    36 36
     from buildstream import _yaml
    
    37 37
     from buildstream._exceptions import ErrorDomain, LoadError, LoadErrorReason
    
    ... ... @@ -92,36 +92,38 @@ class WorkspaceCreater():
    92 92
                                     element_name))
    
    93 93
             return element_name, element_path, workspace_dir
    
    94 94
     
    
    95
    -    def create_workspace_elements(self, kinds, track, suffixs=None, workspace_dir_usr=None,
    
    95
    +    def create_workspace_elements(self, kinds, track, suffixes=None, workspace_dir_usr=None,
    
    96 96
                                       element_attrs=None):
    
    97 97
     
    
    98 98
             element_tuples = []
    
    99 99
     
    
    100
    -        if suffixs is None:
    
    101
    -            suffixs = ['', ] * len(kinds)
    
    100
    +        if suffixes is None:
    
    101
    +            suffixes = ['', ] * len(kinds)
    
    102 102
             else:
    
    103
    -            if len(suffixs) != len(kinds):
    
    103
    +            if len(suffixes) != len(kinds):
    
    104 104
                     raise "terable error"
    
    105 105
     
    
    106
    -        for suffix, kind in zip(suffixs, kinds):
    
    106
    +        for suffix, kind in zip(suffixes, kinds):
    
    107 107
                 element_name, element_path, workspace_dir = \
    
    108 108
                     self.create_workspace_element(kind, track, suffix, workspace_dir_usr,
    
    109 109
                                                   element_attrs)
    
    110
    -
    
    111
    -            # Assert that there is no reference, a track & fetch is needed
    
    112
    -            state = self.cli.get_element_state(self.project_path, element_name)
    
    113
    -            if track:
    
    114
    -                assert state == 'no reference'
    
    115
    -            else:
    
    116
    -                assert state == 'fetch needed'
    
    117 110
                 element_tuples.append((element_name, workspace_dir))
    
    118 111
     
    
    112
    +        # Assert that there is no reference, a track & fetch is needed
    
    113
    +        states = self.cli.get_element_states(self.project_path, [
    
    114
    +            e for e, _ in element_tuples
    
    115
    +        ])
    
    116
    +        if track:
    
    117
    +            assert not any(states[e] != 'no reference' for e, _ in element_tuples)
    
    118
    +        else:
    
    119
    +            assert not any(states[e] != 'fetch needed' for e, _ in element_tuples)
    
    120
    +
    
    119 121
             return element_tuples
    
    120 122
     
    
    121
    -    def open_workspaces(self, kinds, track, suffixs=None, workspace_dir=None,
    
    122
    -                        element_attrs=None):
    
    123
    +    def open_workspaces(self, kinds, track, suffixes=None, workspace_dir=None,
    
    124
    +                        element_attrs=None, no_checkout=False, no_cache=False):
    
    123 125
     
    
    124
    -        element_tuples = self.create_workspace_elements(kinds, track, suffixs, workspace_dir,
    
    126
    +        element_tuples = self.create_workspace_elements(kinds, track, suffixes, workspace_dir,
    
    125 127
                                                             element_attrs)
    
    126 128
             os.makedirs(self.workspace_cmd, exist_ok=True)
    
    127 129
     
    
    ... ... @@ -130,33 +132,41 @@ class WorkspaceCreater():
    130 132
             args = ['workspace', 'open']
    
    131 133
             if track:
    
    132 134
                 args.append('--track')
    
    135
    +        if no_checkout:
    
    136
    +            args.append('--no-checkout')
    
    137
    +        if no_cache:
    
    138
    +            args.append('--no-cache')
    
    133 139
             if workspace_dir is not None:
    
    134 140
                 assert len(element_tuples) == 1, "test logic error"
    
    135 141
                 _, workspace_dir = element_tuples[0]
    
    136 142
                 args.extend(['--directory', workspace_dir])
    
    137
    -
    
    143
    +        print("element_tuples", element_tuples)
    
    138 144
             args.extend([element_name for element_name, workspace_dir_suffix in element_tuples])
    
    145
    +        print("args", args)
    
    139 146
             result = self.cli.run(cwd=self.workspace_cmd, project=self.project_path, args=args)
    
    140 147
     
    
    141 148
             result.assert_success()
    
    142 149
     
    
    143
    -        for element_name, workspace_dir in element_tuples:
    
    144
    -            # Assert that we are now buildable because the source is
    
    145
    -            # now cached.
    
    146
    -            assert self.cli.get_element_state(self.project_path, element_name) == 'buildable'
    
    150
    +        if not no_checkout:
    
    151
    +            # Assert that we are now buildable because the source is now cached.
    
    152
    +            states = self.cli.get_element_states(self.project_path, [
    
    153
    +                e for e, _ in element_tuples
    
    154
    +            ])
    
    155
    +            assert not any(states[e] != 'buildable' for e, _ in element_tuples)
    
    147 156
     
    
    148
    -            # Check that the executable hello file is found in the workspace
    
    149
    -            filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
    
    150
    -            assert os.path.exists(filename)
    
    157
    +            # Check that the executable hello file is found in each workspace
    
    158
    +            for element_name, workspace_dir in element_tuples:
    
    159
    +                filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
    
    160
    +                assert os.path.exists(filename)
    
    151 161
     
    
    152
    -        return element_tuples
    
    162
    +        return element_tuples, result
    
    153 163
     
    
    154 164
     
    
    155 165
     def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
    
    156
    -                   project_path=None, element_attrs=None):
    
    166
    +                   project_path=None, element_attrs=None, no_checkout=False, no_cache=False):
    
    157 167
         workspace_object = WorkspaceCreater(cli, tmpdir, datafiles, project_path)
    
    158
    -    workspaces = workspace_object.open_workspaces((kind, ), track, (suffix, ), workspace_dir,
    
    159
    -                                                  element_attrs)
    
    168
    +    workspaces, _ = workspace_object.open_workspaces((kind, ), track, (suffix, ), workspace_dir,
    
    169
    +                                                     element_attrs, no_checkout, no_cache)
    
    160 170
         assert len(workspaces) == 1
    
    161 171
         element_name, workspace = workspaces[0]
    
    162 172
         return element_name, workspace_object.project_path, workspace
    
    ... ... @@ -190,7 +200,7 @@ def test_open_bzr_customize(cli, tmpdir, datafiles):
    190 200
     def test_open_multi(cli, tmpdir, datafiles):
    
    191 201
     
    
    192 202
         workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
    
    193
    -    workspaces = workspace_object.open_workspaces(repo_kinds, False)
    
    203
    +    workspaces, _ = workspace_object.open_workspaces(repo_kinds, False)
    
    194 204
     
    
    195 205
         for (elname, workspace), kind in zip(workspaces, repo_kinds):
    
    196 206
             assert kind in elname
    
    ... ... @@ -824,7 +834,9 @@ def test_list_unsupported_workspace(cli, tmpdir, datafiles, workspace_cfg):
    824 834
                 "alpha.bst": {
    
    825 835
                     "prepared": False,
    
    826 836
                     "path": "/workspaces/bravo",
    
    827
    -                "running_files": {}
    
    837
    +                "running_files": {},
    
    838
    +                "cached_build": False
    
    839
    +
    
    828 840
                 }
    
    829 841
             }
    
    830 842
         }),
    
    ... ... @@ -839,7 +851,8 @@ def test_list_unsupported_workspace(cli, tmpdir, datafiles, workspace_cfg):
    839 851
                 "alpha.bst": {
    
    840 852
                     "prepared": False,
    
    841 853
                     "path": "/workspaces/bravo",
    
    842
    -                "running_files": {}
    
    854
    +                "running_files": {},
    
    855
    +                "cached_build": False
    
    843 856
                 }
    
    844 857
             }
    
    845 858
         }),
    
    ... ... @@ -857,7 +870,8 @@ def test_list_unsupported_workspace(cli, tmpdir, datafiles, workspace_cfg):
    857 870
                 "alpha.bst": {
    
    858 871
                     "prepared": False,
    
    859 872
                     "path": "/workspaces/bravo",
    
    860
    -                "running_files": {}
    
    873
    +                "running_files": {},
    
    874
    +                "cached_build": False
    
    861 875
                 }
    
    862 876
             }
    
    863 877
         }),
    
    ... ... @@ -882,7 +896,8 @@ def test_list_unsupported_workspace(cli, tmpdir, datafiles, workspace_cfg):
    882 896
                     "last_successful": "some_key",
    
    883 897
                     "running_files": {
    
    884 898
                         "beta.bst": ["some_file"]
    
    885
    -                }
    
    899
    +                },
    
    900
    +                "cached_build": False
    
    886 901
                 }
    
    887 902
             }
    
    888 903
         }),
    
    ... ... @@ -902,7 +917,30 @@ def test_list_unsupported_workspace(cli, tmpdir, datafiles, workspace_cfg):
    902 917
                 "alpha.bst": {
    
    903 918
                     "prepared": True,
    
    904 919
                     "path": "/workspaces/bravo",
    
    905
    -                "running_files": {}
    
    920
    +                "running_files": {},
    
    921
    +                "cached_build": False
    
    922
    +            }
    
    923
    +        }
    
    924
    +    }),
    
    925
    +    # Test loading version 4
    
    926
    +    ({
    
    927
    +        "format-version": 4,
    
    928
    +        "workspaces": {
    
    929
    +            "alpha.bst": {
    
    930
    +                "prepared": False,
    
    931
    +                "path": "/workspaces/bravo",
    
    932
    +                "running_files": {},
    
    933
    +                "cached_build": True
    
    934
    +            }
    
    935
    +        }
    
    936
    +    }, {
    
    937
    +        "format-version": BST_WORKSPACE_FORMAT_VERSION,
    
    938
    +        "workspaces": {
    
    939
    +            "alpha.bst": {
    
    940
    +                "prepared": False,
    
    941
    +                "path": "/workspaces/bravo",
    
    942
    +                "running_files": {},
    
    943
    +                "cached_build": True
    
    906 944
                 }
    
    907 945
             }
    
    908 946
         })
    
    ... ... @@ -1069,25 +1107,35 @@ def test_multiple_failed_builds(cli, tmpdir, datafiles):
    1069 1107
     @pytest.mark.parametrize('subdir', [True, False], ids=["subdir", "no-subdir"])
    
    1070 1108
     @pytest.mark.parametrize("guess_element", [True, False], ids=["guess", "no-guess"])
    
    1071 1109
     def test_external_fetch(cli, datafiles, tmpdir_factory, subdir, guess_element):
    
    1072
    -    # Fetching from a workspace outside a project doesn't fail horribly
    
    1110
    +    # An element with an open workspace can't be fetched, but we still expect fetches
    
    1111
    +    # to fetch any dependencies
    
    1073 1112
         tmpdir = tmpdir_factory.mktemp('')
    
    1074
    -    element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", False)
    
    1113
    +    depend_element = 'fetchable.bst'
    
    1114
    +
    
    1115
    +    # Create an element to fetch (local sources do not need to fetch)
    
    1116
    +    create_element_size(depend_element, str(datafiles), 'elements', [], 1024)
    
    1117
    +
    
    1118
    +    element_name, project, workspace = open_workspace(
    
    1119
    +        cli, tmpdir, datafiles, "git", False, no_checkout=True,
    
    1120
    +        element_attrs={'depends': [depend_element]}
    
    1121
    +    )
    
    1075 1122
         arg_elm = [element_name] if not guess_element else []
    
    1076 1123
     
    
    1077 1124
         if subdir:
    
    1078 1125
             call_dir = os.path.join(workspace, 'usr')
    
    1126
    +        os.makedirs(call_dir, exist_ok=True)
    
    1079 1127
         else:
    
    1080 1128
             call_dir = workspace
    
    1081 1129
     
    
    1130
    +    # Assert that the depended element is not fetched yet
    
    1131
    +    assert cli.get_element_state(str(datafiles), depend_element) == 'fetch needed'
    
    1132
    +
    
    1133
    +    # Fetch the workspaced element
    
    1082 1134
         result = cli.run(project=project, args=['-C', call_dir, 'source', 'fetch'] + arg_elm)
    
    1083 1135
         result.assert_success()
    
    1084 1136
     
    
    1085
    -    # We already fetched it by opening the workspace, but we're also checking
    
    1086
    -    # `bst show` works here
    
    1087
    -    result = cli.run(project=project,
    
    1088
    -                     args=['-C', call_dir, 'show', '--deps', 'none', '--format', '%{state}'] + arg_elm)
    
    1089
    -    result.assert_success()
    
    1090
    -    assert result.output.strip() == 'buildable'
    
    1137
    +    # Assert that the depended element has now been fetched
    
    1138
    +    assert cli.get_element_state(str(datafiles), depend_element) == 'buildable'
    
    1091 1139
     
    
    1092 1140
     
    
    1093 1141
     @pytest.mark.datafiles(DATA_DIR)
    
    ... ... @@ -1116,16 +1164,24 @@ def test_external_push_pull(cli, datafiles, tmpdir_factory, guess_element):
    1116 1164
     @pytest.mark.datafiles(DATA_DIR)
    
    1117 1165
     @pytest.mark.parametrize("guess_element", [True, False], ids=["guess", "no-guess"])
    
    1118 1166
     def test_external_track(cli, datafiles, tmpdir_factory, guess_element):
    
    1119
    -    # Tracking does not get horribly confused
    
    1120 1167
         tmpdir = tmpdir_factory.mktemp('')
    
    1121
    -    element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", True)
    
    1168
    +    element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", False)
    
    1169
    +    element_file = os.path.join(str(datafiles), 'elements', element_name)
    
    1122 1170
         arg_elm = [element_name] if not guess_element else []
    
    1123 1171
     
    
    1124
    -    # The workspace is necessarily already tracked, so we only care that
    
    1125
    -    # there's no weird errors.
    
    1172
    +    # Delete the ref from the source so that we can detect if the
    
    1173
    +    # element has been tracked
    
    1174
    +    element_contents = _yaml.load(element_file)
    
    1175
    +    del element_contents['sources'][0]['ref']
    
    1176
    +    _yaml.dump(_yaml.node_sanitize(element_contents), element_file)
    
    1177
    +
    
    1126 1178
         result = cli.run(project=project, args=['-C', workspace, 'source', 'track'] + arg_elm)
    
    1127 1179
         result.assert_success()
    
    1128 1180
     
    
    1181
    +    # Element is tracked now
    
    1182
    +    element_contents = _yaml.load(element_file)
    
    1183
    +    assert 'ref' in element_contents['sources'][0]
    
    1184
    +
    
    1129 1185
     
    
    1130 1186
     @pytest.mark.datafiles(DATA_DIR)
    
    1131 1187
     def test_external_open_other(cli, datafiles, tmpdir_factory):
    
    ... ... @@ -1210,3 +1266,80 @@ def test_external_list(cli, datafiles, tmpdir_factory):
    1210 1266
     
    
    1211 1267
         result = cli.run(project=project, args=['-C', workspace, 'workspace', 'list'])
    
    1212 1268
         result.assert_success()
    
    1269
    +
    
    1270
    +
    
    1271
    +@pytest.mark.datafiles(DATA_DIR)
    
    1272
    +def test_nocache_open_messages(cli, tmpdir, datafiles):
    
    1273
    +
    
    1274
    +    workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
    
    1275
    +    _, result = workspace_object.open_workspaces(('git', ), False)
    
    1276
    +
    
    1277
    +    # cli default WARN for source dropback possibility when no-cache flag is not passed
    
    1278
    +    assert "WARNING: Workspace will be opened without the cached buildtree if not cached locally" in result.output
    
    1279
    +
    
    1280
    +    # cli WARN for source dropback happening when no-cache flag not given, but buildtree not available
    
    1281
    +    assert "workspace will be opened with source checkout" in result.stderr
    
    1282
    +
    
    1283
    +    # cli default WARN for source dropback possibilty not given when no-cache flag is passed
    
    1284
    +    tmpdir = os.path.join(str(tmpdir), "2")
    
    1285
    +    workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
    
    1286
    +    _, result = workspace_object.open_workspaces(('git', ), False, suffixes='1', no_cache=True)
    
    1287
    +
    
    1288
    +    assert "WARNING: Workspace will be opened without the cached buildtree if not cached locally" not in result.output
    
    1289
    +
    
    1290
    +
    
    1291
    +@pytest.mark.datafiles(DATA_DIR)
    
    1292
    +def test_nocache_reset_messages(cli, tmpdir, datafiles):
    
    1293
    +
    
    1294
    +    workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
    
    1295
    +    workspaces, result = workspace_object.open_workspaces(('git', ), False)
    
    1296
    +    element_name, workspace = workspaces[0]
    
    1297
    +    project = workspace_object.project_path
    
    1298
    +
    
    1299
    +    # Modify workspace, without building so the artifact is not cached
    
    1300
    +    shutil.rmtree(os.path.join(workspace, 'usr', 'bin'))
    
    1301
    +    os.makedirs(os.path.join(workspace, 'etc'))
    
    1302
    +    with open(os.path.join(workspace, 'etc', 'pony.conf'), 'w') as f:
    
    1303
    +        f.write("PONY='pink'")
    
    1304
    +
    
    1305
    +    # Now reset the open workspace, this should have the
    
    1306
    +    # effect of reverting our changes to the original source, as it
    
    1307
    +    # was not originally opened with a cached buildtree and as such
    
    1308
    +    # should not notify the user
    
    1309
    +    result = cli.run(cwd=workspace_object.workspace_cmd, project=project, args=[
    
    1310
    +        'workspace', 'reset', element_name
    
    1311
    +    ])
    
    1312
    +    result.assert_success()
    
    1313
    +    assert "original buildtree artifact not available" not in result.output
    
    1314
    +    assert os.path.exists(os.path.join(workspace, 'usr', 'bin', 'hello'))
    
    1315
    +    assert not os.path.exists(os.path.join(workspace, 'etc', 'pony.conf'))
    
    1316
    +
    
    1317
    +    # Close the workspace
    
    1318
    +    result = cli.run(cwd=workspace_object.workspace_cmd, project=project, args=[
    
    1319
    +        'workspace', 'close', '--remove-dir', element_name
    
    1320
    +    ])
    
    1321
    +    result.assert_success()
    
    1322
    +
    
    1323
    +    # Build the workspace so we have a cached buildtree artifact for the element
    
    1324
    +    assert cli.get_element_state(project, element_name) == 'buildable'
    
    1325
    +    result = cli.run(project=project, args=['build', element_name])
    
    1326
    +    result.assert_success()
    
    1327
    +
    
    1328
    +    # Opening the workspace after a build should lead to the cached buildtree being
    
    1329
    +    # staged by default
    
    1330
    +    result = cli.run(cwd=workspace_object.workspace_cmd, project=project, args=[
    
    1331
    +        'workspace', 'open', element_name
    
    1332
    +    ])
    
    1333
    +    result.assert_success()
    
    1334
    +
    
    1335
    +    result = cli.run(cwd=workspace_object.workspace_cmd, project=project, args=[
    
    1336
    +        'workspace', 'list'
    
    1337
    +    ])
    
    1338
    +    result.assert_success()
    
    1339
    +    # Now reset the workspace and ensure that a warning is not given about the artifact
    
    1340
    +    # buildtree not being available
    
    1341
    +    result = cli.run(cwd=workspace_object.workspace_cmd, project=project, args=[
    
    1342
    +        'workspace', 'reset', element_name
    
    1343
    +    ])
    
    1344
    +    result.assert_success()
    
    1345
    +    assert "original buildtree artifact not available" not in result.output

  • tests/integration/workspace.py
    ... ... @@ -278,3 +278,39 @@ def test_incremental_configure_commands_run_only_once(cli, tmpdir, datafiles):
    278 278
         res = cli.run(project=project, args=['build', element_name])
    
    279 279
         res.assert_success()
    
    280 280
         assert not os.path.exists(os.path.join(workspace, 'prepared-again'))
    
    281
    +
    
    282
    +
    
    283
    +@pytest.mark.integration
    
    284
    +@pytest.mark.datafiles(DATA_DIR)
    
    285
    +@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
    
    286
    +def test_workspace_contains_buildtree(cli, tmpdir, datafiles):
    
    287
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    288
    +    workspace = os.path.join(cli.directory, 'workspace')
    
    289
    +    element_name = 'autotools/amhello.bst'
    
    290
    +
    
    291
    +    # Ensure we're not using the shared artifact cache
    
    292
    +    cli.configure({
    
    293
    +        'artifactdir': os.path.join(str(tmpdir), 'artifacts')
    
    294
    +    })
    
    295
    +
    
    296
    +    # First open the workspace
    
    297
    +    res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
    
    298
    +    res.assert_success()
    
    299
    +
    
    300
    +    # Check that by default the buildtree wasn't staged as not yet available in the cache
    
    301
    +    assert not os.path.exists(os.path.join(workspace, 'src', 'hello'))
    
    302
    +
    
    303
    +    # Close the workspace, removing the dir
    
    304
    +    res = cli.run(project=project, args=['workspace', 'close', '--remove-dir', element_name])
    
    305
    +    res.assert_success()
    
    306
    +
    
    307
    +    # Build the element, so we have it cached along with the buildtreee
    
    308
    +    res = cli.run(project=project, args=['build', element_name])
    
    309
    +    res.assert_success()
    
    310
    +
    
    311
    +    # Open up the workspace, as the buildtree is cached by default it should open with the buildtree
    
    312
    +    res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
    
    313
    +    res.assert_success()
    
    314
    +
    
    315
    +    # Check that the buildtree was staged, by asserting output of the build exists in the dir
    
    316
    +    assert os.path.exists(os.path.join(workspace, 'src', 'hello'))

  • tests/internals/utils.py deleted
    1
    -import os
    
    2
    -from unittest import mock
    
    3
    -
    
    4
    -from buildstream import _yaml
    
    5
    -
    
    6
    -from ..testutils.runcli import cli
    
    7
    -
    
    8
    -
    
    9
    -KiB = 1024
    
    10
    -MiB = (KiB * 1024)
    
    11
    -GiB = (MiB * 1024)
    
    12
    -TiB = (GiB * 1024)
    
    13
    -
    
    14
    -
    
    15
    -def test_parse_size_over_1024T(cli, tmpdir):
    
    16
    -    cli.configure({
    
    17
    -        'cache': {
    
    18
    -            'quota': 2048 * TiB
    
    19
    -        }
    
    20
    -    })
    
    21
    -    project = tmpdir.join("main")
    
    22
    -    os.makedirs(str(project))
    
    23
    -    _yaml.dump({'name': 'main'}, str(project.join("project.conf")))
    
    24
    -
    
    25
    -    volume_space_patch = mock.patch(
    
    26
    -        "buildstream._artifactcache.ArtifactCache._get_volume_space_info_for",
    
    27
    -        autospec=True,
    
    28
    -        return_value=(1025 * TiB, 1025 * TiB)
    
    29
    -    )
    
    30
    -
    
    31
    -    with volume_space_patch:
    
    32
    -        result = cli.run(project, args=["build", "file.bst"])
    
    33
    -        failure_msg = 'Your system does not have enough available space to support the cache quota specified.'
    
    34
    -        assert failure_msg in result.stderr

  • tests/sources/remote.py
    ... ... @@ -136,18 +136,25 @@ def test_unique_key(cli, tmpdir, datafiles):
    136 136
         '''
    
    137 137
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    138 138
         generate_project(project, tmpdir)
    
    139
    -    assert cli.get_element_state(project, 'target.bst') == "fetch needed"
    
    140
    -    assert cli.get_element_state(project, 'target-custom.bst') == "fetch needed"
    
    141
    -    assert cli.get_element_state(project, 'target-custom-executable.bst') == "fetch needed"
    
    139
    +    states = cli.get_element_states(project, [
    
    140
    +        'target.bst', 'target-custom.bst', 'target-custom-executable.bst'
    
    141
    +    ])
    
    142
    +    assert states['target.bst'] == "fetch needed"
    
    143
    +    assert states['target-custom.bst'] == "fetch needed"
    
    144
    +    assert states['target-custom-executable.bst'] == "fetch needed"
    
    145
    +
    
    142 146
         # Try to fetch it
    
    143 147
         result = cli.run(project=project, args=[
    
    144 148
             'source', 'fetch', 'target.bst'
    
    145 149
         ])
    
    146 150
     
    
    147 151
         # We should download the file only once
    
    148
    -    assert cli.get_element_state(project, 'target.bst') == 'buildable'
    
    149
    -    assert cli.get_element_state(project, 'target-custom.bst') == 'buildable'
    
    150
    -    assert cli.get_element_state(project, 'target-custom-executable.bst') == 'buildable'
    
    152
    +    states = cli.get_element_states(project, [
    
    153
    +        'target.bst', 'target-custom.bst', 'target-custom-executable.bst'
    
    154
    +    ])
    
    155
    +    assert states['target.bst'] == 'buildable'
    
    156
    +    assert states['target-custom.bst'] == 'buildable'
    
    157
    +    assert states['target-custom-executable.bst'] == 'buildable'
    
    151 158
     
    
    152 159
         # But the cache key is different because the 'filename' is different.
    
    153 160
         assert cli.get_element_key(project, 'target.bst') != \
    

  • tests/testutils/runcli.py
    ... ... @@ -398,13 +398,12 @@ class Cli():
    398 398
         #
    
    399 399
         # Returns a dictionary with the element names as keys
    
    400 400
         #
    
    401
    -    def get_element_states(self, project, target, deps='all'):
    
    401
    +    def get_element_states(self, project, targets, deps='all'):
    
    402 402
             result = self.run(project=project, silent=True, args=[
    
    403 403
                 'show',
    
    404 404
                 '--deps', deps,
    
    405 405
                 '--format', '%{name}||%{state}',
    
    406
    -            target
    
    407
    -        ])
    
    406
    +        ] + targets)
    
    408 407
             result.assert_success()
    
    409 408
             lines = result.output.splitlines()
    
    410 409
             states = {}
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]