[Notes] [Git][BuildStream/buildstream][raoul/870-root-cache-dir] 14 commits: _frontend/app.py: Initialize logging before preflighting the artifact cache



Title: GitLab

Raoul Hidalgo Charman pushed to branch raoul/870-root-cache-dir at BuildStream / buildstream

Commits:

30 changed files:

Changes:

  • buildstream/_artifactcache.py
    ... ... @@ -62,9 +62,6 @@ class ArtifactCache():
    62 62
             self.project_remote_specs = {}
    
    63 63
     
    
    64 64
             self._required_elements = set()       # The elements required for this session
    
    65
    -        self._cache_size = None               # The current cache size, sometimes it's an estimate
    
    66
    -        self._cache_quota = None              # The cache quota
    
    67
    -        self._cache_lower_threshold = None    # The target cache size for a cleanup
    
    68 65
             self._remotes_setup = False           # Check to prevent double-setup of remotes
    
    69 66
     
    
    70 67
             # Per-project list of _CASRemote instances.
    
    ... ... @@ -216,11 +213,33 @@ class ArtifactCache():
    216 213
         #
    
    217 214
         # Clean the artifact cache as much as possible.
    
    218 215
         #
    
    216
    +    # Args:
    
    217
    +    #    progress (callable): A callback to call when a ref is removed
    
    218
    +    #
    
    219 219
         # Returns:
    
    220 220
         #    (int): The size of the cache after having cleaned up
    
    221 221
         #
    
    222
    -    def clean(self):
    
    222
    +    def clean(self, progress=None):
    
    223 223
             artifacts = self.list_artifacts()
    
    224
    +        context = self.context
    
    225
    +
    
    226
    +        # Some accumulative statistics
    
    227
    +        removed_ref_count = 0
    
    228
    +        space_saved = 0
    
    229
    +
    
    230
    +        # Start off with an announcement with as much info as possible
    
    231
    +        volume_size, volume_avail = self.cas._get_cache_volume_size()
    
    232
    +        self._message(MessageType.STATUS, "Starting cache cleanup",
    
    233
    +                      detail=("Elements required by the current build plan: {}\n" +
    
    234
    +                              "User specified quota: {} ({})\n" +
    
    235
    +                              "Cache usage: {}\n" +
    
    236
    +                              "Cache volume: {} total, {} available")
    
    237
    +                      .format(len(self._required_elements),
    
    238
    +                              context.config_cache_quota,
    
    239
    +                              utils._pretty_size(self._cache_quota_original, dec_places=2),
    
    240
    +                              utils._pretty_size(self.cas.get_cache_size(), dec_places=2),
    
    241
    +                              utils._pretty_size(volume_size, dec_places=2),
    
    242
    +                              utils._pretty_size(volume_avail, dec_places=2)))
    
    224 243
     
    
    225 244
             # Build a set of the cache keys which are required
    
    226 245
             # based on the required elements at cleanup time
    
    ... ... @@ -235,9 +254,10 @@ class ArtifactCache():
    235 254
                 ])
    
    236 255
     
    
    237 256
             # Do a real computation of the cache size once, just in case
    
    238
    -        self.compute_cache_size()
    
    257
    +        usage = self.cas.compute_cache_size()
    
    258
    +        self._message(MessageType.STATUS, "Cache usage recomputed: {}".format(usage))
    
    239 259
     
    
    240
    -        while self.get_cache_size() >= self._cache_lower_threshold:
    
    260
    +        while self.cas.get_cache_size() >= self._cache_lower_threshold:
    
    241 261
                 try:
    
    242 262
                     to_remove = artifacts.pop(0)
    
    243 263
                 except IndexError:
    
    ... ... @@ -245,11 +265,18 @@ class ArtifactCache():
    245 265
                     # can't remove them, we have to abort the build.
    
    246 266
                     #
    
    247 267
                     # FIXME: Asking the user what to do may be neater
    
    268
    +                #
    
    248 269
                     default_conf = os.path.join(os.environ['XDG_CONFIG_HOME'],
    
    249 270
                                                 'buildstream.conf')
    
    250
    -                detail = ("There is not enough space to complete the build.\n"
    
    251
    -                          "Please increase the cache-quota in {}."
    
    252
    -                          .format(self.context.config_origin or default_conf))
    
    271
    +                detail = ("Aborted after removing {} refs and saving {} disk space.\n"
    
    272
    +                          "The remaining {} in the cache is required by the {} elements in your build plan\n\n"
    
    273
    +                          "There is not enough space to complete the build.\n"
    
    274
    +                          "Please increase the cache-quota in {} and/or make more disk space."
    
    275
    +                          .format(removed_ref_count,
    
    276
    +                                  utils._pretty_size(space_saved, dec_places=2),
    
    277
    +                                  utils._pretty_size(self.cas.get_cache_size(), dec_places=2),
    
    278
    +                                  len(self._required_elements),
    
    279
    +                                  (context.config_origin or default_conf)))
    
    253 280
     
    
    254 281
                     if self.has_quota_exceeded():
    
    255 282
                         raise ArtifactError("Cache too full. Aborting.",
    
    ... ... @@ -264,24 +291,34 @@ class ArtifactCache():
    264 291
                     # Remove the actual artifact, if it's not required.
    
    265 292
                     size = self.remove(to_remove)
    
    266 293
     
    
    267
    -                # Remove the size from the removed size
    
    268
    -                self.set_cache_size(self._cache_size - size)
    
    294
    +                removed_ref_count += 1
    
    295
    +                space_saved += size
    
    269 296
     
    
    270
    -        # This should be O(1) if implemented correctly
    
    271
    -        return self.get_cache_size()
    
    297
    +                self._message(MessageType.STATUS,
    
    298
    +                              "Freed {: <7} {}".format(
    
    299
    +                                  utils._pretty_size(size, dec_places=2),
    
    300
    +                                  to_remove))
    
    272 301
     
    
    273
    -    # compute_cache_size()
    
    274
    -    #
    
    275
    -    # Computes the real artifact cache size by calling
    
    276
    -    # the abstract calculate_cache_size() method.
    
    277
    -    #
    
    278
    -    # Returns:
    
    279
    -    #    (int): The size of the artifact cache.
    
    280
    -    #
    
    281
    -    def compute_cache_size(self):
    
    282
    -        self._cache_size = self.cas.calculate_cache_size()
    
    302
    +                # Remove the size from the removed size
    
    303
    +                self.cas.set_cache_size(self.cas._cache_size - size)
    
    283 304
     
    
    284
    -        return self._cache_size
    
    305
    +                # User callback
    
    306
    +                #
    
    307
    +                # Currently this process is fairly slow, but we should
    
    308
    +                # think about throttling this progress() callback if this
    
    309
    +                # becomes too intense.
    
    310
    +                if progress:
    
    311
    +                    progress()
    
    312
    +
    
    313
    +        # Informational message about the side effects of the cleanup
    
    314
    +        self._message(MessageType.INFO, "Cleanup completed",
    
    315
    +                      detail=("Removed {} refs and saving {} disk space.\n" +
    
    316
    +                              "Cache usage is now: {}")
    
    317
    +                      .format(removed_ref_count,
    
    318
    +                              utils._pretty_size(space_saved, dec_places=2),
    
    319
    +                              utils._pretty_size(self.cas.get_cache_size(), dec_places=2)))
    
    320
    +
    
    321
    +        return self.cas.get_cache_size()
    
    285 322
     
    
    286 323
         # add_artifact_size()
    
    287 324
         #
    
    ... ... @@ -292,51 +329,10 @@ class ArtifactCache():
    292 329
         #     artifact_size (int): The size to add.
    
    293 330
         #
    
    294 331
         def add_artifact_size(self, artifact_size):
    
    295
    -        cache_size = self.get_cache_size()
    
    332
    +        cache_size = self.cas.get_cache_size()
    
    296 333
             cache_size += artifact_size
    
    297 334
     
    
    298
    -        self.set_cache_size(cache_size)
    
    299
    -
    
    300
    -    # get_cache_size()
    
    301
    -    #
    
    302
    -    # Fetches the cached size of the cache, this is sometimes
    
    303
    -    # an estimate and periodically adjusted to the real size
    
    304
    -    # when a cache size calculation job runs.
    
    305
    -    #
    
    306
    -    # When it is an estimate, the value is either correct, or
    
    307
    -    # it is greater than the actual cache size.
    
    308
    -    #
    
    309
    -    # Returns:
    
    310
    -    #     (int) An approximation of the artifact cache size.
    
    311
    -    #
    
    312
    -    def get_cache_size(self):
    
    313
    -
    
    314
    -        # If we don't currently have an estimate, figure out the real cache size.
    
    315
    -        if self._cache_size is None:
    
    316
    -            stored_size = self._read_cache_size()
    
    317
    -            if stored_size is not None:
    
    318
    -                self._cache_size = stored_size
    
    319
    -            else:
    
    320
    -                self.compute_cache_size()
    
    321
    -
    
    322
    -        return self._cache_size
    
    323
    -
    
    324
    -    # set_cache_size()
    
    325
    -    #
    
    326
    -    # Forcefully set the overall cache size.
    
    327
    -    #
    
    328
    -    # This is used to update the size in the main process after
    
    329
    -    # having calculated in a cleanup or a cache size calculation job.
    
    330
    -    #
    
    331
    -    # Args:
    
    332
    -    #     cache_size (int): The size to set.
    
    333
    -    #
    
    334
    -    def set_cache_size(self, cache_size):
    
    335
    -
    
    336
    -        assert cache_size is not None
    
    337
    -
    
    338
    -        self._cache_size = cache_size
    
    339
    -        self._write_cache_size(self._cache_size)
    
    335
    +        self.cas.set_cache_size(cache_size)
    
    340 336
     
    
    341 337
         # has_quota_exceeded()
    
    342 338
         #
    
    ... ... @@ -346,7 +342,7 @@ class ArtifactCache():
    346 342
         #    (bool): True of the quota is exceeded
    
    347 343
         #
    
    348 344
         def has_quota_exceeded(self):
    
    349
    -        return self.get_cache_size() > self._cache_quota
    
    345
    +        return self.cas.get_cache_size() > self._cache_quota
    
    350 346
     
    
    351 347
         # preflight():
    
    352 348
         #
    
    ... ... @@ -459,8 +455,7 @@ class ArtifactCache():
    459 455
         #                          `ArtifactCache.get_artifact_fullname`)
    
    460 456
         #
    
    461 457
         # Returns:
    
    462
    -    #    (int|None) The amount of space pruned from the repository in
    
    463
    -    #               Bytes, or None if defer_prune is True
    
    458
    +    #    (int): The amount of space recovered in the cache, in bytes
    
    464 459
         #
    
    465 460
         def remove(self, ref):
    
    466 461
     
    
    ... ... @@ -793,44 +788,6 @@ class ArtifactCache():
    793 788
             with self.context.timed_activity("Initializing remote caches", silent_nested=True):
    
    794 789
                 self.initialize_remotes(on_failure=remote_failed)
    
    795 790
     
    
    796
    -    # _write_cache_size()
    
    797
    -    #
    
    798
    -    # Writes the given size of the artifact to the cache's size file
    
    799
    -    #
    
    800
    -    # Args:
    
    801
    -    #    size (int): The size of the artifact cache to record
    
    802
    -    #
    
    803
    -    def _write_cache_size(self, size):
    
    804
    -        assert isinstance(size, int)
    
    805
    -        size_file_path = os.path.join(self.context.artifactdir, CACHE_SIZE_FILE)
    
    806
    -        with utils.save_file_atomic(size_file_path, "w") as f:
    
    807
    -            f.write(str(size))
    
    808
    -
    
    809
    -    # _read_cache_size()
    
    810
    -    #
    
    811
    -    # Reads and returns the size of the artifact cache that's stored in the
    
    812
    -    # cache's size file
    
    813
    -    #
    
    814
    -    # Returns:
    
    815
    -    #    (int): The size of the artifact cache, as recorded in the file
    
    816
    -    #
    
    817
    -    def _read_cache_size(self):
    
    818
    -        size_file_path = os.path.join(self.context.artifactdir, CACHE_SIZE_FILE)
    
    819
    -
    
    820
    -        if not os.path.exists(size_file_path):
    
    821
    -            return None
    
    822
    -
    
    823
    -        with open(size_file_path, "r") as f:
    
    824
    -            size = f.read()
    
    825
    -
    
    826
    -        try:
    
    827
    -            num_size = int(size)
    
    828
    -        except ValueError as e:
    
    829
    -            raise ArtifactError("Size '{}' parsed from '{}' was not an integer".format(
    
    830
    -                size, size_file_path)) from e
    
    831
    -
    
    832
    -        return num_size
    
    833
    -
    
    834 791
         # _calculate_cache_quota()
    
    835 792
         #
    
    836 793
         # Calculates and sets the cache quota and lower threshold based on the
    
    ... ... @@ -848,20 +805,16 @@ class ArtifactCache():
    848 805
             else:
    
    849 806
                 headroom = 2e9
    
    850 807
     
    
    851
    -        artifactdir_volume = self.context.artifactdir
    
    852
    -        while not os.path.exists(artifactdir_volume):
    
    853
    -            artifactdir_volume = os.path.dirname(artifactdir_volume)
    
    854
    -
    
    855 808
             try:
    
    856
    -            cache_quota = utils._parse_size(self.context.config_cache_quota, artifactdir_volume)
    
    809
    +            cache_quota = utils._parse_size(self.context.config_cache_quota,
    
    810
    +                                            self.context.artifactdir)
    
    857 811
             except utils.UtilError as e:
    
    858 812
                 raise LoadError(LoadErrorReason.INVALID_DATA,
    
    859 813
                                 "{}\nPlease specify the value in bytes or as a % of full disk space.\n"
    
    860 814
                                 "\nValid values are, for example: 800M 10G 1T 50%\n"
    
    861 815
                                 .format(str(e))) from e
    
    862
    -
    
    863
    -        available_space, total_size = self._get_volume_space_info_for(artifactdir_volume)
    
    864
    -        cache_size = self.get_cache_size()
    
    816
    +        total_size, available_space = self.cas._get_cache_volume_size()
    
    817
    +        cache_size = self.cas.get_cache_size()
    
    865 818
     
    
    866 819
             # Ensure system has enough storage for the cache_quota
    
    867 820
             #
    
    ... ... @@ -900,23 +853,10 @@ class ArtifactCache():
    900 853
             # if we end up writing more than 2G, but hey, this stuff is
    
    901 854
             # already really fuzzy.
    
    902 855
             #
    
    856
    +        self._cache_quota_original = cache_quota
    
    903 857
             self._cache_quota = cache_quota - headroom
    
    904 858
             self._cache_lower_threshold = self._cache_quota / 2
    
    905 859
     
    
    906
    -    # _get_volume_space_info_for
    
    907
    -    #
    
    908
    -    # Get the available space and total space for the given volume
    
    909
    -    #
    
    910
    -    # Args:
    
    911
    -    #     volume: volume for which to get the size
    
    912
    -    #
    
    913
    -    # Returns:
    
    914
    -    #     A tuple containing first the availabe number of bytes on the requested
    
    915
    -    #     volume, then the total number of bytes of the volume.
    
    916
    -    def _get_volume_space_info_for(self, volume):
    
    917
    -        stat = os.statvfs(volume)
    
    918
    -        return stat.f_bsize * stat.f_bavail, stat.f_bsize * stat.f_blocks
    
    919
    -
    
    920 860
     
    
    921 861
     # _configured_remote_artifact_cache_specs():
    
    922 862
     #
    

  • buildstream/_cas/__init__.py
    ... ... @@ -17,5 +17,5 @@
    17 17
     #  Authors:
    
    18 18
     #        Tristan Van Berkom <tristan vanberkom codethink co uk>
    
    19 19
     
    
    20
    -from .cascache import CASCache
    
    20
    +from .cascache import CASCache, CASCacheUsage
    
    21 21
     from .casremote import CASRemote, CASRemoteSpec

  • buildstream/_cas/cascache.py
    ... ... @@ -36,20 +36,61 @@ from .._exceptions import CASCacheError
    36 36
     from .casremote import BlobNotFound, _CASBatchRead, _CASBatchUpdate
    
    37 37
     
    
    38 38
     
    
    39
    +CACHE_SIZE_FILE = "cache_size"
    
    40
    +
    
    41
    +
    
    42
    +# CASCacheUsage
    
    43
    +#
    
    44
    +# A simple object to report the current CAS cache usage details.
    
    45
    +#
    
    46
    +# Note that this uses the user configured cache quota
    
    47
    +# rather than the internal quota with protective headroom
    
    48
    +# removed, to provide a more sensible value to display to
    
    49
    +# the user.
    
    50
    +#
    
    51
    +# Args:
    
    52
    +#    cas (CASCache): The CAS cache to get the status of
    
    53
    +#
    
    54
    +class CASCacheUsage():
    
    55
    +
    
    56
    +    def __init__(self, cas):
    
    57
    +        self.quota_config = cas._config_cache_quota          # Configured quota
    
    58
    +        self.quota_size = cas._cache_quota_original          # Resolved cache quota in bytes
    
    59
    +        self.used_size = cas.get_cache_size()                # Size used by artifacts in bytes
    
    60
    +        self.used_percent = 0                                # Percentage of the quota used
    
    61
    +        if self.quota_size is not None:
    
    62
    +            self.used_percent = int(self.used_size * 100 / self.quota_size)
    
    63
    +
    
    64
    +    # Formattable into a human readable string
    
    65
    +    #
    
    66
    +    def __str__(self):
    
    67
    +        return "{} / {} ({}%)" \
    
    68
    +            .format(utils._pretty_size(self.used_size, dec_places=1),
    
    69
    +                    self.quota_config,
    
    70
    +                    self.used_percent)
    
    71
    +
    
    72
    +
    
    39 73
     # A CASCache manages a CAS repository as specified in the Remote Execution API.
    
    40 74
     #
    
    41 75
     # Args:
    
    42 76
     #     path (str): The root directory for the CAS repository
    
    77
    +#     cache_quota (int): User configured cache quota
    
    43 78
     #
    
    44 79
     class CASCache():
    
    45 80
     
    
    46
    -    def __init__(self, path):
    
    81
    +    def __init__(self, path, cache_quota):
    
    47 82
             self.casdir = os.path.join(path, 'cas')
    
    48 83
             self.tmpdir = os.path.join(path, 'tmp')
    
    49 84
             os.makedirs(os.path.join(self.casdir, 'refs', 'heads'), exist_ok=True)
    
    50 85
             os.makedirs(os.path.join(self.casdir, 'objects'), exist_ok=True)
    
    51 86
             os.makedirs(self.tmpdir, exist_ok=True)
    
    52 87
     
    
    88
    +        self._config_cache_quota = cache_quota
    
    89
    +        self._cache_size = None               # The current cache size, sometimes it's an estimate
    
    90
    +        self._cache_quota = None              # The cache quota
    
    91
    +        self._cache_quota_original = None     # The cache quota as specified by the user, in bytes
    
    92
    +        self._cache_lower_threshold = None    # The target cache size for a cleanup
    
    93
    +
    
    53 94
         # preflight():
    
    54 95
         #
    
    55 96
         # Preflight check.
    
    ... ... @@ -587,6 +628,65 @@ class CASCache():
    587 628
             reachable = set()
    
    588 629
             self._reachable_refs_dir(reachable, tree, update_mtime=True)
    
    589 630
     
    
    631
    +    # compute_cache_size()
    
    632
    +    #
    
    633
    +    # Computes the real artifact cache size by calling
    
    634
    +    # the abstract calculate_cache_size() method.
    
    635
    +    #
    
    636
    +    # Returns:
    
    637
    +    #    (int): The size of the artifact cache.
    
    638
    +    #
    
    639
    +    def compute_cache_size(self):
    
    640
    +        old_cache_size = self._cache_size
    
    641
    +        new_cache_size = self.calculate_cache_size()
    
    642
    +
    
    643
    +        if old_cache_size != new_cache_size:
    
    644
    +            self._cache_size = new_cache_size
    
    645
    +
    
    646
    +        return self._cache_size
    
    647
    +
    
    648
    +    # get_cache_size()
    
    649
    +    #
    
    650
    +    # Fetches the cached size of the cache, this is sometimes
    
    651
    +    # an estimate and periodically adjusted to the real size
    
    652
    +    # when a cache size calculation job runs.
    
    653
    +    #
    
    654
    +    # When it is an estimate, the value is either correct, or
    
    655
    +    # it is greater than the actual cache size.
    
    656
    +    #
    
    657
    +    # Returns:
    
    658
    +    #     (int) An approximation of the artifact cache size, in bytes.
    
    659
    +    #
    
    660
    +    def get_cache_size(self):
    
    661
    +
    
    662
    +        # If we don't currently have an estimate, figure out the real cache size.
    
    663
    +        if self._cache_size is None:
    
    664
    +            stored_size = self._read_cache_size()
    
    665
    +            if stored_size is not None:
    
    666
    +                self._cache_size = stored_size
    
    667
    +            else:
    
    668
    +                self._cache_size = self.compute_cache_size()
    
    669
    +                # self._message(MessageType.STATUS, "Cache usage recomputed: {}".format(usage))
    
    670
    +
    
    671
    +        return self._cache_size
    
    672
    +
    
    673
    +    # set_cache_size()
    
    674
    +    #
    
    675
    +    # Forcefully set the overall cache size.
    
    676
    +    #
    
    677
    +    # This is used to update the size in the main process after
    
    678
    +    # having calculated in a cleanup or a cache size calculation job.
    
    679
    +    #
    
    680
    +    # Args:
    
    681
    +    #     cache_size (int): The size to set.
    
    682
    +    #
    
    683
    +    def set_cache_size(self, cache_size):
    
    684
    +
    
    685
    +        assert cache_size is not None
    
    686
    +
    
    687
    +        self._cache_size = cache_size
    
    688
    +        self._write_cache_size(self._cache_size)
    
    689
    +
    
    590 690
         ################################################
    
    591 691
         #             Local Private Methods            #
    
    592 692
         ################################################
    
    ... ... @@ -1015,6 +1115,60 @@ class CASCache():
    1015 1115
             # Send final batch
    
    1016 1116
             batch.send()
    
    1017 1117
     
    
    1118
    +    # _read_cache_size()
    
    1119
    +    #
    
    1120
    +    # Reads and returns the size of the artifact cache that's stored in the
    
    1121
    +    # cache's size file
    
    1122
    +    #
    
    1123
    +    # Returns:
    
    1124
    +    #    (int): The size of the artifact cache, as recorded in the file
    
    1125
    +    #
    
    1126
    +    def _read_cache_size(self):
    
    1127
    +        size_file_path = os.path.join(self.casdir, CACHE_SIZE_FILE)
    
    1128
    +
    
    1129
    +        if not os.path.exists(size_file_path):
    
    1130
    +            return None
    
    1131
    +
    
    1132
    +        with open(size_file_path, "r") as f:
    
    1133
    +            size = f.read()
    
    1134
    +
    
    1135
    +        try:
    
    1136
    +            num_size = int(size)
    
    1137
    +        except ValueError as e:
    
    1138
    +            raise CASCacheError("Size '{}' parsed from '{}' was not an integer".format(
    
    1139
    +                size, size_file_path)) from e
    
    1140
    +
    
    1141
    +        return num_size
    
    1142
    +
    
    1143
    +    # _write_cache_size()
    
    1144
    +    #
    
    1145
    +    # Writes the given size of the artifact to the cache's size file
    
    1146
    +    #
    
    1147
    +    # Args:
    
    1148
    +    #    size (int): The size of the artifact cache to record
    
    1149
    +    #
    
    1150
    +    def _write_cache_size(self, size):
    
    1151
    +        assert isinstance(size, int)
    
    1152
    +        size_file_path = os.path.join(self.casdir, CACHE_SIZE_FILE)
    
    1153
    +        with utils.save_file_atomic(size_file_path, "w") as f:
    
    1154
    +            f.write(str(size))
    
    1155
    +
    
    1156
    +    # _get_cache_volume_size()
    
    1157
    +    #
    
    1158
    +    # Get the available space and total space for the volume on
    
    1159
    +    # which the artifact cache is located.
    
    1160
    +    #
    
    1161
    +    # Returns:
    
    1162
    +    #    (int): The total number of bytes on the volume
    
    1163
    +    #    (int): The number of available bytes on the volume
    
    1164
    +    #
    
    1165
    +    # NOTE: We use this stub to allow the test cases
    
    1166
    +    #       to override what an artifact cache thinks
    
    1167
    +    #       about it's disk size and available bytes.
    
    1168
    +    #
    
    1169
    +    def _get_cache_volume_size(self):
    
    1170
    +        return utils._get_volume_size(self.casdir)
    
    1171
    +
    
    1018 1172
     
    
    1019 1173
     def _grouper(iterable, n):
    
    1020 1174
         while True:
    

  • buildstream/_cas/casserver.py
    ... ... @@ -61,7 +61,7 @@ class ArtifactTooLargeException(Exception):
    61 61
     def create_server(repo, *, enable_push,
    
    62 62
                       max_head_size=int(10e9),
    
    63 63
                       min_head_size=int(2e9)):
    
    64
    -    cas = CASCache(os.path.abspath(repo))
    
    64
    +    cas = CASCache(os.path.abspath(repo), max_head_size)
    
    65 65
     
    
    66 66
         # Use max_workers default from Python 3.5+
    
    67 67
         max_workers = (os.cpu_count() or 1) * 5
    

  • buildstream/_context.py
    ... ... @@ -31,7 +31,7 @@ from ._exceptions import LoadError, LoadErrorReason, BstError
    31 31
     from ._message import Message, MessageType
    
    32 32
     from ._profile import Topics, profile_start, profile_end
    
    33 33
     from ._artifactcache import ArtifactCache
    
    34
    -from ._cas import CASCache
    
    34
    +from ._cas import CASCache, CASCacheUsage
    
    35 35
     from ._workspaces import Workspaces, WorkspaceProjectCache
    
    36 36
     from .plugin import _plugin_lookup
    
    37 37
     from .sandbox import SandboxRemote
    
    ... ... @@ -58,12 +58,21 @@ class Context():
    58 58
             # Filename indicating which configuration file was used, or None for the defaults
    
    59 59
             self.config_origin = None
    
    60 60
     
    
    61
    +        # The directory under which other directories are based
    
    62
    +        self.rootcachedir = None
    
    63
    +
    
    61 64
             # The directory where various sources are stored
    
    62 65
             self.sourcedir = None
    
    63 66
     
    
    64 67
             # The directory where build sandboxes will be created
    
    65 68
             self.builddir = None
    
    66 69
     
    
    70
    +        # The directory for CAS
    
    71
    +        self.casdir = None
    
    72
    +
    
    73
    +        # The directory for temporary files
    
    74
    +        self.tmpdir = None
    
    75
    +
    
    67 76
             # Default root location for workspaces
    
    68 77
             self.workspacedir = None
    
    69 78
     
    
    ... ... @@ -188,13 +197,30 @@ class Context():
    188 197
                 user_config = _yaml.load(config)
    
    189 198
                 _yaml.composite(defaults, user_config)
    
    190 199
     
    
    200
    +        # Give deprecation warnings
    
    201
    +        if defaults.get('builddir'):
    
    202
    +            print("builddir is deprecated, use rootcachedir")
    
    203
    +        else:
    
    204
    +            defaults['builddir'] = os.path.join(defaults['rootcachedir'], 'build')
    
    205
    +
    
    206
    +        if defaults.get('artifactdir'):
    
    207
    +            print("artifactdir is deprecated, use rootcachedir")
    
    208
    +        else:
    
    209
    +            defaults['artifactdir'] = os.path.join(defaults['rootcachedir'], 'artifacts')
    
    210
    +
    
    191 211
             _yaml.node_validate(defaults, [
    
    192
    -            'sourcedir', 'builddir', 'artifactdir', 'logdir',
    
    212
    +            'rootcachedir', 'sourcedir', 'builddir', 'artifactdir', 'logdir',
    
    193 213
                 'scheduler', 'artifacts', 'logging', 'projects',
    
    194
    -            'cache', 'prompt', 'workspacedir', 'remote-execution'
    
    214
    +            'cache', 'prompt', 'workspacedir', 'remote-execution',
    
    195 215
             ])
    
    196 216
     
    
    197
    -        for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
    
    217
    +        # add directories not set by users
    
    218
    +        defaults['tmpdir'] = os.path.join(defaults['rootcachedir'], 'tmp')
    
    219
    +        defaults['casdir'] = os.path.join(defaults['rootcachedir'], 'cas')
    
    220
    +
    
    221
    +        for directory in ['rootcachedir', 'sourcedir', 'builddir',
    
    222
    +                          'artifactdir', 'logdir', 'workspacedir', 'casdir',
    
    223
    +                          'tmpdir']:
    
    198 224
                 # Allow the ~ tilde expansion and any environment variables in
    
    199 225
                 # path specification in the config files.
    
    200 226
                 #
    
    ... ... @@ -289,6 +315,16 @@ class Context():
    289 315
     
    
    290 316
             return self._artifactcache
    
    291 317
     
    
    318
    +    # get_cache_usage()
    
    319
    +    #
    
    320
    +    # Fetches the current usage of the artifact cache
    
    321
    +    #
    
    322
    +    # Returns:
    
    323
    +    #     (CASCacheUsage): The current status
    
    324
    +    #
    
    325
    +    def get_cache_usage(self):
    
    326
    +        return CASCacheUsage(self.get_cascache())
    
    327
    +
    
    292 328
         # add_project():
    
    293 329
         #
    
    294 330
         # Add a project to the context.
    
    ... ... @@ -654,7 +690,7 @@ class Context():
    654 690
     
    
    655 691
         def get_cascache(self):
    
    656 692
             if self._cascache is None:
    
    657
    -            self._cascache = CASCache(self.artifactdir)
    
    693
    +            self._cascache = CASCache(self.rootcachedir, self.config_cache_quota)
    
    658 694
             return self._cascache
    
    659 695
     
    
    660 696
     
    

  • buildstream/_frontend/app.py
    ... ... @@ -194,11 +194,6 @@ class App():
    194 194
             except BstError as e:
    
    195 195
                 self._error_exit(e, "Error instantiating platform")
    
    196 196
     
    
    197
    -        try:
    
    198
    -            self.context.artifactcache.preflight()
    
    199
    -        except BstError as e:
    
    200
    -            self._error_exit(e, "Error instantiating artifact cache")
    
    201
    -
    
    202 197
             # Create the logger right before setting the message handler
    
    203 198
             self.logger = LogLine(self.context,
    
    204 199
                                   self._content_profile,
    
    ... ... @@ -211,6 +206,13 @@ class App():
    211 206
             # Propagate pipeline feedback to the user
    
    212 207
             self.context.set_message_handler(self._message_handler)
    
    213 208
     
    
    209
    +        # Preflight the artifact cache after initializing logging,
    
    210
    +        # this can cause messages to be emitted.
    
    211
    +        try:
    
    212
    +            self.context.artifactcache.preflight()
    
    213
    +        except BstError as e:
    
    214
    +            self._error_exit(e, "Error instantiating artifact cache")
    
    215
    +
    
    214 216
             #
    
    215 217
             # Load the Project
    
    216 218
             #
    

  • buildstream/_frontend/cli.py
    ... ... @@ -11,7 +11,6 @@ from .._exceptions import BstError, LoadError, AppError
    11 11
     from .._versions import BST_FORMAT_VERSION
    
    12 12
     from .complete import main_bashcomplete, complete_path, CompleteUnhandled
    
    13 13
     
    
    14
    -
    
    15 14
     ##################################################################
    
    16 15
     #            Override of click's main entry point                #
    
    17 16
     ##################################################################
    

  • buildstream/_frontend/status.py
    ... ... @@ -353,13 +353,17 @@ class _StatusHeader():
    353 353
         def render(self, line_length, elapsed):
    
    354 354
             project = self._context.get_toplevel_project()
    
    355 355
             line_length = max(line_length, 80)
    
    356
    -        size = 0
    
    357
    -        text = ''
    
    358 356
     
    
    357
    +        #
    
    358
    +        # Line 1: Session time, project name, session / total elements
    
    359
    +        #
    
    360
    +        #  ========= 00:00:00 project-name (143/387) =========
    
    361
    +        #
    
    359 362
             session = str(len(self._stream.session_elements))
    
    360 363
             total = str(len(self._stream.total_elements))
    
    361 364
     
    
    362
    -        # Format and calculate size for target and overall time code
    
    365
    +        size = 0
    
    366
    +        text = ''
    
    363 367
             size += len(total) + len(session) + 4  # Size for (N/N) with a leading space
    
    364 368
             size += 8  # Size of time code
    
    365 369
             size += len(project.name) + 1
    
    ... ... @@ -372,6 +376,12 @@ class _StatusHeader():
    372 376
                     self._format_profile.fmt(')')
    
    373 377
     
    
    374 378
             line1 = self._centered(text, size, line_length, '=')
    
    379
    +
    
    380
    +        #
    
    381
    +        # Line 2: Dynamic list of queue status reports
    
    382
    +        #
    
    383
    +        #  (Fetched:0 117 0)→ (Built:4 0 0)
    
    384
    +        #
    
    375 385
             size = 0
    
    376 386
             text = ''
    
    377 387
     
    
    ... ... @@ -389,10 +399,28 @@ class _StatusHeader():
    389 399
     
    
    390 400
             line2 = self._centered(text, size, line_length, ' ')
    
    391 401
     
    
    392
    -        size = 24
    
    393
    -        text = self._format_profile.fmt("~~~~~ ") + \
    
    394
    -            self._content_profile.fmt('Active Tasks') + \
    
    395
    -            self._format_profile.fmt(" ~~~~~")
    
    402
    +        #
    
    403
    +        # Line 3: Cache usage percentage report
    
    404
    +        #
    
    405
    +        #  ~~~~~~ cache: 69% ~~~~~~
    
    406
    +        #
    
    407
    +        usage = self._context.get_cache_usage()
    
    408
    +        usage_percent = '{}%'.format(usage.used_percent)
    
    409
    +
    
    410
    +        size = 21
    
    411
    +        size += len(usage_percent)
    
    412
    +        if usage.used_percent >= 95:
    
    413
    +            formatted_usage_percent = self._error_profile.fmt(usage_percent)
    
    414
    +        elif usage.used_percent >= 80:
    
    415
    +            formatted_usage_percent = self._content_profile.fmt(usage_percent)
    
    416
    +        else:
    
    417
    +            formatted_usage_percent = self._success_profile.fmt(usage_percent)
    
    418
    +
    
    419
    +        text = self._format_profile.fmt("~~~~~~ ") + \
    
    420
    +            self._content_profile.fmt('cache') + \
    
    421
    +            self._format_profile.fmt(': ') + \
    
    422
    +            formatted_usage_percent + \
    
    423
    +            self._format_profile.fmt(' ~~~~~~')
    
    396 424
             line3 = self._centered(text, size, line_length, ' ')
    
    397 425
     
    
    398 426
             return line1 + '\n' + line2 + '\n' + line3
    

  • buildstream/_frontend/widget.py
    ... ... @@ -452,6 +452,7 @@ class LogLine(Widget):
    452 452
             values["Session Start"] = starttime.strftime('%A, %d-%m-%Y at %H:%M:%S')
    
    453 453
             values["Project"] = "{} ({})".format(project.name, project.directory)
    
    454 454
             values["Targets"] = ", ".join([t.name for t in stream.targets])
    
    455
    +        values["Cache Usage"] = "{}".format(context.get_cache_usage())
    
    455 456
             text += self._format_values(values)
    
    456 457
     
    
    457 458
             # User configurations
    

  • buildstream/_scheduler/jobs/cachesizejob.py
    ... ... @@ -25,14 +25,14 @@ class CacheSizeJob(Job):
    25 25
             self._complete_cb = complete_cb
    
    26 26
     
    
    27 27
             context = self._scheduler.context
    
    28
    -        self._artifacts = context.artifactcache
    
    28
    +        self._cas = context.get_cascache()
    
    29 29
     
    
    30 30
         def child_process(self):
    
    31
    -        return self._artifacts.compute_cache_size()
    
    31
    +        return self._cas.compute_cache_size()
    
    32 32
     
    
    33 33
         def parent_complete(self, status, result):
    
    34 34
             if status == JobStatus.OK:
    
    35
    -            self._artifacts.set_cache_size(result)
    
    35
    +            self._cas.set_cache_size(result)
    
    36 36
     
    
    37 37
             if self._complete_cb:
    
    38 38
                 self._complete_cb(status, result)
    

  • buildstream/_scheduler/jobs/cleanupjob.py
    ... ... @@ -25,14 +25,27 @@ class CleanupJob(Job):
    25 25
             self._complete_cb = complete_cb
    
    26 26
     
    
    27 27
             context = self._scheduler.context
    
    28
    +        self._cas = context.get_cascache()
    
    28 29
             self._artifacts = context.artifactcache
    
    29 30
     
    
    30 31
         def child_process(self):
    
    31
    -        return self._artifacts.clean()
    
    32
    +        def progress():
    
    33
    +            self.send_message('update-cache-size',
    
    34
    +                              self._cas.get_cache_size())
    
    35
    +        return self._artifacts.clean(progress)
    
    36
    +
    
    37
    +    def handle_message(self, message_type, message):
    
    38
    +        # Update the cache size in the main process as we go,
    
    39
    +        # this provides better feedback in the UI.
    
    40
    +        if message_type == 'update-cache-size':
    
    41
    +            self._cas.set_cache_size(message)
    
    42
    +            return True
    
    43
    +
    
    44
    +        return False
    
    32 45
     
    
    33 46
         def parent_complete(self, status, result):
    
    34 47
             if status == JobStatus.OK:
    
    35
    -            self._artifacts.set_cache_size(result)
    
    48
    +            self._cas.set_cache_size(result)
    
    36 49
     
    
    37 50
             if self._complete_cb:
    
    38 51
                 self._complete_cb(status, result)

  • buildstream/_scheduler/jobs/job.py
    ... ... @@ -58,10 +58,10 @@ class JobStatus():
    58 58
     
    
    59 59
     
    
    60 60
     # Used to distinguish between status messages and return values
    
    61
    -class Envelope():
    
    61
    +class _Envelope():
    
    62 62
         def __init__(self, message_type, message):
    
    63
    -        self._message_type = message_type
    
    64
    -        self._message = message
    
    63
    +        self.message_type = message_type
    
    64
    +        self.message = message
    
    65 65
     
    
    66 66
     
    
    67 67
     # Process class that doesn't call waitpid on its own.
    
    ... ... @@ -117,6 +117,8 @@ class Job():
    117 117
             self._logfile = logfile
    
    118 118
             self._task_id = None
    
    119 119
     
    
    120
    +        print("job init")
    
    121
    +
    
    120 122
         # spawn()
    
    121 123
         #
    
    122 124
         # Spawns the job.
    
    ... ... @@ -275,10 +277,37 @@ class Job():
    275 277
         def set_task_id(self, task_id):
    
    276 278
             self._task_id = task_id
    
    277 279
     
    
    280
    +    # send_message()
    
    281
    +    #
    
    282
    +    # To be called from inside Job.child_process() implementations
    
    283
    +    # to send messages to the main process during processing.
    
    284
    +    #
    
    285
    +    # These messages will be processed by the class's Job.handle_message()
    
    286
    +    # implementation.
    
    287
    +    #
    
    288
    +    def send_message(self, message_type, message):
    
    289
    +        self._queue.put(_Envelope(message_type, message))
    
    290
    +
    
    278 291
         #######################################################
    
    279 292
         #                  Abstract Methods                   #
    
    280 293
         #######################################################
    
    281 294
     
    
    295
    +    # handle_message()
    
    296
    +    #
    
    297
    +    # Handle a custom message. This will be called in the main process in
    
    298
    +    # response to any messages sent to the main proces using the
    
    299
    +    # Job.send_message() API from inside a Job.child_process() implementation
    
    300
    +    #
    
    301
    +    # Args:
    
    302
    +    #    message_type (str): A string to identify the message type
    
    303
    +    #    message (any): A simple serializable object
    
    304
    +    #
    
    305
    +    # Returns:
    
    306
    +    #    (bool): Should return a truthy value if message_type is handled.
    
    307
    +    #
    
    308
    +    def handle_message(self, message_type, message):
    
    309
    +        return False
    
    310
    +
    
    282 311
         # parent_complete()
    
    283 312
         #
    
    284 313
         # This will be executed after the job finishes, and is expected to
    
    ... ... @@ -354,7 +383,6 @@ class Job():
    354 383
         #    queue (multiprocessing.Queue): The message queue for IPC
    
    355 384
         #
    
    356 385
         def _child_action(self, queue):
    
    357
    -
    
    358 386
             # This avoids some SIGTSTP signals from grandchildren
    
    359 387
             # getting propagated up to the master process
    
    360 388
             os.setsid()
    
    ... ... @@ -416,7 +444,7 @@ class Job():
    416 444
                                      elapsed=elapsed, detail=e.detail,
    
    417 445
                                      logfile=filename, sandbox=e.sandbox)
    
    418 446
     
    
    419
    -                self._queue.put(Envelope('child_data', self.child_process_data()))
    
    447
    +                self._queue.put(_Envelope('child_data', self.child_process_data()))
    
    420 448
     
    
    421 449
                     # Report the exception to the parent (for internal testing purposes)
    
    422 450
                     self._child_send_error(e)
    
    ... ... @@ -442,7 +470,7 @@ class Job():
    442 470
     
    
    443 471
                 else:
    
    444 472
                     # No exception occurred in the action
    
    445
    -                self._queue.put(Envelope('child_data', self.child_process_data()))
    
    473
    +                self._queue.put(_Envelope('child_data', self.child_process_data()))
    
    446 474
                     self._child_send_result(result)
    
    447 475
     
    
    448 476
                     elapsed = datetime.datetime.now() - starttime
    
    ... ... @@ -469,7 +497,7 @@ class Job():
    469 497
                 domain = e.domain
    
    470 498
                 reason = e.reason
    
    471 499
     
    
    472
    -        envelope = Envelope('error', {
    
    500
    +        envelope = _Envelope('error', {
    
    473 501
                 'domain': domain,
    
    474 502
                 'reason': reason
    
    475 503
             })
    
    ... ... @@ -487,7 +515,7 @@ class Job():
    487 515
         #
    
    488 516
         def _child_send_result(self, result):
    
    489 517
             if result is not None:
    
    490
    -            envelope = Envelope('result', result)
    
    518
    +            envelope = _Envelope('result', result)
    
    491 519
                 self._queue.put(envelope)
    
    492 520
     
    
    493 521
         # _child_shutdown()
    
    ... ... @@ -524,7 +552,7 @@ class Job():
    524 552
             if message.message_type == MessageType.LOG:
    
    525 553
                 return
    
    526 554
     
    
    527
    -        self._queue.put(Envelope('message', message))
    
    555
    +        self._queue.put(_Envelope('message', message))
    
    528 556
     
    
    529 557
         # _parent_shutdown()
    
    530 558
         #
    
    ... ... @@ -588,24 +616,28 @@ class Job():
    588 616
             if not self._listening:
    
    589 617
                 return
    
    590 618
     
    
    591
    -        if envelope._message_type == 'message':
    
    619
    +        if envelope.message_type == 'message':
    
    592 620
                 # Propagate received messages from children
    
    593 621
                 # back through the context.
    
    594
    -            self._scheduler.context.message(envelope._message)
    
    595
    -        elif envelope._message_type == 'error':
    
    622
    +            self._scheduler.context.message(envelope.message)
    
    623
    +        elif envelope.message_type == 'error':
    
    596 624
                 # For regression tests only, save the last error domain / reason
    
    597 625
                 # reported from a child task in the main process, this global state
    
    598 626
                 # is currently managed in _exceptions.py
    
    599
    -            set_last_task_error(envelope._message['domain'],
    
    600
    -                                envelope._message['reason'])
    
    601
    -        elif envelope._message_type == 'result':
    
    627
    +            set_last_task_error(envelope.message['domain'],
    
    628
    +                                envelope.message['reason'])
    
    629
    +        elif envelope.message_type == 'result':
    
    602 630
                 assert self._result is None
    
    603
    -            self._result = envelope._message
    
    604
    -        elif envelope._message_type == 'child_data':
    
    631
    +            self._result = envelope.message
    
    632
    +        elif envelope.message_type == 'child_data':
    
    605 633
                 # If we retry a job, we assign a new value to this
    
    606
    -            self.child_data = envelope._message
    
    607
    -        else:
    
    608
    -            raise Exception()
    
    634
    +            self.child_data = envelope.message
    
    635
    +
    
    636
    +        # Try Job subclass specific messages now
    
    637
    +        elif not self.handle_message(envelope.message_type,
    
    638
    +                                     envelope.message):
    
    639
    +            assert 0, "Unhandled message type '{}': {}" \
    
    640
    +                .format(envelope.message_type, envelope.message)
    
    609 641
     
    
    610 642
         # _parent_process_queue()
    
    611 643
         #
    

  • buildstream/data/userconfig.yaml
    ... ... @@ -13,11 +13,8 @@
    13 13
     # Location to store sources
    
    14 14
     sourcedir: ${XDG_CACHE_HOME}/buildstream/sources
    
    15 15
     
    
    16
    -# Location to perform builds
    
    17
    -builddir: ${XDG_CACHE_HOME}/buildstream/build
    
    18
    -
    
    19
    -# Location to store local binary artifacts
    
    20
    -artifactdir: ${XDG_CACHE_HOME}/buildstream/artifacts
    
    16
    +# Root location for other directories in the cache
    
    17
    +rootcachedir: ${XDG_CACHE_HOME}/buildstream
    
    21 18
     
    
    22 19
     # Location to store build logs
    
    23 20
     logdir: ${XDG_CACHE_HOME}/buildstream/logs
    

  • buildstream/utils.py
    ... ... @@ -43,6 +43,7 @@ from . import _signals
    43 43
     from ._exceptions import BstError, ErrorDomain
    
    44 44
     from ._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
    
    45 45
     
    
    46
    +
    
    46 47
     # The magic number for timestamps: 2011-11-11 11:11:11
    
    47 48
     _magic_timestamp = calendar.timegm([2011, 11, 11, 11, 11, 11])
    
    48 49
     
    
    ... ... @@ -633,6 +634,27 @@ def _get_dir_size(path):
    633 634
         return get_size(path)
    
    634 635
     
    
    635 636
     
    
    637
    +# _get_volume_size():
    
    638
    +#
    
    639
    +# Gets the overall usage and total size of a mounted filesystem in bytes.
    
    640
    +#
    
    641
    +# Args:
    
    642
    +#    path (str): The path to check
    
    643
    +#
    
    644
    +# Returns:
    
    645
    +#    (int): The total number of bytes on the volume
    
    646
    +#    (int): The number of available bytes on the volume
    
    647
    +#
    
    648
    +def _get_volume_size(path):
    
    649
    +    try:
    
    650
    +        stat_ = os.statvfs(path)
    
    651
    +    except OSError as e:
    
    652
    +        raise UtilError("Failed to retrieve stats on volume for path '{}': {}"
    
    653
    +                        .format(path, e)) from e
    
    654
    +
    
    655
    +    return stat_.f_bsize * stat_.f_blocks, stat_.f_bsize * stat_.f_bavail
    
    656
    +
    
    657
    +
    
    636 658
     # _parse_size():
    
    637 659
     #
    
    638 660
     # Convert a string representing data size to a number of
    
    ... ... @@ -667,8 +689,7 @@ def _parse_size(size, volume):
    667 689
             if num > 100:
    
    668 690
                 raise UtilError("{}% is not a valid percentage value.".format(num))
    
    669 691
     
    
    670
    -        stat_ = os.statvfs(volume)
    
    671
    -        disk_size = stat_.f_blocks * stat_.f_bsize
    
    692
    +        disk_size, _ = _get_volume_size(volume)
    
    672 693
     
    
    673 694
             return disk_size * (num / 100)
    
    674 695
     
    

  • conftest.py
    ... ... @@ -53,15 +53,15 @@ def pytest_runtest_setup(item):
    53 53
     class IntegrationCache():
    
    54 54
     
    
    55 55
         def __init__(self, cache):
    
    56
    -        cache = os.path.abspath(cache)
    
    56
    +        self.root = os.path.abspath(cache)
    
    57 57
     
    
    58 58
             # Use the same sources every time
    
    59
    -        self.sources = os.path.join(cache, 'sources')
    
    59
    +        self.sources = os.path.join(self.root, 'sources')
    
    60 60
     
    
    61 61
             # Create a temp directory for the duration of the test for
    
    62 62
             # the artifacts directory
    
    63 63
             try:
    
    64
    -            self.artifacts = tempfile.mkdtemp(dir=cache, prefix='artifacts-')
    
    64
    +            self.artifacts = tempfile.mkdtemp(dir=self.root, prefix='artifacts-')
    
    65 65
             except OSError as e:
    
    66 66
                 raise AssertionError("Unable to create test directory !") from e
    
    67 67
     
    
    ... ... @@ -86,6 +86,10 @@ def integration_cache(request):
    86 86
             shutil.rmtree(cache.artifacts)
    
    87 87
         except FileNotFoundError:
    
    88 88
             pass
    
    89
    +    try:
    
    90
    +        shutil.rmtree(os.path.join(cache.root, 'cas'))
    
    91
    +    except FileNotFoundError:
    
    92
    +        pass
    
    89 93
     
    
    90 94
     
    
    91 95
     #################################################
    

  • doc/bst2html.py
    ... ... @@ -194,10 +194,9 @@ def workdir(source_cache=None):
    194 194
     
    
    195 195
             bst_config_file = os.path.join(tempdir, 'buildstream.conf')
    
    196 196
             config = {
    
    197
    +            'rootcachedir': tempdir,
    
    197 198
                 'sourcedir': source_cache,
    
    198
    -            'artifactdir': os.path.join(tempdir, 'artifacts'),
    
    199 199
                 'logdir': os.path.join(tempdir, 'logs'),
    
    200
    -            'builddir': os.path.join(tempdir, 'build'),
    
    201 200
             }
    
    202 201
             _yaml.dump(config, bst_config_file)
    
    203 202
     
    
    ... ... @@ -411,12 +410,10 @@ def run_session(description, tempdir, source_cache, palette, config_file, force)
    411 410
             # Encode and save the output if that was asked for
    
    412 411
             output = _yaml.node_get(command, str, 'output', default_value=None)
    
    413 412
             if output is not None:
    
    414
    -
    
    415 413
                 # Convert / Generate a nice <div>
    
    416 414
                 converted = generate_html(command_out, directory, config_file,
    
    417 415
                                           source_cache, tempdir, palette,
    
    418 416
                                           command_str, command_fake_output is not None)
    
    419
    -
    
    420 417
                 # Save it
    
    421 418
                 filename = os.path.join(desc_dir, output)
    
    422 419
                 filename = os.path.realpath(filename)
    

  • doc/sessions/running-commands.run
    ... ... @@ -2,7 +2,7 @@
    2 2
     commands:
    
    3 3
     # Make it fetch first
    
    4 4
     - directory: ../examples/running-commands
    
    5
    -  command: fetch hello.bst
    
    5
    +  command: source fetch hello.bst
    
    6 6
     
    
    7 7
     # Capture a show output
    
    8 8
     - directory: ../examples/running-commands
    

  • tests/artifactcache/cache_size.py
    ... ... @@ -50,14 +50,15 @@ def test_cache_size_write(cli, tmpdir):
    50 50
     
    
    51 51
         # Artifact cache must be in a known place
    
    52 52
         artifactdir = os.path.join(project_dir, "artifacts")
    
    53
    -    cli.configure({"artifactdir": artifactdir})
    
    53
    +    casdir = os.path.join(project_dir, "cas")
    
    54
    +    cli.configure({"rootcachedir": project_dir})
    
    54 55
     
    
    55 56
         # Build, to populate the cache
    
    56 57
         res = cli.run(project=project_dir, args=["build", "test.bst"])
    
    57 58
         res.assert_success()
    
    58 59
     
    
    59 60
         # Inspect the artifact cache
    
    60
    -    sizefile = os.path.join(artifactdir, CACHE_SIZE_FILE)
    
    61
    +    sizefile = os.path.join(casdir, CACHE_SIZE_FILE)
    
    61 62
         assert os.path.isfile(sizefile)
    
    62 63
         with open(sizefile, "r") as f:
    
    63 64
             size_data = f.read()
    
    ... ... @@ -80,7 +81,7 @@ def test_quota_over_1024T(cli, tmpdir):
    80 81
         _yaml.dump({'name': 'main'}, str(project.join("project.conf")))
    
    81 82
     
    
    82 83
         volume_space_patch = mock.patch(
    
    83
    -        "buildstream._artifactcache.ArtifactCache._get_volume_space_info_for",
    
    84
    +        "buildstream._cas.CASCache._get_cache_volume_size",
    
    84 85
             autospec=True,
    
    85 86
             return_value=(1025 * TiB, 1025 * TiB)
    
    86 87
         )
    

  • tests/artifactcache/expiry.py
    ... ... @@ -358,13 +358,13 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, err_domain, err_reas
    358 358
             total_space = 10000
    
    359 359
     
    
    360 360
         volume_space_patch = mock.patch(
    
    361
    -        "buildstream._artifactcache.ArtifactCache._get_volume_space_info_for",
    
    361
    +        "buildstream._cas.CASCache._get_cache_volume_size",
    
    362 362
             autospec=True,
    
    363
    -        return_value=(free_space, total_space),
    
    363
    +        return_value=(total_space, free_space),
    
    364 364
         )
    
    365 365
     
    
    366 366
         cache_size_patch = mock.patch(
    
    367
    -        "buildstream._artifactcache.ArtifactCache.get_cache_size",
    
    367
    +        "buildstream._cas.CASCache.get_cache_size",
    
    368 368
             autospec=True,
    
    369 369
             return_value=0,
    
    370 370
         )
    
    ... ... @@ -419,7 +419,7 @@ def test_extract_expiry(cli, datafiles, tmpdir):
    419 419
         # Now we should have a directory for the cached target2.bst, which
    
    420 420
         # replaced target.bst in the cache, we should not have a directory
    
    421 421
         # for the target.bst
    
    422
    -    refsdir = os.path.join(project, 'cache', 'artifacts', 'cas', 'refs', 'heads')
    
    422
    +    refsdir = os.path.join(project, 'cache', 'cas', 'refs', 'heads')
    
    423 423
         refsdirtest = os.path.join(refsdir, 'test')
    
    424 424
         refsdirtarget = os.path.join(refsdirtest, 'target')
    
    425 425
         refsdirtarget2 = os.path.join(refsdirtest, 'target2')
    

  • tests/artifactcache/junctions.py
    ... ... @@ -68,8 +68,8 @@ def test_push_pull(cli, tmpdir, datafiles):
    68 68
             # Now we've pushed, delete the user's local artifact cache
    
    69 69
             # directory and try to redownload it from the share
    
    70 70
             #
    
    71
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    72
    -        shutil.rmtree(artifacts)
    
    71
    +        cas = os.path.join(cli.directory, 'cas')
    
    72
    +        shutil.rmtree(cas)
    
    73 73
     
    
    74 74
             # Assert that nothing is cached locally anymore
    
    75 75
             state = cli.get_element_state(project, 'target.bst')
    

  • tests/artifactcache/pull.py
    ... ... @@ -56,7 +56,7 @@ def test_pull(cli, tmpdir, datafiles):
    56 56
         # Set up an artifact cache.
    
    57 57
         with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
    
    58 58
             # Configure artifact share
    
    59
    -        artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    59
    +        cache_dir = os.path.join(str(tmpdir), 'cache')
    
    60 60
             user_config_file = str(tmpdir.join('buildstream.conf'))
    
    61 61
             user_config = {
    
    62 62
                 'scheduler': {
    
    ... ... @@ -65,7 +65,8 @@ def test_pull(cli, tmpdir, datafiles):
    65 65
                 'artifacts': {
    
    66 66
                     'url': share.repo,
    
    67 67
                     'push': True,
    
    68
    -            }
    
    68
    +            },
    
    69
    +            'rootcachedir': cache_dir
    
    69 70
             }
    
    70 71
     
    
    71 72
             # Write down the user configuration file
    
    ... ... @@ -92,7 +93,6 @@ def test_pull(cli, tmpdir, datafiles):
    92 93
             # Fake minimal context
    
    93 94
             context = Context()
    
    94 95
             context.load(config=user_config_file)
    
    95
    -        context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    96 96
             context.set_message_handler(message_handler)
    
    97 97
     
    
    98 98
             # Load the project and CAS cache
    
    ... ... @@ -102,7 +102,10 @@ def test_pull(cli, tmpdir, datafiles):
    102 102
     
    
    103 103
             # Assert that the element's artifact is **not** cached
    
    104 104
             element = project.load_elements(['target.bst'])[0]
    
    105
    +        print(element)
    
    105 106
             element_key = cli.get_element_key(project_dir, 'target.bst')
    
    107
    +        print(context.casdir)
    
    108
    +        print(cas.get_artifact_fullname(element, element_key))
    
    106 109
             assert not cas.contains(element, element_key)
    
    107 110
     
    
    108 111
             queue = multiprocessing.Queue()
    
    ... ... @@ -110,7 +113,7 @@ def test_pull(cli, tmpdir, datafiles):
    110 113
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    111 114
             process = multiprocessing.Process(target=_queue_wrapper,
    
    112 115
                                               args=(_test_pull, queue, user_config_file, project_dir,
    
    113
    -                                                artifact_dir, 'target.bst', element_key))
    
    116
    +                                                cache_dir, 'target.bst', element_key))
    
    114 117
     
    
    115 118
             try:
    
    116 119
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -127,12 +130,14 @@ def test_pull(cli, tmpdir, datafiles):
    127 130
             assert cas.contains(element, element_key)
    
    128 131
     
    
    129 132
     
    
    130
    -def _test_pull(user_config_file, project_dir, artifact_dir,
    
    133
    +def _test_pull(user_config_file, project_dir, cache_dir,
    
    131 134
                    element_name, element_key, queue):
    
    132 135
         # Fake minimal context
    
    133 136
         context = Context()
    
    134 137
         context.load(config=user_config_file)
    
    135
    -    context.artifactdir = artifact_dir
    
    138
    +    context.rootcachedir = cache_dir
    
    139
    +    context.casdir = os.path.join(cache_dir, 'cas')
    
    140
    +    context.tmpdir = os.path.join(cache_dir, 'tmp')
    
    136 141
         context.set_message_handler(message_handler)
    
    137 142
     
    
    138 143
         # Load the project manually
    
    ... ... @@ -165,7 +170,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
    165 170
         # Set up an artifact cache.
    
    166 171
         with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
    
    167 172
             # Configure artifact share
    
    168
    -        artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    173
    +        rootcache_dir = os.path.join(str(tmpdir), 'cache')
    
    169 174
             user_config_file = str(tmpdir.join('buildstream.conf'))
    
    170 175
             user_config = {
    
    171 176
                 'scheduler': {
    
    ... ... @@ -174,7 +179,8 @@ def test_pull_tree(cli, tmpdir, datafiles):
    174 179
                 'artifacts': {
    
    175 180
                     'url': share.repo,
    
    176 181
                     'push': True,
    
    177
    -            }
    
    182
    +            },
    
    183
    +            'rootcachedir': rootcache_dir
    
    178 184
             }
    
    179 185
     
    
    180 186
             # Write down the user configuration file
    
    ... ... @@ -195,7 +201,6 @@ def test_pull_tree(cli, tmpdir, datafiles):
    195 201
             # Fake minimal context
    
    196 202
             context = Context()
    
    197 203
             context.load(config=user_config_file)
    
    198
    -        context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    199 204
             context.set_message_handler(message_handler)
    
    200 205
     
    
    201 206
             # Load the project and CAS cache
    
    ... ... @@ -218,7 +223,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
    218 223
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    219 224
             process = multiprocessing.Process(target=_queue_wrapper,
    
    220 225
                                               args=(_test_push_tree, queue, user_config_file, project_dir,
    
    221
    -                                                artifact_dir, artifact_digest))
    
    226
    +                                                artifact_digest))
    
    222 227
     
    
    223 228
             try:
    
    224 229
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -246,7 +251,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
    246 251
             # Use subprocess to avoid creation of gRPC threads in main BuildStream process
    
    247 252
             process = multiprocessing.Process(target=_queue_wrapper,
    
    248 253
                                               args=(_test_pull_tree, queue, user_config_file, project_dir,
    
    249
    -                                                artifact_dir, tree_digest))
    
    254
    +                                                tree_digest))
    
    250 255
     
    
    251 256
             try:
    
    252 257
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -268,11 +273,10 @@ def test_pull_tree(cli, tmpdir, datafiles):
    268 273
             assert os.path.exists(cas.objpath(directory_digest))
    
    269 274
     
    
    270 275
     
    
    271
    -def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
    
    276
    +def _test_push_tree(user_config_file, project_dir, artifact_digest, queue):
    
    272 277
         # Fake minimal context
    
    273 278
         context = Context()
    
    274 279
         context.load(config=user_config_file)
    
    275
    -    context.artifactdir = artifact_dir
    
    276 280
         context.set_message_handler(message_handler)
    
    277 281
     
    
    278 282
         # Load the project manually
    
    ... ... @@ -304,11 +308,10 @@ def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest
    304 308
             queue.put("No remote configured")
    
    305 309
     
    
    306 310
     
    
    307
    -def _test_pull_tree(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
    
    311
    +def _test_pull_tree(user_config_file, project_dir, artifact_digest, queue):
    
    308 312
         # Fake minimal context
    
    309 313
         context = Context()
    
    310 314
         context.load(config=user_config_file)
    
    311
    -    context.artifactdir = artifact_dir
    
    312 315
         context.set_message_handler(message_handler)
    
    313 316
     
    
    314 317
         # Load the project manually
    

  • tests/artifactcache/push.py
    ... ... @@ -51,7 +51,7 @@ def test_push(cli, tmpdir, datafiles):
    51 51
         # Set up an artifact cache.
    
    52 52
         with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
    
    53 53
             # Configure artifact share
    
    54
    -        artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    54
    +        rootcache_dir = os.path.join(str(tmpdir), 'cache')
    
    55 55
             user_config_file = str(tmpdir.join('buildstream.conf'))
    
    56 56
             user_config = {
    
    57 57
                 'scheduler': {
    
    ... ... @@ -60,7 +60,8 @@ def test_push(cli, tmpdir, datafiles):
    60 60
                 'artifacts': {
    
    61 61
                     'url': share.repo,
    
    62 62
                     'push': True,
    
    63
    -            }
    
    63
    +            },
    
    64
    +            'rootcachedir': rootcache_dir
    
    64 65
             }
    
    65 66
     
    
    66 67
             # Write down the user configuration file
    
    ... ... @@ -69,7 +70,6 @@ def test_push(cli, tmpdir, datafiles):
    69 70
             # Fake minimal context
    
    70 71
             context = Context()
    
    71 72
             context.load(config=user_config_file)
    
    72
    -        context.artifactdir = artifact_dir
    
    73 73
             context.set_message_handler(message_handler)
    
    74 74
     
    
    75 75
             # Load the project manually
    
    ... ... @@ -89,7 +89,7 @@ def test_push(cli, tmpdir, datafiles):
    89 89
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    90 90
             process = multiprocessing.Process(target=_queue_wrapper,
    
    91 91
                                               args=(_test_push, queue, user_config_file, project_dir,
    
    92
    -                                                artifact_dir, 'target.bst', element_key))
    
    92
    +                                                'target.bst', element_key))
    
    93 93
     
    
    94 94
             try:
    
    95 95
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -106,12 +106,10 @@ def test_push(cli, tmpdir, datafiles):
    106 106
             assert share.has_artifact('test', 'target.bst', element_key)
    
    107 107
     
    
    108 108
     
    
    109
    -def _test_push(user_config_file, project_dir, artifact_dir,
    
    110
    -               element_name, element_key, queue):
    
    109
    +def _test_push(user_config_file, project_dir, element_name, element_key, queue):
    
    111 110
         # Fake minimal context
    
    112 111
         context = Context()
    
    113 112
         context.load(config=user_config_file)
    
    114
    -    context.artifactdir = artifact_dir
    
    115 113
         context.set_message_handler(message_handler)
    
    116 114
     
    
    117 115
         # Load the project manually
    
    ... ... @@ -152,7 +150,7 @@ def test_push_directory(cli, tmpdir, datafiles):
    152 150
         # Set up an artifact cache.
    
    153 151
         with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
    
    154 152
             # Configure artifact share
    
    155
    -        artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    153
    +        rootcache_dir = os.path.join(str(tmpdir), 'cache')
    
    156 154
             user_config_file = str(tmpdir.join('buildstream.conf'))
    
    157 155
             user_config = {
    
    158 156
                 'scheduler': {
    
    ... ... @@ -161,7 +159,8 @@ def test_push_directory(cli, tmpdir, datafiles):
    161 159
                 'artifacts': {
    
    162 160
                     'url': share.repo,
    
    163 161
                     'push': True,
    
    164
    -            }
    
    162
    +            },
    
    163
    +            'rootcachedir': rootcache_dir
    
    165 164
             }
    
    166 165
     
    
    167 166
             # Write down the user configuration file
    
    ... ... @@ -170,7 +169,6 @@ def test_push_directory(cli, tmpdir, datafiles):
    170 169
             # Fake minimal context
    
    171 170
             context = Context()
    
    172 171
             context.load(config=user_config_file)
    
    173
    -        context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    174 172
             context.set_message_handler(message_handler)
    
    175 173
     
    
    176 174
             # Load the project and CAS cache
    
    ... ... @@ -182,6 +180,7 @@ def test_push_directory(cli, tmpdir, datafiles):
    182 180
             # Assert that the element's artifact is cached
    
    183 181
             element = project.load_elements(['target.bst'])[0]
    
    184 182
             element_key = cli.get_element_key(project_dir, 'target.bst')
    
    183
    +        print(context.casdir)
    
    185 184
             assert artifactcache.contains(element, element_key)
    
    186 185
     
    
    187 186
             # Manually setup the CAS remote
    
    ... ... @@ -198,7 +197,7 @@ def test_push_directory(cli, tmpdir, datafiles):
    198 197
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    199 198
             process = multiprocessing.Process(target=_queue_wrapper,
    
    200 199
                                               args=(_test_push_directory, queue, user_config_file,
    
    201
    -                                                project_dir, artifact_dir, artifact_digest))
    
    200
    +                                                project_dir, artifact_digest))
    
    202 201
     
    
    203 202
             try:
    
    204 203
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -216,11 +215,10 @@ def test_push_directory(cli, tmpdir, datafiles):
    216 215
             assert share.has_object(artifact_digest)
    
    217 216
     
    
    218 217
     
    
    219
    -def _test_push_directory(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
    
    218
    +def _test_push_directory(user_config_file, project_dir, artifact_digest, queue):
    
    220 219
         # Fake minimal context
    
    221 220
         context = Context()
    
    222 221
         context.load(config=user_config_file)
    
    223
    -    context.artifactdir = artifact_dir
    
    224 222
         context.set_message_handler(message_handler)
    
    225 223
     
    
    226 224
         # Load the project manually
    
    ... ... @@ -254,6 +252,7 @@ def test_push_message(cli, tmpdir, datafiles):
    254 252
         with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
    
    255 253
             # Configure artifact share
    
    256 254
             artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    255
    +        rootcache_dir = os.path.join(str(tmpdir), 'cache')
    
    257 256
             user_config_file = str(tmpdir.join('buildstream.conf'))
    
    258 257
             user_config = {
    
    259 258
                 'scheduler': {
    
    ... ... @@ -262,7 +261,8 @@ def test_push_message(cli, tmpdir, datafiles):
    262 261
                 'artifacts': {
    
    263 262
                     'url': share.repo,
    
    264 263
                     'push': True,
    
    265
    -            }
    
    264
    +            },
    
    265
    +            'rootcachedir': rootcache_dir
    
    266 266
             }
    
    267 267
     
    
    268 268
             # Write down the user configuration file
    
    ... ... @@ -273,7 +273,7 @@ def test_push_message(cli, tmpdir, datafiles):
    273 273
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    274 274
             process = multiprocessing.Process(target=_queue_wrapper,
    
    275 275
                                               args=(_test_push_message, queue, user_config_file,
    
    276
    -                                                project_dir, artifact_dir))
    
    276
    +                                                project_dir))
    
    277 277
     
    
    278 278
             try:
    
    279 279
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -292,11 +292,10 @@ def test_push_message(cli, tmpdir, datafiles):
    292 292
             assert share.has_object(message_digest)
    
    293 293
     
    
    294 294
     
    
    295
    -def _test_push_message(user_config_file, project_dir, artifact_dir, queue):
    
    295
    +def _test_push_message(user_config_file, project_dir, queue):
    
    296 296
         # Fake minimal context
    
    297 297
         context = Context()
    
    298 298
         context.load(config=user_config_file)
    
    299
    -    context.artifactdir = artifact_dir
    
    300 299
         context.set_message_handler(message_handler)
    
    301 300
     
    
    302 301
         # Load the project manually
    

  • tests/frontend/pull.py
    ... ... @@ -62,8 +62,8 @@ def test_push_pull_all(cli, tmpdir, datafiles):
    62 62
             # Now we've pushed, delete the user's local artifact cache
    
    63 63
             # directory and try to redownload it from the share
    
    64 64
             #
    
    65
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    66
    -        shutil.rmtree(artifacts)
    
    65
    +        cas = os.path.join(cli.directory, 'cas')
    
    66
    +        shutil.rmtree(cas)
    
    67 67
     
    
    68 68
             # Assert that nothing is cached locally anymore
    
    69 69
             states = cli.get_element_states(project, all_elements)
    
    ... ... @@ -154,8 +154,8 @@ def test_pull_secondary_cache(cli, tmpdir, datafiles):
    154 154
             assert_shared(cli, share2, project, 'target.bst')
    
    155 155
     
    
    156 156
             # Delete the user's local artifact cache.
    
    157
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    158
    -        shutil.rmtree(artifacts)
    
    157
    +        cas = os.path.join(cli.directory, 'cas')
    
    158
    +        shutil.rmtree(cas)
    
    159 159
     
    
    160 160
             # Assert that the element is not cached anymore.
    
    161 161
             assert cli.get_element_state(project, 'target.bst') != 'cached'
    
    ... ... @@ -208,8 +208,8 @@ def test_push_pull_specific_remote(cli, tmpdir, datafiles):
    208 208
             # Now we've pushed, delete the user's local artifact cache
    
    209 209
             # directory and try to redownload it from the good_share.
    
    210 210
             #
    
    211
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    212
    -        shutil.rmtree(artifacts)
    
    211
    +        cas = os.path.join(cli.directory, 'cas')
    
    212
    +        shutil.rmtree(cas)
    
    213 213
     
    
    214 214
             result = cli.run(project=project, args=['artifact', 'pull', 'target.bst', '--remote',
    
    215 215
                                                     good_share.repo])
    
    ... ... @@ -249,8 +249,8 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles):
    249 249
             # Now we've pushed, delete the user's local artifact cache
    
    250 250
             # directory and try to redownload it from the share
    
    251 251
             #
    
    252
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    253
    -        shutil.rmtree(artifacts)
    
    252
    +        cas = os.path.join(cli.directory, 'cas')
    
    253
    +        shutil.rmtree(cas)
    
    254 254
     
    
    255 255
             # Assert that nothing is cached locally anymore
    
    256 256
             for element_name in all_elements:
    
    ... ... @@ -299,8 +299,8 @@ def test_push_pull_track_non_strict(cli, tmpdir, datafiles):
    299 299
             # Now we've pushed, delete the user's local artifact cache
    
    300 300
             # directory and try to redownload it from the share
    
    301 301
             #
    
    302
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    303
    -        shutil.rmtree(artifacts)
    
    302
    +        cas = os.path.join(cli.directory, 'cas')
    
    303
    +        shutil.rmtree(cas)
    
    304 304
     
    
    305 305
             # Assert that nothing is cached locally anymore
    
    306 306
             for element_name in all_elements:
    
    ... ... @@ -335,7 +335,7 @@ def test_push_pull_cross_junction(cli, tmpdir, datafiles):
    335 335
             result.assert_success()
    
    336 336
             assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
    
    337 337
     
    
    338
    -        cache_dir = os.path.join(project, 'cache', 'artifacts')
    
    338
    +        cache_dir = os.path.join(project, 'cache', 'cas')
    
    339 339
             shutil.rmtree(cache_dir)
    
    340 340
     
    
    341 341
             assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'buildable'
    
    ... ... @@ -370,8 +370,8 @@ def test_pull_missing_blob(cli, tmpdir, datafiles):
    370 370
             # Now we've pushed, delete the user's local artifact cache
    
    371 371
             # directory and try to redownload it from the share
    
    372 372
             #
    
    373
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    374
    -        shutil.rmtree(artifacts)
    
    373
    +        cas = os.path.join(cli.directory, 'cas')
    
    374
    +        shutil.rmtree(cas)
    
    375 375
     
    
    376 376
             # Assert that nothing is cached locally anymore
    
    377 377
             for element_name in all_elements:
    

  • tests/integration/build-tree.py
    ... ... @@ -158,10 +158,8 @@ def test_buildtree_options(cli, tmpdir, datafiles):
    158 158
             assert cli.get_element_state(project, element_name) == 'cached'
    
    159 159
     
    
    160 160
             # Discard the cache
    
    161
    -        cli.configure({
    
    162
    -            'artifacts': {'url': share.repo, 'push': True},
    
    163
    -            'artifactdir': os.path.join(cli.directory, 'artifacts2')
    
    164
    -        })
    
    161
    +        shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'artifacts')))
    
    162
    +        shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas')))
    
    165 163
             assert cli.get_element_state(project, element_name) != 'cached'
    
    166 164
     
    
    167 165
             # Pull from cache, but do not include buildtrees.
    

  • tests/integration/cachedfail.py
    ... ... @@ -160,7 +160,6 @@ def test_push_cached_fail(cli, tmpdir, datafiles, on_error):
    160 160
     
    
    161 161
             # This element should have failed
    
    162 162
             assert cli.get_element_state(project, 'element.bst') == 'failed'
    
    163
    -        # This element should have been pushed to the remote
    
    164 163
             assert share.has_artifact('test', 'element.bst', cli.get_element_key(project, 'element.bst'))
    
    165 164
     
    
    166 165
     
    

  • tests/integration/messages.py
    ... ... @@ -40,7 +40,7 @@ DATA_DIR = os.path.join(
    40 40
     @pytest.mark.integration
    
    41 41
     @pytest.mark.datafiles(DATA_DIR)
    
    42 42
     @pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
    
    43
    -def test_disable_message_lines(cli, tmpdir, datafiles):
    
    43
    +def test_disable_message_lines(cli, tmpdir, datafiles, integration_cache):
    
    44 44
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    45 45
         element_path = os.path.join(project, 'elements')
    
    46 46
         element_name = 'message.bst'
    
    ... ... @@ -66,7 +66,7 @@ def test_disable_message_lines(cli, tmpdir, datafiles):
    66 66
         assert 'echo "Silly message"' in result.stderr
    
    67 67
     
    
    68 68
         # Let's now build it again, but with --message-lines 0
    
    69
    -    cli.remove_artifact_from_cache(project, element_name)
    
    69
    +    cli.remove_artifact_from_cache(project, element_name, cache_dir=integration_cache.root)
    
    70 70
         result = cli.run(project=project, args=["--message-lines", "0",
    
    71 71
                                                 "build", element_name])
    
    72 72
         result.assert_success()
    
    ... ... @@ -76,7 +76,7 @@ def test_disable_message_lines(cli, tmpdir, datafiles):
    76 76
     @pytest.mark.integration
    
    77 77
     @pytest.mark.datafiles(DATA_DIR)
    
    78 78
     @pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
    
    79
    -def test_disable_error_lines(cli, tmpdir, datafiles):
    
    79
    +def test_disable_error_lines(cli, tmpdir, datafiles, integration_cache):
    
    80 80
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    81 81
         element_path = os.path.join(project, 'elements')
    
    82 82
         element_name = 'message.bst'
    
    ... ... @@ -103,7 +103,7 @@ def test_disable_error_lines(cli, tmpdir, datafiles):
    103 103
         assert "This is a syntax error" in result.stderr
    
    104 104
     
    
    105 105
         # Let's now build it again, but with --error-lines 0
    
    106
    -    cli.remove_artifact_from_cache(project, element_name)
    
    106
    +    cli.remove_artifact_from_cache(project, element_name, cache_dir=integration_cache.root)
    
    107 107
         result = cli.run(project=project, args=["--error-lines", "0",
    
    108 108
                                                 "build", element_name])
    
    109 109
         result.assert_main_error(ErrorDomain.STREAM, None)
    

  • tests/integration/pullbuildtrees.py
    ... ... @@ -19,9 +19,10 @@ DATA_DIR = os.path.join(
    19 19
     # cleared as just forcefully removing the refpath leaves dangling objects.
    
    20 20
     def default_state(cli, tmpdir, share):
    
    21 21
         shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
    
    22
    +    shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
    
    22 23
         cli.configure({
    
    23 24
             'artifacts': {'url': share.repo, 'push': False},
    
    24
    -        'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
    
    25
    +        'rootcachedir': str(tmpdir),
    
    25 26
             'cache': {'pull-buildtrees': False},
    
    26 27
         })
    
    27 28
     
    
    ... ... @@ -42,7 +43,7 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
    42 43
             create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3:
    
    43 44
             cli2.configure({
    
    44 45
                 'artifacts': {'url': share1.repo, 'push': True},
    
    45
    -            'artifactdir': os.path.join(str(tmpdir), 'artifacts')
    
    46
    +            'rootcachedir': str(tmpdir),
    
    46 47
             })
    
    47 48
     
    
    48 49
             # Build autotools element, checked pushed, delete local
    

  • tests/integration/source-determinism.py
    ... ... @@ -94,9 +94,7 @@ def test_deterministic_source_umask(cli, tmpdir, datafiles, kind, integration_ca
    94 94
                     return f.read()
    
    95 95
             finally:
    
    96 96
                 os.umask(old_umask)
    
    97
    -            cache_dir = integration_cache.artifacts
    
    98
    -            cli.remove_artifact_from_cache(project, element_name,
    
    99
    -                                           cache_dir=cache_dir)
    
    97
    +            cli.remove_artifact_from_cache(project, element_name, cache_dir=integration_cache.root)
    
    100 98
     
    
    101 99
         assert get_value_for_umask(0o022) == get_value_for_umask(0o077)
    
    102 100
     
    
    ... ... @@ -156,8 +154,6 @@ def test_deterministic_source_local(cli, tmpdir, datafiles, integration_cache):
    156 154
                 with open(os.path.join(checkoutdir, 'ls-l'), 'r') as f:
    
    157 155
                     return f.read()
    
    158 156
             finally:
    
    159
    -            cache_dir = integration_cache.artifacts
    
    160
    -            cli.remove_artifact_from_cache(project, element_name,
    
    161
    -                                           cache_dir=cache_dir)
    
    157
    +            cli.remove_artifact_from_cache(project, element_name, cache_dir=integration_cache.root)
    
    162 158
     
    
    163 159
         assert get_value_for_mask(0o7777) == get_value_for_mask(0o0700)

  • tests/testutils/artifactshare.py
    ... ... @@ -49,7 +49,7 @@ class ArtifactShare():
    49 49
     
    
    50 50
             os.makedirs(self.repodir)
    
    51 51
     
    
    52
    -        self.cas = CASCache(self.repodir)
    
    52
    +        self.cas = CASCache(self.repodir, max_head_size)
    
    53 53
     
    
    54 54
             self.total_space = total_space
    
    55 55
             self.free_space = free_space
    

  • tests/testutils/runcli.py
    ... ... @@ -247,15 +247,13 @@ class Cli():
    247 247
                                        *, cache_dir=None):
    
    248 248
             # Read configuration to figure out where artifacts are stored
    
    249 249
             if not cache_dir:
    
    250
    -            default = os.path.join(project, 'cache', 'artifacts')
    
    251
    -
    
    252
    -            if self.config is not None:
    
    253
    -                cache_dir = self.config.get('artifactdir', default)
    
    254
    -            else:
    
    255
    -                cache_dir = default
    
    250
    +            cache_dir = os.path.join(project, 'cache')
    
    256 251
     
    
    257 252
             cache_dir = os.path.join(cache_dir, 'cas', 'refs', 'heads')
    
    258 253
     
    
    254
    +        # replace forward slashes
    
    255
    +        element_name = element_name.replace('/', '-')
    
    256
    +
    
    259 257
             cache_dir = os.path.splitext(os.path.join(cache_dir, 'test', element_name))[0]
    
    260 258
             shutil.rmtree(cache_dir)
    
    261 259
     
    
    ... ... @@ -338,6 +336,7 @@ class Cli():
    338 336
             exception = None
    
    339 337
             exit_code = 0
    
    340 338
     
    
    339
    +
    
    341 340
             # Temporarily redirect sys.stdin to /dev/null to ensure that
    
    342 341
             # Popen doesn't attempt to read pytest's dummy stdin.
    
    343 342
             old_stdin = sys.stdin
    
    ... ... @@ -347,6 +346,7 @@ class Cli():
    347 346
                 capture = MultiCapture(out=True, err=True, in_=False, Capture=capture_kind)
    
    348 347
                 capture.start_capturing()
    
    349 348
     
    
    349
    +
    
    350 350
                 try:
    
    351 351
                     cli.main(args=args or (), prog_name=cli.name, **extra)
    
    352 352
                 except SystemExit as e:
    
    ... ... @@ -552,11 +552,21 @@ def cli_integration(tmpdir, integration_cache):
    552 552
         # We want to cache sources for integration tests more permanently,
    
    553 553
         # to avoid downloading the huge base-sdk repeatedly
    
    554 554
         fixture.configure({
    
    555
    +        'rootcachedir': integration_cache.root,
    
    555 556
             'sourcedir': integration_cache.sources,
    
    556
    -        'artifactdir': integration_cache.artifacts
    
    557 557
         })
    
    558 558
     
    
    559
    -    return fixture
    
    559
    +    yield fixture
    
    560
    +
    
    561
    +    # remove following folders if necessary
    
    562
    +    try:
    
    563
    +        shutil.rmtree(os.path.join(integration_cache.root, 'build'))
    
    564
    +    except FileNotFoundError:
    
    565
    +        pass
    
    566
    +    try:
    
    567
    +        shutil.rmtree(os.path.join(integration_cache.root, 'tmp'))
    
    568
    +    except FileNotFoundError:
    
    569
    +        pass
    
    560 570
     
    
    561 571
     
    
    562 572
     @contextmanager
    
    ... ... @@ -596,10 +606,8 @@ def configured(directory, config=None):
    596 606
     
    
    597 607
         if not config.get('sourcedir', False):
    
    598 608
             config['sourcedir'] = os.path.join(directory, 'sources')
    
    599
    -    if not config.get('builddir', False):
    
    600
    -        config['builddir'] = os.path.join(directory, 'build')
    
    601
    -    if not config.get('artifactdir', False):
    
    602
    -        config['artifactdir'] = os.path.join(directory, 'artifacts')
    
    609
    +    if not config.get('rootcachedir', False):
    
    610
    +        config['rootcachedir'] = directory
    
    603 611
         if not config.get('logdir', False):
    
    604 612
             config['logdir'] = os.path.join(directory, 'logs')
    
    605 613
     
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]