Raoul Hidalgo Charman pushed to branch raoul/870-root-cache-dir at BuildStream / buildstream
Commits:
- 
ea51938e
by Raoul Hidalgo Charman at 2019-02-13T16:49:31Z
 - 
eecd764f
by Raoul Hidalgo Charman at 2019-02-13T16:49:36Z
 
18 changed files:
- buildstream/_artifactcache.py
 - buildstream/_cas/__init__.py
 - buildstream/_cas/cascache.py
 - buildstream/_context.py
 - buildstream/_frontend/status.py
 - buildstream/_frontend/widget.py
 - buildstream/_scheduler/jobs/cachesizejob.py
 - buildstream/_scheduler/jobs/cleanupjob.py
 - buildstream/element.py
 - tests/artifactcache/cache_size.py
 - tests/artifactcache/expiry.py
 - tests/frontend/pull.py
 - tests/integration/artifact.py
 - tests/integration/pullbuildtrees.py
 - tests/integration/shellbuildtrees.py
 - tests/internals/context.py
 - tests/internals/pluginloading.py
 - tests/testutils/artifactshare.py
 
Changes:
| ... | ... | @@ -22,7 +22,7 @@ import os | 
| 22 | 22 | 
 from collections.abc import Mapping
 | 
| 23 | 23 | 
 | 
| 24 | 24 | 
 from .types import _KeyStrength
 | 
| 25 | 
-from ._exceptions import ArtifactError, CASError, LoadError, LoadErrorReason
 | 
|
| 25 | 
+from ._exceptions import ArtifactError, CASError
 | 
|
| 26 | 26 | 
 from ._message import Message, MessageType
 | 
| 27 | 27 | 
 from . import utils
 | 
| 28 | 28 | 
 from . import _yaml
 | 
| ... | ... | @@ -46,39 +46,6 @@ class ArtifactCacheSpec(CASRemoteSpec): | 
| 46 | 46 | 
     pass
 | 
| 47 | 47 | 
 | 
| 48 | 48 | 
 | 
| 49 | 
-# ArtifactCacheUsage
 | 
|
| 50 | 
-#
 | 
|
| 51 | 
-# A simple object to report the current artifact cache
 | 
|
| 52 | 
-# usage details.
 | 
|
| 53 | 
-#
 | 
|
| 54 | 
-# Note that this uses the user configured cache quota
 | 
|
| 55 | 
-# rather than the internal quota with protective headroom
 | 
|
| 56 | 
-# removed, to provide a more sensible value to display to
 | 
|
| 57 | 
-# the user.
 | 
|
| 58 | 
-#
 | 
|
| 59 | 
-# Args:
 | 
|
| 60 | 
-#    artifacts (ArtifactCache): The artifact cache to get the status of
 | 
|
| 61 | 
-#
 | 
|
| 62 | 
-class ArtifactCacheUsage():
 | 
|
| 63 | 
-  | 
|
| 64 | 
-    def __init__(self, artifacts):
 | 
|
| 65 | 
-        context = artifacts.context
 | 
|
| 66 | 
-        self.quota_config = context.config_cache_quota       # Configured quota
 | 
|
| 67 | 
-        self.quota_size = artifacts._cache_quota_original    # Resolved cache quota in bytes
 | 
|
| 68 | 
-        self.used_size = artifacts.get_cache_size()          # Size used by artifacts in bytes
 | 
|
| 69 | 
-        self.used_percent = 0                                # Percentage of the quota used
 | 
|
| 70 | 
-        if self.quota_size is not None:
 | 
|
| 71 | 
-            self.used_percent = int(self.used_size * 100 / self.quota_size)
 | 
|
| 72 | 
-  | 
|
| 73 | 
-    # Formattable into a human readable string
 | 
|
| 74 | 
-    #
 | 
|
| 75 | 
-    def __str__(self):
 | 
|
| 76 | 
-        return "{} / {} ({}%)" \
 | 
|
| 77 | 
-            .format(utils._pretty_size(self.used_size, dec_places=1),
 | 
|
| 78 | 
-                    self.quota_config,
 | 
|
| 79 | 
-                    self.used_percent)
 | 
|
| 80 | 
-  | 
|
| 81 | 
-  | 
|
| 82 | 49 | 
 # An ArtifactCache manages artifacts.
 | 
| 83 | 50 | 
 #
 | 
| 84 | 51 | 
 # Args:
 | 
| ... | ... | @@ -87,19 +54,17 @@ class ArtifactCacheUsage(): | 
| 87 | 54 | 
 class ArtifactCache():
 | 
| 88 | 55 | 
     def __init__(self, context):
 | 
| 89 | 56 | 
         self.context = context
 | 
| 90 | 
-        self.extractdir = os.path.join(context.artifactdir, 'extract')
 | 
|
| 57 | 
+        self.extractdir = context.extractdir
 | 
|
| 91 | 58 | 
 | 
| 92 | 59 | 
         self.cas = context.get_cascache()
 | 
| 60 | 
+        self.casquota = context.get_casquota()
 | 
|
| 61 | 
+        self.casquota._calculate_cache_quota()
 | 
|
| 93 | 62 | 
 | 
| 94 | 63 | 
         self.global_remote_specs = []
 | 
| 95 | 64 | 
         self.project_remote_specs = {}
 | 
| 96 | 65 | 
 | 
| 97 | 66 | 
         self._required_elements = set()       # The elements required for this session
 | 
| 98 | 
-        self._cache_size = None               # The current cache size, sometimes it's an estimate
 | 
|
| 99 | 
-        self._cache_quota = None              # The cache quota
 | 
|
| 100 | 
-        self._cache_quota_original = None     # The cache quota as specified by the user, in bytes
 | 
|
| 101 | 
-        self._cache_quota_headroom = None     # The headroom in bytes before reaching the quota or full disk
 | 
|
| 102 | 
-        self._cache_lower_threshold = None    # The target cache size for a cleanup
 | 
|
| 67 | 
+  | 
|
| 103 | 68 | 
         self._remotes_setup = False           # Check to prevent double-setup of remotes
 | 
| 104 | 69 | 
 | 
| 105 | 70 | 
         # Per-project list of _CASRemote instances.
 | 
| ... | ... | @@ -110,8 +75,6 @@ class ArtifactCache(): | 
| 110 | 75 | 
 | 
| 111 | 76 | 
         os.makedirs(self.extractdir, exist_ok=True)
 | 
| 112 | 77 | 
 | 
| 113 | 
-        self._calculate_cache_quota()
 | 
|
| 114 | 
-  | 
|
| 115 | 78 | 
     # setup_remotes():
 | 
| 116 | 79 | 
     #
 | 
| 117 | 80 | 
     # Sets up which remotes to use
 | 
| ... | ... | @@ -235,7 +198,7 @@ class ArtifactCache(): | 
| 235 | 198 | 
         space_saved = 0
 | 
| 236 | 199 | 
 | 
| 237 | 200 | 
         # Start off with an announcement with as much info as possible
 | 
| 238 | 
-        volume_size, volume_avail = self._get_cache_volume_size()
 | 
|
| 201 | 
+        volume_size, volume_avail = self.casquota._get_cache_volume_size()
 | 
|
| 239 | 202 | 
         self._message(MessageType.STATUS, "Starting cache cleanup",
 | 
| 240 | 203 | 
                       detail=("Elements required by the current build plan: {}\n" +
 | 
| 241 | 204 | 
                               "User specified quota: {} ({})\n" +
 | 
| ... | ... | @@ -243,8 +206,8 @@ class ArtifactCache(): | 
| 243 | 206 | 
                               "Cache volume: {} total, {} available")
 | 
| 244 | 207 | 
                       .format(len(self._required_elements),
 | 
| 245 | 208 | 
                               context.config_cache_quota,
 | 
| 246 | 
-                              utils._pretty_size(self._cache_quota_original, dec_places=2),
 | 
|
| 247 | 
-                              utils._pretty_size(self.get_cache_size(), dec_places=2),
 | 
|
| 209 | 
+                              utils._pretty_size(self.casquota._cache_quota, dec_places=2),
 | 
|
| 210 | 
+                              utils._pretty_size(self.casquota.get_cache_size(), dec_places=2),
 | 
|
| 248 | 211 | 
                               utils._pretty_size(volume_size, dec_places=2),
 | 
| 249 | 212 | 
                               utils._pretty_size(volume_avail, dec_places=2)))
 | 
| 250 | 213 | 
 | 
| ... | ... | @@ -261,9 +224,10 @@ class ArtifactCache(): | 
| 261 | 224 | 
             ])
 | 
| 262 | 225 | 
 | 
| 263 | 226 | 
         # Do a real computation of the cache size once, just in case
 | 
| 264 | 
-        self.compute_cache_size()
 | 
|
| 227 | 
+        usage = self.casquota.compute_cache_size()
 | 
|
| 228 | 
+        self._message(MessageType.STATUS, "Cache usage recomputed: {}".format(usage))
 | 
|
| 265 | 229 | 
 | 
| 266 | 
-        while self.get_cache_size() >= self._cache_lower_threshold:
 | 
|
| 230 | 
+        while self.casquota.get_cache_size() >= self.casquota._cache_lower_threshold:
 | 
|
| 267 | 231 | 
             try:
 | 
| 268 | 232 | 
                 to_remove = artifacts.pop(0)
 | 
| 269 | 233 | 
             except IndexError:
 | 
| ... | ... | @@ -280,7 +244,7 @@ class ArtifactCache(): | 
| 280 | 244 | 
                           "Please increase the cache-quota in {} and/or make more disk space."
 | 
| 281 | 245 | 
                           .format(removed_ref_count,
 | 
| 282 | 246 | 
                                   utils._pretty_size(space_saved, dec_places=2),
 | 
| 283 | 
-                                  utils._pretty_size(self.get_cache_size(), dec_places=2),
 | 
|
| 247 | 
+                                  utils._pretty_size(self.casquota.get_cache_size(), dec_places=2),
 | 
|
| 284 | 248 | 
                                   len(self._required_elements),
 | 
| 285 | 249 | 
                                   (context.config_origin or default_conf)))
 | 
| 286 | 250 | 
 | 
| ... | ... | @@ -306,7 +270,7 @@ class ArtifactCache(): | 
| 306 | 270 | 
                                   to_remove))
 | 
| 307 | 271 | 
 | 
| 308 | 272 | 
                 # Remove the size from the removed size
 | 
| 309 | 
-                self.set_cache_size(self._cache_size - size)
 | 
|
| 273 | 
+                self.casquota.set_cache_size(self.casquota._cache_size - size)
 | 
|
| 310 | 274 | 
 | 
| 311 | 275 | 
                 # User callback
 | 
| 312 | 276 | 
                 #
 | 
| ... | ... | @@ -322,29 +286,12 @@ class ArtifactCache(): | 
| 322 | 286 | 
                               "Cache usage is now: {}")
 | 
| 323 | 287 | 
                       .format(removed_ref_count,
 | 
| 324 | 288 | 
                               utils._pretty_size(space_saved, dec_places=2),
 | 
| 325 | 
-                              utils._pretty_size(self.get_cache_size(), dec_places=2)))
 | 
|
| 326 | 
-  | 
|
| 327 | 
-        return self.get_cache_size()
 | 
|
| 328 | 
-  | 
|
| 329 | 
-    # compute_cache_size()
 | 
|
| 330 | 
-    #
 | 
|
| 331 | 
-    # Computes the real artifact cache size by calling
 | 
|
| 332 | 
-    # the abstract calculate_cache_size() method.
 | 
|
| 333 | 
-    #
 | 
|
| 334 | 
-    # Returns:
 | 
|
| 335 | 
-    #    (int): The size of the artifact cache.
 | 
|
| 336 | 
-    #
 | 
|
| 337 | 
-    def compute_cache_size(self):
 | 
|
| 338 | 
-        old_cache_size = self._cache_size
 | 
|
| 339 | 
-        new_cache_size = self.cas.calculate_cache_size()
 | 
|
| 340 | 
-  | 
|
| 341 | 
-        if old_cache_size != new_cache_size:
 | 
|
| 342 | 
-            self._cache_size = new_cache_size
 | 
|
| 289 | 
+                              utils._pretty_size(self.casquota.get_cache_size(), dec_places=2)))
 | 
|
| 343 | 290 | 
 | 
| 344 | 
-            usage = ArtifactCacheUsage(self)
 | 
|
| 345 | 
-            self._message(MessageType.STATUS, "Cache usage recomputed: {}".format(usage))
 | 
|
| 291 | 
+        return self.casquota.get_cache_size()
 | 
|
| 346 | 292 | 
 | 
| 347 | 
-        return self._cache_size
 | 
|
| 293 | 
+    def full(self):
 | 
|
| 294 | 
+        return self.casquota.full()
 | 
|
| 348 | 295 | 
 | 
| 349 | 296 | 
     # add_artifact_size()
 | 
| 350 | 297 | 
     #
 | 
| ... | ... | @@ -355,71 +302,10 @@ class ArtifactCache(): | 
| 355 | 302 | 
     #     artifact_size (int): The size to add.
 | 
| 356 | 303 | 
     #
 | 
| 357 | 304 | 
     def add_artifact_size(self, artifact_size):
 | 
| 358 | 
-        cache_size = self.get_cache_size()
 | 
|
| 305 | 
+        cache_size = self.casquota.get_cache_size()
 | 
|
| 359 | 306 | 
         cache_size += artifact_size
 | 
| 360 | 307 | 
 | 
| 361 | 
-        self.set_cache_size(cache_size)
 | 
|
| 362 | 
-  | 
|
| 363 | 
-    # get_cache_size()
 | 
|
| 364 | 
-    #
 | 
|
| 365 | 
-    # Fetches the cached size of the cache, this is sometimes
 | 
|
| 366 | 
-    # an estimate and periodically adjusted to the real size
 | 
|
| 367 | 
-    # when a cache size calculation job runs.
 | 
|
| 368 | 
-    #
 | 
|
| 369 | 
-    # When it is an estimate, the value is either correct, or
 | 
|
| 370 | 
-    # it is greater than the actual cache size.
 | 
|
| 371 | 
-    #
 | 
|
| 372 | 
-    # Returns:
 | 
|
| 373 | 
-    #     (int) An approximation of the artifact cache size, in bytes.
 | 
|
| 374 | 
-    #
 | 
|
| 375 | 
-    def get_cache_size(self):
 | 
|
| 376 | 
-  | 
|
| 377 | 
-        # If we don't currently have an estimate, figure out the real cache size.
 | 
|
| 378 | 
-        if self._cache_size is None:
 | 
|
| 379 | 
-            stored_size = self._read_cache_size()
 | 
|
| 380 | 
-            if stored_size is not None:
 | 
|
| 381 | 
-                self._cache_size = stored_size
 | 
|
| 382 | 
-            else:
 | 
|
| 383 | 
-                self.compute_cache_size()
 | 
|
| 384 | 
-  | 
|
| 385 | 
-        return self._cache_size
 | 
|
| 386 | 
-  | 
|
| 387 | 
-    # set_cache_size()
 | 
|
| 388 | 
-    #
 | 
|
| 389 | 
-    # Forcefully set the overall cache size.
 | 
|
| 390 | 
-    #
 | 
|
| 391 | 
-    # This is used to update the size in the main process after
 | 
|
| 392 | 
-    # having calculated in a cleanup or a cache size calculation job.
 | 
|
| 393 | 
-    #
 | 
|
| 394 | 
-    # Args:
 | 
|
| 395 | 
-    #     cache_size (int): The size to set.
 | 
|
| 396 | 
-    #
 | 
|
| 397 | 
-    def set_cache_size(self, cache_size):
 | 
|
| 398 | 
-  | 
|
| 399 | 
-        assert cache_size is not None
 | 
|
| 400 | 
-  | 
|
| 401 | 
-        self._cache_size = cache_size
 | 
|
| 402 | 
-        self._write_cache_size(self._cache_size)
 | 
|
| 403 | 
-  | 
|
| 404 | 
-    # full()
 | 
|
| 405 | 
-    #
 | 
|
| 406 | 
-    # Checks if the artifact cache is full, either
 | 
|
| 407 | 
-    # because the user configured quota has been exceeded
 | 
|
| 408 | 
-    # or because the underlying disk is almost full.
 | 
|
| 409 | 
-    #
 | 
|
| 410 | 
-    # Returns:
 | 
|
| 411 | 
-    #    (bool): True if the artifact cache is full
 | 
|
| 412 | 
-    #
 | 
|
| 413 | 
-    def full(self):
 | 
|
| 414 | 
-  | 
|
| 415 | 
-        if self.get_cache_size() > self._cache_quota:
 | 
|
| 416 | 
-            return True
 | 
|
| 417 | 
-  | 
|
| 418 | 
-        _, volume_avail = self._get_cache_volume_size()
 | 
|
| 419 | 
-        if volume_avail < self._cache_quota_headroom:
 | 
|
| 420 | 
-            return True
 | 
|
| 421 | 
-  | 
|
| 422 | 
-        return False
 | 
|
| 308 | 
+        self.casquota.set_cache_size(cache_size)
 | 
|
| 423 | 309 | 
 | 
| 424 | 310 | 
     # preflight():
 | 
| 425 | 311 | 
     #
 | 
| ... | ... | @@ -882,142 +768,6 @@ class ArtifactCache(): | 
| 882 | 768 | 
         with self.context.timed_activity("Initializing remote caches", silent_nested=True):
 | 
| 883 | 769 | 
             self.initialize_remotes(on_failure=remote_failed)
 | 
| 884 | 770 | 
 | 
| 885 | 
-    # _write_cache_size()
 | 
|
| 886 | 
-    #
 | 
|
| 887 | 
-    # Writes the given size of the artifact to the cache's size file
 | 
|
| 888 | 
-    #
 | 
|
| 889 | 
-    # Args:
 | 
|
| 890 | 
-    #    size (int): The size of the artifact cache to record
 | 
|
| 891 | 
-    #
 | 
|
| 892 | 
-    def _write_cache_size(self, size):
 | 
|
| 893 | 
-        assert isinstance(size, int)
 | 
|
| 894 | 
-        size_file_path = os.path.join(self.context.artifactdir, CACHE_SIZE_FILE)
 | 
|
| 895 | 
-        with utils.save_file_atomic(size_file_path, "w") as f:
 | 
|
| 896 | 
-            f.write(str(size))
 | 
|
| 897 | 
-  | 
|
| 898 | 
-    # _read_cache_size()
 | 
|
| 899 | 
-    #
 | 
|
| 900 | 
-    # Reads and returns the size of the artifact cache that's stored in the
 | 
|
| 901 | 
-    # cache's size file
 | 
|
| 902 | 
-    #
 | 
|
| 903 | 
-    # Returns:
 | 
|
| 904 | 
-    #    (int): The size of the artifact cache, as recorded in the file
 | 
|
| 905 | 
-    #
 | 
|
| 906 | 
-    def _read_cache_size(self):
 | 
|
| 907 | 
-        size_file_path = os.path.join(self.context.artifactdir, CACHE_SIZE_FILE)
 | 
|
| 908 | 
-  | 
|
| 909 | 
-        if not os.path.exists(size_file_path):
 | 
|
| 910 | 
-            return None
 | 
|
| 911 | 
-  | 
|
| 912 | 
-        with open(size_file_path, "r") as f:
 | 
|
| 913 | 
-            size = f.read()
 | 
|
| 914 | 
-  | 
|
| 915 | 
-        try:
 | 
|
| 916 | 
-            num_size = int(size)
 | 
|
| 917 | 
-        except ValueError as e:
 | 
|
| 918 | 
-            raise ArtifactError("Size '{}' parsed from '{}' was not an integer".format(
 | 
|
| 919 | 
-                size, size_file_path)) from e
 | 
|
| 920 | 
-  | 
|
| 921 | 
-        return num_size
 | 
|
| 922 | 
-  | 
|
| 923 | 
-    # _calculate_cache_quota()
 | 
|
| 924 | 
-    #
 | 
|
| 925 | 
-    # Calculates and sets the cache quota and lower threshold based on the
 | 
|
| 926 | 
-    # quota set in Context.
 | 
|
| 927 | 
-    # It checks that the quota is both a valid _expression_, and that there is
 | 
|
| 928 | 
-    # enough disk space to satisfy that quota
 | 
|
| 929 | 
-    #
 | 
|
| 930 | 
-    def _calculate_cache_quota(self):
 | 
|
| 931 | 
-        # Headroom intended to give BuildStream a bit of leeway.
 | 
|
| 932 | 
-        # This acts as the minimum size of cache_quota and also
 | 
|
| 933 | 
-        # is taken from the user requested cache_quota.
 | 
|
| 934 | 
-        #
 | 
|
| 935 | 
-        if 'BST_TEST_SUITE' in os.environ:
 | 
|
| 936 | 
-            self._cache_quota_headroom = 0
 | 
|
| 937 | 
-        else:
 | 
|
| 938 | 
-            self._cache_quota_headroom = 2e9
 | 
|
| 939 | 
-  | 
|
| 940 | 
-        try:
 | 
|
| 941 | 
-            cache_quota = utils._parse_size(self.context.config_cache_quota,
 | 
|
| 942 | 
-                                            self.context.artifactdir)
 | 
|
| 943 | 
-        except utils.UtilError as e:
 | 
|
| 944 | 
-            raise LoadError(LoadErrorReason.INVALID_DATA,
 | 
|
| 945 | 
-                            "{}\nPlease specify the value in bytes or as a % of full disk space.\n"
 | 
|
| 946 | 
-                            "\nValid values are, for example: 800M 10G 1T 50%\n"
 | 
|
| 947 | 
-                            .format(str(e))) from e
 | 
|
| 948 | 
-  | 
|
| 949 | 
-        total_size, available_space = self._get_cache_volume_size()
 | 
|
| 950 | 
-        cache_size = self.get_cache_size()
 | 
|
| 951 | 
-  | 
|
| 952 | 
-        # Ensure system has enough storage for the cache_quota
 | 
|
| 953 | 
-        #
 | 
|
| 954 | 
-        # If cache_quota is none, set it to the maximum it could possibly be.
 | 
|
| 955 | 
-        #
 | 
|
| 956 | 
-        # Also check that cache_quota is at least as large as our headroom.
 | 
|
| 957 | 
-        #
 | 
|
| 958 | 
-        if cache_quota is None:  # Infinity, set to max system storage
 | 
|
| 959 | 
-            cache_quota = cache_size + available_space
 | 
|
| 960 | 
-        if cache_quota < self._cache_quota_headroom:  # Check minimum
 | 
|
| 961 | 
-            raise LoadError(LoadErrorReason.INVALID_DATA,
 | 
|
| 962 | 
-                            "Invalid cache quota ({}): ".format(utils._pretty_size(cache_quota)) +
 | 
|
| 963 | 
-                            "BuildStream requires a minimum cache quota of 2G.")
 | 
|
| 964 | 
-        elif cache_quota > total_size:
 | 
|
| 965 | 
-            # A quota greater than the total disk size is certianly an error
 | 
|
| 966 | 
-            raise ArtifactError("Your system does not have enough available " +
 | 
|
| 967 | 
-                                "space to support the cache quota specified.",
 | 
|
| 968 | 
-                                detail=("You have specified a quota of {quota} total disk space.\n" +
 | 
|
| 969 | 
-                                        "The filesystem containing {local_cache_path} only " +
 | 
|
| 970 | 
-                                        "has {total_size} total disk space.")
 | 
|
| 971 | 
-                                .format(
 | 
|
| 972 | 
-                                    quota=self.context.config_cache_quota,
 | 
|
| 973 | 
-                                    local_cache_path=self.context.artifactdir,
 | 
|
| 974 | 
-                                    total_size=utils._pretty_size(total_size)),
 | 
|
| 975 | 
-                                reason='insufficient-storage-for-quota')
 | 
|
| 976 | 
-        elif cache_quota > cache_size + available_space:
 | 
|
| 977 | 
-            # The quota does not fit in the available space, this is a warning
 | 
|
| 978 | 
-            if '%' in self.context.config_cache_quota:
 | 
|
| 979 | 
-                available = (available_space / total_size) * 100
 | 
|
| 980 | 
-                available = '{}% of total disk space'.format(round(available, 1))
 | 
|
| 981 | 
-            else:
 | 
|
| 982 | 
-                available = utils._pretty_size(available_space)
 | 
|
| 983 | 
-  | 
|
| 984 | 
-            self._message(MessageType.WARN,
 | 
|
| 985 | 
-                          "Your system does not have enough available " +
 | 
|
| 986 | 
-                          "space to support the cache quota specified.",
 | 
|
| 987 | 
-                          detail=("You have specified a quota of {quota} total disk space.\n" +
 | 
|
| 988 | 
-                                  "The filesystem containing {local_cache_path} only " +
 | 
|
| 989 | 
-                                  "has {available_size} available.")
 | 
|
| 990 | 
-                          .format(quota=self.context.config_cache_quota,
 | 
|
| 991 | 
-                                  local_cache_path=self.context.artifactdir,
 | 
|
| 992 | 
-                                  available_size=available))
 | 
|
| 993 | 
-  | 
|
| 994 | 
-        # Place a slight headroom (2e9 (2GB) on the cache_quota) into
 | 
|
| 995 | 
-        # cache_quota to try and avoid exceptions.
 | 
|
| 996 | 
-        #
 | 
|
| 997 | 
-        # Of course, we might still end up running out during a build
 | 
|
| 998 | 
-        # if we end up writing more than 2G, but hey, this stuff is
 | 
|
| 999 | 
-        # already really fuzzy.
 | 
|
| 1000 | 
-        #
 | 
|
| 1001 | 
-        self._cache_quota_original = cache_quota
 | 
|
| 1002 | 
-        self._cache_quota = cache_quota - self._cache_quota_headroom
 | 
|
| 1003 | 
-        self._cache_lower_threshold = self._cache_quota / 2
 | 
|
| 1004 | 
-  | 
|
| 1005 | 
-    # _get_cache_volume_size()
 | 
|
| 1006 | 
-    #
 | 
|
| 1007 | 
-    # Get the available space and total space for the volume on
 | 
|
| 1008 | 
-    # which the artifact cache is located.
 | 
|
| 1009 | 
-    #
 | 
|
| 1010 | 
-    # Returns:
 | 
|
| 1011 | 
-    #    (int): The total number of bytes on the volume
 | 
|
| 1012 | 
-    #    (int): The number of available bytes on the volume
 | 
|
| 1013 | 
-    #
 | 
|
| 1014 | 
-    # NOTE: We use this stub to allow the test cases
 | 
|
| 1015 | 
-    #       to override what an artifact cache thinks
 | 
|
| 1016 | 
-    #       about it's disk size and available bytes.
 | 
|
| 1017 | 
-    #
 | 
|
| 1018 | 
-    def _get_cache_volume_size(self):
 | 
|
| 1019 | 
-        return utils._get_volume_size(self.context.artifactdir)
 | 
|
| 1020 | 
-  | 
|
| 1021 | 771 | 
 | 
| 1022 | 772 | 
 # _configured_remote_artifact_cache_specs():
 | 
| 1023 | 773 | 
 #
 | 
| ... | ... | @@ -17,5 +17,5 @@ | 
| 17 | 17 | 
 #  Authors:
 | 
| 18 | 18 | 
 #        Tristan Van Berkom <tristan vanberkom codethink co uk>
 | 
| 19 | 19 | 
 | 
| 20 | 
-from .cascache import CASCache
 | 
|
| 20 | 
+from .cascache import CASCache, CASQuota, CASCacheUsage
 | 
|
| 21 | 21 | 
 from .casremote import CASRemote, CASRemoteSpec
 | 
| ... | ... | @@ -32,17 +32,53 @@ from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2 | 
| 32 | 32 | 
 from .._protos.buildstream.v2 import buildstream_pb2
 | 
| 33 | 33 | 
 | 
| 34 | 34 | 
 from .. import utils
 | 
| 35 | 
-from .._exceptions import CASCacheError
 | 
|
| 35 | 
+from .._exceptions import CASCacheError, LoadError, LoadErrorReason
 | 
|
| 36 | 
+from .._message import Message, MessageType
 | 
|
| 36 | 37 | 
 | 
| 37 | 38 | 
 from .casremote import BlobNotFound, _CASBatchRead, _CASBatchUpdate
 | 
| 38 | 39 | 
 | 
| 39 | 40 | 
 _BUFFER_SIZE = 65536
 | 
| 40 | 41 | 
 | 
| 41 | 42 | 
 | 
| 43 | 
+CACHE_SIZE_FILE = "cache_size"
 | 
|
| 44 | 
+  | 
|
| 45 | 
+  | 
|
| 46 | 
+# CASCacheUsage
 | 
|
| 47 | 
+#
 | 
|
| 48 | 
+# A simple object to report the current CAS cache usage details.
 | 
|
| 49 | 
+#
 | 
|
| 50 | 
+# Note that this uses the user configured cache quota
 | 
|
| 51 | 
+# rather than the internal quota with protective headroom
 | 
|
| 52 | 
+# removed, to provide a more sensible value to display to
 | 
|
| 53 | 
+# the user.
 | 
|
| 54 | 
+#
 | 
|
| 55 | 
+# Args:
 | 
|
| 56 | 
+#    cas (CASQuota): The CAS cache to get the status of
 | 
|
| 57 | 
+#
 | 
|
| 58 | 
+class CASCacheUsage():
 | 
|
| 59 | 
+  | 
|
| 60 | 
+    def __init__(self, casquota):
 | 
|
| 61 | 
+        self.quota_config = casquota._config_cache_quota          # Configured quota
 | 
|
| 62 | 
+        self.quota_size = casquota._cache_quota_original          # Resolved cache quota in bytes
 | 
|
| 63 | 
+        self.used_size = casquota.get_cache_size()                # Size used by artifacts in bytes
 | 
|
| 64 | 
+        self.used_percent = 0                                # Percentage of the quota used
 | 
|
| 65 | 
+        if self.quota_size is not None:
 | 
|
| 66 | 
+            self.used_percent = int(self.used_size * 100 / self.quota_size)
 | 
|
| 67 | 
+  | 
|
| 68 | 
+    # Formattable into a human readable string
 | 
|
| 69 | 
+    #
 | 
|
| 70 | 
+    def __str__(self):
 | 
|
| 71 | 
+        return "{} / {} ({}%)" \
 | 
|
| 72 | 
+            .format(utils._pretty_size(self.used_size, dec_places=1),
 | 
|
| 73 | 
+                    self.quota_config,
 | 
|
| 74 | 
+                    self.used_percent)
 | 
|
| 75 | 
+  | 
|
| 76 | 
+  | 
|
| 42 | 77 | 
 # A CASCache manages a CAS repository as specified in the Remote Execution API.
 | 
| 43 | 78 | 
 #
 | 
| 44 | 79 | 
 # Args:
 | 
| 45 | 80 | 
 #     path (str): The root directory for the CAS repository
 | 
| 81 | 
+#     cache_quota (int): User configured cache quota
 | 
|
| 46 | 82 | 
 #
 | 
| 47 | 83 | 
 class CASCache():
 | 
| 48 | 84 | 
 | 
| ... | ... | @@ -459,16 +495,6 @@ class CASCache(): | 
| 459 | 495 | 
         except FileNotFoundError as e:
 | 
| 460 | 496 | 
             raise CASCacheError("Attempt to access unavailable ref: {}".format(e)) from e
 | 
| 461 | 497 | 
 | 
| 462 | 
-    # calculate_cache_size()
 | 
|
| 463 | 
-    #
 | 
|
| 464 | 
-    # Return the real disk usage of the CAS cache.
 | 
|
| 465 | 
-    #
 | 
|
| 466 | 
-    # Returns:
 | 
|
| 467 | 
-    #    (int): The size of the cache.
 | 
|
| 468 | 
-    #
 | 
|
| 469 | 
-    def calculate_cache_size(self):
 | 
|
| 470 | 
-        return utils._get_dir_size(self.casdir)
 | 
|
| 471 | 
-  | 
|
| 472 | 498 | 
     # list_refs():
 | 
| 473 | 499 | 
     #
 | 
| 474 | 500 | 
     # List refs in Least Recently Modified (LRM) order.
 | 
| ... | ... | @@ -1043,6 +1069,240 @@ class CASCache(): | 
| 1043 | 1069 | 
         batch.send()
 | 
| 1044 | 1070 | 
 | 
| 1045 | 1071 | 
 | 
| 1072 | 
+class CASQuota:
 | 
|
| 1073 | 
+    def __init__(self, context):
 | 
|
| 1074 | 
+        self.cas = context.get_cascache()
 | 
|
| 1075 | 
+        self.casdir = self.cas.casdir
 | 
|
| 1076 | 
+        self._config_cache_quota = context.config_cache_quota
 | 
|
| 1077 | 
+        self._cache_size = None               # The current cache size, sometimes it's an estimate
 | 
|
| 1078 | 
+        self._cache_quota = None              # The cache quota
 | 
|
| 1079 | 
+        self._cache_quota_original = None     # The cache quota as specified by the user, in bytes
 | 
|
| 1080 | 
+        self._cache_quota_headroom = None
 | 
|
| 1081 | 
+        self._cache_lower_threshold = None    # The target cache size for a cleanup
 | 
|
| 1082 | 
+        self.available_space = None
 | 
|
| 1083 | 
+  | 
|
| 1084 | 
+        self._message = context.message
 | 
|
| 1085 | 
+  | 
|
| 1086 | 
+        self._calculate_cache_quota()
 | 
|
| 1087 | 
+  | 
|
| 1088 | 
+    # compute_cache_size()
 | 
|
| 1089 | 
+    #
 | 
|
| 1090 | 
+    # Computes the real artifact cache size by calling
 | 
|
| 1091 | 
+    # the abstract calculate_cache_size() method.
 | 
|
| 1092 | 
+    #
 | 
|
| 1093 | 
+    # Returns:
 | 
|
| 1094 | 
+    #    (int): The size of the artifact cache.
 | 
|
| 1095 | 
+    #
 | 
|
| 1096 | 
+    def compute_cache_size(self):
 | 
|
| 1097 | 
+        old_cache_size = self._cache_size
 | 
|
| 1098 | 
+        new_cache_size = self.calculate_cache_size()
 | 
|
| 1099 | 
+  | 
|
| 1100 | 
+        if old_cache_size != new_cache_size:
 | 
|
| 1101 | 
+            self._cache_size = new_cache_size
 | 
|
| 1102 | 
+  | 
|
| 1103 | 
+        return self._cache_size
 | 
|
| 1104 | 
+  | 
|
| 1105 | 
+    # calculate_cache_size()
 | 
|
| 1106 | 
+    #
 | 
|
| 1107 | 
+    # Return the real disk usage of the CAS cache.
 | 
|
| 1108 | 
+    #
 | 
|
| 1109 | 
+    # Returns:
 | 
|
| 1110 | 
+    #    (int): The size of the cache.
 | 
|
| 1111 | 
+    #
 | 
|
| 1112 | 
+    def calculate_cache_size(self):
 | 
|
| 1113 | 
+        return utils._get_dir_size(self.casdir)
 | 
|
| 1114 | 
+  | 
|
| 1115 | 
+    # get_cache_size()
 | 
|
| 1116 | 
+    #
 | 
|
| 1117 | 
+    # Fetches the cached size of the cache, this is sometimes
 | 
|
| 1118 | 
+    # an estimate and periodically adjusted to the real size
 | 
|
| 1119 | 
+    # when a cache size calculation job runs.
 | 
|
| 1120 | 
+    #
 | 
|
| 1121 | 
+    # When it is an estimate, the value is either correct, or
 | 
|
| 1122 | 
+    # it is greater than the actual cache size.
 | 
|
| 1123 | 
+    #
 | 
|
| 1124 | 
+    # Returns:
 | 
|
| 1125 | 
+    #     (int) An approximation of the artifact cache size, in bytes.
 | 
|
| 1126 | 
+    #
 | 
|
| 1127 | 
+    def get_cache_size(self):
 | 
|
| 1128 | 
+  | 
|
| 1129 | 
+        # If we don't currently have an estimate, figure out the real cache size.
 | 
|
| 1130 | 
+        if self._cache_size is None:
 | 
|
| 1131 | 
+            stored_size = self._read_cache_size()
 | 
|
| 1132 | 
+            if stored_size is not None:
 | 
|
| 1133 | 
+                self._cache_size = stored_size
 | 
|
| 1134 | 
+            else:
 | 
|
| 1135 | 
+                self._cache_size = self.compute_cache_size()
 | 
|
| 1136 | 
+  | 
|
| 1137 | 
+        return self._cache_size
 | 
|
| 1138 | 
+  | 
|
| 1139 | 
+    # set_cache_size()
 | 
|
| 1140 | 
+    #
 | 
|
| 1141 | 
+    # Forcefully set the overall cache size.
 | 
|
| 1142 | 
+    #
 | 
|
| 1143 | 
+    # This is used to update the size in the main process after
 | 
|
| 1144 | 
+    # having calculated in a cleanup or a cache size calculation job.
 | 
|
| 1145 | 
+    #
 | 
|
| 1146 | 
+    # Args:
 | 
|
| 1147 | 
+    #     cache_size (int): The size to set.
 | 
|
| 1148 | 
+    #
 | 
|
| 1149 | 
+    def set_cache_size(self, cache_size):
 | 
|
| 1150 | 
+  | 
|
| 1151 | 
+        assert cache_size is not None
 | 
|
| 1152 | 
+  | 
|
| 1153 | 
+        self._cache_size = cache_size
 | 
|
| 1154 | 
+        self._write_cache_size(self._cache_size)
 | 
|
| 1155 | 
+  | 
|
| 1156 | 
+    # full()
 | 
|
| 1157 | 
+    #
 | 
|
| 1158 | 
+    # Checks if the artifact cache is full, either
 | 
|
| 1159 | 
+    # because the user configured quota has been exceeded
 | 
|
| 1160 | 
+    # or because the underlying disk is almost full.
 | 
|
| 1161 | 
+    #
 | 
|
| 1162 | 
+    # Returns:
 | 
|
| 1163 | 
+    #    (bool): True if the artifact cache is full
 | 
|
| 1164 | 
+    #
 | 
|
| 1165 | 
+    def full(self):
 | 
|
| 1166 | 
+  | 
|
| 1167 | 
+        if self.get_cache_size() > self._cache_quota:
 | 
|
| 1168 | 
+            return True
 | 
|
| 1169 | 
+  | 
|
| 1170 | 
+        _, volume_avail = self._get_cache_volume_size()
 | 
|
| 1171 | 
+        if volume_avail < self._cache_quota_headroom:
 | 
|
| 1172 | 
+            return True
 | 
|
| 1173 | 
+  | 
|
| 1174 | 
+        return False
 | 
|
| 1175 | 
+  | 
|
| 1176 | 
+    ################################################
 | 
|
| 1177 | 
+    #             Local Private Methods            #
 | 
|
| 1178 | 
+    ################################################
 | 
|
| 1179 | 
+  | 
|
| 1180 | 
+    # _read_cache_size()
 | 
|
| 1181 | 
+    #
 | 
|
| 1182 | 
+    # Reads and returns the size of the artifact cache that's stored in the
 | 
|
| 1183 | 
+    # cache's size file
 | 
|
| 1184 | 
+    #
 | 
|
| 1185 | 
+    # Returns:
 | 
|
| 1186 | 
+    #    (int): The size of the artifact cache, as recorded in the file
 | 
|
| 1187 | 
+    #
 | 
|
| 1188 | 
+    def _read_cache_size(self):
 | 
|
| 1189 | 
+        size_file_path = os.path.join(self.casdir, CACHE_SIZE_FILE)
 | 
|
| 1190 | 
+  | 
|
| 1191 | 
+        if not os.path.exists(size_file_path):
 | 
|
| 1192 | 
+            return None
 | 
|
| 1193 | 
+  | 
|
| 1194 | 
+        with open(size_file_path, "r") as f:
 | 
|
| 1195 | 
+            size = f.read()
 | 
|
| 1196 | 
+  | 
|
| 1197 | 
+        try:
 | 
|
| 1198 | 
+            num_size = int(size)
 | 
|
| 1199 | 
+        except ValueError as e:
 | 
|
| 1200 | 
+            raise CASCacheError("Size '{}' parsed from '{}' was not an integer".format(
 | 
|
| 1201 | 
+                size, size_file_path)) from e
 | 
|
| 1202 | 
+  | 
|
| 1203 | 
+        return num_size
 | 
|
| 1204 | 
+  | 
|
| 1205 | 
+    # _write_cache_size()
 | 
|
| 1206 | 
+    #
 | 
|
| 1207 | 
+    # Writes the given size of the artifact to the cache's size file
 | 
|
| 1208 | 
+    #
 | 
|
| 1209 | 
+    # Args:
 | 
|
| 1210 | 
+    #    size (int): The size of the artifact cache to record
 | 
|
| 1211 | 
+    #
 | 
|
| 1212 | 
+    def _write_cache_size(self, size):
 | 
|
| 1213 | 
+        assert isinstance(size, int)
 | 
|
| 1214 | 
+        size_file_path = os.path.join(self.casdir, CACHE_SIZE_FILE)
 | 
|
| 1215 | 
+        with utils.save_file_atomic(size_file_path, "w") as f:
 | 
|
| 1216 | 
+            f.write(str(size))
 | 
|
| 1217 | 
+  | 
|
| 1218 | 
+    # _get_cache_volume_size()
 | 
|
| 1219 | 
+    #
 | 
|
| 1220 | 
+    # Get the available space and total space for the volume on
 | 
|
| 1221 | 
+    # which the artifact cache is located.
 | 
|
| 1222 | 
+    #
 | 
|
| 1223 | 
+    # Returns:
 | 
|
| 1224 | 
+    #    (int): The total number of bytes on the volume
 | 
|
| 1225 | 
+    #    (int): The number of available bytes on the volume
 | 
|
| 1226 | 
+    #
 | 
|
| 1227 | 
+    # NOTE: We use this stub to allow the test cases
 | 
|
| 1228 | 
+    #       to override what an artifact cache thinks
 | 
|
| 1229 | 
+    #       about it's disk size and available bytes.
 | 
|
| 1230 | 
+    #
 | 
|
| 1231 | 
+    def _get_cache_volume_size(self):
 | 
|
| 1232 | 
+        return utils._get_volume_size(self.casdir)
 | 
|
| 1233 | 
+  | 
|
| 1234 | 
+    # _calculate_cache_quota()
 | 
|
| 1235 | 
+    #
 | 
|
| 1236 | 
+    # Calculates and sets the cache quota and lower threshold based on the
 | 
|
| 1237 | 
+    # quota set in Context.
 | 
|
| 1238 | 
+    # It checks that the quota is both a valid _expression_, and that there is
 | 
|
| 1239 | 
+    # enough disk space to satisfy that quota
 | 
|
| 1240 | 
+    #
 | 
|
| 1241 | 
+    def _calculate_cache_quota(self):
 | 
|
| 1242 | 
+        # Headroom intended to give BuildStream a bit of leeway.
 | 
|
| 1243 | 
+        # This acts as the minimum size of cache_quota and also
 | 
|
| 1244 | 
+        # is taken from the user requested cache_quota.
 | 
|
| 1245 | 
+        #
 | 
|
| 1246 | 
+        if 'BST_TEST_SUITE' in os.environ:
 | 
|
| 1247 | 
+            self._cache_quota_headroom = 0
 | 
|
| 1248 | 
+        else:
 | 
|
| 1249 | 
+            self._cache_quota_headroom = 2e9
 | 
|
| 1250 | 
+  | 
|
| 1251 | 
+        total_size, available_space = self._get_cache_volume_size()
 | 
|
| 1252 | 
+        cache_size = self.get_cache_size()
 | 
|
| 1253 | 
+        self.available_space = available_space
 | 
|
| 1254 | 
+  | 
|
| 1255 | 
+        # Ensure system has enough storage for the cache_quota
 | 
|
| 1256 | 
+        #
 | 
|
| 1257 | 
+        # If cache_quota is none, set it to the maximum it could possibly be.
 | 
|
| 1258 | 
+        #
 | 
|
| 1259 | 
+        # Also check that cache_quota is at least as large as our headroom.
 | 
|
| 1260 | 
+        #
 | 
|
| 1261 | 
+        cache_quota = self._config_cache_quota
 | 
|
| 1262 | 
+        if cache_quota is None:  # Infinity, set to max system storage
 | 
|
| 1263 | 
+            cache_quota = cache_size + available_space
 | 
|
| 1264 | 
+        if cache_quota < self._cache_quota_headroom:  # Check minimum
 | 
|
| 1265 | 
+            raise LoadError(LoadErrorReason.INVALID_DATA,
 | 
|
| 1266 | 
+                            "Invalid cache quota ({}): ".format(utils._pretty_size(cache_quota)) +
 | 
|
| 1267 | 
+                            "BuildStream requires a minimum cache quota of 2G.")
 | 
|
| 1268 | 
+        elif cache_quota > total_size:
 | 
|
| 1269 | 
+            # A quota greater than the total disk size is certianly an error
 | 
|
| 1270 | 
+            raise CASCacheError("Your system does not have enough available " +
 | 
|
| 1271 | 
+                                "space to support the cache quota specified.",
 | 
|
| 1272 | 
+                                detail=("You have specified a quota of {quota} total disk space.\n" +
 | 
|
| 1273 | 
+                                        "The filesystem containing {local_cache_path} only " +
 | 
|
| 1274 | 
+                                        "has {total_size} total disk space.")
 | 
|
| 1275 | 
+                                .format(
 | 
|
| 1276 | 
+                                    quota=self._config_cache_quota,
 | 
|
| 1277 | 
+                                    local_cache_path=self.casdir,
 | 
|
| 1278 | 
+                                    total_size=utils._pretty_size(total_size)),
 | 
|
| 1279 | 
+                                reason='insufficient-storage-for-quota')
 | 
|
| 1280 | 
+  | 
|
| 1281 | 
+        elif cache_quota > cache_size + available_space:
 | 
|
| 1282 | 
+            self._message(Message(
 | 
|
| 1283 | 
+                None,
 | 
|
| 1284 | 
+                MessageType.WARN,
 | 
|
| 1285 | 
+                "Your system does not have enough available " +
 | 
|
| 1286 | 
+                "space to support the cache quota specified.",
 | 
|
| 1287 | 
+                detail=("You have specified a quota of {quota} total disk space.\n" +
 | 
|
| 1288 | 
+                        "The filesystem containing {local_cache_path} only " +
 | 
|
| 1289 | 
+                        "has {available_size} available.")
 | 
|
| 1290 | 
+                .format(quota=self._config_cache_quota,
 | 
|
| 1291 | 
+                        local_cache_path=self.casdir,
 | 
|
| 1292 | 
+                        available_size=self.available_space)))
 | 
|
| 1293 | 
+  | 
|
| 1294 | 
+        # Place a slight headroom (2e9 (2GB) on the cache_quota) into
 | 
|
| 1295 | 
+        # cache_quota to try and avoid exceptions.
 | 
|
| 1296 | 
+        #
 | 
|
| 1297 | 
+        # Of course, we might still end up running out during a build
 | 
|
| 1298 | 
+        # if we end up writing more than 2G, but hey, this stuff is
 | 
|
| 1299 | 
+        # already really fuzzy.
 | 
|
| 1300 | 
+        #
 | 
|
| 1301 | 
+        self._cache_quota_original = cache_quota
 | 
|
| 1302 | 
+        self._cache_quota = cache_quota - self._cache_quota_headroom
 | 
|
| 1303 | 
+        self._cache_lower_threshold = self._cache_quota / 2
 | 
|
| 1304 | 
+  | 
|
| 1305 | 
+  | 
|
| 1046 | 1306 | 
 def _grouper(iterable, n):
 | 
| 1047 | 1307 | 
     while True:
 | 
| 1048 | 1308 | 
         try:
 | 
| ... | ... | @@ -30,8 +30,8 @@ from . import _yaml | 
| 30 | 30 | 
 from ._exceptions import LoadError, LoadErrorReason, BstError
 | 
| 31 | 31 | 
 from ._message import Message, MessageType
 | 
| 32 | 32 | 
 from ._profile import Topics, profile_start, profile_end
 | 
| 33 | 
-from ._artifactcache import ArtifactCache, ArtifactCacheUsage
 | 
|
| 34 | 
-from ._cas import CASCache
 | 
|
| 33 | 
+from ._artifactcache import ArtifactCache
 | 
|
| 34 | 
+from ._cas import CASCache, CASQuota, CASCacheUsage
 | 
|
| 35 | 35 | 
 from ._workspaces import Workspaces, WorkspaceProjectCache
 | 
| 36 | 36 | 
 from .plugin import _plugin_lookup
 | 
| 37 | 37 | 
 from .sandbox import SandboxRemote
 | 
| ... | ... | @@ -70,15 +70,15 @@ class Context(): | 
| 70 | 70 | 
         # The directory for CAS
 | 
| 71 | 71 | 
         self.casdir = None
 | 
| 72 | 72 | 
 | 
| 73 | 
+        # Extract directory
 | 
|
| 74 | 
+        self.extractdir = None
 | 
|
| 75 | 
+  | 
|
| 73 | 76 | 
         # The directory for temporary files
 | 
| 74 | 77 | 
         self.tmpdir = None
 | 
| 75 | 78 | 
 | 
| 76 | 79 | 
         # Default root location for workspaces
 | 
| 77 | 80 | 
         self.workspacedir = None
 | 
| 78 | 81 | 
 | 
| 79 | 
-        # The local binary artifact cache directory
 | 
|
| 80 | 
-        self.artifactdir = None
 | 
|
| 81 | 
-  | 
|
| 82 | 82 | 
         # The locations from which to push and pull prebuilt artifacts
 | 
| 83 | 83 | 
         self.artifact_cache_specs = None
 | 
| 84 | 84 | 
 | 
| ... | ... | @@ -155,6 +155,7 @@ class Context(): | 
| 155 | 155 | 
         self._log_handle = None
 | 
| 156 | 156 | 
         self._log_filename = None
 | 
| 157 | 157 | 
         self._cascache = None
 | 
| 158 | 
+        self._casquota = None
 | 
|
| 158 | 159 | 
         self._directory = directory
 | 
| 159 | 160 | 
 | 
| 160 | 161 | 
     # load()
 | 
| ... | ... | @@ -198,18 +199,16 @@ class Context(): | 
| 198 | 199 | 
                             "builddir is obsolete, use cachedir")
 | 
| 199 | 200 | 
 | 
| 200 | 201 | 
         if defaults.get('artifactdir'):
 | 
| 201 | 
-            print("artifactdir is deprecated, use cachedir")
 | 
|
| 202 | 
-        else:
 | 
|
| 203 | 
-            defaults['artifactdir'] = os.path.join(defaults['cachedir'], 'artifacts')
 | 
|
| 202 | 
+            raise LoadError(LoadErrorReason.INVALID_DATA,
 | 
|
| 203 | 
+                            "artifactdir is obsolete")
 | 
|
| 204 | 204 | 
 | 
| 205 | 205 | 
         _yaml.node_validate(defaults, [
 | 
| 206 | 
-            'cachedir', 'sourcedir', 'builddir', 'artifactdir', 'logdir',
 | 
|
| 207 | 
-            'scheduler', 'artifacts', 'logging', 'projects',
 | 
|
| 208 | 
-            'cache', 'prompt', 'workspacedir', 'remote-execution',
 | 
|
| 206 | 
+            'cachedir', 'sourcedir', 'builddir', 'logdir', 'scheduler',
 | 
|
| 207 | 
+            'artifacts', 'logging', 'projects', 'cache', 'prompt',
 | 
|
| 208 | 
+            'workspacedir', 'remote-execution',
 | 
|
| 209 | 209 | 
         ])
 | 
| 210 | 210 | 
 | 
| 211 | 
-        for directory in ['cachedir', 'sourcedir', 'artifactdir', 'logdir',
 | 
|
| 212 | 
-                          'workspacedir']:
 | 
|
| 211 | 
+        for directory in ['cachedir', 'sourcedir', 'logdir', 'workspacedir']:
 | 
|
| 213 | 212 | 
             # Allow the ~ tilde expansion and any environment variables in
 | 
| 214 | 213 | 
             # path specification in the config files.
 | 
| 215 | 214 | 
             #
 | 
| ... | ... | @@ -220,18 +219,32 @@ class Context(): | 
| 220 | 219 | 
             setattr(self, directory, path)
 | 
| 221 | 220 | 
 | 
| 222 | 221 | 
         # add directories not set by users
 | 
| 222 | 
+        self.extractdir = os.path.join(self.cachedir, 'extract')
 | 
|
| 223 | 223 | 
         self.tmpdir = os.path.join(self.cachedir, 'tmp')
 | 
| 224 | 224 | 
         self.casdir = os.path.join(self.cachedir, 'cas')
 | 
| 225 | 225 | 
         self.builddir = os.path.join(self.cachedir, 'build')
 | 
| 226 | 226 | 
 | 
| 227 | 
+        # Move old artifact cas to cas if it exists and create symlink
 | 
|
| 228 | 
+        old_casdir = os.path.join(self.cachedir, 'artifacts', 'cas')
 | 
|
| 229 | 
+        if os.path.exists(old_casdir) and not os.path.islink(old_casdir):
 | 
|
| 230 | 
+            os.rename(old_casdir, self.casdir)
 | 
|
| 231 | 
+            os.symlink(self.casdir, old_casdir)
 | 
|
| 232 | 
+  | 
|
| 227 | 233 | 
         # Load quota configuration
 | 
| 228 | 
-        # We need to find the first existing directory in the path of
 | 
|
| 229 | 
-        # our artifactdir - the artifactdir may not have been created
 | 
|
| 230 | 
-        # yet.
 | 
|
| 234 | 
+        # We need to find the first existing directory in the path of our
 | 
|
| 235 | 
+        # cachedir - the cachedir may not have been created yet.
 | 
|
| 231 | 236 | 
         cache = _yaml.node_get(defaults, Mapping, 'cache')
 | 
| 232 | 237 | 
         _yaml.node_validate(cache, ['quota', 'pull-buildtrees', 'cache-buildtrees'])
 | 
| 233 | 238 | 
 | 
| 234 | 
-        self.config_cache_quota = _yaml.node_get(cache, str, 'quota')
 | 
|
| 239 | 
+        config_cache_quota = _yaml.node_get(cache, str, 'quota')
 | 
|
| 240 | 
+        try:
 | 
|
| 241 | 
+            self.config_cache_quota = utils._parse_size(config_cache_quota,
 | 
|
| 242 | 
+                                                        self.casdir)
 | 
|
| 243 | 
+        except utils.UtilError as e:
 | 
|
| 244 | 
+            raise LoadError(LoadErrorReason.INVALID_DATA,
 | 
|
| 245 | 
+                            "{}\nPlease specify the value in bytes or as a % of full disk space.\n"
 | 
|
| 246 | 
+                            "\nValid values are, for example: 800M 10G 1T 50%\n"
 | 
|
| 247 | 
+                            .format(str(e))) from e
 | 
|
| 235 | 248 | 
 | 
| 236 | 249 | 
         # Load artifact share configuration
 | 
| 237 | 250 | 
         self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
 | 
| ... | ... | @@ -307,15 +320,15 @@ class Context(): | 
| 307 | 320 | 
 | 
| 308 | 321 | 
         return self._artifactcache
 | 
| 309 | 322 | 
 | 
| 310 | 
-    # get_artifact_cache_usage()
 | 
|
| 323 | 
+    # get_cache_usage()
 | 
|
| 311 | 324 | 
     #
 | 
| 312 | 325 | 
     # Fetches the current usage of the artifact cache
 | 
| 313 | 326 | 
     #
 | 
| 314 | 327 | 
     # Returns:
 | 
| 315 | 
-    #     (ArtifactCacheUsage): The current status
 | 
|
| 328 | 
+    #     (CASCacheUsage): The current status
 | 
|
| 316 | 329 | 
     #
 | 
| 317 | 
-    def get_artifact_cache_usage(self):
 | 
|
| 318 | 
-        return ArtifactCacheUsage(self.artifactcache)
 | 
|
| 330 | 
+    def get_cache_usage(self):
 | 
|
| 331 | 
+        return CASCacheUsage(self.get_casquota())
 | 
|
| 319 | 332 | 
 | 
| 320 | 333 | 
     # add_project():
 | 
| 321 | 334 | 
     #
 | 
| ... | ... | @@ -688,6 +701,11 @@ class Context(): | 
| 688 | 701 | 
             self._cascache = CASCache(self.cachedir)
 | 
| 689 | 702 | 
         return self._cascache
 | 
| 690 | 703 | 
 | 
| 704 | 
+    def get_casquota(self):
 | 
|
| 705 | 
+        if self._casquota is None:
 | 
|
| 706 | 
+            self._casquota = CASQuota(self)
 | 
|
| 707 | 
+        return self._casquota
 | 
|
| 708 | 
+  | 
|
| 691 | 709 | 
 | 
| 692 | 710 | 
 # _node_get_option_str()
 | 
| 693 | 711 | 
 #
 | 
| ... | ... | @@ -404,7 +404,7 @@ class _StatusHeader(): | 
| 404 | 404 | 
         #
 | 
| 405 | 405 | 
         #  ~~~~~~ cache: 69% ~~~~~~
 | 
| 406 | 406 | 
         #
 | 
| 407 | 
-        usage = self._context.get_artifact_cache_usage()
 | 
|
| 407 | 
+        usage = self._context.get_cache_usage()
 | 
|
| 408 | 408 | 
         usage_percent = '{}%'.format(usage.used_percent)
 | 
| 409 | 409 | 
 | 
| 410 | 410 | 
         size = 21
 | 
| ... | ... | @@ -486,7 +486,7 @@ class LogLine(Widget): | 
| 486 | 486 | 
         values["Session Start"] = starttime.strftime('%A, %d-%m-%Y at %H:%M:%S')
 | 
| 487 | 487 | 
         values["Project"] = "{} ({})".format(project.name, project.directory)
 | 
| 488 | 488 | 
         values["Targets"] = ", ".join([t.name for t in stream.targets])
 | 
| 489 | 
-        values["Cache Usage"] = "{}".format(context.get_artifact_cache_usage())
 | 
|
| 489 | 
+        values["Cache Usage"] = "{}".format(context.get_cache_usage())
 | 
|
| 490 | 490 | 
         text += self._format_values(values)
 | 
| 491 | 491 | 
 | 
| 492 | 492 | 
         # User configurations
 | 
| ... | ... | @@ -495,10 +495,10 @@ class LogLine(Widget): | 
| 495 | 495 | 
         values = OrderedDict()
 | 
| 496 | 496 | 
         values["Configuration File"] = \
 | 
| 497 | 497 | 
             "Default Configuration" if not context.config_origin else context.config_origin
 | 
| 498 | 
+        values["Cache directory"] = context.cachedir
 | 
|
| 498 | 499 | 
         values["Log Files"] = context.logdir
 | 
| 499 | 500 | 
         values["Source Mirrors"] = context.sourcedir
 | 
| 500 | 501 | 
         values["Build Area"] = context.builddir
 | 
| 501 | 
-        values["Artifact Cache"] = context.artifactdir
 | 
|
| 502 | 502 | 
         values["Strict Build Plan"] = "Yes" if context.get_strict() else "No"
 | 
| 503 | 503 | 
         values["Maximum Fetch Tasks"] = context.sched_fetchers
 | 
| 504 | 504 | 
         values["Maximum Build Tasks"] = context.sched_builders
 | 
| ... | ... | @@ -25,14 +25,14 @@ class CacheSizeJob(Job): | 
| 25 | 25 | 
         self._complete_cb = complete_cb
 | 
| 26 | 26 | 
 | 
| 27 | 27 | 
         context = self._scheduler.context
 | 
| 28 | 
-        self._artifacts = context.artifactcache
 | 
|
| 28 | 
+        self._casquota = context.get_casquota()
 | 
|
| 29 | 29 | 
 | 
| 30 | 30 | 
     def child_process(self):
 | 
| 31 | 
-        return self._artifacts.compute_cache_size()
 | 
|
| 31 | 
+        return self._casquota.compute_cache_size()
 | 
|
| 32 | 32 | 
 | 
| 33 | 33 | 
     def parent_complete(self, status, result):
 | 
| 34 | 34 | 
         if status == JobStatus.OK:
 | 
| 35 | 
-            self._artifacts.set_cache_size(result)
 | 
|
| 35 | 
+            self._casquota.set_cache_size(result)
 | 
|
| 36 | 36 | 
 | 
| 37 | 37 | 
         if self._complete_cb:
 | 
| 38 | 38 | 
             self._complete_cb(status, result)
 | 
| ... | ... | @@ -25,27 +25,27 @@ class CleanupJob(Job): | 
| 25 | 25 | 
         self._complete_cb = complete_cb
 | 
| 26 | 26 | 
 | 
| 27 | 27 | 
         context = self._scheduler.context
 | 
| 28 | 
+        self._casquota = context.get_casquota()
 | 
|
| 28 | 29 | 
         self._artifacts = context.artifactcache
 | 
| 29 | 30 | 
 | 
| 30 | 31 | 
     def child_process(self):
 | 
| 31 | 32 | 
         def progress():
 | 
| 32 | 33 | 
             self.send_message('update-cache-size',
 | 
| 33 | 
-                              self._artifacts.get_cache_size())
 | 
|
| 34 | 
+                              self._casquota.get_cache_size())
 | 
|
| 34 | 35 | 
         return self._artifacts.clean(progress)
 | 
| 35 | 36 | 
 | 
| 36 | 37 | 
     def handle_message(self, message_type, message):
 | 
| 37 | 
-  | 
|
| 38 | 38 | 
         # Update the cache size in the main process as we go,
 | 
| 39 | 39 | 
         # this provides better feedback in the UI.
 | 
| 40 | 40 | 
         if message_type == 'update-cache-size':
 | 
| 41 | 
-            self._artifacts.set_cache_size(message)
 | 
|
| 41 | 
+            self._casquota.set_cache_size(message)
 | 
|
| 42 | 42 | 
             return True
 | 
| 43 | 43 | 
 | 
| 44 | 44 | 
         return False
 | 
| 45 | 45 | 
 | 
| 46 | 46 | 
     def parent_complete(self, status, result):
 | 
| 47 | 47 | 
         if status == JobStatus.OK:
 | 
| 48 | 
-            self._artifacts.set_cache_size(result)
 | 
|
| 48 | 
+            self._casquota.set_cache_size(result)
 | 
|
| 49 | 49 | 
 | 
| 50 | 50 | 
         if self._complete_cb:
 | 
| 51 | 51 | 
             self._complete_cb(status, result)
 | 
| ... | ... | @@ -1435,7 +1435,7 @@ class Element(Plugin): | 
| 1435 | 1435 | 
         # It's advantageous to have this temporary directory on
 | 
| 1436 | 1436 | 
         # the same file system as the rest of our cache.
 | 
| 1437 | 1437 | 
         with self.timed_activity("Staging sources", silent_nested=True), \
 | 
| 1438 | 
-            utils._tempdir(dir=context.artifactdir, prefix='staging-temp') as temp_staging_directory:
 | 
|
| 1438 | 
+            utils._tempdir(dir=context.tmpdir, prefix='staging-temp') as temp_staging_directory:
 | 
|
| 1439 | 1439 | 
 | 
| 1440 | 1440 | 
             import_dir = temp_staging_directory
 | 
| 1441 | 1441 | 
 | 
| ... | ... | @@ -50,7 +50,6 @@ def test_cache_size_write(cli, tmpdir): | 
| 50 | 50 | 
     create_project(project_dir)
 | 
| 51 | 51 | 
 | 
| 52 | 52 | 
     # Artifact cache must be in a known place
 | 
| 53 | 
-    artifactdir = os.path.join(project_dir, "artifacts")
 | 
|
| 54 | 53 | 
     casdir = os.path.join(project_dir, "cas")
 | 
| 55 | 54 | 
     cli.configure({"cachedir": project_dir})
 | 
| 56 | 55 | 
 | 
| ... | ... | @@ -59,7 +58,7 @@ def test_cache_size_write(cli, tmpdir): | 
| 59 | 58 | 
     res.assert_success()
 | 
| 60 | 59 | 
 | 
| 61 | 60 | 
     # Inspect the artifact cache
 | 
| 62 | 
-    sizefile = os.path.join(artifactdir, CACHE_SIZE_FILE)
 | 
|
| 61 | 
+    sizefile = os.path.join(casdir, CACHE_SIZE_FILE)
 | 
|
| 63 | 62 | 
     assert os.path.isfile(sizefile)
 | 
| 64 | 63 | 
     with open(sizefile, "r") as f:
 | 
| 65 | 64 | 
         size_data = f.read()
 | 
| ... | ... | @@ -82,11 +81,11 @@ def test_quota_over_1024T(cli, tmpdir): | 
| 82 | 81 | 
     _yaml.dump({'name': 'main'}, str(project.join("project.conf")))
 | 
| 83 | 82 | 
 | 
| 84 | 83 | 
     volume_space_patch = mock.patch(
 | 
| 85 | 
-        "buildstream._artifactcache.ArtifactCache._get_cache_volume_size",
 | 
|
| 84 | 
+        "buildstream._cas.CASQuota._get_cache_volume_size",
 | 
|
| 86 | 85 | 
         autospec=True,
 | 
| 87 | 86 | 
         return_value=(1025 * TiB, 1025 * TiB)
 | 
| 88 | 87 | 
     )
 | 
| 89 | 88 | 
 | 
| 90 | 89 | 
     with volume_space_patch:
 | 
| 91 | 90 | 
         result = cli.run(project, args=["build", "file.bst"])
 | 
| 92 | 
-        result.assert_main_error(ErrorDomain.ARTIFACT, 'insufficient-storage-for-quota')
 | 
|
| 91 | 
+        result.assert_main_error(ErrorDomain.CAS, 'insufficient-storage-for-quota')
 | 
| ... | ... | @@ -341,7 +341,7 @@ def test_never_delete_required_track(cli, datafiles, tmpdir): | 
| 341 | 341 | 
     ("200%", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
 | 
| 342 | 342 | 
 | 
| 343 | 343 | 
     # Not enough space on disk even if you cleaned up
 | 
| 344 | 
-    ("11K", ErrorDomain.ARTIFACT, 'insufficient-storage-for-quota'),
 | 
|
| 344 | 
+    ("11K", ErrorDomain.CAS, 'insufficient-storage-for-quota'),
 | 
|
| 345 | 345 | 
 | 
| 346 | 346 | 
     # Not enough space for these caches
 | 
| 347 | 347 | 
     ("7K", 'warning', 'Your system does not have enough available'),
 | 
| ... | ... | @@ -355,7 +355,7 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, err_domain, err_reas | 
| 355 | 355 | 
     cli.configure({
 | 
| 356 | 356 | 
         'cache': {
 | 
| 357 | 357 | 
             'quota': quota,
 | 
| 358 | 
-        }
 | 
|
| 358 | 
+        },
 | 
|
| 359 | 359 | 
     })
 | 
| 360 | 360 | 
 | 
| 361 | 361 | 
     # We patch how we get space information
 | 
| ... | ... | @@ -373,13 +373,13 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, err_domain, err_reas | 
| 373 | 373 | 
         total_space = 10000
 | 
| 374 | 374 | 
 | 
| 375 | 375 | 
     volume_space_patch = mock.patch(
 | 
| 376 | 
-        "buildstream._artifactcache.ArtifactCache._get_cache_volume_size",
 | 
|
| 376 | 
+        "buildstream.utils._get_volume_size",
 | 
|
| 377 | 377 | 
         autospec=True,
 | 
| 378 | 378 | 
         return_value=(total_space, free_space),
 | 
| 379 | 379 | 
     )
 | 
| 380 | 380 | 
 | 
| 381 | 381 | 
     cache_size_patch = mock.patch(
 | 
| 382 | 
-        "buildstream._artifactcache.ArtifactCache.get_cache_size",
 | 
|
| 382 | 
+        "buildstream._cas.CASQuota.get_cache_size",
 | 
|
| 383 | 383 | 
         autospec=True,
 | 
| 384 | 384 | 
         return_value=0,
 | 
| 385 | 385 | 
     )
 | 
| ... | ... | @@ -417,7 +417,7 @@ def test_extract_expiry(cli, datafiles, tmpdir): | 
| 417 | 417 | 
     res.assert_success()
 | 
| 418 | 418 | 
 | 
| 419 | 419 | 
     # Get a snapshot of the extracts in advance
 | 
| 420 | 
-    extractdir = os.path.join(project, 'cache', 'artifacts', 'extract', 'test', 'target')
 | 
|
| 420 | 
+    extractdir = os.path.join(project, 'cache', 'extract', 'test', 'target')
 | 
|
| 421 | 421 | 
     extracts = os.listdir(extractdir)
 | 
| 422 | 422 | 
     assert(len(extracts) == 1)
 | 
| 423 | 423 | 
     extract = os.path.join(extractdir, extracts[0])
 | 
| ... | ... | @@ -510,8 +510,8 @@ def test_pull_access_rights(caplog, cli, tmpdir, datafiles): | 
| 510 | 510 | 
 | 
| 511 | 511 | 
         shutil.rmtree(checkout)
 | 
| 512 | 512 | 
 | 
| 513 | 
-        artifacts = os.path.join(cli.directory, 'artifacts')
 | 
|
| 514 | 
-        shutil.rmtree(artifacts)
 | 
|
| 513 | 
+        casdir = os.path.join(cli.directory, 'cas')
 | 
|
| 514 | 
+        shutil.rmtree(casdir)
 | 
|
| 515 | 515 | 
 | 
| 516 | 516 | 
         result = cli.run(project=project, args=['artifact', 'pull', 'compose-all.bst'])
 | 
| 517 | 517 | 
         result.assert_success()
 | 
| ... | ... | @@ -87,7 +87,7 @@ def test_cache_buildtrees(cli, tmpdir, datafiles): | 
| 87 | 87 | 
         create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3:
 | 
| 88 | 88 | 
         cli.configure({
 | 
| 89 | 89 | 
             'artifacts': {'url': share1.repo, 'push': True},
 | 
| 90 | 
-            'artifactdir': os.path.join(str(tmpdir), 'artifacts')
 | 
|
| 90 | 
+            'cachedir': str(tmpdir)
 | 
|
| 91 | 91 | 
         })
 | 
| 92 | 92 | 
 | 
| 93 | 93 | 
         # Build autotools element with cache-buildtrees set via the
 | 
| ... | ... | @@ -103,20 +103,22 @@ def test_cache_buildtrees(cli, tmpdir, datafiles): | 
| 103 | 103 | 
         # to not cache buildtrees
 | 
| 104 | 104 | 
         cache_key = cli.get_element_key(project, element_name)
 | 
| 105 | 105 | 
         elementdigest = share1.has_artifact('test', element_name, cache_key)
 | 
| 106 | 
-        buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
 | 
|
| 106 | 
+        buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello',
 | 
|
| 107 | 107 | 
                                     elementdigest.hash, 'buildtree')
 | 
| 108 | 108 | 
         assert os.path.isdir(buildtreedir)
 | 
| 109 | 109 | 
         assert not os.listdir(buildtreedir)
 | 
| 110 | 110 | 
 | 
| 111 | 111 | 
         # Delete the local cached artifacts, and assert the when pulled with --pull-buildtrees
 | 
| 112 | 112 | 
         # that is was cached in share1 as expected with an empty buildtree dir
 | 
| 113 | 
-        shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
 | 
|
| 113 | 
+        shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
 | 
|
| 114 | 
+        shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
 | 
|
| 114 | 115 | 
         assert cli.get_element_state(project, element_name) != 'cached'
 | 
| 115 | 116 | 
         result = cli.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
 | 
| 116 | 117 | 
         assert element_name in result.get_pulled_elements()
 | 
| 117 | 118 | 
         assert os.path.isdir(buildtreedir)
 | 
| 118 | 119 | 
         assert not os.listdir(buildtreedir)
 | 
| 119 | 
-        shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
 | 
|
| 120 | 
+        shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
 | 
|
| 121 | 
+        shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
 | 
|
| 120 | 122 | 
 | 
| 121 | 123 | 
         # Assert that the default behaviour of pull to not include buildtrees on the artifact
 | 
| 122 | 124 | 
         # in share1 which was purposely cached with an empty one behaves as expected. As such the
 | 
| ... | ... | @@ -125,13 +127,14 @@ def test_cache_buildtrees(cli, tmpdir, datafiles): | 
| 125 | 127 | 
         result = cli.run(project=project, args=['artifact', 'pull', element_name])
 | 
| 126 | 128 | 
         assert element_name in result.get_pulled_elements()
 | 
| 127 | 129 | 
         assert not os.path.isdir(buildtreedir)
 | 
| 128 | 
-        shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
 | 
|
| 130 | 
+        shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
 | 
|
| 131 | 
+        shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
 | 
|
| 129 | 132 | 
 | 
| 130 | 133 | 
         # Repeat building the artifacts, this time with the default behaviour of caching buildtrees,
 | 
| 131 | 134 | 
         # as such the buildtree dir should not be empty
 | 
| 132 | 135 | 
         cli.configure({
 | 
| 133 | 136 | 
             'artifacts': {'url': share2.repo, 'push': True},
 | 
| 134 | 
-            'artifactdir': os.path.join(str(tmpdir), 'artifacts')
 | 
|
| 137 | 
+            'cachedir': str(tmpdir)
 | 
|
| 135 | 138 | 
         })
 | 
| 136 | 139 | 
         result = cli.run(project=project, args=['build', element_name])
 | 
| 137 | 140 | 
         assert result.exit_code == 0
 | 
| ... | ... | @@ -140,27 +143,29 @@ def test_cache_buildtrees(cli, tmpdir, datafiles): | 
| 140 | 143 | 
 | 
| 141 | 144 | 
         # Cache key will be the same however the digest hash will have changed as expected, so reconstruct paths
 | 
| 142 | 145 | 
         elementdigest = share2.has_artifact('test', element_name, cache_key)
 | 
| 143 | 
-        buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
 | 
|
| 146 | 
+        buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello',
 | 
|
| 144 | 147 | 
                                     elementdigest.hash, 'buildtree')
 | 
| 145 | 148 | 
         assert os.path.isdir(buildtreedir)
 | 
| 146 | 149 | 
         assert os.listdir(buildtreedir) is not None
 | 
| 147 | 150 | 
 | 
| 148 | 151 | 
         # Delete the local cached artifacts, and assert that when pulled with --pull-buildtrees
 | 
| 149 | 152 | 
         # that it was cached in share2 as expected with a populated buildtree dir
 | 
| 150 | 
-        shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
 | 
|
| 153 | 
+        shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
 | 
|
| 154 | 
+        shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
 | 
|
| 151 | 155 | 
         assert cli.get_element_state(project, element_name) != 'cached'
 | 
| 152 | 156 | 
         result = cli.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
 | 
| 153 | 157 | 
         assert element_name in result.get_pulled_elements()
 | 
| 154 | 158 | 
         assert os.path.isdir(buildtreedir)
 | 
| 155 | 159 | 
         assert os.listdir(buildtreedir) is not None
 | 
| 156 | 
-        shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
 | 
|
| 160 | 
+        shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
 | 
|
| 161 | 
+        shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
 | 
|
| 157 | 162 | 
 | 
| 158 | 163 | 
         # Clarify that the user config option for cache-buildtrees works as the cli
 | 
| 159 | 164 | 
         # main option does. Point to share3 which does not have the artifacts cached to force
 | 
| 160 | 165 | 
         # a build
 | 
| 161 | 166 | 
         cli.configure({
 | 
| 162 | 167 | 
             'artifacts': {'url': share3.repo, 'push': True},
 | 
| 163 | 
-            'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
 | 
|
| 168 | 
+            # 'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
 | 
|
| 164 | 169 | 
             'cache': {'cache-buildtrees': 'never'}
 | 
| 165 | 170 | 
         })
 | 
| 166 | 171 | 
         result = cli.run(project=project, args=['build', element_name])
 | 
| ... | ... | @@ -168,7 +173,7 @@ def test_cache_buildtrees(cli, tmpdir, datafiles): | 
| 168 | 173 | 
         assert cli.get_element_state(project, element_name) == 'cached'
 | 
| 169 | 174 | 
         cache_key = cli.get_element_key(project, element_name)
 | 
| 170 | 175 | 
         elementdigest = share3.has_artifact('test', element_name, cache_key)
 | 
| 171 | 
-        buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
 | 
|
| 176 | 
+        buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello',
 | 
|
| 172 | 177 | 
                                     elementdigest.hash, 'buildtree')
 | 
| 173 | 178 | 
         assert os.path.isdir(buildtreedir)
 | 
| 174 | 179 | 
         assert not os.listdir(buildtreedir)
 | 
| ... | ... | @@ -21,8 +21,8 @@ DATA_DIR = os.path.join( | 
| 21 | 21 | 
 # to false, which is the default user context. The cache has to be
 | 
| 22 | 22 | 
 # cleared as just forcefully removing the refpath leaves dangling objects.
 | 
| 23 | 23 | 
 def default_state(cli, tmpdir, share):
 | 
| 24 | 
-    shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
 | 
|
| 25 | 24 | 
     shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
 | 
| 25 | 
+    shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
 | 
|
| 26 | 26 | 
     cli.configure({
 | 
| 27 | 27 | 
         'artifacts': {'url': share.repo, 'push': False},
 | 
| 28 | 28 | 
         'cachedir': str(tmpdir),
 | 
| ... | ... | @@ -75,8 +75,9 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles): | 
| 75 | 75 | 
         result = cli2.run(project=project, args=['artifact', 'pull', element_name])
 | 
| 76 | 76 | 
         assert element_name in result.get_pulled_elements()
 | 
| 77 | 77 | 
         elementdigest = share1.has_artifact('test', element_name, cli2.get_element_key(project, element_name))
 | 
| 78 | 
-        buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
 | 
|
| 78 | 
+        buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello',
 | 
|
| 79 | 79 | 
                                     elementdigest.hash, 'buildtree')
 | 
| 80 | 
+        print("elementdigest: {}".format(elementdigest.hash))
 | 
|
| 80 | 81 | 
         assert not os.path.isdir(buildtreedir)
 | 
| 81 | 82 | 
         result = cli2.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
 | 
| 82 | 83 | 
         assert element_name in result.get_pulled_elements()
 | 
| ... | ... | @@ -62,7 +62,7 @@ def test_buildtree_staged_warn_empty_cached(cli_integration, tmpdir, datafiles): | 
| 62 | 62 | 
     # Switch to a temp artifact cache dir to ensure the artifact is rebuilt,
 | 
| 63 | 63 | 
     # caching an empty buildtree
 | 
| 64 | 64 | 
     cli_integration.configure({
 | 
| 65 | 
-        'artifactdir': os.path.join(os.path.join(str(tmpdir), 'artifacts'))
 | 
|
| 65 | 
+        'cachedir': str(tmpdir)
 | 
|
| 66 | 66 | 
     })
 | 
| 67 | 67 | 
 | 
| 68 | 68 | 
     res = cli_integration.run(project=project, args=['--cache-buildtrees', 'never', 'build', element_name])
 | 
| ... | ... | @@ -139,7 +139,7 @@ def test_buildtree_from_failure_option_never(cli_integration, tmpdir, datafiles) | 
| 139 | 139 | 
     # Switch to a temp artifact cache dir to ensure the artifact is rebuilt,
 | 
| 140 | 140 | 
     # caching an empty buildtree
 | 
| 141 | 141 | 
     cli_integration.configure({
 | 
| 142 | 
-        'artifactdir': os.path.join(os.path.join(str(tmpdir), 'artifacts'))
 | 
|
| 142 | 
+        'cachedir': str(tmpdir)
 | 
|
| 143 | 143 | 
     })
 | 
| 144 | 144 | 
 | 
| 145 | 145 | 
     res = cli_integration.run(project=project, args=['--cache-buildtrees', 'never', 'build', element_name])
 | 
| ... | ... | @@ -163,7 +163,7 @@ def test_buildtree_from_failure_option_failure(cli_integration, tmpdir, datafile | 
| 163 | 163 | 
     # default behaviour (which is always) as the buildtree will explicitly have been
 | 
| 164 | 164 | 
     # cached with content.
 | 
| 165 | 165 | 
     cli_integration.configure({
 | 
| 166 | 
-        'artifactdir': os.path.join(os.path.join(str(tmpdir), 'artifacts'))
 | 
|
| 166 | 
+        'cachedir': str(tmpdir)
 | 
|
| 167 | 167 | 
     })
 | 
| 168 | 168 | 
 | 
| 169 | 169 | 
     res = cli_integration.run(project=project, args=['--cache-buildtrees', 'failure', 'build', element_name])
 | 
| ... | ... | @@ -195,10 +195,7 @@ def test_buildtree_pulled(cli, tmpdir, datafiles): | 
| 195 | 195 | 
         assert cli.get_element_state(project, element_name) == 'cached'
 | 
| 196 | 196 | 
 | 
| 197 | 197 | 
         # Discard the cache
 | 
| 198 | 
-        cli.configure({
 | 
|
| 199 | 
-            'artifacts': {'url': share.repo, 'push': True},
 | 
|
| 200 | 
-            'artifactdir': os.path.join(cli.directory, 'artifacts2')
 | 
|
| 201 | 
-        })
 | 
|
| 198 | 
+        shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas')))
 | 
|
| 202 | 199 | 
         assert cli.get_element_state(project, element_name) != 'cached'
 | 
| 203 | 200 | 
 | 
| 204 | 201 | 
         # Pull from cache, ensuring cli options is set to pull the buildtree
 | 
| ... | ... | @@ -231,7 +228,6 @@ def test_buildtree_options(cli, tmpdir, datafiles): | 
| 231 | 228 | 
         assert share.has_artifact('test', element_name, cli.get_element_key(project, element_name))
 | 
| 232 | 229 | 
 | 
| 233 | 230 | 
         # Discard the cache
 | 
| 234 | 
-        shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'artifacts')))
 | 
|
| 235 | 231 | 
         shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas')))
 | 
| 236 | 232 | 
         assert cli.get_element_state(project, element_name) != 'cached'
 | 
| 237 | 233 | 
 | 
| ... | ... | @@ -43,7 +43,7 @@ def test_context_load(context_fixture): | 
| 43 | 43 | 
     context.load(config=os.devnull)
 | 
| 44 | 44 | 
     assert(context.sourcedir == os.path.join(cache_home, 'buildstream', 'sources'))
 | 
| 45 | 45 | 
     assert(context.builddir == os.path.join(cache_home, 'buildstream', 'build'))
 | 
| 46 | 
-    assert(context.artifactdir == os.path.join(cache_home, 'buildstream', 'artifacts'))
 | 
|
| 46 | 
+    assert(context.cachedir == os.path.join(cache_home, 'buildstream'))
 | 
|
| 47 | 47 | 
     assert(context.logdir == os.path.join(cache_home, 'buildstream', 'logs'))
 | 
| 48 | 48 | 
 | 
| 49 | 49 | 
 | 
| ... | ... | @@ -57,7 +57,7 @@ def test_context_load_envvar(context_fixture): | 
| 57 | 57 | 
     context.load(config=os.devnull)
 | 
| 58 | 58 | 
     assert(context.sourcedir == os.path.join('/', 'some', 'path', 'buildstream', 'sources'))
 | 
| 59 | 59 | 
     assert(context.builddir == os.path.join('/', 'some', 'path', 'buildstream', 'build'))
 | 
| 60 | 
-    assert(context.artifactdir == os.path.join('/', 'some', 'path', 'buildstream', 'artifacts'))
 | 
|
| 60 | 
+    assert(context.cachedir == os.path.join('/', 'some', 'path', 'buildstream'))
 | 
|
| 61 | 61 | 
     assert(context.logdir == os.path.join('/', 'some', 'path', 'buildstream', 'logs'))
 | 
| 62 | 62 | 
 | 
| 63 | 63 | 
     # Reset the environment variable
 | 
| ... | ... | @@ -79,7 +79,7 @@ def test_context_load_user_config(context_fixture, datafiles): | 
| 79 | 79 | 
 | 
| 80 | 80 | 
     assert(context.sourcedir == os.path.expanduser('~/pony'))
 | 
| 81 | 81 | 
     assert(context.builddir == os.path.join(cache_home, 'buildstream', 'build'))
 | 
| 82 | 
-    assert(context.artifactdir == os.path.join(cache_home, 'buildstream', 'artifacts'))
 | 
|
| 82 | 
+    assert(context.cachedir == os.path.join(cache_home, 'buildstream'))
 | 
|
| 83 | 83 | 
     assert(context.logdir == os.path.join(cache_home, 'buildstream', 'logs'))
 | 
| 84 | 84 | 
 | 
| 85 | 85 | 
 | 
| ... | ... | @@ -16,7 +16,7 @@ def create_pipeline(tmpdir, basedir, target): | 
| 16 | 16 | 
     context = Context()
 | 
| 17 | 17 | 
     context.load(config=os.devnull)
 | 
| 18 | 18 | 
     context.deploydir = os.path.join(str(tmpdir), 'deploy')
 | 
| 19 | 
-    context.artifactdir = os.path.join(str(tmpdir), 'artifact')
 | 
|
| 19 | 
+    context.casdir = os.path.join(str(tmpdir), 'cas')
 | 
|
| 20 | 20 | 
     project = Project(basedir, context)
 | 
| 21 | 21 | 
 | 
| 22 | 22 | 
     def dummy_handler(message, context):
 | 
| ... | ... | @@ -46,7 +46,6 @@ class ArtifactShare(): | 
| 46 | 46 | 
         # in tests as a remote artifact push/pull configuration
 | 
| 47 | 47 | 
         #
 | 
| 48 | 48 | 
         self.repodir = os.path.join(self.directory, 'repo')
 | 
| 49 | 
-  | 
|
| 50 | 49 | 
         os.makedirs(self.repodir)
 | 
| 51 | 50 | 
 | 
| 52 | 51 | 
         self.cas = CASCache(self.repodir)
 | 
| ... | ... | @@ -171,7 +170,9 @@ class ArtifactShare(): | 
| 171 | 170 | 
 | 
| 172 | 171 | 
     def _mock_statvfs(self, path):
 | 
| 173 | 172 | 
         repo_size = 0
 | 
| 174 | 
-        for root, _, files in os.walk(self.repodir):
 | 
|
| 173 | 
+        for root, dirs, files in os.walk(self.repodir):
 | 
|
| 174 | 
+            for dirname in dirs:
 | 
|
| 175 | 
+                repo_size += os.path.getsize(os.path.join(root, dirname))
 | 
|
| 175 | 176 | 
             for filename in files:
 | 
| 176 | 177 | 
                 repo_size += os.path.getsize(os.path.join(root, filename))
 | 
| 177 | 178 | 
 | 
