Jürg Billeter pushed to branch raoul/870-root-cache-dir at BuildStream / buildstream
Commits:
-
3df140b2
by Dor Askayo at 2019-02-19T16:01:16Z
-
fec626e2
by Dor Askayo at 2019-02-19T16:01:16Z
-
52c0c185
by Phil Dawson at 2019-02-19T17:04:58Z
-
c7f76268
by Raoul Hidalgo Charman at 2019-02-19T17:05:17Z
-
4b62b936
by Raoul Hidalgo Charman at 2019-02-19T17:05:17Z
-
5e10e2e8
by Raoul Hidalgo Charman at 2019-02-19T17:05:17Z
29 changed files:
- .gitignore
- buildstream/_artifactcache.py
- buildstream/_cas/__init__.py
- buildstream/_cas/cascache.py
- buildstream/_context.py
- buildstream/_frontend/status.py
- buildstream/_frontend/widget.py
- buildstream/_scheduler/jobs/cachesizejob.py
- buildstream/_scheduler/jobs/cleanupjob.py
- buildstream/data/userconfig.yaml
- buildstream/element.py
- buildstream/plugintestutils/runcli.py
- conftest.py
- doc/bst2html.py
- doc/sessions/running-commands.run
- setup.cfg
- tests/artifactcache/cache_size.py
- tests/artifactcache/expiry.py
- tests/artifactcache/junctions.py
- tests/artifactcache/pull.py
- tests/artifactcache/push.py
- tests/frontend/pull.py
- tests/integration/artifact.py
- tests/integration/pullbuildtrees.py
- tests/integration/shellbuildtrees.py
- tests/integration/source-determinism.py
- tests/internals/context.py
- tests/internals/pluginloading.py
- tests/testutils/artifactshare.py
Changes:
| ... | ... | @@ -2,6 +2,9 @@ |
| 2 | 2 |
buildstream/**/*.pyc
|
| 3 | 3 |
tests/**/*.pyc
|
| 4 | 4 |
|
| 5 |
+# Build output directory
|
|
| 6 |
+build
|
|
| 7 |
+ |
|
| 5 | 8 |
# Setuptools distribution folder.
|
| 6 | 9 |
/dist/
|
| 7 | 10 |
|
| ... | ... | @@ -22,12 +22,12 @@ import os |
| 22 | 22 |
from collections.abc import Mapping
|
| 23 | 23 |
|
| 24 | 24 |
from .types import _KeyStrength
|
| 25 |
-from ._exceptions import ArtifactError, CASError, LoadError, LoadErrorReason
|
|
| 25 |
+from ._exceptions import ArtifactError, CASError
|
|
| 26 | 26 |
from ._message import Message, MessageType
|
| 27 | 27 |
from . import utils
|
| 28 | 28 |
from . import _yaml
|
| 29 | 29 |
|
| 30 |
-from ._cas import CASRemote, CASRemoteSpec
|
|
| 30 |
+from ._cas import CASRemote, CASRemoteSpec, CASCacheUsage
|
|
| 31 | 31 |
from .storage._casbaseddirectory import CasBasedDirectory
|
| 32 | 32 |
|
| 33 | 33 |
|
| ... | ... | @@ -46,39 +46,6 @@ class ArtifactCacheSpec(CASRemoteSpec): |
| 46 | 46 |
pass
|
| 47 | 47 |
|
| 48 | 48 |
|
| 49 |
-# ArtifactCacheUsage
|
|
| 50 |
-#
|
|
| 51 |
-# A simple object to report the current artifact cache
|
|
| 52 |
-# usage details.
|
|
| 53 |
-#
|
|
| 54 |
-# Note that this uses the user configured cache quota
|
|
| 55 |
-# rather than the internal quota with protective headroom
|
|
| 56 |
-# removed, to provide a more sensible value to display to
|
|
| 57 |
-# the user.
|
|
| 58 |
-#
|
|
| 59 |
-# Args:
|
|
| 60 |
-# artifacts (ArtifactCache): The artifact cache to get the status of
|
|
| 61 |
-#
|
|
| 62 |
-class ArtifactCacheUsage():
|
|
| 63 |
- |
|
| 64 |
- def __init__(self, artifacts):
|
|
| 65 |
- context = artifacts.context
|
|
| 66 |
- self.quota_config = context.config_cache_quota # Configured quota
|
|
| 67 |
- self.quota_size = artifacts._cache_quota_original # Resolved cache quota in bytes
|
|
| 68 |
- self.used_size = artifacts.get_cache_size() # Size used by artifacts in bytes
|
|
| 69 |
- self.used_percent = 0 # Percentage of the quota used
|
|
| 70 |
- if self.quota_size is not None:
|
|
| 71 |
- self.used_percent = int(self.used_size * 100 / self.quota_size)
|
|
| 72 |
- |
|
| 73 |
- # Formattable into a human readable string
|
|
| 74 |
- #
|
|
| 75 |
- def __str__(self):
|
|
| 76 |
- return "{} / {} ({}%)" \
|
|
| 77 |
- .format(utils._pretty_size(self.used_size, dec_places=1),
|
|
| 78 |
- self.quota_config,
|
|
| 79 |
- self.used_percent)
|
|
| 80 |
- |
|
| 81 |
- |
|
| 82 | 49 |
# An ArtifactCache manages artifacts.
|
| 83 | 50 |
#
|
| 84 | 51 |
# Args:
|
| ... | ... | @@ -87,19 +54,17 @@ class ArtifactCacheUsage(): |
| 87 | 54 |
class ArtifactCache():
|
| 88 | 55 |
def __init__(self, context):
|
| 89 | 56 |
self.context = context
|
| 90 |
- self.extractdir = os.path.join(context.artifactdir, 'extract')
|
|
| 57 |
+ self.extractdir = context.extractdir
|
|
| 91 | 58 |
|
| 92 | 59 |
self.cas = context.get_cascache()
|
| 60 |
+ self.casquota = context.get_casquota()
|
|
| 61 |
+ self.casquota._calculate_cache_quota()
|
|
| 93 | 62 |
|
| 94 | 63 |
self.global_remote_specs = []
|
| 95 | 64 |
self.project_remote_specs = {}
|
| 96 | 65 |
|
| 97 | 66 |
self._required_elements = set() # The elements required for this session
|
| 98 |
- self._cache_size = None # The current cache size, sometimes it's an estimate
|
|
| 99 |
- self._cache_quota = None # The cache quota
|
|
| 100 |
- self._cache_quota_original = None # The cache quota as specified by the user, in bytes
|
|
| 101 |
- self._cache_quota_headroom = None # The headroom in bytes before reaching the quota or full disk
|
|
| 102 |
- self._cache_lower_threshold = None # The target cache size for a cleanup
|
|
| 67 |
+ |
|
| 103 | 68 |
self._remotes_setup = False # Check to prevent double-setup of remotes
|
| 104 | 69 |
|
| 105 | 70 |
# Per-project list of _CASRemote instances.
|
| ... | ... | @@ -110,8 +75,6 @@ class ArtifactCache(): |
| 110 | 75 |
|
| 111 | 76 |
os.makedirs(self.extractdir, exist_ok=True)
|
| 112 | 77 |
|
| 113 |
- self._calculate_cache_quota()
|
|
| 114 |
- |
|
| 115 | 78 |
# setup_remotes():
|
| 116 | 79 |
#
|
| 117 | 80 |
# Sets up which remotes to use
|
| ... | ... | @@ -235,7 +198,7 @@ class ArtifactCache(): |
| 235 | 198 |
space_saved = 0
|
| 236 | 199 |
|
| 237 | 200 |
# Start off with an announcement with as much info as possible
|
| 238 |
- volume_size, volume_avail = self._get_cache_volume_size()
|
|
| 201 |
+ volume_size, volume_avail = self.casquota._get_cache_volume_size()
|
|
| 239 | 202 |
self._message(MessageType.STATUS, "Starting cache cleanup",
|
| 240 | 203 |
detail=("Elements required by the current build plan: {}\n" +
|
| 241 | 204 |
"User specified quota: {} ({})\n" +
|
| ... | ... | @@ -243,8 +206,8 @@ class ArtifactCache(): |
| 243 | 206 |
"Cache volume: {} total, {} available")
|
| 244 | 207 |
.format(len(self._required_elements),
|
| 245 | 208 |
context.config_cache_quota,
|
| 246 |
- utils._pretty_size(self._cache_quota_original, dec_places=2),
|
|
| 247 |
- utils._pretty_size(self.get_cache_size(), dec_places=2),
|
|
| 209 |
+ utils._pretty_size(self.casquota._cache_quota, dec_places=2),
|
|
| 210 |
+ utils._pretty_size(self.casquota.get_cache_size(), dec_places=2),
|
|
| 248 | 211 |
utils._pretty_size(volume_size, dec_places=2),
|
| 249 | 212 |
utils._pretty_size(volume_avail, dec_places=2)))
|
| 250 | 213 |
|
| ... | ... | @@ -261,9 +224,11 @@ class ArtifactCache(): |
| 261 | 224 |
])
|
| 262 | 225 |
|
| 263 | 226 |
# Do a real computation of the cache size once, just in case
|
| 264 |
- self.compute_cache_size()
|
|
| 227 |
+ self.casquota.compute_cache_size()
|
|
| 228 |
+ usage = CASCacheUsage(self.casquota)
|
|
| 229 |
+ self._message(MessageType.STATUS, "Cache usage recomputed: {}".format(usage))
|
|
| 265 | 230 |
|
| 266 |
- while self.get_cache_size() >= self._cache_lower_threshold:
|
|
| 231 |
+ while self.casquota.get_cache_size() >= self.casquota._cache_lower_threshold:
|
|
| 267 | 232 |
try:
|
| 268 | 233 |
to_remove = artifacts.pop(0)
|
| 269 | 234 |
except IndexError:
|
| ... | ... | @@ -280,7 +245,7 @@ class ArtifactCache(): |
| 280 | 245 |
"Please increase the cache-quota in {} and/or make more disk space."
|
| 281 | 246 |
.format(removed_ref_count,
|
| 282 | 247 |
utils._pretty_size(space_saved, dec_places=2),
|
| 283 |
- utils._pretty_size(self.get_cache_size(), dec_places=2),
|
|
| 248 |
+ utils._pretty_size(self.casquota.get_cache_size(), dec_places=2),
|
|
| 284 | 249 |
len(self._required_elements),
|
| 285 | 250 |
(context.config_origin or default_conf)))
|
| 286 | 251 |
|
| ... | ... | @@ -306,7 +271,7 @@ class ArtifactCache(): |
| 306 | 271 |
to_remove))
|
| 307 | 272 |
|
| 308 | 273 |
# Remove the size from the removed size
|
| 309 |
- self.set_cache_size(self._cache_size - size)
|
|
| 274 |
+ self.casquota.set_cache_size(self.casquota._cache_size - size)
|
|
| 310 | 275 |
|
| 311 | 276 |
# User callback
|
| 312 | 277 |
#
|
| ... | ... | @@ -322,29 +287,12 @@ class ArtifactCache(): |
| 322 | 287 |
"Cache usage is now: {}")
|
| 323 | 288 |
.format(removed_ref_count,
|
| 324 | 289 |
utils._pretty_size(space_saved, dec_places=2),
|
| 325 |
- utils._pretty_size(self.get_cache_size(), dec_places=2)))
|
|
| 326 |
- |
|
| 327 |
- return self.get_cache_size()
|
|
| 328 |
- |
|
| 329 |
- # compute_cache_size()
|
|
| 330 |
- #
|
|
| 331 |
- # Computes the real artifact cache size by calling
|
|
| 332 |
- # the abstract calculate_cache_size() method.
|
|
| 333 |
- #
|
|
| 334 |
- # Returns:
|
|
| 335 |
- # (int): The size of the artifact cache.
|
|
| 336 |
- #
|
|
| 337 |
- def compute_cache_size(self):
|
|
| 338 |
- old_cache_size = self._cache_size
|
|
| 339 |
- new_cache_size = self.cas.calculate_cache_size()
|
|
| 340 |
- |
|
| 341 |
- if old_cache_size != new_cache_size:
|
|
| 342 |
- self._cache_size = new_cache_size
|
|
| 290 |
+ utils._pretty_size(self.casquota.get_cache_size(), dec_places=2)))
|
|
| 343 | 291 |
|
| 344 |
- usage = ArtifactCacheUsage(self)
|
|
| 345 |
- self._message(MessageType.STATUS, "Cache usage recomputed: {}".format(usage))
|
|
| 292 |
+ return self.casquota.get_cache_size()
|
|
| 346 | 293 |
|
| 347 |
- return self._cache_size
|
|
| 294 |
+ def full(self):
|
|
| 295 |
+ return self.casquota.full()
|
|
| 348 | 296 |
|
| 349 | 297 |
# add_artifact_size()
|
| 350 | 298 |
#
|
| ... | ... | @@ -355,71 +303,10 @@ class ArtifactCache(): |
| 355 | 303 |
# artifact_size (int): The size to add.
|
| 356 | 304 |
#
|
| 357 | 305 |
def add_artifact_size(self, artifact_size):
|
| 358 |
- cache_size = self.get_cache_size()
|
|
| 306 |
+ cache_size = self.casquota.get_cache_size()
|
|
| 359 | 307 |
cache_size += artifact_size
|
| 360 | 308 |
|
| 361 |
- self.set_cache_size(cache_size)
|
|
| 362 |
- |
|
| 363 |
- # get_cache_size()
|
|
| 364 |
- #
|
|
| 365 |
- # Fetches the cached size of the cache, this is sometimes
|
|
| 366 |
- # an estimate and periodically adjusted to the real size
|
|
| 367 |
- # when a cache size calculation job runs.
|
|
| 368 |
- #
|
|
| 369 |
- # When it is an estimate, the value is either correct, or
|
|
| 370 |
- # it is greater than the actual cache size.
|
|
| 371 |
- #
|
|
| 372 |
- # Returns:
|
|
| 373 |
- # (int) An approximation of the artifact cache size, in bytes.
|
|
| 374 |
- #
|
|
| 375 |
- def get_cache_size(self):
|
|
| 376 |
- |
|
| 377 |
- # If we don't currently have an estimate, figure out the real cache size.
|
|
| 378 |
- if self._cache_size is None:
|
|
| 379 |
- stored_size = self._read_cache_size()
|
|
| 380 |
- if stored_size is not None:
|
|
| 381 |
- self._cache_size = stored_size
|
|
| 382 |
- else:
|
|
| 383 |
- self.compute_cache_size()
|
|
| 384 |
- |
|
| 385 |
- return self._cache_size
|
|
| 386 |
- |
|
| 387 |
- # set_cache_size()
|
|
| 388 |
- #
|
|
| 389 |
- # Forcefully set the overall cache size.
|
|
| 390 |
- #
|
|
| 391 |
- # This is used to update the size in the main process after
|
|
| 392 |
- # having calculated in a cleanup or a cache size calculation job.
|
|
| 393 |
- #
|
|
| 394 |
- # Args:
|
|
| 395 |
- # cache_size (int): The size to set.
|
|
| 396 |
- #
|
|
| 397 |
- def set_cache_size(self, cache_size):
|
|
| 398 |
- |
|
| 399 |
- assert cache_size is not None
|
|
| 400 |
- |
|
| 401 |
- self._cache_size = cache_size
|
|
| 402 |
- self._write_cache_size(self._cache_size)
|
|
| 403 |
- |
|
| 404 |
- # full()
|
|
| 405 |
- #
|
|
| 406 |
- # Checks if the artifact cache is full, either
|
|
| 407 |
- # because the user configured quota has been exceeded
|
|
| 408 |
- # or because the underlying disk is almost full.
|
|
| 409 |
- #
|
|
| 410 |
- # Returns:
|
|
| 411 |
- # (bool): True if the artifact cache is full
|
|
| 412 |
- #
|
|
| 413 |
- def full(self):
|
|
| 414 |
- |
|
| 415 |
- if self.get_cache_size() > self._cache_quota:
|
|
| 416 |
- return True
|
|
| 417 |
- |
|
| 418 |
- _, volume_avail = self._get_cache_volume_size()
|
|
| 419 |
- if volume_avail < self._cache_quota_headroom:
|
|
| 420 |
- return True
|
|
| 421 |
- |
|
| 422 |
- return False
|
|
| 309 |
+ self.casquota.set_cache_size(cache_size)
|
|
| 423 | 310 |
|
| 424 | 311 |
# preflight():
|
| 425 | 312 |
#
|
| ... | ... | @@ -885,142 +772,6 @@ class ArtifactCache(): |
| 885 | 772 |
with self.context.timed_activity("Initializing remote caches", silent_nested=True):
|
| 886 | 773 |
self.initialize_remotes(on_failure=remote_failed)
|
| 887 | 774 |
|
| 888 |
- # _write_cache_size()
|
|
| 889 |
- #
|
|
| 890 |
- # Writes the given size of the artifact to the cache's size file
|
|
| 891 |
- #
|
|
| 892 |
- # Args:
|
|
| 893 |
- # size (int): The size of the artifact cache to record
|
|
| 894 |
- #
|
|
| 895 |
- def _write_cache_size(self, size):
|
|
| 896 |
- assert isinstance(size, int)
|
|
| 897 |
- size_file_path = os.path.join(self.context.artifactdir, CACHE_SIZE_FILE)
|
|
| 898 |
- with utils.save_file_atomic(size_file_path, "w") as f:
|
|
| 899 |
- f.write(str(size))
|
|
| 900 |
- |
|
| 901 |
- # _read_cache_size()
|
|
| 902 |
- #
|
|
| 903 |
- # Reads and returns the size of the artifact cache that's stored in the
|
|
| 904 |
- # cache's size file
|
|
| 905 |
- #
|
|
| 906 |
- # Returns:
|
|
| 907 |
- # (int): The size of the artifact cache, as recorded in the file
|
|
| 908 |
- #
|
|
| 909 |
- def _read_cache_size(self):
|
|
| 910 |
- size_file_path = os.path.join(self.context.artifactdir, CACHE_SIZE_FILE)
|
|
| 911 |
- |
|
| 912 |
- if not os.path.exists(size_file_path):
|
|
| 913 |
- return None
|
|
| 914 |
- |
|
| 915 |
- with open(size_file_path, "r") as f:
|
|
| 916 |
- size = f.read()
|
|
| 917 |
- |
|
| 918 |
- try:
|
|
| 919 |
- num_size = int(size)
|
|
| 920 |
- except ValueError as e:
|
|
| 921 |
- raise ArtifactError("Size '{}' parsed from '{}' was not an integer".format(
|
|
| 922 |
- size, size_file_path)) from e
|
|
| 923 |
- |
|
| 924 |
- return num_size
|
|
| 925 |
- |
|
| 926 |
- # _calculate_cache_quota()
|
|
| 927 |
- #
|
|
| 928 |
- # Calculates and sets the cache quota and lower threshold based on the
|
|
| 929 |
- # quota set in Context.
|
|
| 930 |
- # It checks that the quota is both a valid _expression_, and that there is
|
|
| 931 |
- # enough disk space to satisfy that quota
|
|
| 932 |
- #
|
|
| 933 |
- def _calculate_cache_quota(self):
|
|
| 934 |
- # Headroom intended to give BuildStream a bit of leeway.
|
|
| 935 |
- # This acts as the minimum size of cache_quota and also
|
|
| 936 |
- # is taken from the user requested cache_quota.
|
|
| 937 |
- #
|
|
| 938 |
- if 'BST_TEST_SUITE' in os.environ:
|
|
| 939 |
- self._cache_quota_headroom = 0
|
|
| 940 |
- else:
|
|
| 941 |
- self._cache_quota_headroom = 2e9
|
|
| 942 |
- |
|
| 943 |
- try:
|
|
| 944 |
- cache_quota = utils._parse_size(self.context.config_cache_quota,
|
|
| 945 |
- self.context.artifactdir)
|
|
| 946 |
- except utils.UtilError as e:
|
|
| 947 |
- raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
| 948 |
- "{}\nPlease specify the value in bytes or as a % of full disk space.\n"
|
|
| 949 |
- "\nValid values are, for example: 800M 10G 1T 50%\n"
|
|
| 950 |
- .format(str(e))) from e
|
|
| 951 |
- |
|
| 952 |
- total_size, available_space = self._get_cache_volume_size()
|
|
| 953 |
- cache_size = self.get_cache_size()
|
|
| 954 |
- |
|
| 955 |
- # Ensure system has enough storage for the cache_quota
|
|
| 956 |
- #
|
|
| 957 |
- # If cache_quota is none, set it to the maximum it could possibly be.
|
|
| 958 |
- #
|
|
| 959 |
- # Also check that cache_quota is at least as large as our headroom.
|
|
| 960 |
- #
|
|
| 961 |
- if cache_quota is None: # Infinity, set to max system storage
|
|
| 962 |
- cache_quota = cache_size + available_space
|
|
| 963 |
- if cache_quota < self._cache_quota_headroom: # Check minimum
|
|
| 964 |
- raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
| 965 |
- "Invalid cache quota ({}): ".format(utils._pretty_size(cache_quota)) +
|
|
| 966 |
- "BuildStream requires a minimum cache quota of 2G.")
|
|
| 967 |
- elif cache_quota > total_size:
|
|
| 968 |
- # A quota greater than the total disk size is certianly an error
|
|
| 969 |
- raise ArtifactError("Your system does not have enough available " +
|
|
| 970 |
- "space to support the cache quota specified.",
|
|
| 971 |
- detail=("You have specified a quota of {quota} total disk space.\n" +
|
|
| 972 |
- "The filesystem containing {local_cache_path} only " +
|
|
| 973 |
- "has {total_size} total disk space.")
|
|
| 974 |
- .format(
|
|
| 975 |
- quota=self.context.config_cache_quota,
|
|
| 976 |
- local_cache_path=self.context.artifactdir,
|
|
| 977 |
- total_size=utils._pretty_size(total_size)),
|
|
| 978 |
- reason='insufficient-storage-for-quota')
|
|
| 979 |
- elif cache_quota > cache_size + available_space:
|
|
| 980 |
- # The quota does not fit in the available space, this is a warning
|
|
| 981 |
- if '%' in self.context.config_cache_quota:
|
|
| 982 |
- available = (available_space / total_size) * 100
|
|
| 983 |
- available = '{}% of total disk space'.format(round(available, 1))
|
|
| 984 |
- else:
|
|
| 985 |
- available = utils._pretty_size(available_space)
|
|
| 986 |
- |
|
| 987 |
- self._message(MessageType.WARN,
|
|
| 988 |
- "Your system does not have enough available " +
|
|
| 989 |
- "space to support the cache quota specified.",
|
|
| 990 |
- detail=("You have specified a quota of {quota} total disk space.\n" +
|
|
| 991 |
- "The filesystem containing {local_cache_path} only " +
|
|
| 992 |
- "has {available_size} available.")
|
|
| 993 |
- .format(quota=self.context.config_cache_quota,
|
|
| 994 |
- local_cache_path=self.context.artifactdir,
|
|
| 995 |
- available_size=available))
|
|
| 996 |
- |
|
| 997 |
- # Place a slight headroom (2e9 (2GB) on the cache_quota) into
|
|
| 998 |
- # cache_quota to try and avoid exceptions.
|
|
| 999 |
- #
|
|
| 1000 |
- # Of course, we might still end up running out during a build
|
|
| 1001 |
- # if we end up writing more than 2G, but hey, this stuff is
|
|
| 1002 |
- # already really fuzzy.
|
|
| 1003 |
- #
|
|
| 1004 |
- self._cache_quota_original = cache_quota
|
|
| 1005 |
- self._cache_quota = cache_quota - self._cache_quota_headroom
|
|
| 1006 |
- self._cache_lower_threshold = self._cache_quota / 2
|
|
| 1007 |
- |
|
| 1008 |
- # _get_cache_volume_size()
|
|
| 1009 |
- #
|
|
| 1010 |
- # Get the available space and total space for the volume on
|
|
| 1011 |
- # which the artifact cache is located.
|
|
| 1012 |
- #
|
|
| 1013 |
- # Returns:
|
|
| 1014 |
- # (int): The total number of bytes on the volume
|
|
| 1015 |
- # (int): The number of available bytes on the volume
|
|
| 1016 |
- #
|
|
| 1017 |
- # NOTE: We use this stub to allow the test cases
|
|
| 1018 |
- # to override what an artifact cache thinks
|
|
| 1019 |
- # about it's disk size and available bytes.
|
|
| 1020 |
- #
|
|
| 1021 |
- def _get_cache_volume_size(self):
|
|
| 1022 |
- return utils._get_volume_size(self.context.artifactdir)
|
|
| 1023 |
- |
|
| 1024 | 775 |
|
| 1025 | 776 |
# _configured_remote_artifact_cache_specs():
|
| 1026 | 777 |
#
|
| ... | ... | @@ -17,5 +17,5 @@ |
| 17 | 17 |
# Authors:
|
| 18 | 18 |
# Tristan Van Berkom <tristan vanberkom codethink co uk>
|
| 19 | 19 |
|
| 20 |
-from .cascache import CASCache
|
|
| 20 |
+from .cascache import CASCache, CASQuota, CASCacheUsage
|
|
| 21 | 21 |
from .casremote import CASRemote, CASRemoteSpec
|
| ... | ... | @@ -32,17 +32,53 @@ from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2 |
| 32 | 32 |
from .._protos.buildstream.v2 import buildstream_pb2
|
| 33 | 33 |
|
| 34 | 34 |
from .. import utils
|
| 35 |
-from .._exceptions import CASCacheError
|
|
| 35 |
+from .._exceptions import CASCacheError, LoadError, LoadErrorReason
|
|
| 36 |
+from .._message import Message, MessageType
|
|
| 36 | 37 |
|
| 37 | 38 |
from .casremote import BlobNotFound, _CASBatchRead, _CASBatchUpdate
|
| 38 | 39 |
|
| 39 | 40 |
_BUFFER_SIZE = 65536
|
| 40 | 41 |
|
| 41 | 42 |
|
| 43 |
+CACHE_SIZE_FILE = "cache_size"
|
|
| 44 |
+ |
|
| 45 |
+ |
|
| 46 |
+# CASCacheUsage
|
|
| 47 |
+#
|
|
| 48 |
+# A simple object to report the current CAS cache usage details.
|
|
| 49 |
+#
|
|
| 50 |
+# Note that this uses the user configured cache quota
|
|
| 51 |
+# rather than the internal quota with protective headroom
|
|
| 52 |
+# removed, to provide a more sensible value to display to
|
|
| 53 |
+# the user.
|
|
| 54 |
+#
|
|
| 55 |
+# Args:
|
|
| 56 |
+# cas (CASQuota): The CAS cache to get the status of
|
|
| 57 |
+#
|
|
| 58 |
+class CASCacheUsage():
|
|
| 59 |
+ |
|
| 60 |
+ def __init__(self, casquota):
|
|
| 61 |
+ self.quota_config = casquota._config_cache_quota # Configured quota
|
|
| 62 |
+ self.quota_size = casquota._cache_quota_original # Resolved cache quota in bytes
|
|
| 63 |
+ self.used_size = casquota.get_cache_size() # Size used by artifacts in bytes
|
|
| 64 |
+ self.used_percent = 0 # Percentage of the quota used
|
|
| 65 |
+ if self.quota_size is not None:
|
|
| 66 |
+ self.used_percent = int(self.used_size * 100 / self.quota_size)
|
|
| 67 |
+ |
|
| 68 |
+ # Formattable into a human readable string
|
|
| 69 |
+ #
|
|
| 70 |
+ def __str__(self):
|
|
| 71 |
+ return "{} / {} ({}%)" \
|
|
| 72 |
+ .format(utils._pretty_size(self.used_size, dec_places=1),
|
|
| 73 |
+ self.quota_config,
|
|
| 74 |
+ self.used_percent)
|
|
| 75 |
+ |
|
| 76 |
+ |
|
| 42 | 77 |
# A CASCache manages a CAS repository as specified in the Remote Execution API.
|
| 43 | 78 |
#
|
| 44 | 79 |
# Args:
|
| 45 | 80 |
# path (str): The root directory for the CAS repository
|
| 81 |
+# cache_quota (int): User configured cache quota
|
|
| 46 | 82 |
#
|
| 47 | 83 |
class CASCache():
|
| 48 | 84 |
|
| ... | ... | @@ -459,16 +495,6 @@ class CASCache(): |
| 459 | 495 |
except FileNotFoundError as e:
|
| 460 | 496 |
raise CASCacheError("Attempt to access unavailable ref: {}".format(e)) from e
|
| 461 | 497 |
|
| 462 |
- # calculate_cache_size()
|
|
| 463 |
- #
|
|
| 464 |
- # Return the real disk usage of the CAS cache.
|
|
| 465 |
- #
|
|
| 466 |
- # Returns:
|
|
| 467 |
- # (int): The size of the cache.
|
|
| 468 |
- #
|
|
| 469 |
- def calculate_cache_size(self):
|
|
| 470 |
- return utils._get_dir_size(self.casdir)
|
|
| 471 |
- |
|
| 472 | 498 |
# list_refs():
|
| 473 | 499 |
#
|
| 474 | 500 |
# List refs in Least Recently Modified (LRM) order.
|
| ... | ... | @@ -1043,6 +1069,248 @@ class CASCache(): |
| 1043 | 1069 |
batch.send()
|
| 1044 | 1070 |
|
| 1045 | 1071 |
|
| 1072 |
+class CASQuota:
|
|
| 1073 |
+ def __init__(self, context):
|
|
| 1074 |
+ self.cas = context.get_cascache()
|
|
| 1075 |
+ self.casdir = self.cas.casdir
|
|
| 1076 |
+ self._config_cache_quota = context.config_cache_quota
|
|
| 1077 |
+ self._config_cache_quota_string = context.config_cache_quota_string
|
|
| 1078 |
+ self._cache_size = None # The current cache size, sometimes it's an estimate
|
|
| 1079 |
+ self._cache_quota = None # The cache quota
|
|
| 1080 |
+ self._cache_quota_original = None # The cache quota as specified by the user, in bytes
|
|
| 1081 |
+ self._cache_quota_headroom = None # The headroom in bytes before reaching the quota or full disk
|
|
| 1082 |
+ self._cache_lower_threshold = None # The target cache size for a cleanup
|
|
| 1083 |
+ self.available_space = None
|
|
| 1084 |
+ |
|
| 1085 |
+ self._message = context.message
|
|
| 1086 |
+ |
|
| 1087 |
+ self._calculate_cache_quota()
|
|
| 1088 |
+ |
|
| 1089 |
+ # compute_cache_size()
|
|
| 1090 |
+ #
|
|
| 1091 |
+ # Computes the real artifact cache size by calling
|
|
| 1092 |
+ # the abstract calculate_cache_size() method.
|
|
| 1093 |
+ #
|
|
| 1094 |
+ # Returns:
|
|
| 1095 |
+ # (int): The size of the artifact cache.
|
|
| 1096 |
+ #
|
|
| 1097 |
+ def compute_cache_size(self):
|
|
| 1098 |
+ old_cache_size = self._cache_size
|
|
| 1099 |
+ new_cache_size = self.calculate_cache_size()
|
|
| 1100 |
+ |
|
| 1101 |
+ if old_cache_size != new_cache_size:
|
|
| 1102 |
+ self._cache_size = new_cache_size
|
|
| 1103 |
+ |
|
| 1104 |
+ return self._cache_size
|
|
| 1105 |
+ |
|
| 1106 |
+ # calculate_cache_size()
|
|
| 1107 |
+ #
|
|
| 1108 |
+ # Return the real disk usage of the CAS cache.
|
|
| 1109 |
+ #
|
|
| 1110 |
+ # Returns:
|
|
| 1111 |
+ # (int): The size of the cache.
|
|
| 1112 |
+ #
|
|
| 1113 |
+ def calculate_cache_size(self):
|
|
| 1114 |
+ return utils._get_dir_size(self.casdir)
|
|
| 1115 |
+ |
|
| 1116 |
+ # get_cache_size()
|
|
| 1117 |
+ #
|
|
| 1118 |
+ # Fetches the cached size of the cache, this is sometimes
|
|
| 1119 |
+ # an estimate and periodically adjusted to the real size
|
|
| 1120 |
+ # when a cache size calculation job runs.
|
|
| 1121 |
+ #
|
|
| 1122 |
+ # When it is an estimate, the value is either correct, or
|
|
| 1123 |
+ # it is greater than the actual cache size.
|
|
| 1124 |
+ #
|
|
| 1125 |
+ # Returns:
|
|
| 1126 |
+ # (int) An approximation of the artifact cache size, in bytes.
|
|
| 1127 |
+ #
|
|
| 1128 |
+ def get_cache_size(self):
|
|
| 1129 |
+ |
|
| 1130 |
+ # If we don't currently have an estimate, figure out the real cache size.
|
|
| 1131 |
+ if self._cache_size is None:
|
|
| 1132 |
+ stored_size = self._read_cache_size()
|
|
| 1133 |
+ if stored_size is not None:
|
|
| 1134 |
+ self._cache_size = stored_size
|
|
| 1135 |
+ else:
|
|
| 1136 |
+ self._cache_size = self.compute_cache_size()
|
|
| 1137 |
+ |
|
| 1138 |
+ return self._cache_size
|
|
| 1139 |
+ |
|
| 1140 |
+ # set_cache_size()
|
|
| 1141 |
+ #
|
|
| 1142 |
+ # Forcefully set the overall cache size.
|
|
| 1143 |
+ #
|
|
| 1144 |
+ # This is used to update the size in the main process after
|
|
| 1145 |
+ # having calculated in a cleanup or a cache size calculation job.
|
|
| 1146 |
+ #
|
|
| 1147 |
+ # Args:
|
|
| 1148 |
+ # cache_size (int): The size to set.
|
|
| 1149 |
+ #
|
|
| 1150 |
+ def set_cache_size(self, cache_size):
|
|
| 1151 |
+ |
|
| 1152 |
+ assert cache_size is not None
|
|
| 1153 |
+ |
|
| 1154 |
+ self._cache_size = cache_size
|
|
| 1155 |
+ self._write_cache_size(self._cache_size)
|
|
| 1156 |
+ |
|
| 1157 |
+ # full()
|
|
| 1158 |
+ #
|
|
| 1159 |
+ # Checks if the artifact cache is full, either
|
|
| 1160 |
+ # because the user configured quota has been exceeded
|
|
| 1161 |
+ # or because the underlying disk is almost full.
|
|
| 1162 |
+ #
|
|
| 1163 |
+ # Returns:
|
|
| 1164 |
+ # (bool): True if the artifact cache is full
|
|
| 1165 |
+ #
|
|
| 1166 |
+ def full(self):
|
|
| 1167 |
+ |
|
| 1168 |
+ if self.get_cache_size() > self._cache_quota:
|
|
| 1169 |
+ return True
|
|
| 1170 |
+ |
|
| 1171 |
+ _, volume_avail = self._get_cache_volume_size()
|
|
| 1172 |
+ if volume_avail < self._cache_quota_headroom:
|
|
| 1173 |
+ return True
|
|
| 1174 |
+ |
|
| 1175 |
+ return False
|
|
| 1176 |
+ |
|
| 1177 |
+ ################################################
|
|
| 1178 |
+ # Local Private Methods #
|
|
| 1179 |
+ ################################################
|
|
| 1180 |
+ |
|
| 1181 |
+ # _read_cache_size()
|
|
| 1182 |
+ #
|
|
| 1183 |
+ # Reads and returns the size of the artifact cache that's stored in the
|
|
| 1184 |
+ # cache's size file
|
|
| 1185 |
+ #
|
|
| 1186 |
+ # Returns:
|
|
| 1187 |
+ # (int): The size of the artifact cache, as recorded in the file
|
|
| 1188 |
+ #
|
|
| 1189 |
+ def _read_cache_size(self):
|
|
| 1190 |
+ size_file_path = os.path.join(self.casdir, CACHE_SIZE_FILE)
|
|
| 1191 |
+ |
|
| 1192 |
+ if not os.path.exists(size_file_path):
|
|
| 1193 |
+ return None
|
|
| 1194 |
+ |
|
| 1195 |
+ with open(size_file_path, "r") as f:
|
|
| 1196 |
+ size = f.read()
|
|
| 1197 |
+ |
|
| 1198 |
+ try:
|
|
| 1199 |
+ num_size = int(size)
|
|
| 1200 |
+ except ValueError as e:
|
|
| 1201 |
+ raise CASCacheError("Size '{}' parsed from '{}' was not an integer".format(
|
|
| 1202 |
+ size, size_file_path)) from e
|
|
| 1203 |
+ |
|
| 1204 |
+ return num_size
|
|
| 1205 |
+ |
|
| 1206 |
+ # _write_cache_size()
|
|
| 1207 |
+ #
|
|
| 1208 |
+ # Writes the given size of the artifact to the cache's size file
|
|
| 1209 |
+ #
|
|
| 1210 |
+ # Args:
|
|
| 1211 |
+ # size (int): The size of the artifact cache to record
|
|
| 1212 |
+ #
|
|
| 1213 |
+ def _write_cache_size(self, size):
|
|
| 1214 |
+ assert isinstance(size, int)
|
|
| 1215 |
+ size_file_path = os.path.join(self.casdir, CACHE_SIZE_FILE)
|
|
| 1216 |
+ with utils.save_file_atomic(size_file_path, "w") as f:
|
|
| 1217 |
+ f.write(str(size))
|
|
| 1218 |
+ |
|
| 1219 |
+ # _get_cache_volume_size()
|
|
| 1220 |
+ #
|
|
| 1221 |
+ # Get the available space and total space for the volume on
|
|
| 1222 |
+ # which the artifact cache is located.
|
|
| 1223 |
+ #
|
|
| 1224 |
+ # Returns:
|
|
| 1225 |
+ # (int): The total number of bytes on the volume
|
|
| 1226 |
+ # (int): The number of available bytes on the volume
|
|
| 1227 |
+ #
|
|
| 1228 |
+ # NOTE: We use this stub to allow the test cases
|
|
| 1229 |
+ # to override what an artifact cache thinks
|
|
| 1230 |
+ # about it's disk size and available bytes.
|
|
| 1231 |
+ #
|
|
| 1232 |
+ def _get_cache_volume_size(self):
|
|
| 1233 |
+ return utils._get_volume_size(self.casdir)
|
|
| 1234 |
+ |
|
| 1235 |
+ # _calculate_cache_quota()
|
|
| 1236 |
+ #
|
|
| 1237 |
+ # Calculates and sets the cache quota and lower threshold based on the
|
|
| 1238 |
+ # quota set in Context.
|
|
| 1239 |
+ # It checks that the quota is both a valid _expression_, and that there is
|
|
| 1240 |
+ # enough disk space to satisfy that quota
|
|
| 1241 |
+ #
|
|
| 1242 |
+ def _calculate_cache_quota(self):
|
|
| 1243 |
+ # Headroom intended to give BuildStream a bit of leeway.
|
|
| 1244 |
+ # This acts as the minimum size of cache_quota and also
|
|
| 1245 |
+ # is taken from the user requested cache_quota.
|
|
| 1246 |
+ #
|
|
| 1247 |
+ if 'BST_TEST_SUITE' in os.environ:
|
|
| 1248 |
+ self._cache_quota_headroom = 0
|
|
| 1249 |
+ else:
|
|
| 1250 |
+ self._cache_quota_headroom = 2e9
|
|
| 1251 |
+ |
|
| 1252 |
+ total_size, available_space = self._get_cache_volume_size()
|
|
| 1253 |
+ cache_size = self.get_cache_size()
|
|
| 1254 |
+ self.available_space = available_space
|
|
| 1255 |
+ |
|
| 1256 |
+ # Ensure system has enough storage for the cache_quota
|
|
| 1257 |
+ #
|
|
| 1258 |
+ # If cache_quota is none, set it to the maximum it could possibly be.
|
|
| 1259 |
+ #
|
|
| 1260 |
+ # Also check that cache_quota is at least as large as our headroom.
|
|
| 1261 |
+ #
|
|
| 1262 |
+ cache_quota = self._config_cache_quota
|
|
| 1263 |
+ if cache_quota is None: # Infinity, set to max system storage
|
|
| 1264 |
+ cache_quota = cache_size + available_space
|
|
| 1265 |
+ if cache_quota < self._cache_quota_headroom: # Check minimum
|
|
| 1266 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
| 1267 |
+ "Invalid cache quota ({}): ".format(utils._pretty_size(cache_quota)) +
|
|
| 1268 |
+ "BuildStream requires a minimum cache quota of 2G.")
|
|
| 1269 |
+ elif cache_quota > total_size:
|
|
| 1270 |
+ # A quota greater than the total disk size is certianly an error
|
|
| 1271 |
+ raise CASCacheError("Your system does not have enough available " +
|
|
| 1272 |
+ "space to support the cache quota specified.",
|
|
| 1273 |
+ detail=("You have specified a quota of {quota} total disk space.\n" +
|
|
| 1274 |
+ "The filesystem containing {local_cache_path} only " +
|
|
| 1275 |
+ "has {total_size} total disk space.")
|
|
| 1276 |
+ .format(
|
|
| 1277 |
+ quota=self._config_cache_quota,
|
|
| 1278 |
+ local_cache_path=self.casdir,
|
|
| 1279 |
+ total_size=utils._pretty_size(total_size)),
|
|
| 1280 |
+ reason='insufficient-storage-for-quota')
|
|
| 1281 |
+ |
|
| 1282 |
+ elif cache_quota > cache_size + available_space:
|
|
| 1283 |
+ # The quota does not fit in the available space, this is a warning
|
|
| 1284 |
+ if '%' in self._config_cache_quota_string:
|
|
| 1285 |
+ available = (available_space / total_size) * 100
|
|
| 1286 |
+ available = '{}% of total disk space'.format(round(available, 1))
|
|
| 1287 |
+ else:
|
|
| 1288 |
+ available = utils._pretty_size(available_space)
|
|
| 1289 |
+ |
|
| 1290 |
+ self._message(Message(
|
|
| 1291 |
+ None,
|
|
| 1292 |
+ MessageType.WARN,
|
|
| 1293 |
+ "Your system does not have enough available " +
|
|
| 1294 |
+ "space to support the cache quota specified.",
|
|
| 1295 |
+ detail=("You have specified a quota of {quota} total disk space.\n" +
|
|
| 1296 |
+ "The filesystem containing {local_cache_path} only " +
|
|
| 1297 |
+ "has {available_size} available.")
|
|
| 1298 |
+ .format(quota=self._config_cache_quota,
|
|
| 1299 |
+ local_cache_path=self.casdir,
|
|
| 1300 |
+ available_size=available)))
|
|
| 1301 |
+ |
|
| 1302 |
+ # Place a slight headroom (2e9 (2GB) on the cache_quota) into
|
|
| 1303 |
+ # cache_quota to try and avoid exceptions.
|
|
| 1304 |
+ #
|
|
| 1305 |
+ # Of course, we might still end up running out during a build
|
|
| 1306 |
+ # if we end up writing more than 2G, but hey, this stuff is
|
|
| 1307 |
+ # already really fuzzy.
|
|
| 1308 |
+ #
|
|
| 1309 |
+ self._cache_quota_original = cache_quota
|
|
| 1310 |
+ self._cache_quota = cache_quota - self._cache_quota_headroom
|
|
| 1311 |
+ self._cache_lower_threshold = self._cache_quota / 2
|
|
| 1312 |
+ |
|
| 1313 |
+ |
|
| 1046 | 1314 |
def _grouper(iterable, n):
|
| 1047 | 1315 |
while True:
|
| 1048 | 1316 |
try:
|
| ... | ... | @@ -30,8 +30,8 @@ from . import _yaml |
| 30 | 30 |
from ._exceptions import LoadError, LoadErrorReason, BstError
|
| 31 | 31 |
from ._message import Message, MessageType
|
| 32 | 32 |
from ._profile import Topics, profile_start, profile_end
|
| 33 |
-from ._artifactcache import ArtifactCache, ArtifactCacheUsage
|
|
| 34 |
-from ._cas import CASCache
|
|
| 33 |
+from ._artifactcache import ArtifactCache
|
|
| 34 |
+from ._cas import CASCache, CASQuota, CASCacheUsage
|
|
| 35 | 35 |
from ._workspaces import Workspaces, WorkspaceProjectCache
|
| 36 | 36 |
from .plugin import _plugin_lookup
|
| 37 | 37 |
from .sandbox import SandboxRemote
|
| ... | ... | @@ -58,18 +58,27 @@ class Context(): |
| 58 | 58 |
# Filename indicating which configuration file was used, or None for the defaults
|
| 59 | 59 |
self.config_origin = None
|
| 60 | 60 |
|
| 61 |
+ # The directory under which other directories are based
|
|
| 62 |
+ self.cachedir = None
|
|
| 63 |
+ |
|
| 61 | 64 |
# The directory where various sources are stored
|
| 62 | 65 |
self.sourcedir = None
|
| 63 | 66 |
|
| 64 | 67 |
# The directory where build sandboxes will be created
|
| 65 | 68 |
self.builddir = None
|
| 66 | 69 |
|
| 70 |
+ # The directory for CAS
|
|
| 71 |
+ self.casdir = None
|
|
| 72 |
+ |
|
| 73 |
+ # Extract directory
|
|
| 74 |
+ self.extractdir = None
|
|
| 75 |
+ |
|
| 76 |
+ # The directory for temporary files
|
|
| 77 |
+ self.tmpdir = None
|
|
| 78 |
+ |
|
| 67 | 79 |
# Default root location for workspaces
|
| 68 | 80 |
self.workspacedir = None
|
| 69 | 81 |
|
| 70 |
- # The local binary artifact cache directory
|
|
| 71 |
- self.artifactdir = None
|
|
| 72 |
- |
|
| 73 | 82 |
# The locations from which to push and pull prebuilt artifacts
|
| 74 | 83 |
self.artifact_cache_specs = None
|
| 75 | 84 |
|
| ... | ... | @@ -118,6 +127,9 @@ class Context(): |
| 118 | 127 |
# Size of the artifact cache in bytes
|
| 119 | 128 |
self.config_cache_quota = None
|
| 120 | 129 |
|
| 130 |
+ # User specified cache quota, used for display messages
|
|
| 131 |
+ self.config_cache_quota_string = None
|
|
| 132 |
+ |
|
| 121 | 133 |
# Whether or not to attempt to pull build trees globally
|
| 122 | 134 |
self.pull_buildtrees = None
|
| 123 | 135 |
|
| ... | ... | @@ -142,6 +154,7 @@ class Context(): |
| 142 | 154 |
self._log_handle = None
|
| 143 | 155 |
self._log_filename = None
|
| 144 | 156 |
self._cascache = None
|
| 157 |
+ self._casquota = None
|
|
| 145 | 158 |
self._directory = directory
|
| 146 | 159 |
|
| 147 | 160 |
# load()
|
| ... | ... | @@ -179,13 +192,22 @@ class Context(): |
| 179 | 192 |
user_config = _yaml.load(config)
|
| 180 | 193 |
_yaml.composite(defaults, user_config)
|
| 181 | 194 |
|
| 195 |
+ # Give obsoletion warnings
|
|
| 196 |
+ if defaults.get('builddir'):
|
|
| 197 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
| 198 |
+ "builddir is obsolete, use cachedir")
|
|
| 199 |
+ |
|
| 200 |
+ if defaults.get('artifactdir'):
|
|
| 201 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
| 202 |
+ "artifactdir is obsolete")
|
|
| 203 |
+ |
|
| 182 | 204 |
_yaml.node_validate(defaults, [
|
| 183 |
- 'sourcedir', 'builddir', 'artifactdir', 'logdir',
|
|
| 184 |
- 'scheduler', 'artifacts', 'logging', 'projects',
|
|
| 185 |
- 'cache', 'prompt', 'workspacedir', 'remote-execution'
|
|
| 205 |
+ 'cachedir', 'sourcedir', 'builddir', 'logdir', 'scheduler',
|
|
| 206 |
+ 'artifacts', 'logging', 'projects', 'cache', 'prompt',
|
|
| 207 |
+ 'workspacedir', 'remote-execution',
|
|
| 186 | 208 |
])
|
| 187 | 209 |
|
| 188 |
- for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
|
|
| 210 |
+ for directory in ['cachedir', 'sourcedir', 'logdir', 'workspacedir']:
|
|
| 189 | 211 |
# Allow the ~ tilde expansion and any environment variables in
|
| 190 | 212 |
# path specification in the config files.
|
| 191 | 213 |
#
|
| ... | ... | @@ -195,14 +217,34 @@ class Context(): |
| 195 | 217 |
path = os.path.normpath(path)
|
| 196 | 218 |
setattr(self, directory, path)
|
| 197 | 219 |
|
| 220 |
+ # add directories not set by users
|
|
| 221 |
+ self.extractdir = os.path.join(self.cachedir, 'extract')
|
|
| 222 |
+ self.tmpdir = os.path.join(self.cachedir, 'tmp')
|
|
| 223 |
+ self.casdir = os.path.join(self.cachedir, 'cas')
|
|
| 224 |
+ self.builddir = os.path.join(self.cachedir, 'build')
|
|
| 225 |
+ |
|
| 226 |
+ # Move old artifact cas to cas if it exists and create symlink
|
|
| 227 |
+ old_casdir = os.path.join(self.cachedir, 'artifacts', 'cas')
|
|
| 228 |
+ if (os.path.exists(old_casdir) and not os.path.islink(old_casdir) and
|
|
| 229 |
+ not os.path.exists(self.casdir)):
|
|
| 230 |
+ os.rename(old_casdir, self.casdir)
|
|
| 231 |
+ os.symlink(self.casdir, old_casdir)
|
|
| 232 |
+ |
|
| 198 | 233 |
# Load quota configuration
|
| 199 |
- # We need to find the first existing directory in the path of
|
|
| 200 |
- # our artifactdir - the artifactdir may not have been created
|
|
| 201 |
- # yet.
|
|
| 234 |
+ # We need to find the first existing directory in the path of our
|
|
| 235 |
+ # cachedir - the cachedir may not have been created yet.
|
|
| 202 | 236 |
cache = _yaml.node_get(defaults, Mapping, 'cache')
|
| 203 | 237 |
_yaml.node_validate(cache, ['quota', 'pull-buildtrees', 'cache-buildtrees'])
|
| 204 | 238 |
|
| 205 |
- self.config_cache_quota = _yaml.node_get(cache, str, 'quota')
|
|
| 239 |
+ self.config_cache_quota_string = _yaml.node_get(cache, str, 'quota')
|
|
| 240 |
+ try:
|
|
| 241 |
+ self.config_cache_quota = utils._parse_size(self.config_cache_quota_string,
|
|
| 242 |
+ self.casdir)
|
|
| 243 |
+ except utils.UtilError as e:
|
|
| 244 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
| 245 |
+ "{}\nPlease specify the value in bytes or as a % of full disk space.\n"
|
|
| 246 |
+ "\nValid values are, for example: 800M 10G 1T 50%\n"
|
|
| 247 |
+ .format(str(e))) from e
|
|
| 206 | 248 |
|
| 207 | 249 |
# Load artifact share configuration
|
| 208 | 250 |
self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
|
| ... | ... | @@ -262,15 +304,15 @@ class Context(): |
| 262 | 304 |
|
| 263 | 305 |
return self._artifactcache
|
| 264 | 306 |
|
| 265 |
- # get_artifact_cache_usage()
|
|
| 307 |
+ # get_cache_usage()
|
|
| 266 | 308 |
#
|
| 267 | 309 |
# Fetches the current usage of the artifact cache
|
| 268 | 310 |
#
|
| 269 | 311 |
# Returns:
|
| 270 |
- # (ArtifactCacheUsage): The current status
|
|
| 312 |
+ # (CASCacheUsage): The current status
|
|
| 271 | 313 |
#
|
| 272 |
- def get_artifact_cache_usage(self):
|
|
| 273 |
- return ArtifactCacheUsage(self.artifactcache)
|
|
| 314 |
+ def get_cache_usage(self):
|
|
| 315 |
+ return CASCacheUsage(self.get_casquota())
|
|
| 274 | 316 |
|
| 275 | 317 |
# add_project():
|
| 276 | 318 |
#
|
| ... | ... | @@ -640,9 +682,14 @@ class Context(): |
| 640 | 682 |
|
| 641 | 683 |
def get_cascache(self):
|
| 642 | 684 |
if self._cascache is None:
|
| 643 |
- self._cascache = CASCache(self.artifactdir)
|
|
| 685 |
+ self._cascache = CASCache(self.cachedir)
|
|
| 644 | 686 |
return self._cascache
|
| 645 | 687 |
|
| 688 |
+ def get_casquota(self):
|
|
| 689 |
+ if self._casquota is None:
|
|
| 690 |
+ self._casquota = CASQuota(self)
|
|
| 691 |
+ return self._casquota
|
|
| 692 |
+ |
|
| 646 | 693 |
|
| 647 | 694 |
# _node_get_option_str()
|
| 648 | 695 |
#
|
| ... | ... | @@ -404,7 +404,7 @@ class _StatusHeader(): |
| 404 | 404 |
#
|
| 405 | 405 |
# ~~~~~~ cache: 69% ~~~~~~
|
| 406 | 406 |
#
|
| 407 |
- usage = self._context.get_artifact_cache_usage()
|
|
| 407 |
+ usage = self._context.get_cache_usage()
|
|
| 408 | 408 |
usage_percent = '{}%'.format(usage.used_percent)
|
| 409 | 409 |
|
| 410 | 410 |
size = 21
|
| ... | ... | @@ -486,7 +486,7 @@ class LogLine(Widget): |
| 486 | 486 |
values["Session Start"] = starttime.strftime('%A, %d-%m-%Y at %H:%M:%S')
|
| 487 | 487 |
values["Project"] = "{} ({})".format(project.name, project.directory)
|
| 488 | 488 |
values["Targets"] = ", ".join([t.name for t in stream.targets])
|
| 489 |
- values["Cache Usage"] = "{}".format(context.get_artifact_cache_usage())
|
|
| 489 |
+ values["Cache Usage"] = "{}".format(context.get_cache_usage())
|
|
| 490 | 490 |
text += self._format_values(values)
|
| 491 | 491 |
|
| 492 | 492 |
# User configurations
|
| ... | ... | @@ -495,10 +495,10 @@ class LogLine(Widget): |
| 495 | 495 |
values = OrderedDict()
|
| 496 | 496 |
values["Configuration File"] = \
|
| 497 | 497 |
"Default Configuration" if not context.config_origin else context.config_origin
|
| 498 |
+ values["Cache Directory"] = context.cachedir
|
|
| 498 | 499 |
values["Log Files"] = context.logdir
|
| 499 | 500 |
values["Source Mirrors"] = context.sourcedir
|
| 500 | 501 |
values["Build Area"] = context.builddir
|
| 501 |
- values["Artifact Cache"] = context.artifactdir
|
|
| 502 | 502 |
values["Strict Build Plan"] = "Yes" if context.get_strict() else "No"
|
| 503 | 503 |
values["Maximum Fetch Tasks"] = context.sched_fetchers
|
| 504 | 504 |
values["Maximum Build Tasks"] = context.sched_builders
|
| ... | ... | @@ -25,14 +25,14 @@ class CacheSizeJob(Job): |
| 25 | 25 |
self._complete_cb = complete_cb
|
| 26 | 26 |
|
| 27 | 27 |
context = self._scheduler.context
|
| 28 |
- self._artifacts = context.artifactcache
|
|
| 28 |
+ self._casquota = context.get_casquota()
|
|
| 29 | 29 |
|
| 30 | 30 |
def child_process(self):
|
| 31 |
- return self._artifacts.compute_cache_size()
|
|
| 31 |
+ return self._casquota.compute_cache_size()
|
|
| 32 | 32 |
|
| 33 | 33 |
def parent_complete(self, status, result):
|
| 34 | 34 |
if status == JobStatus.OK:
|
| 35 |
- self._artifacts.set_cache_size(result)
|
|
| 35 |
+ self._casquota.set_cache_size(result)
|
|
| 36 | 36 |
|
| 37 | 37 |
if self._complete_cb:
|
| 38 | 38 |
self._complete_cb(status, result)
|
| ... | ... | @@ -25,27 +25,27 @@ class CleanupJob(Job): |
| 25 | 25 |
self._complete_cb = complete_cb
|
| 26 | 26 |
|
| 27 | 27 |
context = self._scheduler.context
|
| 28 |
+ self._casquota = context.get_casquota()
|
|
| 28 | 29 |
self._artifacts = context.artifactcache
|
| 29 | 30 |
|
| 30 | 31 |
def child_process(self):
|
| 31 | 32 |
def progress():
|
| 32 | 33 |
self.send_message('update-cache-size',
|
| 33 |
- self._artifacts.get_cache_size())
|
|
| 34 |
+ self._casquota.get_cache_size())
|
|
| 34 | 35 |
return self._artifacts.clean(progress)
|
| 35 | 36 |
|
| 36 | 37 |
def handle_message(self, message_type, message):
|
| 37 |
- |
|
| 38 | 38 |
# Update the cache size in the main process as we go,
|
| 39 | 39 |
# this provides better feedback in the UI.
|
| 40 | 40 |
if message_type == 'update-cache-size':
|
| 41 |
- self._artifacts.set_cache_size(message)
|
|
| 41 |
+ self._casquota.set_cache_size(message)
|
|
| 42 | 42 |
return True
|
| 43 | 43 |
|
| 44 | 44 |
return False
|
| 45 | 45 |
|
| 46 | 46 |
def parent_complete(self, status, result):
|
| 47 | 47 |
if status == JobStatus.OK:
|
| 48 |
- self._artifacts.set_cache_size(result)
|
|
| 48 |
+ self._casquota.set_cache_size(result)
|
|
| 49 | 49 |
|
| 50 | 50 |
if self._complete_cb:
|
| 51 | 51 |
self._complete_cb(status, result)
|
| ... | ... | @@ -13,11 +13,8 @@ |
| 13 | 13 |
# Location to store sources
|
| 14 | 14 |
sourcedir: ${XDG_CACHE_HOME}/buildstream/sources
|
| 15 | 15 |
|
| 16 |
-# Location to perform builds
|
|
| 17 |
-builddir: ${XDG_CACHE_HOME}/buildstream/build
|
|
| 18 |
- |
|
| 19 |
-# Location to store local binary artifacts
|
|
| 20 |
-artifactdir: ${XDG_CACHE_HOME}/buildstream/artifacts
|
|
| 16 |
+# Root location for other directories in the cache
|
|
| 17 |
+cachedir: ${XDG_CACHE_HOME}/buildstream
|
|
| 21 | 18 |
|
| 22 | 19 |
# Location to store build logs
|
| 23 | 20 |
logdir: ${XDG_CACHE_HOME}/buildstream/logs
|
| ... | ... | @@ -1448,7 +1448,7 @@ class Element(Plugin): |
| 1448 | 1448 |
# It's advantageous to have this temporary directory on
|
| 1449 | 1449 |
# the same file system as the rest of our cache.
|
| 1450 | 1450 |
with self.timed_activity("Staging sources", silent_nested=True), \
|
| 1451 |
- utils._tempdir(dir=context.artifactdir, prefix='staging-temp') as temp_staging_directory:
|
|
| 1451 |
+ utils._tempdir(dir=context.tmpdir, prefix='staging-temp') as temp_staging_directory:
|
|
| 1452 | 1452 |
|
| 1453 | 1453 |
import_dir = temp_staging_directory
|
| 1454 | 1454 |
|
| ... | ... | @@ -277,10 +277,10 @@ class Cli(): |
| 277 | 277 |
*, cache_dir=None):
|
| 278 | 278 |
# Read configuration to figure out where artifacts are stored
|
| 279 | 279 |
if not cache_dir:
|
| 280 |
- default = os.path.join(project, 'cache', 'artifacts')
|
|
| 280 |
+ default = os.path.join(project, 'cache')
|
|
| 281 | 281 |
|
| 282 | 282 |
if self.config is not None:
|
| 283 |
- cache_dir = self.config.get('artifactdir', default)
|
|
| 283 |
+ cache_dir = self.config.get('cachedir', default)
|
|
| 284 | 284 |
else:
|
| 285 | 285 |
cache_dir = default
|
| 286 | 286 |
|
| ... | ... | @@ -582,11 +582,21 @@ def cli_integration(tmpdir, integration_cache): |
| 582 | 582 |
# We want to cache sources for integration tests more permanently,
|
| 583 | 583 |
# to avoid downloading the huge base-sdk repeatedly
|
| 584 | 584 |
fixture.configure({
|
| 585 |
+ 'cachedir': integration_cache.cachedir,
|
|
| 585 | 586 |
'sourcedir': integration_cache.sources,
|
| 586 |
- 'artifactdir': integration_cache.artifacts
|
|
| 587 | 587 |
})
|
| 588 | 588 |
|
| 589 |
- return fixture
|
|
| 589 |
+ yield fixture
|
|
| 590 |
+ |
|
| 591 |
+ # remove following folders if necessary
|
|
| 592 |
+ try:
|
|
| 593 |
+ shutil.rmtree(os.path.join(integration_cache.cachedir, 'build'))
|
|
| 594 |
+ except FileNotFoundError:
|
|
| 595 |
+ pass
|
|
| 596 |
+ try:
|
|
| 597 |
+ shutil.rmtree(os.path.join(integration_cache.cachedir, 'tmp'))
|
|
| 598 |
+ except FileNotFoundError:
|
|
| 599 |
+ pass
|
|
| 590 | 600 |
|
| 591 | 601 |
|
| 592 | 602 |
@contextmanager
|
| ... | ... | @@ -626,10 +636,8 @@ def configured(directory, config=None): |
| 626 | 636 |
|
| 627 | 637 |
if not config.get('sourcedir', False):
|
| 628 | 638 |
config['sourcedir'] = os.path.join(directory, 'sources')
|
| 629 |
- if not config.get('builddir', False):
|
|
| 630 |
- config['builddir'] = os.path.join(directory, 'build')
|
|
| 631 |
- if not config.get('artifactdir', False):
|
|
| 632 |
- config['artifactdir'] = os.path.join(directory, 'artifacts')
|
|
| 639 |
+ if not config.get('cachedir', False):
|
|
| 640 |
+ config['cachedir'] = directory
|
|
| 633 | 641 |
if not config.get('logdir', False):
|
| 634 | 642 |
config['logdir'] = os.path.join(directory, 'logs')
|
| 635 | 643 |
|
| ... | ... | @@ -53,16 +53,16 @@ def pytest_runtest_setup(item): |
| 53 | 53 |
class IntegrationCache():
|
| 54 | 54 |
|
| 55 | 55 |
def __init__(self, cache):
|
| 56 |
- cache = os.path.abspath(cache)
|
|
| 56 |
+ self.root = os.path.abspath(cache)
|
|
| 57 | 57 |
os.makedirs(cache, exist_ok=True)
|
| 58 | 58 |
|
| 59 | 59 |
# Use the same sources every time
|
| 60 |
- self.sources = os.path.join(cache, 'sources')
|
|
| 60 |
+ self.sources = os.path.join(self.root, 'sources')
|
|
| 61 | 61 |
|
| 62 | 62 |
# Create a temp directory for the duration of the test for
|
| 63 | 63 |
# the artifacts directory
|
| 64 | 64 |
try:
|
| 65 |
- self.artifacts = tempfile.mkdtemp(dir=cache, prefix='artifacts-')
|
|
| 65 |
+ self.cachedir = tempfile.mkdtemp(dir=self.root, prefix='cache-')
|
|
| 66 | 66 |
except OSError as e:
|
| 67 | 67 |
raise AssertionError("Unable to create test directory !") from e
|
| 68 | 68 |
|
| ... | ... | @@ -84,7 +84,11 @@ def integration_cache(request): |
| 84 | 84 |
# Clean up the artifacts after each test run - we only want to
|
| 85 | 85 |
# cache sources between runs
|
| 86 | 86 |
try:
|
| 87 |
- shutil.rmtree(cache.artifacts)
|
|
| 87 |
+ shutil.rmtree(cache.cachedir)
|
|
| 88 |
+ except FileNotFoundError:
|
|
| 89 |
+ pass
|
|
| 90 |
+ try:
|
|
| 91 |
+ shutil.rmtree(os.path.join(cache.root, 'cas'))
|
|
| 88 | 92 |
except FileNotFoundError:
|
| 89 | 93 |
pass
|
| 90 | 94 |
|
| ... | ... | @@ -194,10 +194,9 @@ def workdir(source_cache=None): |
| 194 | 194 |
|
| 195 | 195 |
bst_config_file = os.path.join(tempdir, 'buildstream.conf')
|
| 196 | 196 |
config = {
|
| 197 |
+ 'cachedir': tempdir,
|
|
| 197 | 198 |
'sourcedir': source_cache,
|
| 198 |
- 'artifactdir': os.path.join(tempdir, 'artifacts'),
|
|
| 199 | 199 |
'logdir': os.path.join(tempdir, 'logs'),
|
| 200 |
- 'builddir': os.path.join(tempdir, 'build'),
|
|
| 201 | 200 |
}
|
| 202 | 201 |
_yaml.dump(config, bst_config_file)
|
| 203 | 202 |
|
| ... | ... | @@ -411,12 +410,10 @@ def run_session(description, tempdir, source_cache, palette, config_file, force) |
| 411 | 410 |
# Encode and save the output if that was asked for
|
| 412 | 411 |
output = _yaml.node_get(command, str, 'output', default_value=None)
|
| 413 | 412 |
if output is not None:
|
| 414 |
- |
|
| 415 | 413 |
# Convert / Generate a nice <div>
|
| 416 | 414 |
converted = generate_html(command_out, directory, config_file,
|
| 417 | 415 |
source_cache, tempdir, palette,
|
| 418 | 416 |
command_str, command_fake_output is not None)
|
| 419 |
- |
|
| 420 | 417 |
# Save it
|
| 421 | 418 |
filename = os.path.join(desc_dir, output)
|
| 422 | 419 |
filename = os.path.realpath(filename)
|
| ... | ... | @@ -2,7 +2,7 @@ |
| 2 | 2 |
commands:
|
| 3 | 3 |
# Make it fetch first
|
| 4 | 4 |
- directory: ../examples/running-commands
|
| 5 |
- command: fetch hello.bst
|
|
| 5 |
+ command: source fetch hello.bst
|
|
| 6 | 6 |
|
| 7 | 7 |
# Capture a show output
|
| 8 | 8 |
- directory: ../examples/running-commands
|
| ... | ... | @@ -20,4 +20,4 @@ env = |
| 20 | 20 |
[pycodestyle]
|
| 21 | 21 |
max-line-length = 119
|
| 22 | 22 |
ignore = E129,E125,W504,W605
|
| 23 |
-exclude = .git/**,.tox/**,doc/source/conf.py,buildstream/_fuse/fuse.py,buildstream/_protos/**/*py
|
|
| 23 |
+exclude = .git/**,.tox/**,.eggs/**,build/**,doc/source/conf.py,buildstream/_fuse/fuse.py,buildstream/_protos/**/*py,tmp/**
|
| ... | ... | @@ -50,15 +50,15 @@ def test_cache_size_write(cli, tmpdir): |
| 50 | 50 |
create_project(project_dir)
|
| 51 | 51 |
|
| 52 | 52 |
# Artifact cache must be in a known place
|
| 53 |
- artifactdir = os.path.join(project_dir, "artifacts")
|
|
| 54 |
- cli.configure({"artifactdir": artifactdir})
|
|
| 53 |
+ casdir = os.path.join(project_dir, "cas")
|
|
| 54 |
+ cli.configure({"cachedir": project_dir})
|
|
| 55 | 55 |
|
| 56 | 56 |
# Build, to populate the cache
|
| 57 | 57 |
res = cli.run(project=project_dir, args=["build", "test.bst"])
|
| 58 | 58 |
res.assert_success()
|
| 59 | 59 |
|
| 60 | 60 |
# Inspect the artifact cache
|
| 61 |
- sizefile = os.path.join(artifactdir, CACHE_SIZE_FILE)
|
|
| 61 |
+ sizefile = os.path.join(casdir, CACHE_SIZE_FILE)
|
|
| 62 | 62 |
assert os.path.isfile(sizefile)
|
| 63 | 63 |
with open(sizefile, "r") as f:
|
| 64 | 64 |
size_data = f.read()
|
| ... | ... | @@ -81,11 +81,11 @@ def test_quota_over_1024T(cli, tmpdir): |
| 81 | 81 |
_yaml.dump({'name': 'main'}, str(project.join("project.conf")))
|
| 82 | 82 |
|
| 83 | 83 |
volume_space_patch = mock.patch(
|
| 84 |
- "buildstream._artifactcache.ArtifactCache._get_cache_volume_size",
|
|
| 84 |
+ "buildstream._cas.CASQuota._get_cache_volume_size",
|
|
| 85 | 85 |
autospec=True,
|
| 86 | 86 |
return_value=(1025 * TiB, 1025 * TiB)
|
| 87 | 87 |
)
|
| 88 | 88 |
|
| 89 | 89 |
with volume_space_patch:
|
| 90 | 90 |
result = cli.run(project, args=["build", "file.bst"])
|
| 91 |
- result.assert_main_error(ErrorDomain.ARTIFACT, 'insufficient-storage-for-quota')
|
|
| 91 |
+ result.assert_main_error(ErrorDomain.CAS, 'insufficient-storage-for-quota')
|
| ... | ... | @@ -341,7 +341,7 @@ def test_never_delete_required_track(cli, datafiles, tmpdir): |
| 341 | 341 |
("200%", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
|
| 342 | 342 |
|
| 343 | 343 |
# Not enough space on disk even if you cleaned up
|
| 344 |
- ("11K", ErrorDomain.ARTIFACT, 'insufficient-storage-for-quota'),
|
|
| 344 |
+ ("11K", ErrorDomain.CAS, 'insufficient-storage-for-quota'),
|
|
| 345 | 345 |
|
| 346 | 346 |
# Not enough space for these caches
|
| 347 | 347 |
("7K", 'warning', 'Your system does not have enough available'),
|
| ... | ... | @@ -355,7 +355,7 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, err_domain, err_reas |
| 355 | 355 |
cli.configure({
|
| 356 | 356 |
'cache': {
|
| 357 | 357 |
'quota': quota,
|
| 358 |
- }
|
|
| 358 |
+ },
|
|
| 359 | 359 |
})
|
| 360 | 360 |
|
| 361 | 361 |
# We patch how we get space information
|
| ... | ... | @@ -373,13 +373,13 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, err_domain, err_reas |
| 373 | 373 |
total_space = 10000
|
| 374 | 374 |
|
| 375 | 375 |
volume_space_patch = mock.patch(
|
| 376 |
- "buildstream._artifactcache.ArtifactCache._get_cache_volume_size",
|
|
| 376 |
+ "buildstream.utils._get_volume_size",
|
|
| 377 | 377 |
autospec=True,
|
| 378 | 378 |
return_value=(total_space, free_space),
|
| 379 | 379 |
)
|
| 380 | 380 |
|
| 381 | 381 |
cache_size_patch = mock.patch(
|
| 382 |
- "buildstream._artifactcache.ArtifactCache.get_cache_size",
|
|
| 382 |
+ "buildstream._cas.CASQuota.get_cache_size",
|
|
| 383 | 383 |
autospec=True,
|
| 384 | 384 |
return_value=0,
|
| 385 | 385 |
)
|
| ... | ... | @@ -417,7 +417,7 @@ def test_extract_expiry(cli, datafiles, tmpdir): |
| 417 | 417 |
res.assert_success()
|
| 418 | 418 |
|
| 419 | 419 |
# Get a snapshot of the extracts in advance
|
| 420 |
- extractdir = os.path.join(project, 'cache', 'artifacts', 'extract', 'test', 'target')
|
|
| 420 |
+ extractdir = os.path.join(project, 'cache', 'extract', 'test', 'target')
|
|
| 421 | 421 |
extracts = os.listdir(extractdir)
|
| 422 | 422 |
assert(len(extracts) == 1)
|
| 423 | 423 |
extract = os.path.join(extractdir, extracts[0])
|
| ... | ... | @@ -436,7 +436,7 @@ def test_extract_expiry(cli, datafiles, tmpdir): |
| 436 | 436 |
# Now we should have a directory for the cached target2.bst, which
|
| 437 | 437 |
# replaced target.bst in the cache, we should not have a directory
|
| 438 | 438 |
# for the target.bst
|
| 439 |
- refsdir = os.path.join(project, 'cache', 'artifacts', 'cas', 'refs', 'heads')
|
|
| 439 |
+ refsdir = os.path.join(project, 'cache', 'cas', 'refs', 'heads')
|
|
| 440 | 440 |
refsdirtest = os.path.join(refsdir, 'test')
|
| 441 | 441 |
refsdirtarget = os.path.join(refsdirtest, 'target')
|
| 442 | 442 |
refsdirtarget2 = os.path.join(refsdirtest, 'target2')
|
| ... | ... | @@ -70,8 +70,8 @@ def test_push_pull(cli, tmpdir, datafiles): |
| 70 | 70 |
# Now we've pushed, delete the user's local artifact cache
|
| 71 | 71 |
# directory and try to redownload it from the share
|
| 72 | 72 |
#
|
| 73 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
| 74 |
- shutil.rmtree(artifacts)
|
|
| 73 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
| 74 |
+ shutil.rmtree(cas)
|
|
| 75 | 75 |
|
| 76 | 76 |
# Assert that nothing is cached locally anymore
|
| 77 | 77 |
state = cli.get_element_state(project, 'target.bst')
|
| ... | ... | @@ -57,7 +57,7 @@ def test_pull(cli, tmpdir, datafiles): |
| 57 | 57 |
# Set up an artifact cache.
|
| 58 | 58 |
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
| 59 | 59 |
# Configure artifact share
|
| 60 |
- artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
| 60 |
+ cache_dir = os.path.join(str(tmpdir), 'cache')
|
|
| 61 | 61 |
user_config_file = str(tmpdir.join('buildstream.conf'))
|
| 62 | 62 |
user_config = {
|
| 63 | 63 |
'scheduler': {
|
| ... | ... | @@ -66,7 +66,8 @@ def test_pull(cli, tmpdir, datafiles): |
| 66 | 66 |
'artifacts': {
|
| 67 | 67 |
'url': share.repo,
|
| 68 | 68 |
'push': True,
|
| 69 |
- }
|
|
| 69 |
+ },
|
|
| 70 |
+ 'cachedir': cache_dir
|
|
| 70 | 71 |
}
|
| 71 | 72 |
|
| 72 | 73 |
# Write down the user configuration file
|
| ... | ... | @@ -93,7 +94,6 @@ def test_pull(cli, tmpdir, datafiles): |
| 93 | 94 |
# Fake minimal context
|
| 94 | 95 |
context = Context()
|
| 95 | 96 |
context.load(config=user_config_file)
|
| 96 |
- context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
| 97 | 97 |
context.set_message_handler(message_handler)
|
| 98 | 98 |
|
| 99 | 99 |
# Load the project and CAS cache
|
| ... | ... | @@ -111,7 +111,7 @@ def test_pull(cli, tmpdir, datafiles): |
| 111 | 111 |
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
|
| 112 | 112 |
process = multiprocessing.Process(target=_queue_wrapper,
|
| 113 | 113 |
args=(_test_pull, queue, user_config_file, project_dir,
|
| 114 |
- artifact_dir, 'target.bst', element_key))
|
|
| 114 |
+ cache_dir, 'target.bst', element_key))
|
|
| 115 | 115 |
|
| 116 | 116 |
try:
|
| 117 | 117 |
# Keep SIGINT blocked in the child process
|
| ... | ... | @@ -128,12 +128,14 @@ def test_pull(cli, tmpdir, datafiles): |
| 128 | 128 |
assert cas.contains(element, element_key)
|
| 129 | 129 |
|
| 130 | 130 |
|
| 131 |
-def _test_pull(user_config_file, project_dir, artifact_dir,
|
|
| 131 |
+def _test_pull(user_config_file, project_dir, cache_dir,
|
|
| 132 | 132 |
element_name, element_key, queue):
|
| 133 | 133 |
# Fake minimal context
|
| 134 | 134 |
context = Context()
|
| 135 | 135 |
context.load(config=user_config_file)
|
| 136 |
- context.artifactdir = artifact_dir
|
|
| 136 |
+ context.cachedir = cache_dir
|
|
| 137 |
+ context.casdir = os.path.join(cache_dir, 'cas')
|
|
| 138 |
+ context.tmpdir = os.path.join(cache_dir, 'tmp')
|
|
| 137 | 139 |
context.set_message_handler(message_handler)
|
| 138 | 140 |
|
| 139 | 141 |
# Load the project manually
|
| ... | ... | @@ -166,7 +168,7 @@ def test_pull_tree(cli, tmpdir, datafiles): |
| 166 | 168 |
# Set up an artifact cache.
|
| 167 | 169 |
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
| 168 | 170 |
# Configure artifact share
|
| 169 |
- artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
| 171 |
+ rootcache_dir = os.path.join(str(tmpdir), 'cache')
|
|
| 170 | 172 |
user_config_file = str(tmpdir.join('buildstream.conf'))
|
| 171 | 173 |
user_config = {
|
| 172 | 174 |
'scheduler': {
|
| ... | ... | @@ -175,7 +177,8 @@ def test_pull_tree(cli, tmpdir, datafiles): |
| 175 | 177 |
'artifacts': {
|
| 176 | 178 |
'url': share.repo,
|
| 177 | 179 |
'push': True,
|
| 178 |
- }
|
|
| 180 |
+ },
|
|
| 181 |
+ 'cachedir': rootcache_dir
|
|
| 179 | 182 |
}
|
| 180 | 183 |
|
| 181 | 184 |
# Write down the user configuration file
|
| ... | ... | @@ -196,7 +199,6 @@ def test_pull_tree(cli, tmpdir, datafiles): |
| 196 | 199 |
# Fake minimal context
|
| 197 | 200 |
context = Context()
|
| 198 | 201 |
context.load(config=user_config_file)
|
| 199 |
- context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
| 200 | 202 |
context.set_message_handler(message_handler)
|
| 201 | 203 |
|
| 202 | 204 |
# Load the project and CAS cache
|
| ... | ... | @@ -219,7 +221,7 @@ def test_pull_tree(cli, tmpdir, datafiles): |
| 219 | 221 |
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
|
| 220 | 222 |
process = multiprocessing.Process(target=_queue_wrapper,
|
| 221 | 223 |
args=(_test_push_tree, queue, user_config_file, project_dir,
|
| 222 |
- artifact_dir, artifact_digest))
|
|
| 224 |
+ artifact_digest))
|
|
| 223 | 225 |
|
| 224 | 226 |
try:
|
| 225 | 227 |
# Keep SIGINT blocked in the child process
|
| ... | ... | @@ -247,7 +249,7 @@ def test_pull_tree(cli, tmpdir, datafiles): |
| 247 | 249 |
# Use subprocess to avoid creation of gRPC threads in main BuildStream process
|
| 248 | 250 |
process = multiprocessing.Process(target=_queue_wrapper,
|
| 249 | 251 |
args=(_test_pull_tree, queue, user_config_file, project_dir,
|
| 250 |
- artifact_dir, tree_digest))
|
|
| 252 |
+ tree_digest))
|
|
| 251 | 253 |
|
| 252 | 254 |
try:
|
| 253 | 255 |
# Keep SIGINT blocked in the child process
|
| ... | ... | @@ -269,11 +271,10 @@ def test_pull_tree(cli, tmpdir, datafiles): |
| 269 | 271 |
assert os.path.exists(cas.objpath(directory_digest))
|
| 270 | 272 |
|
| 271 | 273 |
|
| 272 |
-def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
|
|
| 274 |
+def _test_push_tree(user_config_file, project_dir, artifact_digest, queue):
|
|
| 273 | 275 |
# Fake minimal context
|
| 274 | 276 |
context = Context()
|
| 275 | 277 |
context.load(config=user_config_file)
|
| 276 |
- context.artifactdir = artifact_dir
|
|
| 277 | 278 |
context.set_message_handler(message_handler)
|
| 278 | 279 |
|
| 279 | 280 |
# Load the project manually
|
| ... | ... | @@ -305,11 +306,10 @@ def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest |
| 305 | 306 |
queue.put("No remote configured")
|
| 306 | 307 |
|
| 307 | 308 |
|
| 308 |
-def _test_pull_tree(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
|
|
| 309 |
+def _test_pull_tree(user_config_file, project_dir, artifact_digest, queue):
|
|
| 309 | 310 |
# Fake minimal context
|
| 310 | 311 |
context = Context()
|
| 311 | 312 |
context.load(config=user_config_file)
|
| 312 |
- context.artifactdir = artifact_dir
|
|
| 313 | 313 |
context.set_message_handler(message_handler)
|
| 314 | 314 |
|
| 315 | 315 |
# Load the project manually
|
| ... | ... | @@ -51,7 +51,7 @@ def test_push(cli, tmpdir, datafiles): |
| 51 | 51 |
# Set up an artifact cache.
|
| 52 | 52 |
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
| 53 | 53 |
# Configure artifact share
|
| 54 |
- artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
| 54 |
+ rootcache_dir = os.path.join(str(tmpdir), 'cache')
|
|
| 55 | 55 |
user_config_file = str(tmpdir.join('buildstream.conf'))
|
| 56 | 56 |
user_config = {
|
| 57 | 57 |
'scheduler': {
|
| ... | ... | @@ -60,7 +60,8 @@ def test_push(cli, tmpdir, datafiles): |
| 60 | 60 |
'artifacts': {
|
| 61 | 61 |
'url': share.repo,
|
| 62 | 62 |
'push': True,
|
| 63 |
- }
|
|
| 63 |
+ },
|
|
| 64 |
+ 'cachedir': rootcache_dir
|
|
| 64 | 65 |
}
|
| 65 | 66 |
|
| 66 | 67 |
# Write down the user configuration file
|
| ... | ... | @@ -69,7 +70,6 @@ def test_push(cli, tmpdir, datafiles): |
| 69 | 70 |
# Fake minimal context
|
| 70 | 71 |
context = Context()
|
| 71 | 72 |
context.load(config=user_config_file)
|
| 72 |
- context.artifactdir = artifact_dir
|
|
| 73 | 73 |
context.set_message_handler(message_handler)
|
| 74 | 74 |
|
| 75 | 75 |
# Load the project manually
|
| ... | ... | @@ -89,7 +89,7 @@ def test_push(cli, tmpdir, datafiles): |
| 89 | 89 |
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
|
| 90 | 90 |
process = multiprocessing.Process(target=_queue_wrapper,
|
| 91 | 91 |
args=(_test_push, queue, user_config_file, project_dir,
|
| 92 |
- artifact_dir, 'target.bst', element_key))
|
|
| 92 |
+ 'target.bst', element_key))
|
|
| 93 | 93 |
|
| 94 | 94 |
try:
|
| 95 | 95 |
# Keep SIGINT blocked in the child process
|
| ... | ... | @@ -106,12 +106,10 @@ def test_push(cli, tmpdir, datafiles): |
| 106 | 106 |
assert share.has_artifact('test', 'target.bst', element_key)
|
| 107 | 107 |
|
| 108 | 108 |
|
| 109 |
-def _test_push(user_config_file, project_dir, artifact_dir,
|
|
| 110 |
- element_name, element_key, queue):
|
|
| 109 |
+def _test_push(user_config_file, project_dir, element_name, element_key, queue):
|
|
| 111 | 110 |
# Fake minimal context
|
| 112 | 111 |
context = Context()
|
| 113 | 112 |
context.load(config=user_config_file)
|
| 114 |
- context.artifactdir = artifact_dir
|
|
| 115 | 113 |
context.set_message_handler(message_handler)
|
| 116 | 114 |
|
| 117 | 115 |
# Load the project manually
|
| ... | ... | @@ -152,7 +150,7 @@ def test_push_directory(cli, tmpdir, datafiles): |
| 152 | 150 |
# Set up an artifact cache.
|
| 153 | 151 |
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
| 154 | 152 |
# Configure artifact share
|
| 155 |
- artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
| 153 |
+ rootcache_dir = os.path.join(str(tmpdir), 'cache')
|
|
| 156 | 154 |
user_config_file = str(tmpdir.join('buildstream.conf'))
|
| 157 | 155 |
user_config = {
|
| 158 | 156 |
'scheduler': {
|
| ... | ... | @@ -161,7 +159,8 @@ def test_push_directory(cli, tmpdir, datafiles): |
| 161 | 159 |
'artifacts': {
|
| 162 | 160 |
'url': share.repo,
|
| 163 | 161 |
'push': True,
|
| 164 |
- }
|
|
| 162 |
+ },
|
|
| 163 |
+ 'cachedir': rootcache_dir
|
|
| 165 | 164 |
}
|
| 166 | 165 |
|
| 167 | 166 |
# Write down the user configuration file
|
| ... | ... | @@ -170,7 +169,6 @@ def test_push_directory(cli, tmpdir, datafiles): |
| 170 | 169 |
# Fake minimal context
|
| 171 | 170 |
context = Context()
|
| 172 | 171 |
context.load(config=user_config_file)
|
| 173 |
- context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
| 174 | 172 |
context.set_message_handler(message_handler)
|
| 175 | 173 |
|
| 176 | 174 |
# Load the project and CAS cache
|
| ... | ... | @@ -198,7 +196,7 @@ def test_push_directory(cli, tmpdir, datafiles): |
| 198 | 196 |
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
|
| 199 | 197 |
process = multiprocessing.Process(target=_queue_wrapper,
|
| 200 | 198 |
args=(_test_push_directory, queue, user_config_file,
|
| 201 |
- project_dir, artifact_dir, artifact_digest))
|
|
| 199 |
+ project_dir, artifact_digest))
|
|
| 202 | 200 |
|
| 203 | 201 |
try:
|
| 204 | 202 |
# Keep SIGINT blocked in the child process
|
| ... | ... | @@ -216,11 +214,10 @@ def test_push_directory(cli, tmpdir, datafiles): |
| 216 | 214 |
assert share.has_object(artifact_digest)
|
| 217 | 215 |
|
| 218 | 216 |
|
| 219 |
-def _test_push_directory(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
|
|
| 217 |
+def _test_push_directory(user_config_file, project_dir, artifact_digest, queue):
|
|
| 220 | 218 |
# Fake minimal context
|
| 221 | 219 |
context = Context()
|
| 222 | 220 |
context.load(config=user_config_file)
|
| 223 |
- context.artifactdir = artifact_dir
|
|
| 224 | 221 |
context.set_message_handler(message_handler)
|
| 225 | 222 |
|
| 226 | 223 |
# Load the project manually
|
| ... | ... | @@ -254,6 +251,7 @@ def test_push_message(cli, tmpdir, datafiles): |
| 254 | 251 |
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
| 255 | 252 |
# Configure artifact share
|
| 256 | 253 |
artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
| 254 |
+ rootcache_dir = os.path.join(str(tmpdir), 'cache')
|
|
| 257 | 255 |
user_config_file = str(tmpdir.join('buildstream.conf'))
|
| 258 | 256 |
user_config = {
|
| 259 | 257 |
'scheduler': {
|
| ... | ... | @@ -262,7 +260,8 @@ def test_push_message(cli, tmpdir, datafiles): |
| 262 | 260 |
'artifacts': {
|
| 263 | 261 |
'url': share.repo,
|
| 264 | 262 |
'push': True,
|
| 265 |
- }
|
|
| 263 |
+ },
|
|
| 264 |
+ 'cachedir': rootcache_dir
|
|
| 266 | 265 |
}
|
| 267 | 266 |
|
| 268 | 267 |
# Write down the user configuration file
|
| ... | ... | @@ -273,7 +272,7 @@ def test_push_message(cli, tmpdir, datafiles): |
| 273 | 272 |
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
|
| 274 | 273 |
process = multiprocessing.Process(target=_queue_wrapper,
|
| 275 | 274 |
args=(_test_push_message, queue, user_config_file,
|
| 276 |
- project_dir, artifact_dir))
|
|
| 275 |
+ project_dir))
|
|
| 277 | 276 |
|
| 278 | 277 |
try:
|
| 279 | 278 |
# Keep SIGINT blocked in the child process
|
| ... | ... | @@ -292,11 +291,10 @@ def test_push_message(cli, tmpdir, datafiles): |
| 292 | 291 |
assert share.has_object(message_digest)
|
| 293 | 292 |
|
| 294 | 293 |
|
| 295 |
-def _test_push_message(user_config_file, project_dir, artifact_dir, queue):
|
|
| 294 |
+def _test_push_message(user_config_file, project_dir, queue):
|
|
| 296 | 295 |
# Fake minimal context
|
| 297 | 296 |
context = Context()
|
| 298 | 297 |
context.load(config=user_config_file)
|
| 299 |
- context.artifactdir = artifact_dir
|
|
| 300 | 298 |
context.set_message_handler(message_handler)
|
| 301 | 299 |
|
| 302 | 300 |
# Load the project manually
|
| ... | ... | @@ -64,8 +64,8 @@ def test_push_pull_all(cli, tmpdir, datafiles): |
| 64 | 64 |
# Now we've pushed, delete the user's local artifact cache
|
| 65 | 65 |
# directory and try to redownload it from the share
|
| 66 | 66 |
#
|
| 67 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
| 68 |
- shutil.rmtree(artifacts)
|
|
| 67 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
| 68 |
+ shutil.rmtree(cas)
|
|
| 69 | 69 |
|
| 70 | 70 |
# Assert that nothing is cached locally anymore
|
| 71 | 71 |
states = cli.get_element_states(project, all_elements)
|
| ... | ... | @@ -114,7 +114,7 @@ def test_push_pull_default_targets(cli, tmpdir, datafiles): |
| 114 | 114 |
# Now we've pushed, delete the user's local artifact cache
|
| 115 | 115 |
# directory and try to redownload it from the share
|
| 116 | 116 |
#
|
| 117 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
| 117 |
+ artifacts = os.path.join(cli.directory, 'cas')
|
|
| 118 | 118 |
shutil.rmtree(artifacts)
|
| 119 | 119 |
|
| 120 | 120 |
# Assert that nothing is cached locally anymore
|
| ... | ... | @@ -156,8 +156,8 @@ def test_pull_secondary_cache(cli, tmpdir, datafiles): |
| 156 | 156 |
assert_shared(cli, share2, project, 'target.bst')
|
| 157 | 157 |
|
| 158 | 158 |
# Delete the user's local artifact cache.
|
| 159 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
| 160 |
- shutil.rmtree(artifacts)
|
|
| 159 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
| 160 |
+ shutil.rmtree(cas)
|
|
| 161 | 161 |
|
| 162 | 162 |
# Assert that the element is not cached anymore.
|
| 163 | 163 |
assert cli.get_element_state(project, 'target.bst') != 'cached'
|
| ... | ... | @@ -210,8 +210,8 @@ def test_push_pull_specific_remote(cli, tmpdir, datafiles): |
| 210 | 210 |
# Now we've pushed, delete the user's local artifact cache
|
| 211 | 211 |
# directory and try to redownload it from the good_share.
|
| 212 | 212 |
#
|
| 213 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
| 214 |
- shutil.rmtree(artifacts)
|
|
| 213 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
| 214 |
+ shutil.rmtree(cas)
|
|
| 215 | 215 |
|
| 216 | 216 |
result = cli.run(project=project, args=['artifact', 'pull', 'target.bst', '--remote',
|
| 217 | 217 |
good_share.repo])
|
| ... | ... | @@ -251,8 +251,8 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles): |
| 251 | 251 |
# Now we've pushed, delete the user's local artifact cache
|
| 252 | 252 |
# directory and try to redownload it from the share
|
| 253 | 253 |
#
|
| 254 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
| 255 |
- shutil.rmtree(artifacts)
|
|
| 254 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
| 255 |
+ shutil.rmtree(cas)
|
|
| 256 | 256 |
|
| 257 | 257 |
# Assert that nothing is cached locally anymore
|
| 258 | 258 |
for element_name in all_elements:
|
| ... | ... | @@ -301,8 +301,8 @@ def test_push_pull_track_non_strict(cli, tmpdir, datafiles): |
| 301 | 301 |
# Now we've pushed, delete the user's local artifact cache
|
| 302 | 302 |
# directory and try to redownload it from the share
|
| 303 | 303 |
#
|
| 304 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
| 305 |
- shutil.rmtree(artifacts)
|
|
| 304 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
| 305 |
+ shutil.rmtree(cas)
|
|
| 306 | 306 |
|
| 307 | 307 |
# Assert that nothing is cached locally anymore
|
| 308 | 308 |
for element_name in all_elements:
|
| ... | ... | @@ -337,7 +337,7 @@ def test_push_pull_cross_junction(cli, tmpdir, datafiles): |
| 337 | 337 |
result.assert_success()
|
| 338 | 338 |
assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
|
| 339 | 339 |
|
| 340 |
- cache_dir = os.path.join(project, 'cache', 'artifacts')
|
|
| 340 |
+ cache_dir = os.path.join(project, 'cache', 'cas')
|
|
| 341 | 341 |
shutil.rmtree(cache_dir)
|
| 342 | 342 |
|
| 343 | 343 |
assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'buildable'
|
| ... | ... | @@ -372,8 +372,8 @@ def test_pull_missing_blob(cli, tmpdir, datafiles): |
| 372 | 372 |
# Now we've pushed, delete the user's local artifact cache
|
| 373 | 373 |
# directory and try to redownload it from the share
|
| 374 | 374 |
#
|
| 375 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
| 376 |
- shutil.rmtree(artifacts)
|
|
| 375 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
| 376 |
+ shutil.rmtree(cas)
|
|
| 377 | 377 |
|
| 378 | 378 |
# Assert that nothing is cached locally anymore
|
| 379 | 379 |
for element_name in all_elements:
|
| ... | ... | @@ -510,8 +510,8 @@ def test_pull_access_rights(caplog, cli, tmpdir, datafiles): |
| 510 | 510 |
|
| 511 | 511 |
shutil.rmtree(checkout)
|
| 512 | 512 |
|
| 513 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
| 514 |
- shutil.rmtree(artifacts)
|
|
| 513 |
+ casdir = os.path.join(cli.directory, 'cas')
|
|
| 514 |
+ shutil.rmtree(casdir)
|
|
| 515 | 515 |
|
| 516 | 516 |
result = cli.run(project=project, args=['artifact', 'pull', 'compose-all.bst'])
|
| 517 | 517 |
result.assert_success()
|
| ... | ... | @@ -53,7 +53,7 @@ def test_cache_buildtrees(cli, tmpdir, datafiles): |
| 53 | 53 |
create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3:
|
| 54 | 54 |
cli.configure({
|
| 55 | 55 |
'artifacts': {'url': share1.repo, 'push': True},
|
| 56 |
- 'artifactdir': os.path.join(str(tmpdir), 'artifacts')
|
|
| 56 |
+ 'cachedir': str(tmpdir)
|
|
| 57 | 57 |
})
|
| 58 | 58 |
|
| 59 | 59 |
# Build autotools element with cache-buildtrees set via the
|
| ... | ... | @@ -69,20 +69,22 @@ def test_cache_buildtrees(cli, tmpdir, datafiles): |
| 69 | 69 |
# to not cache buildtrees
|
| 70 | 70 |
cache_key = cli.get_element_key(project, element_name)
|
| 71 | 71 |
elementdigest = share1.has_artifact('test', element_name, cache_key)
|
| 72 |
- buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
|
|
| 72 |
+ buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello',
|
|
| 73 | 73 |
elementdigest.hash, 'buildtree')
|
| 74 | 74 |
assert os.path.isdir(buildtreedir)
|
| 75 | 75 |
assert not os.listdir(buildtreedir)
|
| 76 | 76 |
|
| 77 | 77 |
# Delete the local cached artifacts, and assert the when pulled with --pull-buildtrees
|
| 78 | 78 |
# that is was cached in share1 as expected with an empty buildtree dir
|
| 79 |
- shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
|
|
| 79 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
|
|
| 80 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
|
|
| 80 | 81 |
assert cli.get_element_state(project, element_name) != 'cached'
|
| 81 | 82 |
result = cli.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
|
| 82 | 83 |
assert element_name in result.get_pulled_elements()
|
| 83 | 84 |
assert os.path.isdir(buildtreedir)
|
| 84 | 85 |
assert not os.listdir(buildtreedir)
|
| 85 |
- shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
|
|
| 86 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
|
|
| 87 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
|
|
| 86 | 88 |
|
| 87 | 89 |
# Assert that the default behaviour of pull to not include buildtrees on the artifact
|
| 88 | 90 |
# in share1 which was purposely cached with an empty one behaves as expected. As such the
|
| ... | ... | @@ -91,13 +93,14 @@ def test_cache_buildtrees(cli, tmpdir, datafiles): |
| 91 | 93 |
result = cli.run(project=project, args=['artifact', 'pull', element_name])
|
| 92 | 94 |
assert element_name in result.get_pulled_elements()
|
| 93 | 95 |
assert not os.path.isdir(buildtreedir)
|
| 94 |
- shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
|
|
| 96 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
|
|
| 97 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
|
|
| 95 | 98 |
|
| 96 | 99 |
# Repeat building the artifacts, this time with the default behaviour of caching buildtrees,
|
| 97 | 100 |
# as such the buildtree dir should not be empty
|
| 98 | 101 |
cli.configure({
|
| 99 | 102 |
'artifacts': {'url': share2.repo, 'push': True},
|
| 100 |
- 'artifactdir': os.path.join(str(tmpdir), 'artifacts')
|
|
| 103 |
+ 'cachedir': str(tmpdir)
|
|
| 101 | 104 |
})
|
| 102 | 105 |
result = cli.run(project=project, args=['build', element_name])
|
| 103 | 106 |
assert result.exit_code == 0
|
| ... | ... | @@ -106,27 +109,29 @@ def test_cache_buildtrees(cli, tmpdir, datafiles): |
| 106 | 109 |
|
| 107 | 110 |
# Cache key will be the same however the digest hash will have changed as expected, so reconstruct paths
|
| 108 | 111 |
elementdigest = share2.has_artifact('test', element_name, cache_key)
|
| 109 |
- buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
|
|
| 112 |
+ buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello',
|
|
| 110 | 113 |
elementdigest.hash, 'buildtree')
|
| 111 | 114 |
assert os.path.isdir(buildtreedir)
|
| 112 | 115 |
assert os.listdir(buildtreedir) is not None
|
| 113 | 116 |
|
| 114 | 117 |
# Delete the local cached artifacts, and assert that when pulled with --pull-buildtrees
|
| 115 | 118 |
# that it was cached in share2 as expected with a populated buildtree dir
|
| 116 |
- shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
|
|
| 119 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
|
|
| 120 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
|
|
| 117 | 121 |
assert cli.get_element_state(project, element_name) != 'cached'
|
| 118 | 122 |
result = cli.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
|
| 119 | 123 |
assert element_name in result.get_pulled_elements()
|
| 120 | 124 |
assert os.path.isdir(buildtreedir)
|
| 121 | 125 |
assert os.listdir(buildtreedir) is not None
|
| 122 |
- shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
|
|
| 126 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
|
|
| 127 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
|
|
| 123 | 128 |
|
| 124 | 129 |
# Clarify that the user config option for cache-buildtrees works as the cli
|
| 125 | 130 |
# main option does. Point to share3 which does not have the artifacts cached to force
|
| 126 | 131 |
# a build
|
| 127 | 132 |
cli.configure({
|
| 128 | 133 |
'artifacts': {'url': share3.repo, 'push': True},
|
| 129 |
- 'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
|
|
| 134 |
+ 'cachedir': str(tmpdir),
|
|
| 130 | 135 |
'cache': {'cache-buildtrees': 'never'}
|
| 131 | 136 |
})
|
| 132 | 137 |
result = cli.run(project=project, args=['build', element_name])
|
| ... | ... | @@ -134,7 +139,7 @@ def test_cache_buildtrees(cli, tmpdir, datafiles): |
| 134 | 139 |
assert cli.get_element_state(project, element_name) == 'cached'
|
| 135 | 140 |
cache_key = cli.get_element_key(project, element_name)
|
| 136 | 141 |
elementdigest = share3.has_artifact('test', element_name, cache_key)
|
| 137 |
- buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
|
|
| 142 |
+ buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello',
|
|
| 138 | 143 |
elementdigest.hash, 'buildtree')
|
| 139 | 144 |
assert os.path.isdir(buildtreedir)
|
| 140 | 145 |
assert not os.listdir(buildtreedir)
|
| ... | ... | @@ -21,10 +21,11 @@ DATA_DIR = os.path.join( |
| 21 | 21 |
# to false, which is the default user context. The cache has to be
|
| 22 | 22 |
# cleared as just forcefully removing the refpath leaves dangling objects.
|
| 23 | 23 |
def default_state(cli, tmpdir, share):
|
| 24 |
- shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
|
|
| 24 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
|
|
| 25 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
|
|
| 25 | 26 |
cli.configure({
|
| 26 | 27 |
'artifacts': {'url': share.repo, 'push': False},
|
| 27 |
- 'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
|
|
| 28 |
+ 'cachedir': str(tmpdir),
|
|
| 28 | 29 |
'cache': {'pull-buildtrees': False},
|
| 29 | 30 |
})
|
| 30 | 31 |
|
| ... | ... | @@ -45,7 +46,7 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles): |
| 45 | 46 |
create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3:
|
| 46 | 47 |
cli2.configure({
|
| 47 | 48 |
'artifacts': {'url': share1.repo, 'push': True},
|
| 48 |
- 'artifactdir': os.path.join(str(tmpdir), 'artifacts')
|
|
| 49 |
+ 'cachedir': str(tmpdir),
|
|
| 49 | 50 |
})
|
| 50 | 51 |
|
| 51 | 52 |
# Build autotools element, checked pushed, delete local
|
| ... | ... | @@ -74,7 +75,7 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles): |
| 74 | 75 |
result = cli2.run(project=project, args=['artifact', 'pull', element_name])
|
| 75 | 76 |
assert element_name in result.get_pulled_elements()
|
| 76 | 77 |
elementdigest = share1.has_artifact('test', element_name, cli2.get_element_key(project, element_name))
|
| 77 |
- buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
|
|
| 78 |
+ buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello',
|
|
| 78 | 79 |
elementdigest.hash, 'buildtree')
|
| 79 | 80 |
assert not os.path.isdir(buildtreedir)
|
| 80 | 81 |
result = cli2.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
|
| ... | ... | @@ -62,7 +62,7 @@ def test_buildtree_staged_warn_empty_cached(cli_integration, tmpdir, datafiles): |
| 62 | 62 |
# Switch to a temp artifact cache dir to ensure the artifact is rebuilt,
|
| 63 | 63 |
# caching an empty buildtree
|
| 64 | 64 |
cli_integration.configure({
|
| 65 |
- 'artifactdir': os.path.join(os.path.join(str(tmpdir), 'artifacts'))
|
|
| 65 |
+ 'cachedir': str(tmpdir)
|
|
| 66 | 66 |
})
|
| 67 | 67 |
|
| 68 | 68 |
res = cli_integration.run(project=project, args=['--cache-buildtrees', 'never', 'build', element_name])
|
| ... | ... | @@ -139,7 +139,7 @@ def test_buildtree_from_failure_option_never(cli_integration, tmpdir, datafiles) |
| 139 | 139 |
# Switch to a temp artifact cache dir to ensure the artifact is rebuilt,
|
| 140 | 140 |
# caching an empty buildtree
|
| 141 | 141 |
cli_integration.configure({
|
| 142 |
- 'artifactdir': os.path.join(os.path.join(str(tmpdir), 'artifacts'))
|
|
| 142 |
+ 'cachedir': str(tmpdir)
|
|
| 143 | 143 |
})
|
| 144 | 144 |
|
| 145 | 145 |
res = cli_integration.run(project=project, args=['--cache-buildtrees', 'never', 'build', element_name])
|
| ... | ... | @@ -163,7 +163,7 @@ def test_buildtree_from_failure_option_failure(cli_integration, tmpdir, datafile |
| 163 | 163 |
# default behaviour (which is always) as the buildtree will explicitly have been
|
| 164 | 164 |
# cached with content.
|
| 165 | 165 |
cli_integration.configure({
|
| 166 |
- 'artifactdir': os.path.join(os.path.join(str(tmpdir), 'artifacts'))
|
|
| 166 |
+ 'cachedir': str(tmpdir)
|
|
| 167 | 167 |
})
|
| 168 | 168 |
|
| 169 | 169 |
res = cli_integration.run(project=project, args=['--cache-buildtrees', 'failure', 'build', element_name])
|
| ... | ... | @@ -195,10 +195,7 @@ def test_buildtree_pulled(cli, tmpdir, datafiles): |
| 195 | 195 |
assert cli.get_element_state(project, element_name) == 'cached'
|
| 196 | 196 |
|
| 197 | 197 |
# Discard the cache
|
| 198 |
- cli.configure({
|
|
| 199 |
- 'artifacts': {'url': share.repo, 'push': True},
|
|
| 200 |
- 'artifactdir': os.path.join(cli.directory, 'artifacts2')
|
|
| 201 |
- })
|
|
| 198 |
+ shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas')))
|
|
| 202 | 199 |
assert cli.get_element_state(project, element_name) != 'cached'
|
| 203 | 200 |
|
| 204 | 201 |
# Pull from cache, ensuring cli options is set to pull the buildtree
|
| ... | ... | @@ -231,10 +228,7 @@ def test_buildtree_options(cli, tmpdir, datafiles): |
| 231 | 228 |
assert share.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
| 232 | 229 |
|
| 233 | 230 |
# Discard the cache
|
| 234 |
- cli.configure({
|
|
| 235 |
- 'artifacts': {'url': share.repo, 'push': True},
|
|
| 236 |
- 'artifactdir': os.path.join(cli.directory, 'artifacts2')
|
|
| 237 |
- })
|
|
| 231 |
+ shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas')))
|
|
| 238 | 232 |
assert cli.get_element_state(project, element_name) != 'cached'
|
| 239 | 233 |
|
| 240 | 234 |
# Pull from cache, but do not include buildtrees.
|
| ... | ... | @@ -274,7 +268,7 @@ def test_buildtree_options(cli, tmpdir, datafiles): |
| 274 | 268 |
])
|
| 275 | 269 |
assert 'Attempting to fetch missing artifact buildtree' in res.stderr
|
| 276 | 270 |
assert 'Hi' in res.output
|
| 277 |
- shutil.rmtree(os.path.join(os.path.join(cli.directory, 'artifacts2')))
|
|
| 271 |
+ shutil.rmtree(os.path.join(os.path.join(str(tmpdir), 'cache', 'cas')))
|
|
| 278 | 272 |
assert cli.get_element_state(project, element_name) != 'cached'
|
| 279 | 273 |
|
| 280 | 274 |
# Check it's not loading the shell at all with always set for the buildtree, when the
|
| ... | ... | @@ -31,7 +31,7 @@ def create_test_directory(*path, mode=0o644): |
| 31 | 31 |
@pytest.mark.datafiles(DATA_DIR)
|
| 32 | 32 |
@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS] + ['local'])
|
| 33 | 33 |
@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
|
| 34 |
-def test_deterministic_source_umask(cli, tmpdir, datafiles, kind, integration_cache):
|
|
| 34 |
+def test_deterministic_source_umask(cli, tmpdir, datafiles, kind):
|
|
| 35 | 35 |
project = str(datafiles)
|
| 36 | 36 |
element_name = 'list.bst'
|
| 37 | 37 |
element_path = os.path.join(project, 'elements', element_name)
|
| ... | ... | @@ -94,9 +94,7 @@ def test_deterministic_source_umask(cli, tmpdir, datafiles, kind, integration_ca |
| 94 | 94 |
return f.read()
|
| 95 | 95 |
finally:
|
| 96 | 96 |
os.umask(old_umask)
|
| 97 |
- cache_dir = integration_cache.artifacts
|
|
| 98 |
- cli.remove_artifact_from_cache(project, element_name,
|
|
| 99 |
- cache_dir=cache_dir)
|
|
| 97 |
+ cli.remove_artifact_from_cache(project, element_name)
|
|
| 100 | 98 |
|
| 101 | 99 |
assert get_value_for_umask(0o022) == get_value_for_umask(0o077)
|
| 102 | 100 |
|
| ... | ... | @@ -104,7 +102,7 @@ def test_deterministic_source_umask(cli, tmpdir, datafiles, kind, integration_ca |
| 104 | 102 |
@pytest.mark.integration
|
| 105 | 103 |
@pytest.mark.datafiles(DATA_DIR)
|
| 106 | 104 |
@pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
|
| 107 |
-def test_deterministic_source_local(cli, tmpdir, datafiles, integration_cache):
|
|
| 105 |
+def test_deterministic_source_local(cli, tmpdir, datafiles):
|
|
| 108 | 106 |
"""Only user rights should be considered for local source.
|
| 109 | 107 |
"""
|
| 110 | 108 |
project = str(datafiles)
|
| ... | ... | @@ -156,8 +154,6 @@ def test_deterministic_source_local(cli, tmpdir, datafiles, integration_cache): |
| 156 | 154 |
with open(os.path.join(checkoutdir, 'ls-l'), 'r') as f:
|
| 157 | 155 |
return f.read()
|
| 158 | 156 |
finally:
|
| 159 |
- cache_dir = integration_cache.artifacts
|
|
| 160 |
- cli.remove_artifact_from_cache(project, element_name,
|
|
| 161 |
- cache_dir=cache_dir)
|
|
| 157 |
+ cli.remove_artifact_from_cache(project, element_name)
|
|
| 162 | 158 |
|
| 163 | 159 |
assert get_value_for_mask(0o7777) == get_value_for_mask(0o0700)
|
| ... | ... | @@ -43,7 +43,7 @@ def test_context_load(context_fixture): |
| 43 | 43 |
context.load(config=os.devnull)
|
| 44 | 44 |
assert(context.sourcedir == os.path.join(cache_home, 'buildstream', 'sources'))
|
| 45 | 45 |
assert(context.builddir == os.path.join(cache_home, 'buildstream', 'build'))
|
| 46 |
- assert(context.artifactdir == os.path.join(cache_home, 'buildstream', 'artifacts'))
|
|
| 46 |
+ assert(context.cachedir == os.path.join(cache_home, 'buildstream'))
|
|
| 47 | 47 |
assert(context.logdir == os.path.join(cache_home, 'buildstream', 'logs'))
|
| 48 | 48 |
|
| 49 | 49 |
|
| ... | ... | @@ -57,7 +57,7 @@ def test_context_load_envvar(context_fixture): |
| 57 | 57 |
context.load(config=os.devnull)
|
| 58 | 58 |
assert(context.sourcedir == os.path.join('/', 'some', 'path', 'buildstream', 'sources'))
|
| 59 | 59 |
assert(context.builddir == os.path.join('/', 'some', 'path', 'buildstream', 'build'))
|
| 60 |
- assert(context.artifactdir == os.path.join('/', 'some', 'path', 'buildstream', 'artifacts'))
|
|
| 60 |
+ assert(context.cachedir == os.path.join('/', 'some', 'path', 'buildstream'))
|
|
| 61 | 61 |
assert(context.logdir == os.path.join('/', 'some', 'path', 'buildstream', 'logs'))
|
| 62 | 62 |
|
| 63 | 63 |
# Reset the environment variable
|
| ... | ... | @@ -79,7 +79,7 @@ def test_context_load_user_config(context_fixture, datafiles): |
| 79 | 79 |
|
| 80 | 80 |
assert(context.sourcedir == os.path.expanduser('~/pony'))
|
| 81 | 81 |
assert(context.builddir == os.path.join(cache_home, 'buildstream', 'build'))
|
| 82 |
- assert(context.artifactdir == os.path.join(cache_home, 'buildstream', 'artifacts'))
|
|
| 82 |
+ assert(context.cachedir == os.path.join(cache_home, 'buildstream'))
|
|
| 83 | 83 |
assert(context.logdir == os.path.join(cache_home, 'buildstream', 'logs'))
|
| 84 | 84 |
|
| 85 | 85 |
|
| ... | ... | @@ -16,7 +16,7 @@ def create_pipeline(tmpdir, basedir, target): |
| 16 | 16 |
context = Context()
|
| 17 | 17 |
context.load(config=os.devnull)
|
| 18 | 18 |
context.deploydir = os.path.join(str(tmpdir), 'deploy')
|
| 19 |
- context.artifactdir = os.path.join(str(tmpdir), 'artifact')
|
|
| 19 |
+ context.casdir = os.path.join(str(tmpdir), 'cas')
|
|
| 20 | 20 |
project = Project(basedir, context)
|
| 21 | 21 |
|
| 22 | 22 |
def dummy_handler(message, context):
|
| ... | ... | @@ -46,7 +46,6 @@ class ArtifactShare(): |
| 46 | 46 |
# in tests as a remote artifact push/pull configuration
|
| 47 | 47 |
#
|
| 48 | 48 |
self.repodir = os.path.join(self.directory, 'repo')
|
| 49 |
- |
|
| 50 | 49 |
os.makedirs(self.repodir)
|
| 51 | 50 |
|
| 52 | 51 |
self.cas = CASCache(self.repodir)
|
