Raoul Hidalgo Charman pushed to branch raoul/870-root-cache-dir at BuildStream / buildstream
Commits:
-
9989c304
by Raoul Hidalgo Charman at 2019-02-07T11:12:02Z
-
24d2a9bf
by Raoul Hidalgo Charman at 2019-02-07T11:12:49Z
28 changed files:
- buildstream/_artifactcache.py
- buildstream/_cas/__init__.py
- buildstream/_cas/cascache.py
- buildstream/_context.py
- buildstream/_frontend/cli.py
- buildstream/_frontend/status.py
- buildstream/_frontend/widget.py
- buildstream/_scheduler/jobs/cachesizejob.py
- buildstream/_scheduler/jobs/cleanupjob.py
- buildstream/_scheduler/jobs/job.py
- buildstream/data/userconfig.yaml
- buildstream/utils.py
- conftest.py
- doc/bst2html.py
- doc/sessions/running-commands.run
- tests/artifactcache/cache_size.py
- tests/artifactcache/expiry.py
- tests/artifactcache/junctions.py
- tests/artifactcache/pull.py
- tests/artifactcache/push.py
- tests/frontend/pull.py
- tests/integration/build-tree.py
- tests/integration/cachedfail.py
- tests/integration/messages.py
- tests/integration/pullbuildtrees.py
- tests/integration/source-determinism.py
- tests/testutils/artifactshare.py
- tests/testutils/runcli.py
Changes:
... | ... | @@ -23,7 +23,7 @@ import string |
23 | 23 |
from collections.abc import Mapping
|
24 | 24 |
|
25 | 25 |
from .types import _KeyStrength
|
26 |
-from ._exceptions import ArtifactError, CASError, LoadError, LoadErrorReason
|
|
26 |
+from ._exceptions import ArtifactError, CASError
|
|
27 | 27 |
from ._message import Message, MessageType
|
28 | 28 |
from . import utils
|
29 | 29 |
from . import _yaml
|
... | ... | @@ -46,39 +46,6 @@ class ArtifactCacheSpec(CASRemoteSpec): |
46 | 46 |
pass
|
47 | 47 |
|
48 | 48 |
|
49 |
-# ArtifactCacheUsage
|
|
50 |
-#
|
|
51 |
-# A simple object to report the current artifact cache
|
|
52 |
-# usage details.
|
|
53 |
-#
|
|
54 |
-# Note that this uses the user configured cache quota
|
|
55 |
-# rather than the internal quota with protective headroom
|
|
56 |
-# removed, to provide a more sensible value to display to
|
|
57 |
-# the user.
|
|
58 |
-#
|
|
59 |
-# Args:
|
|
60 |
-# artifacts (ArtifactCache): The artifact cache to get the status of
|
|
61 |
-#
|
|
62 |
-class ArtifactCacheUsage():
|
|
63 |
- |
|
64 |
- def __init__(self, artifacts):
|
|
65 |
- context = artifacts.context
|
|
66 |
- self.quota_config = context.config_cache_quota # Configured quota
|
|
67 |
- self.quota_size = artifacts._cache_quota_original # Resolved cache quota in bytes
|
|
68 |
- self.used_size = artifacts.get_cache_size() # Size used by artifacts in bytes
|
|
69 |
- self.used_percent = 0 # Percentage of the quota used
|
|
70 |
- if self.quota_size is not None:
|
|
71 |
- self.used_percent = int(self.used_size * 100 / self.quota_size)
|
|
72 |
- |
|
73 |
- # Formattable into a human readable string
|
|
74 |
- #
|
|
75 |
- def __str__(self):
|
|
76 |
- return "{} / {} ({}%)" \
|
|
77 |
- .format(utils._pretty_size(self.used_size, dec_places=1),
|
|
78 |
- self.quota_config,
|
|
79 |
- self.used_percent)
|
|
80 |
- |
|
81 |
- |
|
82 | 49 |
# An ArtifactCache manages artifacts.
|
83 | 50 |
#
|
84 | 51 |
# Args:
|
... | ... | @@ -91,15 +58,23 @@ class ArtifactCache(): |
91 | 58 |
|
92 | 59 |
self.cas = context.get_cascache()
|
93 | 60 |
|
61 |
+ # check to see if the cache quota exceeds space available
|
|
62 |
+ if self.cas.cache_quota_exceeded:
|
|
63 |
+ self._message(MessageType.WARN,
|
|
64 |
+ "Your system does not have enough available " +
|
|
65 |
+ "space to support the cache quota specified.",
|
|
66 |
+ detail=("You have specified a quota of {quota} total disk space.\n" +
|
|
67 |
+ "The filesystem containing {local_cache_path} only " +
|
|
68 |
+ "has {available_size} available.")
|
|
69 |
+ .format(quota=context.config_cache_quota,
|
|
70 |
+ local_cache_path=context.cachedir,
|
|
71 |
+ available_size=self.cas.available_space))
|
|
72 |
+ |
|
94 | 73 |
self.global_remote_specs = []
|
95 | 74 |
self.project_remote_specs = {}
|
96 | 75 |
|
97 | 76 |
self._required_elements = set() # The elements required for this session
|
98 |
- self._cache_size = None # The current cache size, sometimes it's an estimate
|
|
99 |
- self._cache_quota = None # The cache quota
|
|
100 |
- self._cache_quota_original = None # The cache quota as specified by the user, in bytes
|
|
101 |
- self._cache_quota_headroom = None # The headroom in bytes before reaching the quota or full disk
|
|
102 |
- self._cache_lower_threshold = None # The target cache size for a cleanup
|
|
77 |
+ |
|
103 | 78 |
self._remotes_setup = False # Check to prevent double-setup of remotes
|
104 | 79 |
|
105 | 80 |
# Per-project list of _CASRemote instances.
|
... | ... | @@ -110,8 +85,6 @@ class ArtifactCache(): |
110 | 85 |
|
111 | 86 |
os.makedirs(self.extractdir, exist_ok=True)
|
112 | 87 |
|
113 |
- self._calculate_cache_quota()
|
|
114 |
- |
|
115 | 88 |
# get_artifact_fullname()
|
116 | 89 |
#
|
117 | 90 |
# Generate a full name for an artifact, including the
|
... | ... | @@ -266,7 +239,7 @@ class ArtifactCache(): |
266 | 239 |
space_saved = 0
|
267 | 240 |
|
268 | 241 |
# Start off with an announcement with as much info as possible
|
269 |
- volume_size, volume_avail = self._get_cache_volume_size()
|
|
242 |
+ volume_size, volume_avail = self.cas._get_cache_volume_size()
|
|
270 | 243 |
self._message(MessageType.STATUS, "Starting cache cleanup",
|
271 | 244 |
detail=("Elements required by the current build plan: {}\n" +
|
272 | 245 |
"User specified quota: {} ({})\n" +
|
... | ... | @@ -274,8 +247,8 @@ class ArtifactCache(): |
274 | 247 |
"Cache volume: {} total, {} available")
|
275 | 248 |
.format(len(self._required_elements),
|
276 | 249 |
context.config_cache_quota,
|
277 |
- utils._pretty_size(self._cache_quota_original, dec_places=2),
|
|
278 |
- utils._pretty_size(self.get_cache_size(), dec_places=2),
|
|
250 |
+ utils._pretty_size(self.cas._cache_quota, dec_places=2),
|
|
251 |
+ utils._pretty_size(self.cas.get_cache_size(), dec_places=2),
|
|
279 | 252 |
utils._pretty_size(volume_size, dec_places=2),
|
280 | 253 |
utils._pretty_size(volume_avail, dec_places=2)))
|
281 | 254 |
|
... | ... | @@ -292,9 +265,10 @@ class ArtifactCache(): |
292 | 265 |
])
|
293 | 266 |
|
294 | 267 |
# Do a real computation of the cache size once, just in case
|
295 |
- self.compute_cache_size()
|
|
268 |
+ usage = self.cas.compute_cache_size()
|
|
269 |
+ self._message(MessageType.STATUS, "Cache usage recomputed: {}".format(usage))
|
|
296 | 270 |
|
297 |
- while self.get_cache_size() >= self._cache_lower_threshold:
|
|
271 |
+ while self.cas.get_cache_size() >= self.cas._cache_lower_threshold:
|
|
298 | 272 |
try:
|
299 | 273 |
to_remove = artifacts.pop(0)
|
300 | 274 |
except IndexError:
|
... | ... | @@ -311,7 +285,7 @@ class ArtifactCache(): |
311 | 285 |
"Please increase the cache-quota in {} and/or make more disk space."
|
312 | 286 |
.format(removed_ref_count,
|
313 | 287 |
utils._pretty_size(space_saved, dec_places=2),
|
314 |
- utils._pretty_size(self.get_cache_size(), dec_places=2),
|
|
288 |
+ utils._pretty_size(self.cas.get_cache_size(), dec_places=2),
|
|
315 | 289 |
len(self._required_elements),
|
316 | 290 |
(context.config_origin or default_conf)))
|
317 | 291 |
|
... | ... | @@ -337,7 +311,7 @@ class ArtifactCache(): |
337 | 311 |
to_remove))
|
338 | 312 |
|
339 | 313 |
# Remove the size from the removed size
|
340 |
- self.set_cache_size(self._cache_size - size)
|
|
314 |
+ self.cas.set_cache_size(self.cas._cache_size - size)
|
|
341 | 315 |
|
342 | 316 |
# User callback
|
343 | 317 |
#
|
... | ... | @@ -353,29 +327,12 @@ class ArtifactCache(): |
353 | 327 |
"Cache usage is now: {}")
|
354 | 328 |
.format(removed_ref_count,
|
355 | 329 |
utils._pretty_size(space_saved, dec_places=2),
|
356 |
- utils._pretty_size(self.get_cache_size(), dec_places=2)))
|
|
357 |
- |
|
358 |
- return self.get_cache_size()
|
|
359 |
- |
|
360 |
- # compute_cache_size()
|
|
361 |
- #
|
|
362 |
- # Computes the real artifact cache size by calling
|
|
363 |
- # the abstract calculate_cache_size() method.
|
|
364 |
- #
|
|
365 |
- # Returns:
|
|
366 |
- # (int): The size of the artifact cache.
|
|
367 |
- #
|
|
368 |
- def compute_cache_size(self):
|
|
369 |
- old_cache_size = self._cache_size
|
|
370 |
- new_cache_size = self.cas.calculate_cache_size()
|
|
330 |
+ utils._pretty_size(self.cas.get_cache_size(), dec_places=2)))
|
|
371 | 331 |
|
372 |
- if old_cache_size != new_cache_size:
|
|
373 |
- self._cache_size = new_cache_size
|
|
332 |
+ return self.cas.get_cache_size()
|
|
374 | 333 |
|
375 |
- usage = ArtifactCacheUsage(self)
|
|
376 |
- self._message(MessageType.STATUS, "Cache usage recomputed: {}".format(usage))
|
|
377 |
- |
|
378 |
- return self._cache_size
|
|
334 |
+ def full(self):
|
|
335 |
+ return self.cas.full()
|
|
379 | 336 |
|
380 | 337 |
# add_artifact_size()
|
381 | 338 |
#
|
... | ... | @@ -386,71 +343,10 @@ class ArtifactCache(): |
386 | 343 |
# artifact_size (int): The size to add.
|
387 | 344 |
#
|
388 | 345 |
def add_artifact_size(self, artifact_size):
|
389 |
- cache_size = self.get_cache_size()
|
|
346 |
+ cache_size = self.cas.get_cache_size()
|
|
390 | 347 |
cache_size += artifact_size
|
391 | 348 |
|
392 |
- self.set_cache_size(cache_size)
|
|
393 |
- |
|
394 |
- # get_cache_size()
|
|
395 |
- #
|
|
396 |
- # Fetches the cached size of the cache, this is sometimes
|
|
397 |
- # an estimate and periodically adjusted to the real size
|
|
398 |
- # when a cache size calculation job runs.
|
|
399 |
- #
|
|
400 |
- # When it is an estimate, the value is either correct, or
|
|
401 |
- # it is greater than the actual cache size.
|
|
402 |
- #
|
|
403 |
- # Returns:
|
|
404 |
- # (int) An approximation of the artifact cache size, in bytes.
|
|
405 |
- #
|
|
406 |
- def get_cache_size(self):
|
|
407 |
- |
|
408 |
- # If we don't currently have an estimate, figure out the real cache size.
|
|
409 |
- if self._cache_size is None:
|
|
410 |
- stored_size = self._read_cache_size()
|
|
411 |
- if stored_size is not None:
|
|
412 |
- self._cache_size = stored_size
|
|
413 |
- else:
|
|
414 |
- self.compute_cache_size()
|
|
415 |
- |
|
416 |
- return self._cache_size
|
|
417 |
- |
|
418 |
- # set_cache_size()
|
|
419 |
- #
|
|
420 |
- # Forcefully set the overall cache size.
|
|
421 |
- #
|
|
422 |
- # This is used to update the size in the main process after
|
|
423 |
- # having calculated in a cleanup or a cache size calculation job.
|
|
424 |
- #
|
|
425 |
- # Args:
|
|
426 |
- # cache_size (int): The size to set.
|
|
427 |
- #
|
|
428 |
- def set_cache_size(self, cache_size):
|
|
429 |
- |
|
430 |
- assert cache_size is not None
|
|
431 |
- |
|
432 |
- self._cache_size = cache_size
|
|
433 |
- self._write_cache_size(self._cache_size)
|
|
434 |
- |
|
435 |
- # full()
|
|
436 |
- #
|
|
437 |
- # Checks if the artifact cache is full, either
|
|
438 |
- # because the user configured quota has been exceeded
|
|
439 |
- # or because the underlying disk is almost full.
|
|
440 |
- #
|
|
441 |
- # Returns:
|
|
442 |
- # (bool): True if the artifact cache is full
|
|
443 |
- #
|
|
444 |
- def full(self):
|
|
445 |
- |
|
446 |
- if self.get_cache_size() > self._cache_quota:
|
|
447 |
- return True
|
|
448 |
- |
|
449 |
- _, volume_avail = self._get_cache_volume_size()
|
|
450 |
- if volume_avail < self._cache_quota_headroom:
|
|
451 |
- return True
|
|
452 |
- |
|
453 |
- return False
|
|
349 |
+ self.cas.set_cache_size(cache_size)
|
|
454 | 350 |
|
455 | 351 |
# preflight():
|
456 | 352 |
#
|
... | ... | @@ -896,142 +792,6 @@ class ArtifactCache(): |
896 | 792 |
with self.context.timed_activity("Initializing remote caches", silent_nested=True):
|
897 | 793 |
self.initialize_remotes(on_failure=remote_failed)
|
898 | 794 |
|
899 |
- # _write_cache_size()
|
|
900 |
- #
|
|
901 |
- # Writes the given size of the artifact to the cache's size file
|
|
902 |
- #
|
|
903 |
- # Args:
|
|
904 |
- # size (int): The size of the artifact cache to record
|
|
905 |
- #
|
|
906 |
- def _write_cache_size(self, size):
|
|
907 |
- assert isinstance(size, int)
|
|
908 |
- size_file_path = os.path.join(self.context.artifactdir, CACHE_SIZE_FILE)
|
|
909 |
- with utils.save_file_atomic(size_file_path, "w") as f:
|
|
910 |
- f.write(str(size))
|
|
911 |
- |
|
912 |
- # _read_cache_size()
|
|
913 |
- #
|
|
914 |
- # Reads and returns the size of the artifact cache that's stored in the
|
|
915 |
- # cache's size file
|
|
916 |
- #
|
|
917 |
- # Returns:
|
|
918 |
- # (int): The size of the artifact cache, as recorded in the file
|
|
919 |
- #
|
|
920 |
- def _read_cache_size(self):
|
|
921 |
- size_file_path = os.path.join(self.context.artifactdir, CACHE_SIZE_FILE)
|
|
922 |
- |
|
923 |
- if not os.path.exists(size_file_path):
|
|
924 |
- return None
|
|
925 |
- |
|
926 |
- with open(size_file_path, "r") as f:
|
|
927 |
- size = f.read()
|
|
928 |
- |
|
929 |
- try:
|
|
930 |
- num_size = int(size)
|
|
931 |
- except ValueError as e:
|
|
932 |
- raise ArtifactError("Size '{}' parsed from '{}' was not an integer".format(
|
|
933 |
- size, size_file_path)) from e
|
|
934 |
- |
|
935 |
- return num_size
|
|
936 |
- |
|
937 |
- # _calculate_cache_quota()
|
|
938 |
- #
|
|
939 |
- # Calculates and sets the cache quota and lower threshold based on the
|
|
940 |
- # quota set in Context.
|
|
941 |
- # It checks that the quota is both a valid _expression_, and that there is
|
|
942 |
- # enough disk space to satisfy that quota
|
|
943 |
- #
|
|
944 |
- def _calculate_cache_quota(self):
|
|
945 |
- # Headroom intended to give BuildStream a bit of leeway.
|
|
946 |
- # This acts as the minimum size of cache_quota and also
|
|
947 |
- # is taken from the user requested cache_quota.
|
|
948 |
- #
|
|
949 |
- if 'BST_TEST_SUITE' in os.environ:
|
|
950 |
- self._cache_quota_headroom = 0
|
|
951 |
- else:
|
|
952 |
- self._cache_quota_headroom = 2e9
|
|
953 |
- |
|
954 |
- try:
|
|
955 |
- cache_quota = utils._parse_size(self.context.config_cache_quota,
|
|
956 |
- self.context.artifactdir)
|
|
957 |
- except utils.UtilError as e:
|
|
958 |
- raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
959 |
- "{}\nPlease specify the value in bytes or as a % of full disk space.\n"
|
|
960 |
- "\nValid values are, for example: 800M 10G 1T 50%\n"
|
|
961 |
- .format(str(e))) from e
|
|
962 |
- |
|
963 |
- total_size, available_space = self._get_cache_volume_size()
|
|
964 |
- cache_size = self.get_cache_size()
|
|
965 |
- |
|
966 |
- # Ensure system has enough storage for the cache_quota
|
|
967 |
- #
|
|
968 |
- # If cache_quota is none, set it to the maximum it could possibly be.
|
|
969 |
- #
|
|
970 |
- # Also check that cache_quota is at least as large as our headroom.
|
|
971 |
- #
|
|
972 |
- if cache_quota is None: # Infinity, set to max system storage
|
|
973 |
- cache_quota = cache_size + available_space
|
|
974 |
- if cache_quota < self._cache_quota_headroom: # Check minimum
|
|
975 |
- raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
976 |
- "Invalid cache quota ({}): ".format(utils._pretty_size(cache_quota)) +
|
|
977 |
- "BuildStream requires a minimum cache quota of 2G.")
|
|
978 |
- elif cache_quota > total_size:
|
|
979 |
- # A quota greater than the total disk size is certianly an error
|
|
980 |
- raise ArtifactError("Your system does not have enough available " +
|
|
981 |
- "space to support the cache quota specified.",
|
|
982 |
- detail=("You have specified a quota of {quota} total disk space.\n" +
|
|
983 |
- "The filesystem containing {local_cache_path} only " +
|
|
984 |
- "has {total_size} total disk space.")
|
|
985 |
- .format(
|
|
986 |
- quota=self.context.config_cache_quota,
|
|
987 |
- local_cache_path=self.context.artifactdir,
|
|
988 |
- total_size=utils._pretty_size(total_size)),
|
|
989 |
- reason='insufficient-storage-for-quota')
|
|
990 |
- elif cache_quota > cache_size + available_space:
|
|
991 |
- # The quota does not fit in the available space, this is a warning
|
|
992 |
- if '%' in self.context.config_cache_quota:
|
|
993 |
- available = (available_space / total_size) * 100
|
|
994 |
- available = '{}% of total disk space'.format(round(available, 1))
|
|
995 |
- else:
|
|
996 |
- available = utils._pretty_size(available_space)
|
|
997 |
- |
|
998 |
- self._message(MessageType.WARN,
|
|
999 |
- "Your system does not have enough available " +
|
|
1000 |
- "space to support the cache quota specified.",
|
|
1001 |
- detail=("You have specified a quota of {quota} total disk space.\n" +
|
|
1002 |
- "The filesystem containing {local_cache_path} only " +
|
|
1003 |
- "has {available_size} available.")
|
|
1004 |
- .format(quota=self.context.config_cache_quota,
|
|
1005 |
- local_cache_path=self.context.artifactdir,
|
|
1006 |
- available_size=available))
|
|
1007 |
- |
|
1008 |
- # Place a slight headroom (2e9 (2GB) on the cache_quota) into
|
|
1009 |
- # cache_quota to try and avoid exceptions.
|
|
1010 |
- #
|
|
1011 |
- # Of course, we might still end up running out during a build
|
|
1012 |
- # if we end up writing more than 2G, but hey, this stuff is
|
|
1013 |
- # already really fuzzy.
|
|
1014 |
- #
|
|
1015 |
- self._cache_quota_original = cache_quota
|
|
1016 |
- self._cache_quota = cache_quota - self._cache_quota_headroom
|
|
1017 |
- self._cache_lower_threshold = self._cache_quota / 2
|
|
1018 |
- |
|
1019 |
- # _get_cache_volume_size()
|
|
1020 |
- #
|
|
1021 |
- # Get the available space and total space for the volume on
|
|
1022 |
- # which the artifact cache is located.
|
|
1023 |
- #
|
|
1024 |
- # Returns:
|
|
1025 |
- # (int): The total number of bytes on the volume
|
|
1026 |
- # (int): The number of available bytes on the volume
|
|
1027 |
- #
|
|
1028 |
- # NOTE: We use this stub to allow the test cases
|
|
1029 |
- # to override what an artifact cache thinks
|
|
1030 |
- # about it's disk size and available bytes.
|
|
1031 |
- #
|
|
1032 |
- def _get_cache_volume_size(self):
|
|
1033 |
- return utils._get_volume_size(self.context.artifactdir)
|
|
1034 |
- |
|
1035 | 795 |
|
1036 | 796 |
# _configured_remote_artifact_cache_specs():
|
1037 | 797 |
#
|
... | ... | @@ -17,5 +17,5 @@ |
17 | 17 |
# Authors:
|
18 | 18 |
# Tristan Van Berkom <tristan vanberkom codethink co uk>
|
19 | 19 |
|
20 |
-from .cascache import CASCache
|
|
20 |
+from .cascache import CASCache, CASCacheUsage
|
|
21 | 21 |
from .casremote import CASRemote, CASRemoteSpec
|
... | ... | @@ -31,25 +31,73 @@ from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2 |
31 | 31 |
from .._protos.buildstream.v2 import buildstream_pb2
|
32 | 32 |
|
33 | 33 |
from .. import utils
|
34 |
-from .._exceptions import CASCacheError
|
|
34 |
+from .._exceptions import CASCacheError, LoadError, LoadErrorReason
|
|
35 | 35 |
|
36 | 36 |
from .casremote import BlobNotFound, _CASBatchRead, _CASBatchUpdate
|
37 | 37 |
|
38 | 38 |
|
39 |
+CACHE_SIZE_FILE = "cache_size"
|
|
40 |
+ |
|
41 |
+ |
|
42 |
+# CASCacheUsage
|
|
43 |
+#
|
|
44 |
+# A simple object to report the current CAS cache usage details.
|
|
45 |
+#
|
|
46 |
+# Note that this uses the user configured cache quota
|
|
47 |
+# rather than the internal quota with protective headroom
|
|
48 |
+# removed, to provide a more sensible value to display to
|
|
49 |
+# the user.
|
|
50 |
+#
|
|
51 |
+# Args:
|
|
52 |
+# cas (CASCache): The CAS cache to get the status of
|
|
53 |
+#
|
|
54 |
+class CASCacheUsage():
|
|
55 |
+ |
|
56 |
+ def __init__(self, cas):
|
|
57 |
+ self.quota_config = cas._config_cache_quota # Configured quota
|
|
58 |
+ self.quota_size = cas._cache_quota_original # Resolved cache quota in bytes
|
|
59 |
+ self.used_size = cas.get_cache_size() # Size used by artifacts in bytes
|
|
60 |
+ self.used_percent = 0 # Percentage of the quota used
|
|
61 |
+ if self.quota_size is not None:
|
|
62 |
+ self.used_percent = int(self.used_size * 100 / self.quota_size)
|
|
63 |
+ |
|
64 |
+ # Formattable into a human readable string
|
|
65 |
+ #
|
|
66 |
+ def __str__(self):
|
|
67 |
+ return "{} / {} ({}%)" \
|
|
68 |
+ .format(utils._pretty_size(self.used_size, dec_places=1),
|
|
69 |
+ self.quota_config,
|
|
70 |
+ self.used_percent)
|
|
71 |
+ |
|
72 |
+ |
|
39 | 73 |
# A CASCache manages a CAS repository as specified in the Remote Execution API.
|
40 | 74 |
#
|
41 | 75 |
# Args:
|
42 | 76 |
# path (str): The root directory for the CAS repository
|
77 |
+# cache_quota (int): User configured cache quota
|
|
43 | 78 |
#
|
44 | 79 |
class CASCache():
|
45 | 80 |
|
46 |
- def __init__(self, path):
|
|
81 |
+ def __init__(self, path, cache_quota=None):
|
|
47 | 82 |
self.casdir = os.path.join(path, 'cas')
|
48 | 83 |
self.tmpdir = os.path.join(path, 'tmp')
|
49 | 84 |
os.makedirs(os.path.join(self.casdir, 'refs', 'heads'), exist_ok=True)
|
50 | 85 |
os.makedirs(os.path.join(self.casdir, 'objects'), exist_ok=True)
|
51 | 86 |
os.makedirs(self.tmpdir, exist_ok=True)
|
52 | 87 |
|
88 |
+ self._config_cache_quota = cache_quota
|
|
89 |
+ self._cache_size = None # The current cache size, sometimes it's an estimate
|
|
90 |
+ self._cache_quota = None # The cache quota
|
|
91 |
+ self._cache_quota_original = None # The cache quota as specified by the user, in bytes
|
|
92 |
+ self._cache_quota_headroom = None
|
|
93 |
+ self._cache_lower_threshold = None # The target cache size for a cleanup
|
|
94 |
+ |
|
95 |
+ # Flag used for when cache quota is larger than available space
|
|
96 |
+ self.cache_quota_exceeded = False
|
|
97 |
+ self.available_space = None
|
|
98 |
+ |
|
99 |
+ self._calculate_cache_quota()
|
|
100 |
+ |
|
53 | 101 |
# preflight():
|
54 | 102 |
#
|
55 | 103 |
# Preflight check.
|
... | ... | @@ -587,6 +635,84 @@ class CASCache(): |
587 | 635 |
reachable = set()
|
588 | 636 |
self._reachable_refs_dir(reachable, tree, update_mtime=True)
|
589 | 637 |
|
638 |
+ # compute_cache_size()
|
|
639 |
+ #
|
|
640 |
+ # Computes the real artifact cache size by calling
|
|
641 |
+ # the abstract calculate_cache_size() method.
|
|
642 |
+ #
|
|
643 |
+ # Returns:
|
|
644 |
+ # (int): The size of the artifact cache.
|
|
645 |
+ #
|
|
646 |
+ def compute_cache_size(self):
|
|
647 |
+ old_cache_size = self._cache_size
|
|
648 |
+ new_cache_size = self.calculate_cache_size()
|
|
649 |
+ |
|
650 |
+ if old_cache_size != new_cache_size:
|
|
651 |
+ self._cache_size = new_cache_size
|
|
652 |
+ |
|
653 |
+ return self._cache_size
|
|
654 |
+ |
|
655 |
+ # get_cache_size()
|
|
656 |
+ #
|
|
657 |
+ # Fetches the cached size of the cache, this is sometimes
|
|
658 |
+ # an estimate and periodically adjusted to the real size
|
|
659 |
+ # when a cache size calculation job runs.
|
|
660 |
+ #
|
|
661 |
+ # When it is an estimate, the value is either correct, or
|
|
662 |
+ # it is greater than the actual cache size.
|
|
663 |
+ #
|
|
664 |
+ # Returns:
|
|
665 |
+ # (int) An approximation of the artifact cache size, in bytes.
|
|
666 |
+ #
|
|
667 |
+ def get_cache_size(self):
|
|
668 |
+ |
|
669 |
+ # If we don't currently have an estimate, figure out the real cache size.
|
|
670 |
+ if self._cache_size is None:
|
|
671 |
+ stored_size = self._read_cache_size()
|
|
672 |
+ if stored_size is not None:
|
|
673 |
+ self._cache_size = stored_size
|
|
674 |
+ else:
|
|
675 |
+ self._cache_size = self.compute_cache_size()
|
|
676 |
+ |
|
677 |
+ return self._cache_size
|
|
678 |
+ |
|
679 |
+ # set_cache_size()
|
|
680 |
+ #
|
|
681 |
+ # Forcefully set the overall cache size.
|
|
682 |
+ #
|
|
683 |
+ # This is used to update the size in the main process after
|
|
684 |
+ # having calculated in a cleanup or a cache size calculation job.
|
|
685 |
+ #
|
|
686 |
+ # Args:
|
|
687 |
+ # cache_size (int): The size to set.
|
|
688 |
+ #
|
|
689 |
+ def set_cache_size(self, cache_size):
|
|
690 |
+ |
|
691 |
+ assert cache_size is not None
|
|
692 |
+ |
|
693 |
+ self._cache_size = cache_size
|
|
694 |
+ self._write_cache_size(self._cache_size)
|
|
695 |
+ |
|
696 |
+ # full()
|
|
697 |
+ #
|
|
698 |
+ # Checks if the artifact cache is full, either
|
|
699 |
+ # because the user configured quota has been exceeded
|
|
700 |
+ # or because the underlying disk is almost full.
|
|
701 |
+ #
|
|
702 |
+ # Returns:
|
|
703 |
+ # (bool): True if the artifact cache is full
|
|
704 |
+ #
|
|
705 |
+ def full(self):
|
|
706 |
+ |
|
707 |
+ if self.get_cache_size() > self._cache_quota:
|
|
708 |
+ return True
|
|
709 |
+ |
|
710 |
+ _, volume_avail = self._get_cache_volume_size()
|
|
711 |
+ if volume_avail < self._cache_quota_headroom:
|
|
712 |
+ return True
|
|
713 |
+ |
|
714 |
+ return False
|
|
715 |
+ |
|
590 | 716 |
################################################
|
591 | 717 |
# Local Private Methods #
|
592 | 718 |
################################################
|
... | ... | @@ -1015,6 +1141,122 @@ class CASCache(): |
1015 | 1141 |
# Send final batch
|
1016 | 1142 |
batch.send()
|
1017 | 1143 |
|
1144 |
+ # _read_cache_size()
|
|
1145 |
+ #
|
|
1146 |
+ # Reads and returns the size of the artifact cache that's stored in the
|
|
1147 |
+ # cache's size file
|
|
1148 |
+ #
|
|
1149 |
+ # Returns:
|
|
1150 |
+ # (int): The size of the artifact cache, as recorded in the file
|
|
1151 |
+ #
|
|
1152 |
+ def _read_cache_size(self):
|
|
1153 |
+ size_file_path = os.path.join(self.casdir, CACHE_SIZE_FILE)
|
|
1154 |
+ |
|
1155 |
+ if not os.path.exists(size_file_path):
|
|
1156 |
+ return None
|
|
1157 |
+ |
|
1158 |
+ with open(size_file_path, "r") as f:
|
|
1159 |
+ size = f.read()
|
|
1160 |
+ |
|
1161 |
+ try:
|
|
1162 |
+ num_size = int(size)
|
|
1163 |
+ except ValueError as e:
|
|
1164 |
+ raise CASCacheError("Size '{}' parsed from '{}' was not an integer".format(
|
|
1165 |
+ size, size_file_path)) from e
|
|
1166 |
+ |
|
1167 |
+ return num_size
|
|
1168 |
+ |
|
1169 |
+ # _write_cache_size()
|
|
1170 |
+ #
|
|
1171 |
+ # Writes the given size of the artifact to the cache's size file
|
|
1172 |
+ #
|
|
1173 |
+ # Args:
|
|
1174 |
+ # size (int): The size of the artifact cache to record
|
|
1175 |
+ #
|
|
1176 |
+ def _write_cache_size(self, size):
|
|
1177 |
+ assert isinstance(size, int)
|
|
1178 |
+ size_file_path = os.path.join(self.casdir, CACHE_SIZE_FILE)
|
|
1179 |
+ with utils.save_file_atomic(size_file_path, "w") as f:
|
|
1180 |
+ f.write(str(size))
|
|
1181 |
+ |
|
1182 |
+ # _get_cache_volume_size()
|
|
1183 |
+ #
|
|
1184 |
+ # Get the available space and total space for the volume on
|
|
1185 |
+ # which the artifact cache is located.
|
|
1186 |
+ #
|
|
1187 |
+ # Returns:
|
|
1188 |
+ # (int): The total number of bytes on the volume
|
|
1189 |
+ # (int): The number of available bytes on the volume
|
|
1190 |
+ #
|
|
1191 |
+ # NOTE: We use this stub to allow the test cases
|
|
1192 |
+ # to override what an artifact cache thinks
|
|
1193 |
+ # about it's disk size and available bytes.
|
|
1194 |
+ #
|
|
1195 |
+ def _get_cache_volume_size(self):
|
|
1196 |
+ return utils._get_volume_size(self.casdir)
|
|
1197 |
+ |
|
1198 |
+ # _calculate_cache_quota()
|
|
1199 |
+ #
|
|
1200 |
+ # Calculates and sets the cache quota and lower threshold based on the
|
|
1201 |
+ # quota set in Context.
|
|
1202 |
+ # It checks that the quota is both a valid _expression_, and that there is
|
|
1203 |
+ # enough disk space to satisfy that quota
|
|
1204 |
+ #
|
|
1205 |
+ def _calculate_cache_quota(self):
|
|
1206 |
+ # Headroom intended to give BuildStream a bit of leeway.
|
|
1207 |
+ # This acts as the minimum size of cache_quota and also
|
|
1208 |
+ # is taken from the user requested cache_quota.
|
|
1209 |
+ #
|
|
1210 |
+ if 'BST_TEST_SUITE' in os.environ:
|
|
1211 |
+ self._cache_quota_headroom = 0
|
|
1212 |
+ else:
|
|
1213 |
+ self._cache_quota_headroom = 2e9
|
|
1214 |
+ |
|
1215 |
+ total_size, available_space = self._get_cache_volume_size()
|
|
1216 |
+ cache_size = self.get_cache_size()
|
|
1217 |
+ self.available_space = available_space
|
|
1218 |
+ |
|
1219 |
+ # Ensure system has enough storage for the cache_quota
|
|
1220 |
+ #
|
|
1221 |
+ # If cache_quota is none, set it to the maximum it could possibly be.
|
|
1222 |
+ #
|
|
1223 |
+ # Also check that cache_quota is at least as large as our headroom.
|
|
1224 |
+ #
|
|
1225 |
+ cache_quota = self._config_cache_quota
|
|
1226 |
+ if cache_quota is None: # Infinity, set to max system storage
|
|
1227 |
+ cache_quota = cache_size + available_space
|
|
1228 |
+ if cache_quota < self._cache_quota_headroom: # Check minimum
|
|
1229 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
1230 |
+ "Invalid cache quota ({}): ".format(utils._pretty_size(cache_quota)) +
|
|
1231 |
+ "BuildStream requires a minimum cache quota of 2G.")
|
|
1232 |
+ elif cache_quota > total_size:
|
|
1233 |
+ # A quota greater than the total disk size is certianly an error
|
|
1234 |
+ raise CASCacheError("Your system does not have enough available " +
|
|
1235 |
+ "space to support the cache quota specified.",
|
|
1236 |
+ detail=("You have specified a quota of {quota} total disk space.\n" +
|
|
1237 |
+ "The filesystem containing {local_cache_path} only " +
|
|
1238 |
+ "has {total_size} total disk space.")
|
|
1239 |
+ .format(
|
|
1240 |
+ quota=self._config_cache_quota,
|
|
1241 |
+ local_cache_path=self.casdir,
|
|
1242 |
+ total_size=utils._pretty_size(total_size)),
|
|
1243 |
+ reason='insufficient-storage-for-quota')
|
|
1244 |
+ elif cache_quota > cache_size + available_space:
|
|
1245 |
+ # FIXME: we want a message to be printed out here but message
|
|
1246 |
+ # depends on context and cas server can't have context currently
|
|
1247 |
+ self.cache_quota_exceeded = True
|
|
1248 |
+ |
|
1249 |
+ # Place a slight headroom (2e9 (2GB) on the cache_quota) into
|
|
1250 |
+ # cache_quota to try and avoid exceptions.
|
|
1251 |
+ #
|
|
1252 |
+ # Of course, we might still end up running out during a build
|
|
1253 |
+ # if we end up writing more than 2G, but hey, this stuff is
|
|
1254 |
+ # already really fuzzy.
|
|
1255 |
+ #
|
|
1256 |
+ self._cache_quota_original = cache_quota
|
|
1257 |
+ self._cache_quota = cache_quota - self._cache_quota_headroom
|
|
1258 |
+ self._cache_lower_threshold = self._cache_quota / 2
|
|
1259 |
+ |
|
1018 | 1260 |
|
1019 | 1261 |
def _grouper(iterable, n):
|
1020 | 1262 |
while True:
|
... | ... | @@ -30,8 +30,8 @@ from . import _yaml |
30 | 30 |
from ._exceptions import LoadError, LoadErrorReason, BstError
|
31 | 31 |
from ._message import Message, MessageType
|
32 | 32 |
from ._profile import Topics, profile_start, profile_end
|
33 |
-from ._artifactcache import ArtifactCache, ArtifactCacheUsage
|
|
34 |
-from ._cas import CASCache
|
|
33 |
+from ._artifactcache import ArtifactCache
|
|
34 |
+from ._cas import CASCache, CASCacheUsage
|
|
35 | 35 |
from ._workspaces import Workspaces, WorkspaceProjectCache
|
36 | 36 |
from .plugin import _plugin_lookup
|
37 | 37 |
from .sandbox import SandboxRemote
|
... | ... | @@ -58,12 +58,21 @@ class Context(): |
58 | 58 |
# Filename indicating which configuration file was used, or None for the defaults
|
59 | 59 |
self.config_origin = None
|
60 | 60 |
|
61 |
+ # The directory under which other directories are based
|
|
62 |
+ self.cachedir = None
|
|
63 |
+ |
|
61 | 64 |
# The directory where various sources are stored
|
62 | 65 |
self.sourcedir = None
|
63 | 66 |
|
64 | 67 |
# The directory where build sandboxes will be created
|
65 | 68 |
self.builddir = None
|
66 | 69 |
|
70 |
+ # The directory for CAS
|
|
71 |
+ self.casdir = None
|
|
72 |
+ |
|
73 |
+ # The directory for temporary files
|
|
74 |
+ self.tmpdir = None
|
|
75 |
+ |
|
67 | 76 |
# Default root location for workspaces
|
68 | 77 |
self.workspacedir = None
|
69 | 78 |
|
... | ... | @@ -180,13 +189,30 @@ class Context(): |
180 | 189 |
user_config = _yaml.load(config)
|
181 | 190 |
_yaml.composite(defaults, user_config)
|
182 | 191 |
|
192 |
+ # Give deprecation warnings
|
|
193 |
+ if defaults.get('builddir'):
|
|
194 |
+ print("builddir is deprecated, use cachedir")
|
|
195 |
+ else:
|
|
196 |
+ defaults['builddir'] = os.path.join(defaults['cachedir'], 'build')
|
|
197 |
+ |
|
198 |
+ if defaults.get('artifactdir'):
|
|
199 |
+ print("artifactdir is deprecated, use cachedir")
|
|
200 |
+ else:
|
|
201 |
+ defaults['artifactdir'] = os.path.join(defaults['cachedir'], 'artifacts')
|
|
202 |
+ |
|
183 | 203 |
_yaml.node_validate(defaults, [
|
184 |
- 'sourcedir', 'builddir', 'artifactdir', 'logdir',
|
|
204 |
+ 'cachedir', 'sourcedir', 'builddir', 'artifactdir', 'logdir',
|
|
185 | 205 |
'scheduler', 'artifacts', 'logging', 'projects',
|
186 |
- 'cache', 'prompt', 'workspacedir', 'remote-execution'
|
|
206 |
+ 'cache', 'prompt', 'workspacedir', 'remote-execution',
|
|
187 | 207 |
])
|
188 | 208 |
|
189 |
- for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
|
|
209 |
+ # add directories not set by users
|
|
210 |
+ defaults['tmpdir'] = os.path.join(defaults['cachedir'], 'tmp')
|
|
211 |
+ defaults['casdir'] = os.path.join(defaults['cachedir'], 'cas')
|
|
212 |
+ |
|
213 |
+ for directory in ['cachedir', 'sourcedir', 'builddir',
|
|
214 |
+ 'artifactdir', 'logdir', 'workspacedir', 'casdir',
|
|
215 |
+ 'tmpdir']:
|
|
190 | 216 |
# Allow the ~ tilde expansion and any environment variables in
|
191 | 217 |
# path specification in the config files.
|
192 | 218 |
#
|
... | ... | @@ -203,7 +229,15 @@ class Context(): |
203 | 229 |
cache = _yaml.node_get(defaults, Mapping, 'cache')
|
204 | 230 |
_yaml.node_validate(cache, ['quota', 'pull-buildtrees'])
|
205 | 231 |
|
206 |
- self.config_cache_quota = _yaml.node_get(cache, str, 'quota')
|
|
232 |
+ config_cache_quota = _yaml.node_get(cache, str, 'quota')
|
|
233 |
+ try:
|
|
234 |
+ self.config_cache_quota = utils._parse_size(config_cache_quota,
|
|
235 |
+ self.casdir)
|
|
236 |
+ except utils.UtilError as e:
|
|
237 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
238 |
+ "{}\nPlease specify the value in bytes or as a % of full disk space.\n"
|
|
239 |
+ "\nValid values are, for example: 800M 10G 1T 50%\n"
|
|
240 |
+ .format(str(e))) from e
|
|
207 | 241 |
|
208 | 242 |
# Load artifact share configuration
|
209 | 243 |
self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
|
... | ... | @@ -275,15 +309,15 @@ class Context(): |
275 | 309 |
|
276 | 310 |
return self._artifactcache
|
277 | 311 |
|
278 |
- # get_artifact_cache_usage()
|
|
312 |
+ # get_cache_usage()
|
|
279 | 313 |
#
|
280 | 314 |
# Fetches the current usage of the artifact cache
|
281 | 315 |
#
|
282 | 316 |
# Returns:
|
283 |
- # (ArtifactCacheUsage): The current status
|
|
317 |
+ # (CASCacheUsage): The current status
|
|
284 | 318 |
#
|
285 |
- def get_artifact_cache_usage(self):
|
|
286 |
- return ArtifactCacheUsage(self.artifactcache)
|
|
319 |
+ def get_cache_usage(self):
|
|
320 |
+ return CASCacheUsage(self.get_cascache())
|
|
287 | 321 |
|
288 | 322 |
# add_project():
|
289 | 323 |
#
|
... | ... | @@ -650,7 +684,7 @@ class Context(): |
650 | 684 |
|
651 | 685 |
def get_cascache(self):
|
652 | 686 |
if self._cascache is None:
|
653 |
- self._cascache = CASCache(self.artifactdir)
|
|
687 |
+ self._cascache = CASCache(self.cachedir, self.config_cache_quota)
|
|
654 | 688 |
return self._cascache
|
655 | 689 |
|
656 | 690 |
|
... | ... | @@ -11,11 +11,11 @@ from .._exceptions import BstError, LoadError, AppError |
11 | 11 |
from .._versions import BST_FORMAT_VERSION
|
12 | 12 |
from .complete import main_bashcomplete, complete_path, CompleteUnhandled
|
13 | 13 |
|
14 |
- |
|
15 | 14 |
##################################################################
|
16 | 15 |
# Override of click's main entry point #
|
17 | 16 |
##################################################################
|
18 | 17 |
|
18 |
+ |
|
19 | 19 |
# search_command()
|
20 | 20 |
#
|
21 | 21 |
# Helper function to get a command and context object
|
... | ... | @@ -404,7 +404,7 @@ class _StatusHeader(): |
404 | 404 |
#
|
405 | 405 |
# ~~~~~~ cache: 69% ~~~~~~
|
406 | 406 |
#
|
407 |
- usage = self._context.get_artifact_cache_usage()
|
|
407 |
+ usage = self._context.get_cache_usage()
|
|
408 | 408 |
usage_percent = '{}%'.format(usage.used_percent)
|
409 | 409 |
|
410 | 410 |
size = 21
|
... | ... | @@ -452,7 +452,7 @@ class LogLine(Widget): |
452 | 452 |
values["Session Start"] = starttime.strftime('%A, %d-%m-%Y at %H:%M:%S')
|
453 | 453 |
values["Project"] = "{} ({})".format(project.name, project.directory)
|
454 | 454 |
values["Targets"] = ", ".join([t.name for t in stream.targets])
|
455 |
- values["Cache Usage"] = "{}".format(context.get_artifact_cache_usage())
|
|
455 |
+ values["Cache Usage"] = "{}".format(context.get_cache_usage())
|
|
456 | 456 |
text += self._format_values(values)
|
457 | 457 |
|
458 | 458 |
# User configurations
|
... | ... | @@ -25,14 +25,14 @@ class CacheSizeJob(Job): |
25 | 25 |
self._complete_cb = complete_cb
|
26 | 26 |
|
27 | 27 |
context = self._scheduler.context
|
28 |
- self._artifacts = context.artifactcache
|
|
28 |
+ self._cas = context.get_cascache()
|
|
29 | 29 |
|
30 | 30 |
def child_process(self):
|
31 |
- return self._artifacts.compute_cache_size()
|
|
31 |
+ return self._cas.compute_cache_size()
|
|
32 | 32 |
|
33 | 33 |
def parent_complete(self, status, result):
|
34 | 34 |
if status == JobStatus.OK:
|
35 |
- self._artifacts.set_cache_size(result)
|
|
35 |
+ self._cas.set_cache_size(result)
|
|
36 | 36 |
|
37 | 37 |
if self._complete_cb:
|
38 | 38 |
self._complete_cb(status, result)
|
... | ... | @@ -25,27 +25,27 @@ class CleanupJob(Job): |
25 | 25 |
self._complete_cb = complete_cb
|
26 | 26 |
|
27 | 27 |
context = self._scheduler.context
|
28 |
+ self._cas = context.get_cascache()
|
|
28 | 29 |
self._artifacts = context.artifactcache
|
29 | 30 |
|
30 | 31 |
def child_process(self):
|
31 | 32 |
def progress():
|
32 | 33 |
self.send_message('update-cache-size',
|
33 |
- self._artifacts.get_cache_size())
|
|
34 |
+ self._cas.get_cache_size())
|
|
34 | 35 |
return self._artifacts.clean(progress)
|
35 | 36 |
|
36 | 37 |
def handle_message(self, message_type, message):
|
37 |
- |
|
38 | 38 |
# Update the cache size in the main process as we go,
|
39 | 39 |
# this provides better feedback in the UI.
|
40 | 40 |
if message_type == 'update-cache-size':
|
41 |
- self._artifacts.set_cache_size(message)
|
|
41 |
+ self._cas.set_cache_size(message)
|
|
42 | 42 |
return True
|
43 | 43 |
|
44 | 44 |
return False
|
45 | 45 |
|
46 | 46 |
def parent_complete(self, status, result):
|
47 | 47 |
if status == JobStatus.OK:
|
48 |
- self._artifacts.set_cache_size(result)
|
|
48 |
+ self._cas.set_cache_size(result)
|
|
49 | 49 |
|
50 | 50 |
if self._complete_cb:
|
51 | 51 |
self._complete_cb(status, result)
|
... | ... | @@ -381,7 +381,6 @@ class Job(): |
381 | 381 |
# queue (multiprocessing.Queue): The message queue for IPC
|
382 | 382 |
#
|
383 | 383 |
def _child_action(self, queue):
|
384 |
- |
|
385 | 384 |
# This avoids some SIGTSTP signals from grandchildren
|
386 | 385 |
# getting propagated up to the master process
|
387 | 386 |
os.setsid()
|
... | ... | @@ -13,11 +13,8 @@ |
13 | 13 |
# Location to store sources
|
14 | 14 |
sourcedir: ${XDG_CACHE_HOME}/buildstream/sources
|
15 | 15 |
|
16 |
-# Location to perform builds
|
|
17 |
-builddir: ${XDG_CACHE_HOME}/buildstream/build
|
|
18 |
- |
|
19 |
-# Location to store local binary artifacts
|
|
20 |
-artifactdir: ${XDG_CACHE_HOME}/buildstream/artifacts
|
|
16 |
+# Root location for other directories in the cache
|
|
17 |
+cachedir: ${XDG_CACHE_HOME}/buildstream
|
|
21 | 18 |
|
22 | 19 |
# Location to store build logs
|
23 | 20 |
logdir: ${XDG_CACHE_HOME}/buildstream/logs
|
... | ... | @@ -43,6 +43,7 @@ from . import _signals |
43 | 43 |
from ._exceptions import BstError, ErrorDomain
|
44 | 44 |
from ._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
45 | 45 |
|
46 |
+ |
|
46 | 47 |
# The magic number for timestamps: 2011-11-11 11:11:11
|
47 | 48 |
_magic_timestamp = calendar.timegm([2011, 11, 11, 11, 11, 11])
|
48 | 49 |
|
... | ... | @@ -53,16 +53,16 @@ def pytest_runtest_setup(item): |
53 | 53 |
class IntegrationCache():
|
54 | 54 |
|
55 | 55 |
def __init__(self, cache):
|
56 |
- cache = os.path.abspath(cache)
|
|
56 |
+ self.root = os.path.abspath(cache)
|
|
57 | 57 |
os.makedirs(cache, exist_ok=True)
|
58 | 58 |
|
59 | 59 |
# Use the same sources every time
|
60 |
- self.sources = os.path.join(cache, 'sources')
|
|
60 |
+ self.sources = os.path.join(self.root, 'sources')
|
|
61 | 61 |
|
62 | 62 |
# Create a temp directory for the duration of the test for
|
63 | 63 |
# the artifacts directory
|
64 | 64 |
try:
|
65 |
- self.artifacts = tempfile.mkdtemp(dir=cache, prefix='artifacts-')
|
|
65 |
+ self.artifacts = tempfile.mkdtemp(dir=self.root, prefix='artifacts-')
|
|
66 | 66 |
except OSError as e:
|
67 | 67 |
raise AssertionError("Unable to create test directory !") from e
|
68 | 68 |
|
... | ... | @@ -87,6 +87,10 @@ def integration_cache(request): |
87 | 87 |
shutil.rmtree(cache.artifacts)
|
88 | 88 |
except FileNotFoundError:
|
89 | 89 |
pass
|
90 |
+ try:
|
|
91 |
+ shutil.rmtree(os.path.join(cache.root, 'cas'))
|
|
92 |
+ except FileNotFoundError:
|
|
93 |
+ pass
|
|
90 | 94 |
|
91 | 95 |
|
92 | 96 |
#################################################
|
... | ... | @@ -194,10 +194,9 @@ def workdir(source_cache=None): |
194 | 194 |
|
195 | 195 |
bst_config_file = os.path.join(tempdir, 'buildstream.conf')
|
196 | 196 |
config = {
|
197 |
+ 'cachedir': tempdir,
|
|
197 | 198 |
'sourcedir': source_cache,
|
198 |
- 'artifactdir': os.path.join(tempdir, 'artifacts'),
|
|
199 | 199 |
'logdir': os.path.join(tempdir, 'logs'),
|
200 |
- 'builddir': os.path.join(tempdir, 'build'),
|
|
201 | 200 |
}
|
202 | 201 |
_yaml.dump(config, bst_config_file)
|
203 | 202 |
|
... | ... | @@ -411,12 +410,10 @@ def run_session(description, tempdir, source_cache, palette, config_file, force) |
411 | 410 |
# Encode and save the output if that was asked for
|
412 | 411 |
output = _yaml.node_get(command, str, 'output', default_value=None)
|
413 | 412 |
if output is not None:
|
414 |
- |
|
415 | 413 |
# Convert / Generate a nice <div>
|
416 | 414 |
converted = generate_html(command_out, directory, config_file,
|
417 | 415 |
source_cache, tempdir, palette,
|
418 | 416 |
command_str, command_fake_output is not None)
|
419 |
- |
|
420 | 417 |
# Save it
|
421 | 418 |
filename = os.path.join(desc_dir, output)
|
422 | 419 |
filename = os.path.realpath(filename)
|
... | ... | @@ -2,7 +2,7 @@ |
2 | 2 |
commands:
|
3 | 3 |
# Make it fetch first
|
4 | 4 |
- directory: ../examples/running-commands
|
5 |
- command: fetch hello.bst
|
|
5 |
+ command: source fetch hello.bst
|
|
6 | 6 |
|
7 | 7 |
# Capture a show output
|
8 | 8 |
- directory: ../examples/running-commands
|
... | ... | @@ -50,14 +50,15 @@ def test_cache_size_write(cli, tmpdir): |
50 | 50 |
|
51 | 51 |
# Artifact cache must be in a known place
|
52 | 52 |
artifactdir = os.path.join(project_dir, "artifacts")
|
53 |
- cli.configure({"artifactdir": artifactdir})
|
|
53 |
+ casdir = os.path.join(project_dir, "cas")
|
|
54 |
+ cli.configure({"cachedir": project_dir})
|
|
54 | 55 |
|
55 | 56 |
# Build, to populate the cache
|
56 | 57 |
res = cli.run(project=project_dir, args=["build", "test.bst"])
|
57 | 58 |
res.assert_success()
|
58 | 59 |
|
59 | 60 |
# Inspect the artifact cache
|
60 |
- sizefile = os.path.join(artifactdir, CACHE_SIZE_FILE)
|
|
61 |
+ sizefile = os.path.join(casdir, CACHE_SIZE_FILE)
|
|
61 | 62 |
assert os.path.isfile(sizefile)
|
62 | 63 |
with open(sizefile, "r") as f:
|
63 | 64 |
size_data = f.read()
|
... | ... | @@ -80,11 +81,11 @@ def test_quota_over_1024T(cli, tmpdir): |
80 | 81 |
_yaml.dump({'name': 'main'}, str(project.join("project.conf")))
|
81 | 82 |
|
82 | 83 |
volume_space_patch = mock.patch(
|
83 |
- "buildstream._artifactcache.ArtifactCache._get_cache_volume_size",
|
|
84 |
+ "buildstream._cas.CASCache._get_cache_volume_size",
|
|
84 | 85 |
autospec=True,
|
85 | 86 |
return_value=(1025 * TiB, 1025 * TiB)
|
86 | 87 |
)
|
87 | 88 |
|
88 | 89 |
with volume_space_patch:
|
89 | 90 |
result = cli.run(project, args=["build", "file.bst"])
|
90 |
- result.assert_main_error(ErrorDomain.ARTIFACT, 'insufficient-storage-for-quota')
|
|
91 |
+ result.assert_main_error(ErrorDomain.CAS, 'insufficient-storage-for-quota')
|
... | ... | @@ -340,11 +340,11 @@ def test_never_delete_required_track(cli, datafiles, tmpdir): |
340 | 340 |
("200%", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
|
341 | 341 |
|
342 | 342 |
# Not enough space on disk even if you cleaned up
|
343 |
- ("11K", ErrorDomain.ARTIFACT, 'insufficient-storage-for-quota'),
|
|
343 |
+ ("11K", ErrorDomain.CAS, 'insufficient-storage-for-quota'),
|
|
344 | 344 |
|
345 | 345 |
# Not enough space for these caches
|
346 |
- ("7K", 'warning', 'Your system does not have enough available'),
|
|
347 |
- ("70%", 'warning', 'Your system does not have enough available')
|
|
346 |
+ ("7K", 'success', None),
|
|
347 |
+ ("70%", 'success', None)
|
|
348 | 348 |
])
|
349 | 349 |
@pytest.mark.datafiles(DATA_DIR)
|
350 | 350 |
def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, err_domain, err_reason):
|
... | ... | @@ -354,7 +354,7 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, err_domain, err_reas |
354 | 354 |
cli.configure({
|
355 | 355 |
'cache': {
|
356 | 356 |
'quota': quota,
|
357 |
- }
|
|
357 |
+ },
|
|
358 | 358 |
})
|
359 | 359 |
|
360 | 360 |
# We patch how we get space information
|
... | ... | @@ -372,13 +372,13 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, err_domain, err_reas |
372 | 372 |
total_space = 10000
|
373 | 373 |
|
374 | 374 |
volume_space_patch = mock.patch(
|
375 |
- "buildstream._artifactcache.ArtifactCache._get_cache_volume_size",
|
|
375 |
+ "buildstream.utils._get_volume_size",
|
|
376 | 376 |
autospec=True,
|
377 | 377 |
return_value=(total_space, free_space),
|
378 | 378 |
)
|
379 | 379 |
|
380 | 380 |
cache_size_patch = mock.patch(
|
381 |
- "buildstream._artifactcache.ArtifactCache.get_cache_size",
|
|
381 |
+ "buildstream._cas.CASCache.get_cache_size",
|
|
382 | 382 |
autospec=True,
|
383 | 383 |
return_value=0,
|
384 | 384 |
)
|
... | ... | @@ -435,7 +435,7 @@ def test_extract_expiry(cli, datafiles, tmpdir): |
435 | 435 |
# Now we should have a directory for the cached target2.bst, which
|
436 | 436 |
# replaced target.bst in the cache, we should not have a directory
|
437 | 437 |
# for the target.bst
|
438 |
- refsdir = os.path.join(project, 'cache', 'artifacts', 'cas', 'refs', 'heads')
|
|
438 |
+ refsdir = os.path.join(project, 'cache', 'cas', 'refs', 'heads')
|
|
439 | 439 |
refsdirtest = os.path.join(refsdir, 'test')
|
440 | 440 |
refsdirtarget = os.path.join(refsdirtest, 'target')
|
441 | 441 |
refsdirtarget2 = os.path.join(refsdirtest, 'target2')
|
... | ... | @@ -68,8 +68,8 @@ def test_push_pull(cli, tmpdir, datafiles): |
68 | 68 |
# Now we've pushed, delete the user's local artifact cache
|
69 | 69 |
# directory and try to redownload it from the share
|
70 | 70 |
#
|
71 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
72 |
- shutil.rmtree(artifacts)
|
|
71 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
72 |
+ shutil.rmtree(cas)
|
|
73 | 73 |
|
74 | 74 |
# Assert that nothing is cached locally anymore
|
75 | 75 |
state = cli.get_element_state(project, 'target.bst')
|
... | ... | @@ -56,7 +56,7 @@ def test_pull(cli, tmpdir, datafiles): |
56 | 56 |
# Set up an artifact cache.
|
57 | 57 |
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
58 | 58 |
# Configure artifact share
|
59 |
- artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
59 |
+ cache_dir = os.path.join(str(tmpdir), 'cache')
|
|
60 | 60 |
user_config_file = str(tmpdir.join('buildstream.conf'))
|
61 | 61 |
user_config = {
|
62 | 62 |
'scheduler': {
|
... | ... | @@ -65,7 +65,8 @@ def test_pull(cli, tmpdir, datafiles): |
65 | 65 |
'artifacts': {
|
66 | 66 |
'url': share.repo,
|
67 | 67 |
'push': True,
|
68 |
- }
|
|
68 |
+ },
|
|
69 |
+ 'cachedir': cache_dir
|
|
69 | 70 |
}
|
70 | 71 |
|
71 | 72 |
# Write down the user configuration file
|
... | ... | @@ -92,7 +93,6 @@ def test_pull(cli, tmpdir, datafiles): |
92 | 93 |
# Fake minimal context
|
93 | 94 |
context = Context()
|
94 | 95 |
context.load(config=user_config_file)
|
95 |
- context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
96 | 96 |
context.set_message_handler(message_handler)
|
97 | 97 |
|
98 | 98 |
# Load the project and CAS cache
|
... | ... | @@ -110,7 +110,7 @@ def test_pull(cli, tmpdir, datafiles): |
110 | 110 |
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
|
111 | 111 |
process = multiprocessing.Process(target=_queue_wrapper,
|
112 | 112 |
args=(_test_pull, queue, user_config_file, project_dir,
|
113 |
- artifact_dir, 'target.bst', element_key))
|
|
113 |
+ cache_dir, 'target.bst', element_key))
|
|
114 | 114 |
|
115 | 115 |
try:
|
116 | 116 |
# Keep SIGINT blocked in the child process
|
... | ... | @@ -127,12 +127,14 @@ def test_pull(cli, tmpdir, datafiles): |
127 | 127 |
assert cas.contains(element, element_key)
|
128 | 128 |
|
129 | 129 |
|
130 |
-def _test_pull(user_config_file, project_dir, artifact_dir,
|
|
130 |
+def _test_pull(user_config_file, project_dir, cache_dir,
|
|
131 | 131 |
element_name, element_key, queue):
|
132 | 132 |
# Fake minimal context
|
133 | 133 |
context = Context()
|
134 | 134 |
context.load(config=user_config_file)
|
135 |
- context.artifactdir = artifact_dir
|
|
135 |
+ context.cachedir = cache_dir
|
|
136 |
+ context.casdir = os.path.join(cache_dir, 'cas')
|
|
137 |
+ context.tmpdir = os.path.join(cache_dir, 'tmp')
|
|
136 | 138 |
context.set_message_handler(message_handler)
|
137 | 139 |
|
138 | 140 |
# Load the project manually
|
... | ... | @@ -165,7 +167,7 @@ def test_pull_tree(cli, tmpdir, datafiles): |
165 | 167 |
# Set up an artifact cache.
|
166 | 168 |
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
167 | 169 |
# Configure artifact share
|
168 |
- artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
170 |
+ rootcache_dir = os.path.join(str(tmpdir), 'cache')
|
|
169 | 171 |
user_config_file = str(tmpdir.join('buildstream.conf'))
|
170 | 172 |
user_config = {
|
171 | 173 |
'scheduler': {
|
... | ... | @@ -174,7 +176,8 @@ def test_pull_tree(cli, tmpdir, datafiles): |
174 | 176 |
'artifacts': {
|
175 | 177 |
'url': share.repo,
|
176 | 178 |
'push': True,
|
177 |
- }
|
|
179 |
+ },
|
|
180 |
+ 'cachedir': rootcache_dir
|
|
178 | 181 |
}
|
179 | 182 |
|
180 | 183 |
# Write down the user configuration file
|
... | ... | @@ -195,7 +198,6 @@ def test_pull_tree(cli, tmpdir, datafiles): |
195 | 198 |
# Fake minimal context
|
196 | 199 |
context = Context()
|
197 | 200 |
context.load(config=user_config_file)
|
198 |
- context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
199 | 201 |
context.set_message_handler(message_handler)
|
200 | 202 |
|
201 | 203 |
# Load the project and CAS cache
|
... | ... | @@ -218,7 +220,7 @@ def test_pull_tree(cli, tmpdir, datafiles): |
218 | 220 |
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
|
219 | 221 |
process = multiprocessing.Process(target=_queue_wrapper,
|
220 | 222 |
args=(_test_push_tree, queue, user_config_file, project_dir,
|
221 |
- artifact_dir, artifact_digest))
|
|
223 |
+ artifact_digest))
|
|
222 | 224 |
|
223 | 225 |
try:
|
224 | 226 |
# Keep SIGINT blocked in the child process
|
... | ... | @@ -246,7 +248,7 @@ def test_pull_tree(cli, tmpdir, datafiles): |
246 | 248 |
# Use subprocess to avoid creation of gRPC threads in main BuildStream process
|
247 | 249 |
process = multiprocessing.Process(target=_queue_wrapper,
|
248 | 250 |
args=(_test_pull_tree, queue, user_config_file, project_dir,
|
249 |
- artifact_dir, tree_digest))
|
|
251 |
+ tree_digest))
|
|
250 | 252 |
|
251 | 253 |
try:
|
252 | 254 |
# Keep SIGINT blocked in the child process
|
... | ... | @@ -268,11 +270,10 @@ def test_pull_tree(cli, tmpdir, datafiles): |
268 | 270 |
assert os.path.exists(cas.objpath(directory_digest))
|
269 | 271 |
|
270 | 272 |
|
271 |
-def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
|
|
273 |
+def _test_push_tree(user_config_file, project_dir, artifact_digest, queue):
|
|
272 | 274 |
# Fake minimal context
|
273 | 275 |
context = Context()
|
274 | 276 |
context.load(config=user_config_file)
|
275 |
- context.artifactdir = artifact_dir
|
|
276 | 277 |
context.set_message_handler(message_handler)
|
277 | 278 |
|
278 | 279 |
# Load the project manually
|
... | ... | @@ -304,11 +305,10 @@ def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest |
304 | 305 |
queue.put("No remote configured")
|
305 | 306 |
|
306 | 307 |
|
307 |
-def _test_pull_tree(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
|
|
308 |
+def _test_pull_tree(user_config_file, project_dir, artifact_digest, queue):
|
|
308 | 309 |
# Fake minimal context
|
309 | 310 |
context = Context()
|
310 | 311 |
context.load(config=user_config_file)
|
311 |
- context.artifactdir = artifact_dir
|
|
312 | 312 |
context.set_message_handler(message_handler)
|
313 | 313 |
|
314 | 314 |
# Load the project manually
|
... | ... | @@ -51,7 +51,7 @@ def test_push(cli, tmpdir, datafiles): |
51 | 51 |
# Set up an artifact cache.
|
52 | 52 |
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
53 | 53 |
# Configure artifact share
|
54 |
- artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
54 |
+ rootcache_dir = os.path.join(str(tmpdir), 'cache')
|
|
55 | 55 |
user_config_file = str(tmpdir.join('buildstream.conf'))
|
56 | 56 |
user_config = {
|
57 | 57 |
'scheduler': {
|
... | ... | @@ -60,7 +60,8 @@ def test_push(cli, tmpdir, datafiles): |
60 | 60 |
'artifacts': {
|
61 | 61 |
'url': share.repo,
|
62 | 62 |
'push': True,
|
63 |
- }
|
|
63 |
+ },
|
|
64 |
+ 'cachedir': rootcache_dir
|
|
64 | 65 |
}
|
65 | 66 |
|
66 | 67 |
# Write down the user configuration file
|
... | ... | @@ -69,7 +70,6 @@ def test_push(cli, tmpdir, datafiles): |
69 | 70 |
# Fake minimal context
|
70 | 71 |
context = Context()
|
71 | 72 |
context.load(config=user_config_file)
|
72 |
- context.artifactdir = artifact_dir
|
|
73 | 73 |
context.set_message_handler(message_handler)
|
74 | 74 |
|
75 | 75 |
# Load the project manually
|
... | ... | @@ -89,7 +89,7 @@ def test_push(cli, tmpdir, datafiles): |
89 | 89 |
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
|
90 | 90 |
process = multiprocessing.Process(target=_queue_wrapper,
|
91 | 91 |
args=(_test_push, queue, user_config_file, project_dir,
|
92 |
- artifact_dir, 'target.bst', element_key))
|
|
92 |
+ 'target.bst', element_key))
|
|
93 | 93 |
|
94 | 94 |
try:
|
95 | 95 |
# Keep SIGINT blocked in the child process
|
... | ... | @@ -106,12 +106,10 @@ def test_push(cli, tmpdir, datafiles): |
106 | 106 |
assert share.has_artifact('test', 'target.bst', element_key)
|
107 | 107 |
|
108 | 108 |
|
109 |
-def _test_push(user_config_file, project_dir, artifact_dir,
|
|
110 |
- element_name, element_key, queue):
|
|
109 |
+def _test_push(user_config_file, project_dir, element_name, element_key, queue):
|
|
111 | 110 |
# Fake minimal context
|
112 | 111 |
context = Context()
|
113 | 112 |
context.load(config=user_config_file)
|
114 |
- context.artifactdir = artifact_dir
|
|
115 | 113 |
context.set_message_handler(message_handler)
|
116 | 114 |
|
117 | 115 |
# Load the project manually
|
... | ... | @@ -152,7 +150,7 @@ def test_push_directory(cli, tmpdir, datafiles): |
152 | 150 |
# Set up an artifact cache.
|
153 | 151 |
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
154 | 152 |
# Configure artifact share
|
155 |
- artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
153 |
+ rootcache_dir = os.path.join(str(tmpdir), 'cache')
|
|
156 | 154 |
user_config_file = str(tmpdir.join('buildstream.conf'))
|
157 | 155 |
user_config = {
|
158 | 156 |
'scheduler': {
|
... | ... | @@ -161,7 +159,8 @@ def test_push_directory(cli, tmpdir, datafiles): |
161 | 159 |
'artifacts': {
|
162 | 160 |
'url': share.repo,
|
163 | 161 |
'push': True,
|
164 |
- }
|
|
162 |
+ },
|
|
163 |
+ 'cachedir': rootcache_dir
|
|
165 | 164 |
}
|
166 | 165 |
|
167 | 166 |
# Write down the user configuration file
|
... | ... | @@ -170,7 +169,6 @@ def test_push_directory(cli, tmpdir, datafiles): |
170 | 169 |
# Fake minimal context
|
171 | 170 |
context = Context()
|
172 | 171 |
context.load(config=user_config_file)
|
173 |
- context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
|
174 | 172 |
context.set_message_handler(message_handler)
|
175 | 173 |
|
176 | 174 |
# Load the project and CAS cache
|
... | ... | @@ -198,7 +196,7 @@ def test_push_directory(cli, tmpdir, datafiles): |
198 | 196 |
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
|
199 | 197 |
process = multiprocessing.Process(target=_queue_wrapper,
|
200 | 198 |
args=(_test_push_directory, queue, user_config_file,
|
201 |
- project_dir, artifact_dir, artifact_digest))
|
|
199 |
+ project_dir, artifact_digest))
|
|
202 | 200 |
|
203 | 201 |
try:
|
204 | 202 |
# Keep SIGINT blocked in the child process
|
... | ... | @@ -216,11 +214,10 @@ def test_push_directory(cli, tmpdir, datafiles): |
216 | 214 |
assert share.has_object(artifact_digest)
|
217 | 215 |
|
218 | 216 |
|
219 |
-def _test_push_directory(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
|
|
217 |
+def _test_push_directory(user_config_file, project_dir, artifact_digest, queue):
|
|
220 | 218 |
# Fake minimal context
|
221 | 219 |
context = Context()
|
222 | 220 |
context.load(config=user_config_file)
|
223 |
- context.artifactdir = artifact_dir
|
|
224 | 221 |
context.set_message_handler(message_handler)
|
225 | 222 |
|
226 | 223 |
# Load the project manually
|
... | ... | @@ -254,6 +251,7 @@ def test_push_message(cli, tmpdir, datafiles): |
254 | 251 |
with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
255 | 252 |
# Configure artifact share
|
256 | 253 |
artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
|
254 |
+ rootcache_dir = os.path.join(str(tmpdir), 'cache')
|
|
257 | 255 |
user_config_file = str(tmpdir.join('buildstream.conf'))
|
258 | 256 |
user_config = {
|
259 | 257 |
'scheduler': {
|
... | ... | @@ -262,7 +260,8 @@ def test_push_message(cli, tmpdir, datafiles): |
262 | 260 |
'artifacts': {
|
263 | 261 |
'url': share.repo,
|
264 | 262 |
'push': True,
|
265 |
- }
|
|
263 |
+ },
|
|
264 |
+ 'cachedir': rootcache_dir
|
|
266 | 265 |
}
|
267 | 266 |
|
268 | 267 |
# Write down the user configuration file
|
... | ... | @@ -273,7 +272,7 @@ def test_push_message(cli, tmpdir, datafiles): |
273 | 272 |
# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
|
274 | 273 |
process = multiprocessing.Process(target=_queue_wrapper,
|
275 | 274 |
args=(_test_push_message, queue, user_config_file,
|
276 |
- project_dir, artifact_dir))
|
|
275 |
+ project_dir))
|
|
277 | 276 |
|
278 | 277 |
try:
|
279 | 278 |
# Keep SIGINT blocked in the child process
|
... | ... | @@ -292,11 +291,10 @@ def test_push_message(cli, tmpdir, datafiles): |
292 | 291 |
assert share.has_object(message_digest)
|
293 | 292 |
|
294 | 293 |
|
295 |
-def _test_push_message(user_config_file, project_dir, artifact_dir, queue):
|
|
294 |
+def _test_push_message(user_config_file, project_dir, queue):
|
|
296 | 295 |
# Fake minimal context
|
297 | 296 |
context = Context()
|
298 | 297 |
context.load(config=user_config_file)
|
299 |
- context.artifactdir = artifact_dir
|
|
300 | 298 |
context.set_message_handler(message_handler)
|
301 | 299 |
|
302 | 300 |
# Load the project manually
|
... | ... | @@ -62,8 +62,8 @@ def test_push_pull_all(cli, tmpdir, datafiles): |
62 | 62 |
# Now we've pushed, delete the user's local artifact cache
|
63 | 63 |
# directory and try to redownload it from the share
|
64 | 64 |
#
|
65 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
66 |
- shutil.rmtree(artifacts)
|
|
65 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
66 |
+ shutil.rmtree(cas)
|
|
67 | 67 |
|
68 | 68 |
# Assert that nothing is cached locally anymore
|
69 | 69 |
states = cli.get_element_states(project, all_elements)
|
... | ... | @@ -112,7 +112,7 @@ def test_push_pull_default_targets(cli, tmpdir, datafiles): |
112 | 112 |
# Now we've pushed, delete the user's local artifact cache
|
113 | 113 |
# directory and try to redownload it from the share
|
114 | 114 |
#
|
115 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
115 |
+ artifacts = os.path.join(cli.directory, 'cas')
|
|
116 | 116 |
shutil.rmtree(artifacts)
|
117 | 117 |
|
118 | 118 |
# Assert that nothing is cached locally anymore
|
... | ... | @@ -154,8 +154,8 @@ def test_pull_secondary_cache(cli, tmpdir, datafiles): |
154 | 154 |
assert_shared(cli, share2, project, 'target.bst')
|
155 | 155 |
|
156 | 156 |
# Delete the user's local artifact cache.
|
157 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
158 |
- shutil.rmtree(artifacts)
|
|
157 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
158 |
+ shutil.rmtree(cas)
|
|
159 | 159 |
|
160 | 160 |
# Assert that the element is not cached anymore.
|
161 | 161 |
assert cli.get_element_state(project, 'target.bst') != 'cached'
|
... | ... | @@ -208,8 +208,8 @@ def test_push_pull_specific_remote(cli, tmpdir, datafiles): |
208 | 208 |
# Now we've pushed, delete the user's local artifact cache
|
209 | 209 |
# directory and try to redownload it from the good_share.
|
210 | 210 |
#
|
211 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
212 |
- shutil.rmtree(artifacts)
|
|
211 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
212 |
+ shutil.rmtree(cas)
|
|
213 | 213 |
|
214 | 214 |
result = cli.run(project=project, args=['artifact', 'pull', 'target.bst', '--remote',
|
215 | 215 |
good_share.repo])
|
... | ... | @@ -249,8 +249,8 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles): |
249 | 249 |
# Now we've pushed, delete the user's local artifact cache
|
250 | 250 |
# directory and try to redownload it from the share
|
251 | 251 |
#
|
252 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
253 |
- shutil.rmtree(artifacts)
|
|
252 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
253 |
+ shutil.rmtree(cas)
|
|
254 | 254 |
|
255 | 255 |
# Assert that nothing is cached locally anymore
|
256 | 256 |
for element_name in all_elements:
|
... | ... | @@ -299,8 +299,8 @@ def test_push_pull_track_non_strict(cli, tmpdir, datafiles): |
299 | 299 |
# Now we've pushed, delete the user's local artifact cache
|
300 | 300 |
# directory and try to redownload it from the share
|
301 | 301 |
#
|
302 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
303 |
- shutil.rmtree(artifacts)
|
|
302 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
303 |
+ shutil.rmtree(cas)
|
|
304 | 304 |
|
305 | 305 |
# Assert that nothing is cached locally anymore
|
306 | 306 |
for element_name in all_elements:
|
... | ... | @@ -335,7 +335,7 @@ def test_push_pull_cross_junction(cli, tmpdir, datafiles): |
335 | 335 |
result.assert_success()
|
336 | 336 |
assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
|
337 | 337 |
|
338 |
- cache_dir = os.path.join(project, 'cache', 'artifacts')
|
|
338 |
+ cache_dir = os.path.join(project, 'cache', 'cas')
|
|
339 | 339 |
shutil.rmtree(cache_dir)
|
340 | 340 |
|
341 | 341 |
assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'buildable'
|
... | ... | @@ -370,8 +370,8 @@ def test_pull_missing_blob(cli, tmpdir, datafiles): |
370 | 370 |
# Now we've pushed, delete the user's local artifact cache
|
371 | 371 |
# directory and try to redownload it from the share
|
372 | 372 |
#
|
373 |
- artifacts = os.path.join(cli.directory, 'artifacts')
|
|
374 |
- shutil.rmtree(artifacts)
|
|
373 |
+ cas = os.path.join(cli.directory, 'cas')
|
|
374 |
+ shutil.rmtree(cas)
|
|
375 | 375 |
|
376 | 376 |
# Assert that nothing is cached locally anymore
|
377 | 377 |
for element_name in all_elements:
|
... | ... | @@ -159,10 +159,8 @@ def test_buildtree_options(cli, tmpdir, datafiles): |
159 | 159 |
assert share.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
160 | 160 |
|
161 | 161 |
# Discard the cache
|
162 |
- cli.configure({
|
|
163 |
- 'artifacts': {'url': share.repo, 'push': True},
|
|
164 |
- 'artifactdir': os.path.join(cli.directory, 'artifacts2')
|
|
165 |
- })
|
|
162 |
+ shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'artifacts')))
|
|
163 |
+ shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas')))
|
|
166 | 164 |
assert cli.get_element_state(project, element_name) != 'cached'
|
167 | 165 |
|
168 | 166 |
# Pull from cache, but do not include buildtrees.
|
... | ... | @@ -202,7 +200,7 @@ def test_buildtree_options(cli, tmpdir, datafiles): |
202 | 200 |
])
|
203 | 201 |
assert 'Attempting to fetch missing artifact buildtree' in res.stderr
|
204 | 202 |
assert 'Hi' in res.output
|
205 |
- shutil.rmtree(os.path.join(os.path.join(cli.directory, 'artifacts2')))
|
|
203 |
+ shutil.rmtree(os.path.join(os.path.join(str(tmpdir), 'cache', 'cas')))
|
|
206 | 204 |
assert cli.get_element_state(project, element_name) != 'cached'
|
207 | 205 |
|
208 | 206 |
# Check it's not loading the shell at all with always set for the buildtree, when the
|
... | ... | @@ -160,7 +160,6 @@ def test_push_cached_fail(cli, tmpdir, datafiles, on_error): |
160 | 160 |
|
161 | 161 |
# This element should have failed
|
162 | 162 |
assert cli.get_element_state(project, 'element.bst') == 'failed'
|
163 |
- # This element should have been pushed to the remote
|
|
164 | 163 |
assert share.has_artifact('test', 'element.bst', cli.get_element_key(project, 'element.bst'))
|
165 | 164 |
|
166 | 165 |
|
... | ... | @@ -40,7 +40,7 @@ DATA_DIR = os.path.join( |
40 | 40 |
@pytest.mark.integration
|
41 | 41 |
@pytest.mark.datafiles(DATA_DIR)
|
42 | 42 |
@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
43 |
-def test_disable_message_lines(cli, tmpdir, datafiles):
|
|
43 |
+def test_disable_message_lines(cli, tmpdir, datafiles, integration_cache):
|
|
44 | 44 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
45 | 45 |
element_path = os.path.join(project, 'elements')
|
46 | 46 |
element_name = 'message.bst'
|
... | ... | @@ -66,7 +66,7 @@ def test_disable_message_lines(cli, tmpdir, datafiles): |
66 | 66 |
assert 'echo "Silly message"' in result.stderr
|
67 | 67 |
|
68 | 68 |
# Let's now build it again, but with --message-lines 0
|
69 |
- cli.remove_artifact_from_cache(project, element_name)
|
|
69 |
+ cli.remove_artifact_from_cache(project, element_name, cache_dir=integration_cache.root)
|
|
70 | 70 |
result = cli.run(project=project, args=["--message-lines", "0",
|
71 | 71 |
"build", element_name])
|
72 | 72 |
result.assert_success()
|
... | ... | @@ -76,7 +76,7 @@ def test_disable_message_lines(cli, tmpdir, datafiles): |
76 | 76 |
@pytest.mark.integration
|
77 | 77 |
@pytest.mark.datafiles(DATA_DIR)
|
78 | 78 |
@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
79 |
-def test_disable_error_lines(cli, tmpdir, datafiles):
|
|
79 |
+def test_disable_error_lines(cli, tmpdir, datafiles, integration_cache):
|
|
80 | 80 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
81 | 81 |
element_path = os.path.join(project, 'elements')
|
82 | 82 |
element_name = 'message.bst'
|
... | ... | @@ -103,7 +103,7 @@ def test_disable_error_lines(cli, tmpdir, datafiles): |
103 | 103 |
assert "This is a syntax error" in result.stderr
|
104 | 104 |
|
105 | 105 |
# Let's now build it again, but with --error-lines 0
|
106 |
- cli.remove_artifact_from_cache(project, element_name)
|
|
106 |
+ cli.remove_artifact_from_cache(project, element_name, cache_dir=integration_cache.root)
|
|
107 | 107 |
result = cli.run(project=project, args=["--error-lines", "0",
|
108 | 108 |
"build", element_name])
|
109 | 109 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
... | ... | @@ -19,9 +19,10 @@ DATA_DIR = os.path.join( |
19 | 19 |
# cleared as just forcefully removing the refpath leaves dangling objects.
|
20 | 20 |
def default_state(cli, tmpdir, share):
|
21 | 21 |
shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
|
22 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
|
|
22 | 23 |
cli.configure({
|
23 | 24 |
'artifacts': {'url': share.repo, 'push': False},
|
24 |
- 'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
|
|
25 |
+ 'cachedir': str(tmpdir),
|
|
25 | 26 |
'cache': {'pull-buildtrees': False},
|
26 | 27 |
})
|
27 | 28 |
|
... | ... | @@ -42,7 +43,7 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles): |
42 | 43 |
create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3:
|
43 | 44 |
cli2.configure({
|
44 | 45 |
'artifacts': {'url': share1.repo, 'push': True},
|
45 |
- 'artifactdir': os.path.join(str(tmpdir), 'artifacts')
|
|
46 |
+ 'cachedir': str(tmpdir),
|
|
46 | 47 |
})
|
47 | 48 |
|
48 | 49 |
# Build autotools element, checked pushed, delete local
|
... | ... | @@ -94,9 +94,7 @@ def test_deterministic_source_umask(cli, tmpdir, datafiles, kind, integration_ca |
94 | 94 |
return f.read()
|
95 | 95 |
finally:
|
96 | 96 |
os.umask(old_umask)
|
97 |
- cache_dir = integration_cache.artifacts
|
|
98 |
- cli.remove_artifact_from_cache(project, element_name,
|
|
99 |
- cache_dir=cache_dir)
|
|
97 |
+ cli.remove_artifact_from_cache(project, element_name, cache_dir=integration_cache.root)
|
|
100 | 98 |
|
101 | 99 |
assert get_value_for_umask(0o022) == get_value_for_umask(0o077)
|
102 | 100 |
|
... | ... | @@ -156,8 +154,6 @@ def test_deterministic_source_local(cli, tmpdir, datafiles, integration_cache): |
156 | 154 |
with open(os.path.join(checkoutdir, 'ls-l'), 'r') as f:
|
157 | 155 |
return f.read()
|
158 | 156 |
finally:
|
159 |
- cache_dir = integration_cache.artifacts
|
|
160 |
- cli.remove_artifact_from_cache(project, element_name,
|
|
161 |
- cache_dir=cache_dir)
|
|
157 |
+ cli.remove_artifact_from_cache(project, element_name, cache_dir=integration_cache.root)
|
|
162 | 158 |
|
163 | 159 |
assert get_value_for_mask(0o7777) == get_value_for_mask(0o0700)
|
... | ... | @@ -46,7 +46,6 @@ class ArtifactShare(): |
46 | 46 |
# in tests as a remote artifact push/pull configuration
|
47 | 47 |
#
|
48 | 48 |
self.repodir = os.path.join(self.directory, 'repo')
|
49 |
- |
|
50 | 49 |
os.makedirs(self.repodir)
|
51 | 50 |
|
52 | 51 |
self.cas = CASCache(self.repodir)
|
... | ... | @@ -171,7 +170,9 @@ class ArtifactShare(): |
171 | 170 |
|
172 | 171 |
def _mock_statvfs(self, path):
|
173 | 172 |
repo_size = 0
|
174 |
- for root, _, files in os.walk(self.repodir):
|
|
173 |
+ for root, dirs, files in os.walk(self.repodir):
|
|
174 |
+ for dirname in dirs:
|
|
175 |
+ repo_size += os.path.getsize(os.path.join(root, dirname))
|
|
175 | 176 |
for filename in files:
|
176 | 177 |
repo_size += os.path.getsize(os.path.join(root, filename))
|
177 | 178 |
|
... | ... | @@ -247,15 +247,13 @@ class Cli(): |
247 | 247 |
*, cache_dir=None):
|
248 | 248 |
# Read configuration to figure out where artifacts are stored
|
249 | 249 |
if not cache_dir:
|
250 |
- default = os.path.join(project, 'cache', 'artifacts')
|
|
251 |
- |
|
252 |
- if self.config is not None:
|
|
253 |
- cache_dir = self.config.get('artifactdir', default)
|
|
254 |
- else:
|
|
255 |
- cache_dir = default
|
|
250 |
+ cache_dir = os.path.join(project, 'cache')
|
|
256 | 251 |
|
257 | 252 |
cache_dir = os.path.join(cache_dir, 'cas', 'refs', 'heads')
|
258 | 253 |
|
254 |
+ # replace forward slashes
|
|
255 |
+ element_name = element_name.replace('/', '-')
|
|
256 |
+ |
|
259 | 257 |
cache_dir = os.path.splitext(os.path.join(cache_dir, 'test', element_name))[0]
|
260 | 258 |
shutil.rmtree(cache_dir)
|
261 | 259 |
|
... | ... | @@ -552,11 +550,21 @@ def cli_integration(tmpdir, integration_cache): |
552 | 550 |
# We want to cache sources for integration tests more permanently,
|
553 | 551 |
# to avoid downloading the huge base-sdk repeatedly
|
554 | 552 |
fixture.configure({
|
553 |
+ 'cachedir': integration_cache.root,
|
|
555 | 554 |
'sourcedir': integration_cache.sources,
|
556 |
- 'artifactdir': integration_cache.artifacts
|
|
557 | 555 |
})
|
558 | 556 |
|
559 |
- return fixture
|
|
557 |
+ yield fixture
|
|
558 |
+ |
|
559 |
+ # remove following folders if necessary
|
|
560 |
+ try:
|
|
561 |
+ shutil.rmtree(os.path.join(integration_cache.root, 'build'))
|
|
562 |
+ except FileNotFoundError:
|
|
563 |
+ pass
|
|
564 |
+ try:
|
|
565 |
+ shutil.rmtree(os.path.join(integration_cache.root, 'tmp'))
|
|
566 |
+ except FileNotFoundError:
|
|
567 |
+ pass
|
|
560 | 568 |
|
561 | 569 |
|
562 | 570 |
@contextmanager
|
... | ... | @@ -596,10 +604,8 @@ def configured(directory, config=None): |
596 | 604 |
|
597 | 605 |
if not config.get('sourcedir', False):
|
598 | 606 |
config['sourcedir'] = os.path.join(directory, 'sources')
|
599 |
- if not config.get('builddir', False):
|
|
600 |
- config['builddir'] = os.path.join(directory, 'build')
|
|
601 |
- if not config.get('artifactdir', False):
|
|
602 |
- config['artifactdir'] = os.path.join(directory, 'artifacts')
|
|
607 |
+ if not config.get('cachedir', False):
|
|
608 |
+ config['cachedir'] = directory
|
|
603 | 609 |
if not config.get('logdir', False):
|
604 | 610 |
config['logdir'] = os.path.join(directory, 'logs')
|
605 | 611 |
|