[Notes] [Git][BuildStream/buildstream][raoul/870-root-cache-dir] 3 commits: cachedir: add new dir option that's default root to other dirs



Title: GitLab

Raoul Hidalgo Charman pushed to branch raoul/870-root-cache-dir at BuildStream / buildstream

Commits:

29 changed files:

Changes:

  • buildstream/_artifactcache.py
    ... ... @@ -22,7 +22,7 @@ import os
    22 22
     from collections.abc import Mapping
    
    23 23
     
    
    24 24
     from .types import _KeyStrength
    
    25
    -from ._exceptions import ArtifactError, CASError, LoadError, LoadErrorReason
    
    25
    +from ._exceptions import ArtifactError, CASError
    
    26 26
     from ._message import Message, MessageType
    
    27 27
     from . import utils
    
    28 28
     from . import _yaml
    
    ... ... @@ -46,39 +46,6 @@ class ArtifactCacheSpec(CASRemoteSpec):
    46 46
         pass
    
    47 47
     
    
    48 48
     
    
    49
    -# ArtifactCacheUsage
    
    50
    -#
    
    51
    -# A simple object to report the current artifact cache
    
    52
    -# usage details.
    
    53
    -#
    
    54
    -# Note that this uses the user configured cache quota
    
    55
    -# rather than the internal quota with protective headroom
    
    56
    -# removed, to provide a more sensible value to display to
    
    57
    -# the user.
    
    58
    -#
    
    59
    -# Args:
    
    60
    -#    artifacts (ArtifactCache): The artifact cache to get the status of
    
    61
    -#
    
    62
    -class ArtifactCacheUsage():
    
    63
    -
    
    64
    -    def __init__(self, artifacts):
    
    65
    -        context = artifacts.context
    
    66
    -        self.quota_config = context.config_cache_quota       # Configured quota
    
    67
    -        self.quota_size = artifacts._cache_quota_original    # Resolved cache quota in bytes
    
    68
    -        self.used_size = artifacts.get_cache_size()          # Size used by artifacts in bytes
    
    69
    -        self.used_percent = 0                                # Percentage of the quota used
    
    70
    -        if self.quota_size is not None:
    
    71
    -            self.used_percent = int(self.used_size * 100 / self.quota_size)
    
    72
    -
    
    73
    -    # Formattable into a human readable string
    
    74
    -    #
    
    75
    -    def __str__(self):
    
    76
    -        return "{} / {} ({}%)" \
    
    77
    -            .format(utils._pretty_size(self.used_size, dec_places=1),
    
    78
    -                    self.quota_config,
    
    79
    -                    self.used_percent)
    
    80
    -
    
    81
    -
    
    82 49
     # An ArtifactCache manages artifacts.
    
    83 50
     #
    
    84 51
     # Args:
    
    ... ... @@ -87,19 +54,17 @@ class ArtifactCacheUsage():
    87 54
     class ArtifactCache():
    
    88 55
         def __init__(self, context):
    
    89 56
             self.context = context
    
    90
    -        self.extractdir = os.path.join(context.artifactdir, 'extract')
    
    57
    +        self.extractdir = context.extractdir
    
    91 58
     
    
    92 59
             self.cas = context.get_cascache()
    
    60
    +        self.casquota = context.get_casquota()
    
    61
    +        self.casquota._calculate_cache_quota()
    
    93 62
     
    
    94 63
             self.global_remote_specs = []
    
    95 64
             self.project_remote_specs = {}
    
    96 65
     
    
    97 66
             self._required_elements = set()       # The elements required for this session
    
    98
    -        self._cache_size = None               # The current cache size, sometimes it's an estimate
    
    99
    -        self._cache_quota = None              # The cache quota
    
    100
    -        self._cache_quota_original = None     # The cache quota as specified by the user, in bytes
    
    101
    -        self._cache_quota_headroom = None     # The headroom in bytes before reaching the quota or full disk
    
    102
    -        self._cache_lower_threshold = None    # The target cache size for a cleanup
    
    67
    +
    
    103 68
             self._remotes_setup = False           # Check to prevent double-setup of remotes
    
    104 69
     
    
    105 70
             # Per-project list of _CASRemote instances.
    
    ... ... @@ -110,8 +75,6 @@ class ArtifactCache():
    110 75
     
    
    111 76
             os.makedirs(self.extractdir, exist_ok=True)
    
    112 77
     
    
    113
    -        self._calculate_cache_quota()
    
    114
    -
    
    115 78
         # setup_remotes():
    
    116 79
         #
    
    117 80
         # Sets up which remotes to use
    
    ... ... @@ -235,7 +198,7 @@ class ArtifactCache():
    235 198
             space_saved = 0
    
    236 199
     
    
    237 200
             # Start off with an announcement with as much info as possible
    
    238
    -        volume_size, volume_avail = self._get_cache_volume_size()
    
    201
    +        volume_size, volume_avail = self.casquota._get_cache_volume_size()
    
    239 202
             self._message(MessageType.STATUS, "Starting cache cleanup",
    
    240 203
                           detail=("Elements required by the current build plan: {}\n" +
    
    241 204
                                   "User specified quota: {} ({})\n" +
    
    ... ... @@ -243,8 +206,8 @@ class ArtifactCache():
    243 206
                                   "Cache volume: {} total, {} available")
    
    244 207
                           .format(len(self._required_elements),
    
    245 208
                                   context.config_cache_quota,
    
    246
    -                              utils._pretty_size(self._cache_quota_original, dec_places=2),
    
    247
    -                              utils._pretty_size(self.get_cache_size(), dec_places=2),
    
    209
    +                              utils._pretty_size(self.casquota._cache_quota, dec_places=2),
    
    210
    +                              utils._pretty_size(self.casquota.get_cache_size(), dec_places=2),
    
    248 211
                                   utils._pretty_size(volume_size, dec_places=2),
    
    249 212
                                   utils._pretty_size(volume_avail, dec_places=2)))
    
    250 213
     
    
    ... ... @@ -261,9 +224,10 @@ class ArtifactCache():
    261 224
                 ])
    
    262 225
     
    
    263 226
             # Do a real computation of the cache size once, just in case
    
    264
    -        self.compute_cache_size()
    
    227
    +        usage = self.casquota.compute_cache_size()
    
    228
    +        self._message(MessageType.STATUS, "Cache usage recomputed: {}".format(usage))
    
    265 229
     
    
    266
    -        while self.get_cache_size() >= self._cache_lower_threshold:
    
    230
    +        while self.casquota.get_cache_size() >= self.casquota._cache_lower_threshold:
    
    267 231
                 try:
    
    268 232
                     to_remove = artifacts.pop(0)
    
    269 233
                 except IndexError:
    
    ... ... @@ -280,7 +244,7 @@ class ArtifactCache():
    280 244
                               "Please increase the cache-quota in {} and/or make more disk space."
    
    281 245
                               .format(removed_ref_count,
    
    282 246
                                       utils._pretty_size(space_saved, dec_places=2),
    
    283
    -                                  utils._pretty_size(self.get_cache_size(), dec_places=2),
    
    247
    +                                  utils._pretty_size(self.casquota.get_cache_size(), dec_places=2),
    
    284 248
                                       len(self._required_elements),
    
    285 249
                                       (context.config_origin or default_conf)))
    
    286 250
     
    
    ... ... @@ -306,7 +270,7 @@ class ArtifactCache():
    306 270
                                       to_remove))
    
    307 271
     
    
    308 272
                     # Remove the size from the removed size
    
    309
    -                self.set_cache_size(self._cache_size - size)
    
    273
    +                self.casquota.set_cache_size(self.casquota._cache_size - size)
    
    310 274
     
    
    311 275
                     # User callback
    
    312 276
                     #
    
    ... ... @@ -322,29 +286,12 @@ class ArtifactCache():
    322 286
                                   "Cache usage is now: {}")
    
    323 287
                           .format(removed_ref_count,
    
    324 288
                                   utils._pretty_size(space_saved, dec_places=2),
    
    325
    -                              utils._pretty_size(self.get_cache_size(), dec_places=2)))
    
    326
    -
    
    327
    -        return self.get_cache_size()
    
    328
    -
    
    329
    -    # compute_cache_size()
    
    330
    -    #
    
    331
    -    # Computes the real artifact cache size by calling
    
    332
    -    # the abstract calculate_cache_size() method.
    
    333
    -    #
    
    334
    -    # Returns:
    
    335
    -    #    (int): The size of the artifact cache.
    
    336
    -    #
    
    337
    -    def compute_cache_size(self):
    
    338
    -        old_cache_size = self._cache_size
    
    339
    -        new_cache_size = self.cas.calculate_cache_size()
    
    340
    -
    
    341
    -        if old_cache_size != new_cache_size:
    
    342
    -            self._cache_size = new_cache_size
    
    289
    +                              utils._pretty_size(self.casquota.get_cache_size(), dec_places=2)))
    
    343 290
     
    
    344
    -            usage = ArtifactCacheUsage(self)
    
    345
    -            self._message(MessageType.STATUS, "Cache usage recomputed: {}".format(usage))
    
    291
    +        return self.casquota.get_cache_size()
    
    346 292
     
    
    347
    -        return self._cache_size
    
    293
    +    def full(self):
    
    294
    +        return self.casquota.full()
    
    348 295
     
    
    349 296
         # add_artifact_size()
    
    350 297
         #
    
    ... ... @@ -355,71 +302,10 @@ class ArtifactCache():
    355 302
         #     artifact_size (int): The size to add.
    
    356 303
         #
    
    357 304
         def add_artifact_size(self, artifact_size):
    
    358
    -        cache_size = self.get_cache_size()
    
    305
    +        cache_size = self.casquota.get_cache_size()
    
    359 306
             cache_size += artifact_size
    
    360 307
     
    
    361
    -        self.set_cache_size(cache_size)
    
    362
    -
    
    363
    -    # get_cache_size()
    
    364
    -    #
    
    365
    -    # Fetches the cached size of the cache, this is sometimes
    
    366
    -    # an estimate and periodically adjusted to the real size
    
    367
    -    # when a cache size calculation job runs.
    
    368
    -    #
    
    369
    -    # When it is an estimate, the value is either correct, or
    
    370
    -    # it is greater than the actual cache size.
    
    371
    -    #
    
    372
    -    # Returns:
    
    373
    -    #     (int) An approximation of the artifact cache size, in bytes.
    
    374
    -    #
    
    375
    -    def get_cache_size(self):
    
    376
    -
    
    377
    -        # If we don't currently have an estimate, figure out the real cache size.
    
    378
    -        if self._cache_size is None:
    
    379
    -            stored_size = self._read_cache_size()
    
    380
    -            if stored_size is not None:
    
    381
    -                self._cache_size = stored_size
    
    382
    -            else:
    
    383
    -                self.compute_cache_size()
    
    384
    -
    
    385
    -        return self._cache_size
    
    386
    -
    
    387
    -    # set_cache_size()
    
    388
    -    #
    
    389
    -    # Forcefully set the overall cache size.
    
    390
    -    #
    
    391
    -    # This is used to update the size in the main process after
    
    392
    -    # having calculated in a cleanup or a cache size calculation job.
    
    393
    -    #
    
    394
    -    # Args:
    
    395
    -    #     cache_size (int): The size to set.
    
    396
    -    #
    
    397
    -    def set_cache_size(self, cache_size):
    
    398
    -
    
    399
    -        assert cache_size is not None
    
    400
    -
    
    401
    -        self._cache_size = cache_size
    
    402
    -        self._write_cache_size(self._cache_size)
    
    403
    -
    
    404
    -    # full()
    
    405
    -    #
    
    406
    -    # Checks if the artifact cache is full, either
    
    407
    -    # because the user configured quota has been exceeded
    
    408
    -    # or because the underlying disk is almost full.
    
    409
    -    #
    
    410
    -    # Returns:
    
    411
    -    #    (bool): True if the artifact cache is full
    
    412
    -    #
    
    413
    -    def full(self):
    
    414
    -
    
    415
    -        if self.get_cache_size() > self._cache_quota:
    
    416
    -            return True
    
    417
    -
    
    418
    -        _, volume_avail = self._get_cache_volume_size()
    
    419
    -        if volume_avail < self._cache_quota_headroom:
    
    420
    -            return True
    
    421
    -
    
    422
    -        return False
    
    308
    +        self.casquota.set_cache_size(cache_size)
    
    423 309
     
    
    424 310
         # preflight():
    
    425 311
         #
    
    ... ... @@ -882,142 +768,6 @@ class ArtifactCache():
    882 768
             with self.context.timed_activity("Initializing remote caches", silent_nested=True):
    
    883 769
                 self.initialize_remotes(on_failure=remote_failed)
    
    884 770
     
    
    885
    -    # _write_cache_size()
    
    886
    -    #
    
    887
    -    # Writes the given size of the artifact to the cache's size file
    
    888
    -    #
    
    889
    -    # Args:
    
    890
    -    #    size (int): The size of the artifact cache to record
    
    891
    -    #
    
    892
    -    def _write_cache_size(self, size):
    
    893
    -        assert isinstance(size, int)
    
    894
    -        size_file_path = os.path.join(self.context.artifactdir, CACHE_SIZE_FILE)
    
    895
    -        with utils.save_file_atomic(size_file_path, "w") as f:
    
    896
    -            f.write(str(size))
    
    897
    -
    
    898
    -    # _read_cache_size()
    
    899
    -    #
    
    900
    -    # Reads and returns the size of the artifact cache that's stored in the
    
    901
    -    # cache's size file
    
    902
    -    #
    
    903
    -    # Returns:
    
    904
    -    #    (int): The size of the artifact cache, as recorded in the file
    
    905
    -    #
    
    906
    -    def _read_cache_size(self):
    
    907
    -        size_file_path = os.path.join(self.context.artifactdir, CACHE_SIZE_FILE)
    
    908
    -
    
    909
    -        if not os.path.exists(size_file_path):
    
    910
    -            return None
    
    911
    -
    
    912
    -        with open(size_file_path, "r") as f:
    
    913
    -            size = f.read()
    
    914
    -
    
    915
    -        try:
    
    916
    -            num_size = int(size)
    
    917
    -        except ValueError as e:
    
    918
    -            raise ArtifactError("Size '{}' parsed from '{}' was not an integer".format(
    
    919
    -                size, size_file_path)) from e
    
    920
    -
    
    921
    -        return num_size
    
    922
    -
    
    923
    -    # _calculate_cache_quota()
    
    924
    -    #
    
    925
    -    # Calculates and sets the cache quota and lower threshold based on the
    
    926
    -    # quota set in Context.
    
    927
    -    # It checks that the quota is both a valid _expression_, and that there is
    
    928
    -    # enough disk space to satisfy that quota
    
    929
    -    #
    
    930
    -    def _calculate_cache_quota(self):
    
    931
    -        # Headroom intended to give BuildStream a bit of leeway.
    
    932
    -        # This acts as the minimum size of cache_quota and also
    
    933
    -        # is taken from the user requested cache_quota.
    
    934
    -        #
    
    935
    -        if 'BST_TEST_SUITE' in os.environ:
    
    936
    -            self._cache_quota_headroom = 0
    
    937
    -        else:
    
    938
    -            self._cache_quota_headroom = 2e9
    
    939
    -
    
    940
    -        try:
    
    941
    -            cache_quota = utils._parse_size(self.context.config_cache_quota,
    
    942
    -                                            self.context.artifactdir)
    
    943
    -        except utils.UtilError as e:
    
    944
    -            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    945
    -                            "{}\nPlease specify the value in bytes or as a % of full disk space.\n"
    
    946
    -                            "\nValid values are, for example: 800M 10G 1T 50%\n"
    
    947
    -                            .format(str(e))) from e
    
    948
    -
    
    949
    -        total_size, available_space = self._get_cache_volume_size()
    
    950
    -        cache_size = self.get_cache_size()
    
    951
    -
    
    952
    -        # Ensure system has enough storage for the cache_quota
    
    953
    -        #
    
    954
    -        # If cache_quota is none, set it to the maximum it could possibly be.
    
    955
    -        #
    
    956
    -        # Also check that cache_quota is at least as large as our headroom.
    
    957
    -        #
    
    958
    -        if cache_quota is None:  # Infinity, set to max system storage
    
    959
    -            cache_quota = cache_size + available_space
    
    960
    -        if cache_quota < self._cache_quota_headroom:  # Check minimum
    
    961
    -            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    962
    -                            "Invalid cache quota ({}): ".format(utils._pretty_size(cache_quota)) +
    
    963
    -                            "BuildStream requires a minimum cache quota of 2G.")
    
    964
    -        elif cache_quota > total_size:
    
    965
    -            # A quota greater than the total disk size is certianly an error
    
    966
    -            raise ArtifactError("Your system does not have enough available " +
    
    967
    -                                "space to support the cache quota specified.",
    
    968
    -                                detail=("You have specified a quota of {quota} total disk space.\n" +
    
    969
    -                                        "The filesystem containing {local_cache_path} only " +
    
    970
    -                                        "has {total_size} total disk space.")
    
    971
    -                                .format(
    
    972
    -                                    quota=self.context.config_cache_quota,
    
    973
    -                                    local_cache_path=self.context.artifactdir,
    
    974
    -                                    total_size=utils._pretty_size(total_size)),
    
    975
    -                                reason='insufficient-storage-for-quota')
    
    976
    -        elif cache_quota > cache_size + available_space:
    
    977
    -            # The quota does not fit in the available space, this is a warning
    
    978
    -            if '%' in self.context.config_cache_quota:
    
    979
    -                available = (available_space / total_size) * 100
    
    980
    -                available = '{}% of total disk space'.format(round(available, 1))
    
    981
    -            else:
    
    982
    -                available = utils._pretty_size(available_space)
    
    983
    -
    
    984
    -            self._message(MessageType.WARN,
    
    985
    -                          "Your system does not have enough available " +
    
    986
    -                          "space to support the cache quota specified.",
    
    987
    -                          detail=("You have specified a quota of {quota} total disk space.\n" +
    
    988
    -                                  "The filesystem containing {local_cache_path} only " +
    
    989
    -                                  "has {available_size} available.")
    
    990
    -                          .format(quota=self.context.config_cache_quota,
    
    991
    -                                  local_cache_path=self.context.artifactdir,
    
    992
    -                                  available_size=available))
    
    993
    -
    
    994
    -        # Place a slight headroom (2e9 (2GB) on the cache_quota) into
    
    995
    -        # cache_quota to try and avoid exceptions.
    
    996
    -        #
    
    997
    -        # Of course, we might still end up running out during a build
    
    998
    -        # if we end up writing more than 2G, but hey, this stuff is
    
    999
    -        # already really fuzzy.
    
    1000
    -        #
    
    1001
    -        self._cache_quota_original = cache_quota
    
    1002
    -        self._cache_quota = cache_quota - self._cache_quota_headroom
    
    1003
    -        self._cache_lower_threshold = self._cache_quota / 2
    
    1004
    -
    
    1005
    -    # _get_cache_volume_size()
    
    1006
    -    #
    
    1007
    -    # Get the available space and total space for the volume on
    
    1008
    -    # which the artifact cache is located.
    
    1009
    -    #
    
    1010
    -    # Returns:
    
    1011
    -    #    (int): The total number of bytes on the volume
    
    1012
    -    #    (int): The number of available bytes on the volume
    
    1013
    -    #
    
    1014
    -    # NOTE: We use this stub to allow the test cases
    
    1015
    -    #       to override what an artifact cache thinks
    
    1016
    -    #       about it's disk size and available bytes.
    
    1017
    -    #
    
    1018
    -    def _get_cache_volume_size(self):
    
    1019
    -        return utils._get_volume_size(self.context.artifactdir)
    
    1020
    -
    
    1021 771
     
    
    1022 772
     # _configured_remote_artifact_cache_specs():
    
    1023 773
     #
    

  • buildstream/_cas/__init__.py
    ... ... @@ -17,5 +17,5 @@
    17 17
     #  Authors:
    
    18 18
     #        Tristan Van Berkom <tristan vanberkom codethink co uk>
    
    19 19
     
    
    20
    -from .cascache import CASCache
    
    20
    +from .cascache import CASCache, CASQuota, CASCacheUsage
    
    21 21
     from .casremote import CASRemote, CASRemoteSpec

  • buildstream/_cas/cascache.py
    ... ... @@ -32,17 +32,53 @@ from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
    32 32
     from .._protos.buildstream.v2 import buildstream_pb2
    
    33 33
     
    
    34 34
     from .. import utils
    
    35
    -from .._exceptions import CASCacheError
    
    35
    +from .._exceptions import CASCacheError, LoadError, LoadErrorReason
    
    36
    +from .._message import Message, MessageType
    
    36 37
     
    
    37 38
     from .casremote import BlobNotFound, _CASBatchRead, _CASBatchUpdate
    
    38 39
     
    
    39 40
     _BUFFER_SIZE = 65536
    
    40 41
     
    
    41 42
     
    
    43
    +CACHE_SIZE_FILE = "cache_size"
    
    44
    +
    
    45
    +
    
    46
    +# CASCacheUsage
    
    47
    +#
    
    48
    +# A simple object to report the current CAS cache usage details.
    
    49
    +#
    
    50
    +# Note that this uses the user configured cache quota
    
    51
    +# rather than the internal quota with protective headroom
    
    52
    +# removed, to provide a more sensible value to display to
    
    53
    +# the user.
    
    54
    +#
    
    55
    +# Args:
    
    56
    +#    cas (CASQuota): The CAS cache to get the status of
    
    57
    +#
    
    58
    +class CASCacheUsage():
    
    59
    +
    
    60
    +    def __init__(self, casquota):
    
    61
    +        self.quota_config = casquota._config_cache_quota          # Configured quota
    
    62
    +        self.quota_size = casquota._cache_quota_original          # Resolved cache quota in bytes
    
    63
    +        self.used_size = casquota.get_cache_size()                # Size used by artifacts in bytes
    
    64
    +        self.used_percent = 0                                # Percentage of the quota used
    
    65
    +        if self.quota_size is not None:
    
    66
    +            self.used_percent = int(self.used_size * 100 / self.quota_size)
    
    67
    +
    
    68
    +    # Formattable into a human readable string
    
    69
    +    #
    
    70
    +    def __str__(self):
    
    71
    +        return "{} / {} ({}%)" \
    
    72
    +            .format(utils._pretty_size(self.used_size, dec_places=1),
    
    73
    +                    self.quota_config,
    
    74
    +                    self.used_percent)
    
    75
    +
    
    76
    +
    
    42 77
     # A CASCache manages a CAS repository as specified in the Remote Execution API.
    
    43 78
     #
    
    44 79
     # Args:
    
    45 80
     #     path (str): The root directory for the CAS repository
    
    81
    +#     cache_quota (int): User configured cache quota
    
    46 82
     #
    
    47 83
     class CASCache():
    
    48 84
     
    
    ... ... @@ -459,16 +495,6 @@ class CASCache():
    459 495
             except FileNotFoundError as e:
    
    460 496
                 raise CASCacheError("Attempt to access unavailable ref: {}".format(e)) from e
    
    461 497
     
    
    462
    -    # calculate_cache_size()
    
    463
    -    #
    
    464
    -    # Return the real disk usage of the CAS cache.
    
    465
    -    #
    
    466
    -    # Returns:
    
    467
    -    #    (int): The size of the cache.
    
    468
    -    #
    
    469
    -    def calculate_cache_size(self):
    
    470
    -        return utils._get_dir_size(self.casdir)
    
    471
    -
    
    472 498
         # list_refs():
    
    473 499
         #
    
    474 500
         # List refs in Least Recently Modified (LRM) order.
    
    ... ... @@ -1043,6 +1069,248 @@ class CASCache():
    1043 1069
             batch.send()
    
    1044 1070
     
    
    1045 1071
     
    
    1072
    +class CASQuota:
    
    1073
    +    def __init__(self, context):
    
    1074
    +        self.cas = context.get_cascache()
    
    1075
    +        self.casdir = self.cas.casdir
    
    1076
    +        self._config_cache_quota = context.config_cache_quota
    
    1077
    +        self._config_cache_quota_string = context.config_cache_quota_string
    
    1078
    +        self._cache_size = None               # The current cache size, sometimes it's an estimate
    
    1079
    +        self._cache_quota = None              # The cache quota
    
    1080
    +        self._cache_quota_original = None     # The cache quota as specified by the user, in bytes
    
    1081
    +        self._cache_quota_headroom = None     # The headroom in bytes before reaching the quota or full disk
    
    1082
    +        self._cache_lower_threshold = None    # The target cache size for a cleanup
    
    1083
    +        self.available_space = None
    
    1084
    +
    
    1085
    +        self._message = context.message
    
    1086
    +
    
    1087
    +        self._calculate_cache_quota()
    
    1088
    +
    
    1089
    +    # compute_cache_size()
    
    1090
    +    #
    
    1091
    +    # Computes the real artifact cache size by calling
    
    1092
    +    # the abstract calculate_cache_size() method.
    
    1093
    +    #
    
    1094
    +    # Returns:
    
    1095
    +    #    (int): The size of the artifact cache.
    
    1096
    +    #
    
    1097
    +    def compute_cache_size(self):
    
    1098
    +        old_cache_size = self._cache_size
    
    1099
    +        new_cache_size = self.calculate_cache_size()
    
    1100
    +
    
    1101
    +        if old_cache_size != new_cache_size:
    
    1102
    +            self._cache_size = new_cache_size
    
    1103
    +
    
    1104
    +        return self._cache_size
    
    1105
    +
    
    1106
    +    # calculate_cache_size()
    
    1107
    +    #
    
    1108
    +    # Return the real disk usage of the CAS cache.
    
    1109
    +    #
    
    1110
    +    # Returns:
    
    1111
    +    #    (int): The size of the cache.
    
    1112
    +    #
    
    1113
    +    def calculate_cache_size(self):
    
    1114
    +        return utils._get_dir_size(self.casdir)
    
    1115
    +
    
    1116
    +    # get_cache_size()
    
    1117
    +    #
    
    1118
    +    # Fetches the cached size of the cache, this is sometimes
    
    1119
    +    # an estimate and periodically adjusted to the real size
    
    1120
    +    # when a cache size calculation job runs.
    
    1121
    +    #
    
    1122
    +    # When it is an estimate, the value is either correct, or
    
    1123
    +    # it is greater than the actual cache size.
    
    1124
    +    #
    
    1125
    +    # Returns:
    
    1126
    +    #     (int) An approximation of the artifact cache size, in bytes.
    
    1127
    +    #
    
    1128
    +    def get_cache_size(self):
    
    1129
    +
    
    1130
    +        # If we don't currently have an estimate, figure out the real cache size.
    
    1131
    +        if self._cache_size is None:
    
    1132
    +            stored_size = self._read_cache_size()
    
    1133
    +            if stored_size is not None:
    
    1134
    +                self._cache_size = stored_size
    
    1135
    +            else:
    
    1136
    +                self._cache_size = self.compute_cache_size()
    
    1137
    +
    
    1138
    +        return self._cache_size
    
    1139
    +
    
    1140
    +    # set_cache_size()
    
    1141
    +    #
    
    1142
    +    # Forcefully set the overall cache size.
    
    1143
    +    #
    
    1144
    +    # This is used to update the size in the main process after
    
    1145
    +    # having calculated in a cleanup or a cache size calculation job.
    
    1146
    +    #
    
    1147
    +    # Args:
    
    1148
    +    #     cache_size (int): The size to set.
    
    1149
    +    #
    
    1150
    +    def set_cache_size(self, cache_size):
    
    1151
    +
    
    1152
    +        assert cache_size is not None
    
    1153
    +
    
    1154
    +        self._cache_size = cache_size
    
    1155
    +        self._write_cache_size(self._cache_size)
    
    1156
    +
    
    1157
    +    # full()
    
    1158
    +    #
    
    1159
    +    # Checks if the artifact cache is full, either
    
    1160
    +    # because the user configured quota has been exceeded
    
    1161
    +    # or because the underlying disk is almost full.
    
    1162
    +    #
    
    1163
    +    # Returns:
    
    1164
    +    #    (bool): True if the artifact cache is full
    
    1165
    +    #
    
    1166
    +    def full(self):
    
    1167
    +
    
    1168
    +        if self.get_cache_size() > self._cache_quota:
    
    1169
    +            return True
    
    1170
    +
    
    1171
    +        _, volume_avail = self._get_cache_volume_size()
    
    1172
    +        if volume_avail < self._cache_quota_headroom:
    
    1173
    +            return True
    
    1174
    +
    
    1175
    +        return False
    
    1176
    +
    
    1177
    +    ################################################
    
    1178
    +    #             Local Private Methods            #
    
    1179
    +    ################################################
    
    1180
    +
    
    1181
    +    # _read_cache_size()
    
    1182
    +    #
    
    1183
    +    # Reads and returns the size of the artifact cache that's stored in the
    
    1184
    +    # cache's size file
    
    1185
    +    #
    
    1186
    +    # Returns:
    
    1187
    +    #    (int): The size of the artifact cache, as recorded in the file
    
    1188
    +    #
    
    1189
    +    def _read_cache_size(self):
    
    1190
    +        size_file_path = os.path.join(self.casdir, CACHE_SIZE_FILE)
    
    1191
    +
    
    1192
    +        if not os.path.exists(size_file_path):
    
    1193
    +            return None
    
    1194
    +
    
    1195
    +        with open(size_file_path, "r") as f:
    
    1196
    +            size = f.read()
    
    1197
    +
    
    1198
    +        try:
    
    1199
    +            num_size = int(size)
    
    1200
    +        except ValueError as e:
    
    1201
    +            raise CASCacheError("Size '{}' parsed from '{}' was not an integer".format(
    
    1202
    +                size, size_file_path)) from e
    
    1203
    +
    
    1204
    +        return num_size
    
    1205
    +
    
    1206
    +    # _write_cache_size()
    
    1207
    +    #
    
    1208
    +    # Writes the given size of the artifact to the cache's size file
    
    1209
    +    #
    
    1210
    +    # Args:
    
    1211
    +    #    size (int): The size of the artifact cache to record
    
    1212
    +    #
    
    1213
    +    def _write_cache_size(self, size):
    
    1214
    +        assert isinstance(size, int)
    
    1215
    +        size_file_path = os.path.join(self.casdir, CACHE_SIZE_FILE)
    
    1216
    +        with utils.save_file_atomic(size_file_path, "w") as f:
    
    1217
    +            f.write(str(size))
    
    1218
    +
    
    1219
    +    # _get_cache_volume_size()
    
    1220
    +    #
    
    1221
    +    # Get the available space and total space for the volume on
    
    1222
    +    # which the artifact cache is located.
    
    1223
    +    #
    
    1224
    +    # Returns:
    
    1225
    +    #    (int): The total number of bytes on the volume
    
    1226
    +    #    (int): The number of available bytes on the volume
    
    1227
    +    #
    
    1228
    +    # NOTE: We use this stub to allow the test cases
    
    1229
    +    #       to override what an artifact cache thinks
    
    1230
    +    #       about it's disk size and available bytes.
    
    1231
    +    #
    
    1232
    +    def _get_cache_volume_size(self):
    
    1233
    +        return utils._get_volume_size(self.casdir)
    
    1234
    +
    
    1235
    +    # _calculate_cache_quota()
    
    1236
    +    #
    
    1237
    +    # Calculates and sets the cache quota and lower threshold based on the
    
    1238
    +    # quota set in Context.
    
    1239
    +    # It checks that the quota is both a valid _expression_, and that there is
    
    1240
    +    # enough disk space to satisfy that quota
    
    1241
    +    #
    
    1242
    +    def _calculate_cache_quota(self):
    
    1243
    +        # Headroom intended to give BuildStream a bit of leeway.
    
    1244
    +        # This acts as the minimum size of cache_quota and also
    
    1245
    +        # is taken from the user requested cache_quota.
    
    1246
    +        #
    
    1247
    +        if 'BST_TEST_SUITE' in os.environ:
    
    1248
    +            self._cache_quota_headroom = 0
    
    1249
    +        else:
    
    1250
    +            self._cache_quota_headroom = 2e9
    
    1251
    +
    
    1252
    +        total_size, available_space = self._get_cache_volume_size()
    
    1253
    +        cache_size = self.get_cache_size()
    
    1254
    +        self.available_space = available_space
    
    1255
    +
    
    1256
    +        # Ensure system has enough storage for the cache_quota
    
    1257
    +        #
    
    1258
    +        # If cache_quota is none, set it to the maximum it could possibly be.
    
    1259
    +        #
    
    1260
    +        # Also check that cache_quota is at least as large as our headroom.
    
    1261
    +        #
    
    1262
    +        cache_quota = self._config_cache_quota
    
    1263
    +        if cache_quota is None:  # Infinity, set to max system storage
    
    1264
    +            cache_quota = cache_size + available_space
    
    1265
    +        if cache_quota < self._cache_quota_headroom:  # Check minimum
    
    1266
    +            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    1267
    +                            "Invalid cache quota ({}): ".format(utils._pretty_size(cache_quota)) +
    
    1268
    +                            "BuildStream requires a minimum cache quota of 2G.")
    
    1269
    +        elif cache_quota > total_size:
    
    1270
    +            # A quota greater than the total disk size is certianly an error
    
    1271
    +            raise CASCacheError("Your system does not have enough available " +
    
    1272
    +                                "space to support the cache quota specified.",
    
    1273
    +                                detail=("You have specified a quota of {quota} total disk space.\n" +
    
    1274
    +                                        "The filesystem containing {local_cache_path} only " +
    
    1275
    +                                        "has {total_size} total disk space.")
    
    1276
    +                                .format(
    
    1277
    +                                    quota=self._config_cache_quota,
    
    1278
    +                                    local_cache_path=self.casdir,
    
    1279
    +                                    total_size=utils._pretty_size(total_size)),
    
    1280
    +                                reason='insufficient-storage-for-quota')
    
    1281
    +
    
    1282
    +        elif cache_quota > cache_size + available_space:
    
    1283
    +            # The quota does not fit in the available space, this is a warning
    
    1284
    +            if '%' in self._config_cache_quota_string:
    
    1285
    +                available = (available_space / total_size) * 100
    
    1286
    +                available = '{}% of total disk space'.format(round(available, 1))
    
    1287
    +            else:
    
    1288
    +                available = utils._pretty_size(available_space)
    
    1289
    +
    
    1290
    +            self._message(Message(
    
    1291
    +                None,
    
    1292
    +                MessageType.WARN,
    
    1293
    +                "Your system does not have enough available " +
    
    1294
    +                "space to support the cache quota specified.",
    
    1295
    +                detail=("You have specified a quota of {quota} total disk space.\n" +
    
    1296
    +                        "The filesystem containing {local_cache_path} only " +
    
    1297
    +                        "has {available_size} available.")
    
    1298
    +                .format(quota=self._config_cache_quota,
    
    1299
    +                        local_cache_path=self.casdir,
    
    1300
    +                        available_size=available)))
    
    1301
    +
    
    1302
    +        # Place a slight headroom (2e9 (2GB) on the cache_quota) into
    
    1303
    +        # cache_quota to try and avoid exceptions.
    
    1304
    +        #
    
    1305
    +        # Of course, we might still end up running out during a build
    
    1306
    +        # if we end up writing more than 2G, but hey, this stuff is
    
    1307
    +        # already really fuzzy.
    
    1308
    +        #
    
    1309
    +        self._cache_quota_original = cache_quota
    
    1310
    +        self._cache_quota = cache_quota - self._cache_quota_headroom
    
    1311
    +        self._cache_lower_threshold = self._cache_quota / 2
    
    1312
    +
    
    1313
    +
    
    1046 1314
     def _grouper(iterable, n):
    
    1047 1315
         while True:
    
    1048 1316
             try:
    

  • buildstream/_context.py
    ... ... @@ -30,8 +30,8 @@ from . import _yaml
    30 30
     from ._exceptions import LoadError, LoadErrorReason, BstError
    
    31 31
     from ._message import Message, MessageType
    
    32 32
     from ._profile import Topics, profile_start, profile_end
    
    33
    -from ._artifactcache import ArtifactCache, ArtifactCacheUsage
    
    34
    -from ._cas import CASCache
    
    33
    +from ._artifactcache import ArtifactCache
    
    34
    +from ._cas import CASCache, CASQuota, CASCacheUsage
    
    35 35
     from ._workspaces import Workspaces, WorkspaceProjectCache
    
    36 36
     from .plugin import _plugin_lookup
    
    37 37
     from .sandbox import SandboxRemote
    
    ... ... @@ -58,18 +58,27 @@ class Context():
    58 58
             # Filename indicating which configuration file was used, or None for the defaults
    
    59 59
             self.config_origin = None
    
    60 60
     
    
    61
    +        # The directory under which other directories are based
    
    62
    +        self.cachedir = None
    
    63
    +
    
    61 64
             # The directory where various sources are stored
    
    62 65
             self.sourcedir = None
    
    63 66
     
    
    64 67
             # The directory where build sandboxes will be created
    
    65 68
             self.builddir = None
    
    66 69
     
    
    70
    +        # The directory for CAS
    
    71
    +        self.casdir = None
    
    72
    +
    
    73
    +        # Extract directory
    
    74
    +        self.extractdir = None
    
    75
    +
    
    76
    +        # The directory for temporary files
    
    77
    +        self.tmpdir = None
    
    78
    +
    
    67 79
             # Default root location for workspaces
    
    68 80
             self.workspacedir = None
    
    69 81
     
    
    70
    -        # The local binary artifact cache directory
    
    71
    -        self.artifactdir = None
    
    72
    -
    
    73 82
             # The locations from which to push and pull prebuilt artifacts
    
    74 83
             self.artifact_cache_specs = None
    
    75 84
     
    
    ... ... @@ -146,6 +155,7 @@ class Context():
    146 155
             self._log_handle = None
    
    147 156
             self._log_filename = None
    
    148 157
             self._cascache = None
    
    158
    +        self._casquota = None
    
    149 159
             self._directory = directory
    
    150 160
     
    
    151 161
         # load()
    
    ... ... @@ -183,13 +193,22 @@ class Context():
    183 193
                 user_config = _yaml.load(config)
    
    184 194
                 _yaml.composite(defaults, user_config)
    
    185 195
     
    
    196
    +        # Give obsoletion warnings
    
    197
    +        if defaults.get('builddir'):
    
    198
    +            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    199
    +                            "builddir is obsolete, use cachedir")
    
    200
    +
    
    201
    +        if defaults.get('artifactdir'):
    
    202
    +            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    203
    +                            "artifactdir is obsolete")
    
    204
    +
    
    186 205
             _yaml.node_validate(defaults, [
    
    187
    -            'sourcedir', 'builddir', 'artifactdir', 'logdir',
    
    188
    -            'scheduler', 'artifacts', 'logging', 'projects',
    
    189
    -            'cache', 'prompt', 'workspacedir', 'remote-execution'
    
    206
    +            'cachedir', 'sourcedir', 'builddir', 'logdir', 'scheduler',
    
    207
    +            'artifacts', 'logging', 'projects', 'cache', 'prompt',
    
    208
    +            'workspacedir', 'remote-execution',
    
    190 209
             ])
    
    191 210
     
    
    192
    -        for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
    
    211
    +        for directory in ['cachedir', 'sourcedir', 'logdir', 'workspacedir']:
    
    193 212
                 # Allow the ~ tilde expansion and any environment variables in
    
    194 213
                 # path specification in the config files.
    
    195 214
                 #
    
    ... ... @@ -199,14 +218,34 @@ class Context():
    199 218
                 path = os.path.normpath(path)
    
    200 219
                 setattr(self, directory, path)
    
    201 220
     
    
    221
    +        # add directories not set by users
    
    222
    +        self.extractdir = os.path.join(self.cachedir, 'extract')
    
    223
    +        self.tmpdir = os.path.join(self.cachedir, 'tmp')
    
    224
    +        self.casdir = os.path.join(self.cachedir, 'cas')
    
    225
    +        self.builddir = os.path.join(self.cachedir, 'build')
    
    226
    +
    
    227
    +        # Move old artifact cas to cas if it exists and create symlink
    
    228
    +        old_casdir = os.path.join(self.cachedir, 'artifacts', 'cas')
    
    229
    +        if (os.path.exists(old_casdir) and not os.path.islink(old_casdir) and
    
    230
    +                not os.path.exists(self.casdir)):
    
    231
    +            os.rename(old_casdir, self.casdir)
    
    232
    +            os.symlink(self.casdir, old_casdir)
    
    233
    +
    
    202 234
             # Load quota configuration
    
    203
    -        # We need to find the first existing directory in the path of
    
    204
    -        # our artifactdir - the artifactdir may not have been created
    
    205
    -        # yet.
    
    235
    +        # We need to find the first existing directory in the path of our
    
    236
    +        # cachedir - the cachedir may not have been created yet.
    
    206 237
             cache = _yaml.node_get(defaults, Mapping, 'cache')
    
    207 238
             _yaml.node_validate(cache, ['quota', 'pull-buildtrees', 'cache-buildtrees'])
    
    208 239
     
    
    209
    -        self.config_cache_quota = _yaml.node_get(cache, str, 'quota')
    
    240
    +        self.config_cache_quota_string = _yaml.node_get(cache, str, 'quota')
    
    241
    +        try:
    
    242
    +            self.config_cache_quota = utils._parse_size(self.config_cache_quota_string,
    
    243
    +                                                        self.casdir)
    
    244
    +        except utils.UtilError as e:
    
    245
    +            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    246
    +                            "{}\nPlease specify the value in bytes or as a % of full disk space.\n"
    
    247
    +                            "\nValid values are, for example: 800M 10G 1T 50%\n"
    
    248
    +                            .format(str(e))) from e
    
    210 249
     
    
    211 250
             # Load artifact share configuration
    
    212 251
             self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
    
    ... ... @@ -282,15 +321,15 @@ class Context():
    282 321
     
    
    283 322
             return self._artifactcache
    
    284 323
     
    
    285
    -    # get_artifact_cache_usage()
    
    324
    +    # get_cache_usage()
    
    286 325
         #
    
    287 326
         # Fetches the current usage of the artifact cache
    
    288 327
         #
    
    289 328
         # Returns:
    
    290
    -    #     (ArtifactCacheUsage): The current status
    
    329
    +    #     (CASCacheUsage): The current status
    
    291 330
         #
    
    292
    -    def get_artifact_cache_usage(self):
    
    293
    -        return ArtifactCacheUsage(self.artifactcache)
    
    331
    +    def get_cache_usage(self):
    
    332
    +        return CASCacheUsage(self.get_casquota())
    
    294 333
     
    
    295 334
         # add_project():
    
    296 335
         #
    
    ... ... @@ -660,9 +699,14 @@ class Context():
    660 699
     
    
    661 700
         def get_cascache(self):
    
    662 701
             if self._cascache is None:
    
    663
    -            self._cascache = CASCache(self.artifactdir)
    
    702
    +            self._cascache = CASCache(self.cachedir)
    
    664 703
             return self._cascache
    
    665 704
     
    
    705
    +    def get_casquota(self):
    
    706
    +        if self._casquota is None:
    
    707
    +            self._casquota = CASQuota(self)
    
    708
    +        return self._casquota
    
    709
    +
    
    666 710
     
    
    667 711
     # _node_get_option_str()
    
    668 712
     #
    

  • buildstream/_frontend/status.py
    ... ... @@ -404,7 +404,7 @@ class _StatusHeader():
    404 404
             #
    
    405 405
             #  ~~~~~~ cache: 69% ~~~~~~
    
    406 406
             #
    
    407
    -        usage = self._context.get_artifact_cache_usage()
    
    407
    +        usage = self._context.get_cache_usage()
    
    408 408
             usage_percent = '{}%'.format(usage.used_percent)
    
    409 409
     
    
    410 410
             size = 21
    

  • buildstream/_frontend/widget.py
    ... ... @@ -486,7 +486,7 @@ class LogLine(Widget):
    486 486
             values["Session Start"] = starttime.strftime('%A, %d-%m-%Y at %H:%M:%S')
    
    487 487
             values["Project"] = "{} ({})".format(project.name, project.directory)
    
    488 488
             values["Targets"] = ", ".join([t.name for t in stream.targets])
    
    489
    -        values["Cache Usage"] = "{}".format(context.get_artifact_cache_usage())
    
    489
    +        values["Cache Usage"] = "{}".format(context.get_cache_usage())
    
    490 490
             text += self._format_values(values)
    
    491 491
     
    
    492 492
             # User configurations
    
    ... ... @@ -495,10 +495,10 @@ class LogLine(Widget):
    495 495
             values = OrderedDict()
    
    496 496
             values["Configuration File"] = \
    
    497 497
                 "Default Configuration" if not context.config_origin else context.config_origin
    
    498
    +        values["Cache Directory"] = context.cachedir
    
    498 499
             values["Log Files"] = context.logdir
    
    499 500
             values["Source Mirrors"] = context.sourcedir
    
    500 501
             values["Build Area"] = context.builddir
    
    501
    -        values["Artifact Cache"] = context.artifactdir
    
    502 502
             values["Strict Build Plan"] = "Yes" if context.get_strict() else "No"
    
    503 503
             values["Maximum Fetch Tasks"] = context.sched_fetchers
    
    504 504
             values["Maximum Build Tasks"] = context.sched_builders
    

  • buildstream/_scheduler/jobs/cachesizejob.py
    ... ... @@ -25,14 +25,14 @@ class CacheSizeJob(Job):
    25 25
             self._complete_cb = complete_cb
    
    26 26
     
    
    27 27
             context = self._scheduler.context
    
    28
    -        self._artifacts = context.artifactcache
    
    28
    +        self._casquota = context.get_casquota()
    
    29 29
     
    
    30 30
         def child_process(self):
    
    31
    -        return self._artifacts.compute_cache_size()
    
    31
    +        return self._casquota.compute_cache_size()
    
    32 32
     
    
    33 33
         def parent_complete(self, status, result):
    
    34 34
             if status == JobStatus.OK:
    
    35
    -            self._artifacts.set_cache_size(result)
    
    35
    +            self._casquota.set_cache_size(result)
    
    36 36
     
    
    37 37
             if self._complete_cb:
    
    38 38
                 self._complete_cb(status, result)
    

  • buildstream/_scheduler/jobs/cleanupjob.py
    ... ... @@ -25,27 +25,27 @@ class CleanupJob(Job):
    25 25
             self._complete_cb = complete_cb
    
    26 26
     
    
    27 27
             context = self._scheduler.context
    
    28
    +        self._casquota = context.get_casquota()
    
    28 29
             self._artifacts = context.artifactcache
    
    29 30
     
    
    30 31
         def child_process(self):
    
    31 32
             def progress():
    
    32 33
                 self.send_message('update-cache-size',
    
    33
    -                              self._artifacts.get_cache_size())
    
    34
    +                              self._casquota.get_cache_size())
    
    34 35
             return self._artifacts.clean(progress)
    
    35 36
     
    
    36 37
         def handle_message(self, message_type, message):
    
    37
    -
    
    38 38
             # Update the cache size in the main process as we go,
    
    39 39
             # this provides better feedback in the UI.
    
    40 40
             if message_type == 'update-cache-size':
    
    41
    -            self._artifacts.set_cache_size(message)
    
    41
    +            self._casquota.set_cache_size(message)
    
    42 42
                 return True
    
    43 43
     
    
    44 44
             return False
    
    45 45
     
    
    46 46
         def parent_complete(self, status, result):
    
    47 47
             if status == JobStatus.OK:
    
    48
    -            self._artifacts.set_cache_size(result)
    
    48
    +            self._casquota.set_cache_size(result)
    
    49 49
     
    
    50 50
             if self._complete_cb:
    
    51 51
                 self._complete_cb(status, result)

  • buildstream/data/userconfig.yaml
    ... ... @@ -13,11 +13,8 @@
    13 13
     # Location to store sources
    
    14 14
     sourcedir: ${XDG_CACHE_HOME}/buildstream/sources
    
    15 15
     
    
    16
    -# Location to perform builds
    
    17
    -builddir: ${XDG_CACHE_HOME}/buildstream/build
    
    18
    -
    
    19
    -# Location to store local binary artifacts
    
    20
    -artifactdir: ${XDG_CACHE_HOME}/buildstream/artifacts
    
    16
    +# Root location for other directories in the cache
    
    17
    +cachedir: ${XDG_CACHE_HOME}/buildstream
    
    21 18
     
    
    22 19
     # Location to store build logs
    
    23 20
     logdir: ${XDG_CACHE_HOME}/buildstream/logs
    

  • buildstream/element.py
    ... ... @@ -1450,7 +1450,7 @@ class Element(Plugin):
    1450 1450
             # It's advantageous to have this temporary directory on
    
    1451 1451
             # the same file system as the rest of our cache.
    
    1452 1452
             with self.timed_activity("Staging sources", silent_nested=True), \
    
    1453
    -            utils._tempdir(dir=context.artifactdir, prefix='staging-temp') as temp_staging_directory:
    
    1453
    +            utils._tempdir(dir=context.tmpdir, prefix='staging-temp') as temp_staging_directory:
    
    1454 1454
     
    
    1455 1455
                 import_dir = temp_staging_directory
    
    1456 1456
     
    

  • buildstream/plugintestutils/runcli.py
    ... ... @@ -277,15 +277,13 @@ class Cli():
    277 277
                                        *, cache_dir=None):
    
    278 278
             # Read configuration to figure out where artifacts are stored
    
    279 279
             if not cache_dir:
    
    280
    -            default = os.path.join(project, 'cache', 'artifacts')
    
    281
    -
    
    282
    -            if self.config is not None:
    
    283
    -                cache_dir = self.config.get('artifactdir', default)
    
    284
    -            else:
    
    285
    -                cache_dir = default
    
    280
    +            cache_dir = os.path.join(project, 'cache')
    
    286 281
     
    
    287 282
             cache_dir = os.path.join(cache_dir, 'cas', 'refs', 'heads')
    
    288 283
     
    
    284
    +        # replace forward slashes
    
    285
    +        element_name = element_name.replace('/', '-')
    
    286
    +
    
    289 287
             cache_dir = os.path.splitext(os.path.join(cache_dir, 'test', element_name))[0]
    
    290 288
             shutil.rmtree(cache_dir)
    
    291 289
     
    
    ... ... @@ -582,11 +580,21 @@ def cli_integration(tmpdir, integration_cache):
    582 580
         # We want to cache sources for integration tests more permanently,
    
    583 581
         # to avoid downloading the huge base-sdk repeatedly
    
    584 582
         fixture.configure({
    
    583
    +        'cachedir': integration_cache.cachedir,
    
    585 584
             'sourcedir': integration_cache.sources,
    
    586
    -        'artifactdir': integration_cache.artifacts
    
    587 585
         })
    
    588 586
     
    
    589
    -    return fixture
    
    587
    +    yield fixture
    
    588
    +
    
    589
    +    # remove following folders if necessary
    
    590
    +    try:
    
    591
    +        shutil.rmtree(os.path.join(integration_cache.root, 'build'))
    
    592
    +    except FileNotFoundError:
    
    593
    +        pass
    
    594
    +    try:
    
    595
    +        shutil.rmtree(os.path.join(integration_cache.root, 'tmp'))
    
    596
    +    except FileNotFoundError:
    
    597
    +        pass
    
    590 598
     
    
    591 599
     
    
    592 600
     @contextmanager
    
    ... ... @@ -626,10 +634,8 @@ def configured(directory, config=None):
    626 634
     
    
    627 635
         if not config.get('sourcedir', False):
    
    628 636
             config['sourcedir'] = os.path.join(directory, 'sources')
    
    629
    -    if not config.get('builddir', False):
    
    630
    -        config['builddir'] = os.path.join(directory, 'build')
    
    631
    -    if not config.get('artifactdir', False):
    
    632
    -        config['artifactdir'] = os.path.join(directory, 'artifacts')
    
    637
    +    if not config.get('cachedir', False):
    
    638
    +        config['cachedir'] = directory
    
    633 639
         if not config.get('logdir', False):
    
    634 640
             config['logdir'] = os.path.join(directory, 'logs')
    
    635 641
     
    

  • conftest.py
    ... ... @@ -53,16 +53,16 @@ def pytest_runtest_setup(item):
    53 53
     class IntegrationCache():
    
    54 54
     
    
    55 55
         def __init__(self, cache):
    
    56
    -        cache = os.path.abspath(cache)
    
    56
    +        self.root = os.path.abspath(cache)
    
    57 57
             os.makedirs(cache, exist_ok=True)
    
    58 58
     
    
    59 59
             # Use the same sources every time
    
    60
    -        self.sources = os.path.join(cache, 'sources')
    
    60
    +        self.sources = os.path.join(self.root, 'sources')
    
    61 61
     
    
    62 62
             # Create a temp directory for the duration of the test for
    
    63 63
             # the artifacts directory
    
    64 64
             try:
    
    65
    -            self.artifacts = tempfile.mkdtemp(dir=cache, prefix='artifacts-')
    
    65
    +            self.cachedir = tempfile.mkdtemp(dir=self.root, prefix='cache-')
    
    66 66
             except OSError as e:
    
    67 67
                 raise AssertionError("Unable to create test directory !") from e
    
    68 68
     
    
    ... ... @@ -84,7 +84,11 @@ def integration_cache(request):
    84 84
         # Clean up the artifacts after each test run - we only want to
    
    85 85
         # cache sources between runs
    
    86 86
         try:
    
    87
    -        shutil.rmtree(cache.artifacts)
    
    87
    +        shutil.rmtree(cache.cachedir)
    
    88
    +    except FileNotFoundError:
    
    89
    +        pass
    
    90
    +    try:
    
    91
    +        shutil.rmtree(os.path.join(cache.root, 'cas'))
    
    88 92
         except FileNotFoundError:
    
    89 93
             pass
    
    90 94
     
    

  • doc/bst2html.py
    ... ... @@ -194,10 +194,9 @@ def workdir(source_cache=None):
    194 194
     
    
    195 195
             bst_config_file = os.path.join(tempdir, 'buildstream.conf')
    
    196 196
             config = {
    
    197
    +            'cachedir': tempdir,
    
    197 198
                 'sourcedir': source_cache,
    
    198
    -            'artifactdir': os.path.join(tempdir, 'artifacts'),
    
    199 199
                 'logdir': os.path.join(tempdir, 'logs'),
    
    200
    -            'builddir': os.path.join(tempdir, 'build'),
    
    201 200
             }
    
    202 201
             _yaml.dump(config, bst_config_file)
    
    203 202
     
    
    ... ... @@ -411,12 +410,10 @@ def run_session(description, tempdir, source_cache, palette, config_file, force)
    411 410
             # Encode and save the output if that was asked for
    
    412 411
             output = _yaml.node_get(command, str, 'output', default_value=None)
    
    413 412
             if output is not None:
    
    414
    -
    
    415 413
                 # Convert / Generate a nice <div>
    
    416 414
                 converted = generate_html(command_out, directory, config_file,
    
    417 415
                                           source_cache, tempdir, palette,
    
    418 416
                                           command_str, command_fake_output is not None)
    
    419
    -
    
    420 417
                 # Save it
    
    421 418
                 filename = os.path.join(desc_dir, output)
    
    422 419
                 filename = os.path.realpath(filename)
    

  • doc/sessions/running-commands.run
    ... ... @@ -2,7 +2,7 @@
    2 2
     commands:
    
    3 3
     # Make it fetch first
    
    4 4
     - directory: ../examples/running-commands
    
    5
    -  command: fetch hello.bst
    
    5
    +  command: source fetch hello.bst
    
    6 6
     
    
    7 7
     # Capture a show output
    
    8 8
     - directory: ../examples/running-commands
    

  • tests/artifactcache/cache_size.py
    ... ... @@ -50,15 +50,15 @@ def test_cache_size_write(cli, tmpdir):
    50 50
         create_project(project_dir)
    
    51 51
     
    
    52 52
         # Artifact cache must be in a known place
    
    53
    -    artifactdir = os.path.join(project_dir, "artifacts")
    
    54
    -    cli.configure({"artifactdir": artifactdir})
    
    53
    +    casdir = os.path.join(project_dir, "cas")
    
    54
    +    cli.configure({"cachedir": project_dir})
    
    55 55
     
    
    56 56
         # Build, to populate the cache
    
    57 57
         res = cli.run(project=project_dir, args=["build", "test.bst"])
    
    58 58
         res.assert_success()
    
    59 59
     
    
    60 60
         # Inspect the artifact cache
    
    61
    -    sizefile = os.path.join(artifactdir, CACHE_SIZE_FILE)
    
    61
    +    sizefile = os.path.join(casdir, CACHE_SIZE_FILE)
    
    62 62
         assert os.path.isfile(sizefile)
    
    63 63
         with open(sizefile, "r") as f:
    
    64 64
             size_data = f.read()
    
    ... ... @@ -81,11 +81,11 @@ def test_quota_over_1024T(cli, tmpdir):
    81 81
         _yaml.dump({'name': 'main'}, str(project.join("project.conf")))
    
    82 82
     
    
    83 83
         volume_space_patch = mock.patch(
    
    84
    -        "buildstream._artifactcache.ArtifactCache._get_cache_volume_size",
    
    84
    +        "buildstream._cas.CASQuota._get_cache_volume_size",
    
    85 85
             autospec=True,
    
    86 86
             return_value=(1025 * TiB, 1025 * TiB)
    
    87 87
         )
    
    88 88
     
    
    89 89
         with volume_space_patch:
    
    90 90
             result = cli.run(project, args=["build", "file.bst"])
    
    91
    -        result.assert_main_error(ErrorDomain.ARTIFACT, 'insufficient-storage-for-quota')
    91
    +        result.assert_main_error(ErrorDomain.CAS, 'insufficient-storage-for-quota')

  • tests/artifactcache/expiry.py
    ... ... @@ -341,7 +341,7 @@ def test_never_delete_required_track(cli, datafiles, tmpdir):
    341 341
         ("200%", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
    
    342 342
     
    
    343 343
         # Not enough space on disk even if you cleaned up
    
    344
    -    ("11K", ErrorDomain.ARTIFACT, 'insufficient-storage-for-quota'),
    
    344
    +    ("11K", ErrorDomain.CAS, 'insufficient-storage-for-quota'),
    
    345 345
     
    
    346 346
         # Not enough space for these caches
    
    347 347
         ("7K", 'warning', 'Your system does not have enough available'),
    
    ... ... @@ -355,7 +355,7 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, err_domain, err_reas
    355 355
         cli.configure({
    
    356 356
             'cache': {
    
    357 357
                 'quota': quota,
    
    358
    -        }
    
    358
    +        },
    
    359 359
         })
    
    360 360
     
    
    361 361
         # We patch how we get space information
    
    ... ... @@ -373,13 +373,13 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, err_domain, err_reas
    373 373
             total_space = 10000
    
    374 374
     
    
    375 375
         volume_space_patch = mock.patch(
    
    376
    -        "buildstream._artifactcache.ArtifactCache._get_cache_volume_size",
    
    376
    +        "buildstream.utils._get_volume_size",
    
    377 377
             autospec=True,
    
    378 378
             return_value=(total_space, free_space),
    
    379 379
         )
    
    380 380
     
    
    381 381
         cache_size_patch = mock.patch(
    
    382
    -        "buildstream._artifactcache.ArtifactCache.get_cache_size",
    
    382
    +        "buildstream._cas.CASQuota.get_cache_size",
    
    383 383
             autospec=True,
    
    384 384
             return_value=0,
    
    385 385
         )
    
    ... ... @@ -417,7 +417,7 @@ def test_extract_expiry(cli, datafiles, tmpdir):
    417 417
         res.assert_success()
    
    418 418
     
    
    419 419
         # Get a snapshot of the extracts in advance
    
    420
    -    extractdir = os.path.join(project, 'cache', 'artifacts', 'extract', 'test', 'target')
    
    420
    +    extractdir = os.path.join(project, 'cache', 'extract', 'test', 'target')
    
    421 421
         extracts = os.listdir(extractdir)
    
    422 422
         assert(len(extracts) == 1)
    
    423 423
         extract = os.path.join(extractdir, extracts[0])
    
    ... ... @@ -436,7 +436,7 @@ def test_extract_expiry(cli, datafiles, tmpdir):
    436 436
         # Now we should have a directory for the cached target2.bst, which
    
    437 437
         # replaced target.bst in the cache, we should not have a directory
    
    438 438
         # for the target.bst
    
    439
    -    refsdir = os.path.join(project, 'cache', 'artifacts', 'cas', 'refs', 'heads')
    
    439
    +    refsdir = os.path.join(project, 'cache', 'cas', 'refs', 'heads')
    
    440 440
         refsdirtest = os.path.join(refsdir, 'test')
    
    441 441
         refsdirtarget = os.path.join(refsdirtest, 'target')
    
    442 442
         refsdirtarget2 = os.path.join(refsdirtest, 'target2')
    

  • tests/artifactcache/junctions.py
    ... ... @@ -70,8 +70,8 @@ def test_push_pull(cli, tmpdir, datafiles):
    70 70
             # Now we've pushed, delete the user's local artifact cache
    
    71 71
             # directory and try to redownload it from the share
    
    72 72
             #
    
    73
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    74
    -        shutil.rmtree(artifacts)
    
    73
    +        cas = os.path.join(cli.directory, 'cas')
    
    74
    +        shutil.rmtree(cas)
    
    75 75
     
    
    76 76
             # Assert that nothing is cached locally anymore
    
    77 77
             state = cli.get_element_state(project, 'target.bst')
    

  • tests/artifactcache/pull.py
    ... ... @@ -57,7 +57,7 @@ def test_pull(cli, tmpdir, datafiles):
    57 57
         # Set up an artifact cache.
    
    58 58
         with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
    
    59 59
             # Configure artifact share
    
    60
    -        artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    60
    +        cache_dir = os.path.join(str(tmpdir), 'cache')
    
    61 61
             user_config_file = str(tmpdir.join('buildstream.conf'))
    
    62 62
             user_config = {
    
    63 63
                 'scheduler': {
    
    ... ... @@ -66,7 +66,8 @@ def test_pull(cli, tmpdir, datafiles):
    66 66
                 'artifacts': {
    
    67 67
                     'url': share.repo,
    
    68 68
                     'push': True,
    
    69
    -            }
    
    69
    +            },
    
    70
    +            'cachedir': cache_dir
    
    70 71
             }
    
    71 72
     
    
    72 73
             # Write down the user configuration file
    
    ... ... @@ -93,7 +94,6 @@ def test_pull(cli, tmpdir, datafiles):
    93 94
             # Fake minimal context
    
    94 95
             context = Context()
    
    95 96
             context.load(config=user_config_file)
    
    96
    -        context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    97 97
             context.set_message_handler(message_handler)
    
    98 98
     
    
    99 99
             # Load the project and CAS cache
    
    ... ... @@ -111,7 +111,7 @@ def test_pull(cli, tmpdir, datafiles):
    111 111
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    112 112
             process = multiprocessing.Process(target=_queue_wrapper,
    
    113 113
                                               args=(_test_pull, queue, user_config_file, project_dir,
    
    114
    -                                                artifact_dir, 'target.bst', element_key))
    
    114
    +                                                cache_dir, 'target.bst', element_key))
    
    115 115
     
    
    116 116
             try:
    
    117 117
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -128,12 +128,14 @@ def test_pull(cli, tmpdir, datafiles):
    128 128
             assert cas.contains(element, element_key)
    
    129 129
     
    
    130 130
     
    
    131
    -def _test_pull(user_config_file, project_dir, artifact_dir,
    
    131
    +def _test_pull(user_config_file, project_dir, cache_dir,
    
    132 132
                    element_name, element_key, queue):
    
    133 133
         # Fake minimal context
    
    134 134
         context = Context()
    
    135 135
         context.load(config=user_config_file)
    
    136
    -    context.artifactdir = artifact_dir
    
    136
    +    context.cachedir = cache_dir
    
    137
    +    context.casdir = os.path.join(cache_dir, 'cas')
    
    138
    +    context.tmpdir = os.path.join(cache_dir, 'tmp')
    
    137 139
         context.set_message_handler(message_handler)
    
    138 140
     
    
    139 141
         # Load the project manually
    
    ... ... @@ -166,7 +168,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
    166 168
         # Set up an artifact cache.
    
    167 169
         with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
    
    168 170
             # Configure artifact share
    
    169
    -        artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    171
    +        rootcache_dir = os.path.join(str(tmpdir), 'cache')
    
    170 172
             user_config_file = str(tmpdir.join('buildstream.conf'))
    
    171 173
             user_config = {
    
    172 174
                 'scheduler': {
    
    ... ... @@ -175,7 +177,8 @@ def test_pull_tree(cli, tmpdir, datafiles):
    175 177
                 'artifacts': {
    
    176 178
                     'url': share.repo,
    
    177 179
                     'push': True,
    
    178
    -            }
    
    180
    +            },
    
    181
    +            'cachedir': rootcache_dir
    
    179 182
             }
    
    180 183
     
    
    181 184
             # Write down the user configuration file
    
    ... ... @@ -196,7 +199,6 @@ def test_pull_tree(cli, tmpdir, datafiles):
    196 199
             # Fake minimal context
    
    197 200
             context = Context()
    
    198 201
             context.load(config=user_config_file)
    
    199
    -        context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    200 202
             context.set_message_handler(message_handler)
    
    201 203
     
    
    202 204
             # Load the project and CAS cache
    
    ... ... @@ -219,7 +221,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
    219 221
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    220 222
             process = multiprocessing.Process(target=_queue_wrapper,
    
    221 223
                                               args=(_test_push_tree, queue, user_config_file, project_dir,
    
    222
    -                                                artifact_dir, artifact_digest))
    
    224
    +                                                artifact_digest))
    
    223 225
     
    
    224 226
             try:
    
    225 227
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -247,7 +249,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
    247 249
             # Use subprocess to avoid creation of gRPC threads in main BuildStream process
    
    248 250
             process = multiprocessing.Process(target=_queue_wrapper,
    
    249 251
                                               args=(_test_pull_tree, queue, user_config_file, project_dir,
    
    250
    -                                                artifact_dir, tree_digest))
    
    252
    +                                                tree_digest))
    
    251 253
     
    
    252 254
             try:
    
    253 255
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -269,11 +271,10 @@ def test_pull_tree(cli, tmpdir, datafiles):
    269 271
             assert os.path.exists(cas.objpath(directory_digest))
    
    270 272
     
    
    271 273
     
    
    272
    -def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
    
    274
    +def _test_push_tree(user_config_file, project_dir, artifact_digest, queue):
    
    273 275
         # Fake minimal context
    
    274 276
         context = Context()
    
    275 277
         context.load(config=user_config_file)
    
    276
    -    context.artifactdir = artifact_dir
    
    277 278
         context.set_message_handler(message_handler)
    
    278 279
     
    
    279 280
         # Load the project manually
    
    ... ... @@ -305,11 +306,10 @@ def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest
    305 306
             queue.put("No remote configured")
    
    306 307
     
    
    307 308
     
    
    308
    -def _test_pull_tree(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
    
    309
    +def _test_pull_tree(user_config_file, project_dir, artifact_digest, queue):
    
    309 310
         # Fake minimal context
    
    310 311
         context = Context()
    
    311 312
         context.load(config=user_config_file)
    
    312
    -    context.artifactdir = artifact_dir
    
    313 313
         context.set_message_handler(message_handler)
    
    314 314
     
    
    315 315
         # Load the project manually
    

  • tests/artifactcache/push.py
    ... ... @@ -51,7 +51,7 @@ def test_push(cli, tmpdir, datafiles):
    51 51
         # Set up an artifact cache.
    
    52 52
         with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
    
    53 53
             # Configure artifact share
    
    54
    -        artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    54
    +        rootcache_dir = os.path.join(str(tmpdir), 'cache')
    
    55 55
             user_config_file = str(tmpdir.join('buildstream.conf'))
    
    56 56
             user_config = {
    
    57 57
                 'scheduler': {
    
    ... ... @@ -60,7 +60,8 @@ def test_push(cli, tmpdir, datafiles):
    60 60
                 'artifacts': {
    
    61 61
                     'url': share.repo,
    
    62 62
                     'push': True,
    
    63
    -            }
    
    63
    +            },
    
    64
    +            'cachedir': rootcache_dir
    
    64 65
             }
    
    65 66
     
    
    66 67
             # Write down the user configuration file
    
    ... ... @@ -69,7 +70,6 @@ def test_push(cli, tmpdir, datafiles):
    69 70
             # Fake minimal context
    
    70 71
             context = Context()
    
    71 72
             context.load(config=user_config_file)
    
    72
    -        context.artifactdir = artifact_dir
    
    73 73
             context.set_message_handler(message_handler)
    
    74 74
     
    
    75 75
             # Load the project manually
    
    ... ... @@ -89,7 +89,7 @@ def test_push(cli, tmpdir, datafiles):
    89 89
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    90 90
             process = multiprocessing.Process(target=_queue_wrapper,
    
    91 91
                                               args=(_test_push, queue, user_config_file, project_dir,
    
    92
    -                                                artifact_dir, 'target.bst', element_key))
    
    92
    +                                                'target.bst', element_key))
    
    93 93
     
    
    94 94
             try:
    
    95 95
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -106,12 +106,10 @@ def test_push(cli, tmpdir, datafiles):
    106 106
             assert share.has_artifact('test', 'target.bst', element_key)
    
    107 107
     
    
    108 108
     
    
    109
    -def _test_push(user_config_file, project_dir, artifact_dir,
    
    110
    -               element_name, element_key, queue):
    
    109
    +def _test_push(user_config_file, project_dir, element_name, element_key, queue):
    
    111 110
         # Fake minimal context
    
    112 111
         context = Context()
    
    113 112
         context.load(config=user_config_file)
    
    114
    -    context.artifactdir = artifact_dir
    
    115 113
         context.set_message_handler(message_handler)
    
    116 114
     
    
    117 115
         # Load the project manually
    
    ... ... @@ -152,7 +150,7 @@ def test_push_directory(cli, tmpdir, datafiles):
    152 150
         # Set up an artifact cache.
    
    153 151
         with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
    
    154 152
             # Configure artifact share
    
    155
    -        artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    153
    +        rootcache_dir = os.path.join(str(tmpdir), 'cache')
    
    156 154
             user_config_file = str(tmpdir.join('buildstream.conf'))
    
    157 155
             user_config = {
    
    158 156
                 'scheduler': {
    
    ... ... @@ -161,7 +159,8 @@ def test_push_directory(cli, tmpdir, datafiles):
    161 159
                 'artifacts': {
    
    162 160
                     'url': share.repo,
    
    163 161
                     'push': True,
    
    164
    -            }
    
    162
    +            },
    
    163
    +            'cachedir': rootcache_dir
    
    165 164
             }
    
    166 165
     
    
    167 166
             # Write down the user configuration file
    
    ... ... @@ -170,7 +169,6 @@ def test_push_directory(cli, tmpdir, datafiles):
    170 169
             # Fake minimal context
    
    171 170
             context = Context()
    
    172 171
             context.load(config=user_config_file)
    
    173
    -        context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    174 172
             context.set_message_handler(message_handler)
    
    175 173
     
    
    176 174
             # Load the project and CAS cache
    
    ... ... @@ -198,7 +196,7 @@ def test_push_directory(cli, tmpdir, datafiles):
    198 196
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    199 197
             process = multiprocessing.Process(target=_queue_wrapper,
    
    200 198
                                               args=(_test_push_directory, queue, user_config_file,
    
    201
    -                                                project_dir, artifact_dir, artifact_digest))
    
    199
    +                                                project_dir, artifact_digest))
    
    202 200
     
    
    203 201
             try:
    
    204 202
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -216,11 +214,10 @@ def test_push_directory(cli, tmpdir, datafiles):
    216 214
             assert share.has_object(artifact_digest)
    
    217 215
     
    
    218 216
     
    
    219
    -def _test_push_directory(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
    
    217
    +def _test_push_directory(user_config_file, project_dir, artifact_digest, queue):
    
    220 218
         # Fake minimal context
    
    221 219
         context = Context()
    
    222 220
         context.load(config=user_config_file)
    
    223
    -    context.artifactdir = artifact_dir
    
    224 221
         context.set_message_handler(message_handler)
    
    225 222
     
    
    226 223
         # Load the project manually
    
    ... ... @@ -254,6 +251,7 @@ def test_push_message(cli, tmpdir, datafiles):
    254 251
         with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
    
    255 252
             # Configure artifact share
    
    256 253
             artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    254
    +        rootcache_dir = os.path.join(str(tmpdir), 'cache')
    
    257 255
             user_config_file = str(tmpdir.join('buildstream.conf'))
    
    258 256
             user_config = {
    
    259 257
                 'scheduler': {
    
    ... ... @@ -262,7 +260,8 @@ def test_push_message(cli, tmpdir, datafiles):
    262 260
                 'artifacts': {
    
    263 261
                     'url': share.repo,
    
    264 262
                     'push': True,
    
    265
    -            }
    
    263
    +            },
    
    264
    +            'cachedir': rootcache_dir
    
    266 265
             }
    
    267 266
     
    
    268 267
             # Write down the user configuration file
    
    ... ... @@ -273,7 +272,7 @@ def test_push_message(cli, tmpdir, datafiles):
    273 272
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    274 273
             process = multiprocessing.Process(target=_queue_wrapper,
    
    275 274
                                               args=(_test_push_message, queue, user_config_file,
    
    276
    -                                                project_dir, artifact_dir))
    
    275
    +                                                project_dir))
    
    277 276
     
    
    278 277
             try:
    
    279 278
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -292,11 +291,10 @@ def test_push_message(cli, tmpdir, datafiles):
    292 291
             assert share.has_object(message_digest)
    
    293 292
     
    
    294 293
     
    
    295
    -def _test_push_message(user_config_file, project_dir, artifact_dir, queue):
    
    294
    +def _test_push_message(user_config_file, project_dir, queue):
    
    296 295
         # Fake minimal context
    
    297 296
         context = Context()
    
    298 297
         context.load(config=user_config_file)
    
    299
    -    context.artifactdir = artifact_dir
    
    300 298
         context.set_message_handler(message_handler)
    
    301 299
     
    
    302 300
         # Load the project manually
    

  • tests/frontend/pull.py
    ... ... @@ -64,8 +64,8 @@ def test_push_pull_all(cli, tmpdir, datafiles):
    64 64
             # Now we've pushed, delete the user's local artifact cache
    
    65 65
             # directory and try to redownload it from the share
    
    66 66
             #
    
    67
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    68
    -        shutil.rmtree(artifacts)
    
    67
    +        cas = os.path.join(cli.directory, 'cas')
    
    68
    +        shutil.rmtree(cas)
    
    69 69
     
    
    70 70
             # Assert that nothing is cached locally anymore
    
    71 71
             states = cli.get_element_states(project, all_elements)
    
    ... ... @@ -114,7 +114,7 @@ def test_push_pull_default_targets(cli, tmpdir, datafiles):
    114 114
             # Now we've pushed, delete the user's local artifact cache
    
    115 115
             # directory and try to redownload it from the share
    
    116 116
             #
    
    117
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    117
    +        artifacts = os.path.join(cli.directory, 'cas')
    
    118 118
             shutil.rmtree(artifacts)
    
    119 119
     
    
    120 120
             # Assert that nothing is cached locally anymore
    
    ... ... @@ -156,8 +156,8 @@ def test_pull_secondary_cache(cli, tmpdir, datafiles):
    156 156
             assert_shared(cli, share2, project, 'target.bst')
    
    157 157
     
    
    158 158
             # Delete the user's local artifact cache.
    
    159
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    160
    -        shutil.rmtree(artifacts)
    
    159
    +        cas = os.path.join(cli.directory, 'cas')
    
    160
    +        shutil.rmtree(cas)
    
    161 161
     
    
    162 162
             # Assert that the element is not cached anymore.
    
    163 163
             assert cli.get_element_state(project, 'target.bst') != 'cached'
    
    ... ... @@ -210,8 +210,8 @@ def test_push_pull_specific_remote(cli, tmpdir, datafiles):
    210 210
             # Now we've pushed, delete the user's local artifact cache
    
    211 211
             # directory and try to redownload it from the good_share.
    
    212 212
             #
    
    213
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    214
    -        shutil.rmtree(artifacts)
    
    213
    +        cas = os.path.join(cli.directory, 'cas')
    
    214
    +        shutil.rmtree(cas)
    
    215 215
     
    
    216 216
             result = cli.run(project=project, args=['artifact', 'pull', 'target.bst', '--remote',
    
    217 217
                                                     good_share.repo])
    
    ... ... @@ -251,8 +251,8 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles):
    251 251
             # Now we've pushed, delete the user's local artifact cache
    
    252 252
             # directory and try to redownload it from the share
    
    253 253
             #
    
    254
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    255
    -        shutil.rmtree(artifacts)
    
    254
    +        cas = os.path.join(cli.directory, 'cas')
    
    255
    +        shutil.rmtree(cas)
    
    256 256
     
    
    257 257
             # Assert that nothing is cached locally anymore
    
    258 258
             for element_name in all_elements:
    
    ... ... @@ -301,8 +301,8 @@ def test_push_pull_track_non_strict(cli, tmpdir, datafiles):
    301 301
             # Now we've pushed, delete the user's local artifact cache
    
    302 302
             # directory and try to redownload it from the share
    
    303 303
             #
    
    304
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    305
    -        shutil.rmtree(artifacts)
    
    304
    +        cas = os.path.join(cli.directory, 'cas')
    
    305
    +        shutil.rmtree(cas)
    
    306 306
     
    
    307 307
             # Assert that nothing is cached locally anymore
    
    308 308
             for element_name in all_elements:
    
    ... ... @@ -337,7 +337,7 @@ def test_push_pull_cross_junction(cli, tmpdir, datafiles):
    337 337
             result.assert_success()
    
    338 338
             assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
    
    339 339
     
    
    340
    -        cache_dir = os.path.join(project, 'cache', 'artifacts')
    
    340
    +        cache_dir = os.path.join(project, 'cache', 'cas')
    
    341 341
             shutil.rmtree(cache_dir)
    
    342 342
     
    
    343 343
             assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'buildable'
    
    ... ... @@ -372,8 +372,8 @@ def test_pull_missing_blob(cli, tmpdir, datafiles):
    372 372
             # Now we've pushed, delete the user's local artifact cache
    
    373 373
             # directory and try to redownload it from the share
    
    374 374
             #
    
    375
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    376
    -        shutil.rmtree(artifacts)
    
    375
    +        cas = os.path.join(cli.directory, 'cas')
    
    376
    +        shutil.rmtree(cas)
    
    377 377
     
    
    378 378
             # Assert that nothing is cached locally anymore
    
    379 379
             for element_name in all_elements:
    
    ... ... @@ -510,8 +510,8 @@ def test_pull_access_rights(caplog, cli, tmpdir, datafiles):
    510 510
     
    
    511 511
             shutil.rmtree(checkout)
    
    512 512
     
    
    513
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    514
    -        shutil.rmtree(artifacts)
    
    513
    +        casdir = os.path.join(cli.directory, 'cas')
    
    514
    +        shutil.rmtree(casdir)
    
    515 515
     
    
    516 516
             result = cli.run(project=project, args=['artifact', 'pull', 'compose-all.bst'])
    
    517 517
             result.assert_success()
    

  • tests/integration/artifact.py
    ... ... @@ -87,7 +87,7 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
    87 87
             create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3:
    
    88 88
             cli.configure({
    
    89 89
                 'artifacts': {'url': share1.repo, 'push': True},
    
    90
    -            'artifactdir': os.path.join(str(tmpdir), 'artifacts')
    
    90
    +            'cachedir': str(tmpdir)
    
    91 91
             })
    
    92 92
     
    
    93 93
             # Build autotools element with cache-buildtrees set via the
    
    ... ... @@ -103,20 +103,22 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
    103 103
             # to not cache buildtrees
    
    104 104
             cache_key = cli.get_element_key(project, element_name)
    
    105 105
             elementdigest = share1.has_artifact('test', element_name, cache_key)
    
    106
    -        buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
    
    106
    +        buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello',
    
    107 107
                                         elementdigest.hash, 'buildtree')
    
    108 108
             assert os.path.isdir(buildtreedir)
    
    109 109
             assert not os.listdir(buildtreedir)
    
    110 110
     
    
    111 111
             # Delete the local cached artifacts, and assert the when pulled with --pull-buildtrees
    
    112 112
             # that is was cached in share1 as expected with an empty buildtree dir
    
    113
    -        shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
    
    113
    +        shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
    
    114
    +        shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
    
    114 115
             assert cli.get_element_state(project, element_name) != 'cached'
    
    115 116
             result = cli.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
    
    116 117
             assert element_name in result.get_pulled_elements()
    
    117 118
             assert os.path.isdir(buildtreedir)
    
    118 119
             assert not os.listdir(buildtreedir)
    
    119
    -        shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
    
    120
    +        shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
    
    121
    +        shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
    
    120 122
     
    
    121 123
             # Assert that the default behaviour of pull to not include buildtrees on the artifact
    
    122 124
             # in share1 which was purposely cached with an empty one behaves as expected. As such the
    
    ... ... @@ -125,13 +127,14 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
    125 127
             result = cli.run(project=project, args=['artifact', 'pull', element_name])
    
    126 128
             assert element_name in result.get_pulled_elements()
    
    127 129
             assert not os.path.isdir(buildtreedir)
    
    128
    -        shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
    
    130
    +        shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
    
    131
    +        shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
    
    129 132
     
    
    130 133
             # Repeat building the artifacts, this time with the default behaviour of caching buildtrees,
    
    131 134
             # as such the buildtree dir should not be empty
    
    132 135
             cli.configure({
    
    133 136
                 'artifacts': {'url': share2.repo, 'push': True},
    
    134
    -            'artifactdir': os.path.join(str(tmpdir), 'artifacts')
    
    137
    +            'cachedir': str(tmpdir)
    
    135 138
             })
    
    136 139
             result = cli.run(project=project, args=['build', element_name])
    
    137 140
             assert result.exit_code == 0
    
    ... ... @@ -140,27 +143,29 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
    140 143
     
    
    141 144
             # Cache key will be the same however the digest hash will have changed as expected, so reconstruct paths
    
    142 145
             elementdigest = share2.has_artifact('test', element_name, cache_key)
    
    143
    -        buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
    
    146
    +        buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello',
    
    144 147
                                         elementdigest.hash, 'buildtree')
    
    145 148
             assert os.path.isdir(buildtreedir)
    
    146 149
             assert os.listdir(buildtreedir) is not None
    
    147 150
     
    
    148 151
             # Delete the local cached artifacts, and assert that when pulled with --pull-buildtrees
    
    149 152
             # that it was cached in share2 as expected with a populated buildtree dir
    
    150
    -        shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
    
    153
    +        shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
    
    154
    +        shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
    
    151 155
             assert cli.get_element_state(project, element_name) != 'cached'
    
    152 156
             result = cli.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
    
    153 157
             assert element_name in result.get_pulled_elements()
    
    154 158
             assert os.path.isdir(buildtreedir)
    
    155 159
             assert os.listdir(buildtreedir) is not None
    
    156
    -        shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
    
    160
    +        shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
    
    161
    +        shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
    
    157 162
     
    
    158 163
             # Clarify that the user config option for cache-buildtrees works as the cli
    
    159 164
             # main option does. Point to share3 which does not have the artifacts cached to force
    
    160 165
             # a build
    
    161 166
             cli.configure({
    
    162 167
                 'artifacts': {'url': share3.repo, 'push': True},
    
    163
    -            'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
    
    168
    +            # 'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
    
    164 169
                 'cache': {'cache-buildtrees': 'never'}
    
    165 170
             })
    
    166 171
             result = cli.run(project=project, args=['build', element_name])
    
    ... ... @@ -168,7 +173,7 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
    168 173
             assert cli.get_element_state(project, element_name) == 'cached'
    
    169 174
             cache_key = cli.get_element_key(project, element_name)
    
    170 175
             elementdigest = share3.has_artifact('test', element_name, cache_key)
    
    171
    -        buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
    
    176
    +        buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello',
    
    172 177
                                         elementdigest.hash, 'buildtree')
    
    173 178
             assert os.path.isdir(buildtreedir)
    
    174 179
             assert not os.listdir(buildtreedir)

  • tests/integration/cachedfail.py
    ... ... @@ -160,7 +160,6 @@ def test_push_cached_fail(cli, tmpdir, datafiles, on_error):
    160 160
     
    
    161 161
             # This element should have failed
    
    162 162
             assert cli.get_element_state(project, 'element.bst') == 'failed'
    
    163
    -        # This element should have been pushed to the remote
    
    164 163
             assert share.has_artifact('test', 'element.bst', cli.get_element_key(project, 'element.bst'))
    
    165 164
     
    
    166 165
     
    

  • tests/integration/messages.py
    ... ... @@ -39,7 +39,7 @@ DATA_DIR = os.path.join(
    39 39
     @pytest.mark.integration
    
    40 40
     @pytest.mark.datafiles(DATA_DIR)
    
    41 41
     @pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
    
    42
    -def test_disable_message_lines(cli, tmpdir, datafiles):
    
    42
    +def test_disable_message_lines(cli, tmpdir, datafiles, integration_cache):
    
    43 43
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    44 44
         element_path = os.path.join(project, 'elements')
    
    45 45
         element_name = 'message.bst'
    
    ... ... @@ -65,7 +65,7 @@ def test_disable_message_lines(cli, tmpdir, datafiles):
    65 65
         assert 'echo "Silly message"' in result.stderr
    
    66 66
     
    
    67 67
         # Let's now build it again, but with --message-lines 0
    
    68
    -    cli.remove_artifact_from_cache(project, element_name)
    
    68
    +    cli.remove_artifact_from_cache(project, element_name, cache_dir=integration_cache.root)
    
    69 69
         result = cli.run(project=project, args=["--message-lines", "0",
    
    70 70
                                                 "build", element_name])
    
    71 71
         result.assert_success()
    
    ... ... @@ -75,7 +75,7 @@ def test_disable_message_lines(cli, tmpdir, datafiles):
    75 75
     @pytest.mark.integration
    
    76 76
     @pytest.mark.datafiles(DATA_DIR)
    
    77 77
     @pytest.mark.skipif(not HAVE_SANDBOX, reason='Only available with a functioning sandbox')
    
    78
    -def test_disable_error_lines(cli, tmpdir, datafiles):
    
    78
    +def test_disable_error_lines(cli, tmpdir, datafiles, integration_cache):
    
    79 79
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    80 80
         element_path = os.path.join(project, 'elements')
    
    81 81
         element_name = 'message.bst'
    
    ... ... @@ -102,7 +102,7 @@ def test_disable_error_lines(cli, tmpdir, datafiles):
    102 102
         assert "This is a syntax error" in result.stderr
    
    103 103
     
    
    104 104
         # Let's now build it again, but with --error-lines 0
    
    105
    -    cli.remove_artifact_from_cache(project, element_name)
    
    105
    +    cli.remove_artifact_from_cache(project, element_name, cache_dir=integration_cache.root)
    
    106 106
         result = cli.run(project=project, args=["--error-lines", "0",
    
    107 107
                                                 "build", element_name])
    
    108 108
         result.assert_main_error(ErrorDomain.STREAM, None)
    

  • tests/integration/pullbuildtrees.py
    ... ... @@ -21,10 +21,11 @@ DATA_DIR = os.path.join(
    21 21
     # to false, which is the default user context. The cache has to be
    
    22 22
     # cleared as just forcefully removing the refpath leaves dangling objects.
    
    23 23
     def default_state(cli, tmpdir, share):
    
    24
    -    shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
    
    24
    +    shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
    
    25
    +    shutil.rmtree(os.path.join(str(tmpdir), 'extract'))
    
    25 26
         cli.configure({
    
    26 27
             'artifacts': {'url': share.repo, 'push': False},
    
    27
    -        'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
    
    28
    +        'cachedir': str(tmpdir),
    
    28 29
             'cache': {'pull-buildtrees': False},
    
    29 30
         })
    
    30 31
     
    
    ... ... @@ -45,7 +46,7 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
    45 46
             create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3:
    
    46 47
             cli2.configure({
    
    47 48
                 'artifacts': {'url': share1.repo, 'push': True},
    
    48
    -            'artifactdir': os.path.join(str(tmpdir), 'artifacts')
    
    49
    +            'cachedir': str(tmpdir),
    
    49 50
             })
    
    50 51
     
    
    51 52
             # Build autotools element, checked pushed, delete local
    
    ... ... @@ -74,7 +75,7 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
    74 75
             result = cli2.run(project=project, args=['artifact', 'pull', element_name])
    
    75 76
             assert element_name in result.get_pulled_elements()
    
    76 77
             elementdigest = share1.has_artifact('test', element_name, cli2.get_element_key(project, element_name))
    
    77
    -        buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
    
    78
    +        buildtreedir = os.path.join(str(tmpdir), 'extract', 'test', 'autotools-amhello',
    
    78 79
                                         elementdigest.hash, 'buildtree')
    
    79 80
             assert not os.path.isdir(buildtreedir)
    
    80 81
             result = cli2.run(project=project, args=['--pull-buildtrees', 'artifact', 'pull', element_name])
    

  • tests/integration/shellbuildtrees.py
    ... ... @@ -62,7 +62,7 @@ def test_buildtree_staged_warn_empty_cached(cli_integration, tmpdir, datafiles):
    62 62
         # Switch to a temp artifact cache dir to ensure the artifact is rebuilt,
    
    63 63
         # caching an empty buildtree
    
    64 64
         cli_integration.configure({
    
    65
    -        'artifactdir': os.path.join(os.path.join(str(tmpdir), 'artifacts'))
    
    65
    +        'cachedir': str(tmpdir)
    
    66 66
         })
    
    67 67
     
    
    68 68
         res = cli_integration.run(project=project, args=['--cache-buildtrees', 'never', 'build', element_name])
    
    ... ... @@ -139,7 +139,7 @@ def test_buildtree_from_failure_option_never(cli_integration, tmpdir, datafiles)
    139 139
         # Switch to a temp artifact cache dir to ensure the artifact is rebuilt,
    
    140 140
         # caching an empty buildtree
    
    141 141
         cli_integration.configure({
    
    142
    -        'artifactdir': os.path.join(os.path.join(str(tmpdir), 'artifacts'))
    
    142
    +        'cachedir': str(tmpdir)
    
    143 143
         })
    
    144 144
     
    
    145 145
         res = cli_integration.run(project=project, args=['--cache-buildtrees', 'never', 'build', element_name])
    
    ... ... @@ -163,7 +163,7 @@ def test_buildtree_from_failure_option_failure(cli_integration, tmpdir, datafile
    163 163
         # default behaviour (which is always) as the buildtree will explicitly have been
    
    164 164
         # cached with content.
    
    165 165
         cli_integration.configure({
    
    166
    -        'artifactdir': os.path.join(os.path.join(str(tmpdir), 'artifacts'))
    
    166
    +        'cachedir': str(tmpdir)
    
    167 167
         })
    
    168 168
     
    
    169 169
         res = cli_integration.run(project=project, args=['--cache-buildtrees', 'failure', 'build', element_name])
    
    ... ... @@ -195,10 +195,7 @@ def test_buildtree_pulled(cli, tmpdir, datafiles):
    195 195
             assert cli.get_element_state(project, element_name) == 'cached'
    
    196 196
     
    
    197 197
             # Discard the cache
    
    198
    -        cli.configure({
    
    199
    -            'artifacts': {'url': share.repo, 'push': True},
    
    200
    -            'artifactdir': os.path.join(cli.directory, 'artifacts2')
    
    201
    -        })
    
    198
    +        shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas')))
    
    202 199
             assert cli.get_element_state(project, element_name) != 'cached'
    
    203 200
     
    
    204 201
             # Pull from cache, ensuring cli options is set to pull the buildtree
    
    ... ... @@ -231,10 +228,7 @@ def test_buildtree_options(cli, tmpdir, datafiles):
    231 228
             assert share.has_artifact('test', element_name, cli.get_element_key(project, element_name))
    
    232 229
     
    
    233 230
             # Discard the cache
    
    234
    -        cli.configure({
    
    235
    -            'artifacts': {'url': share.repo, 'push': True},
    
    236
    -            'artifactdir': os.path.join(cli.directory, 'artifacts2')
    
    237
    -        })
    
    231
    +        shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas')))
    
    238 232
             assert cli.get_element_state(project, element_name) != 'cached'
    
    239 233
     
    
    240 234
             # Pull from cache, but do not include buildtrees.
    
    ... ... @@ -274,7 +268,7 @@ def test_buildtree_options(cli, tmpdir, datafiles):
    274 268
             ])
    
    275 269
             assert 'Attempting to fetch missing artifact buildtree' in res.stderr
    
    276 270
             assert 'Hi' in res.output
    
    277
    -        shutil.rmtree(os.path.join(os.path.join(cli.directory, 'artifacts2')))
    
    271
    +        shutil.rmtree(os.path.join(os.path.join(str(tmpdir), 'cache', 'cas')))
    
    278 272
             assert cli.get_element_state(project, element_name) != 'cached'
    
    279 273
     
    
    280 274
             # Check it's not loading the shell at all with always set for the buildtree, when the
    

  • tests/integration/source-determinism.py
    ... ... @@ -94,9 +94,7 @@ def test_deterministic_source_umask(cli, tmpdir, datafiles, kind, integration_ca
    94 94
                     return f.read()
    
    95 95
             finally:
    
    96 96
                 os.umask(old_umask)
    
    97
    -            cache_dir = integration_cache.artifacts
    
    98
    -            cli.remove_artifact_from_cache(project, element_name,
    
    99
    -                                           cache_dir=cache_dir)
    
    97
    +            cli.remove_artifact_from_cache(project, element_name, cache_dir=integration_cache.root)
    
    100 98
     
    
    101 99
         assert get_value_for_umask(0o022) == get_value_for_umask(0o077)
    
    102 100
     
    
    ... ... @@ -156,8 +154,6 @@ def test_deterministic_source_local(cli, tmpdir, datafiles, integration_cache):
    156 154
                 with open(os.path.join(checkoutdir, 'ls-l'), 'r') as f:
    
    157 155
                     return f.read()
    
    158 156
             finally:
    
    159
    -            cache_dir = integration_cache.artifacts
    
    160
    -            cli.remove_artifact_from_cache(project, element_name,
    
    161
    -                                           cache_dir=cache_dir)
    
    157
    +            cli.remove_artifact_from_cache(project, element_name, cache_dir=integration_cache.root)
    
    162 158
     
    
    163 159
         assert get_value_for_mask(0o7777) == get_value_for_mask(0o0700)

  • tests/internals/context.py
    ... ... @@ -43,7 +43,7 @@ def test_context_load(context_fixture):
    43 43
         context.load(config=os.devnull)
    
    44 44
         assert(context.sourcedir == os.path.join(cache_home, 'buildstream', 'sources'))
    
    45 45
         assert(context.builddir == os.path.join(cache_home, 'buildstream', 'build'))
    
    46
    -    assert(context.artifactdir == os.path.join(cache_home, 'buildstream', 'artifacts'))
    
    46
    +    assert(context.cachedir == os.path.join(cache_home, 'buildstream'))
    
    47 47
         assert(context.logdir == os.path.join(cache_home, 'buildstream', 'logs'))
    
    48 48
     
    
    49 49
     
    
    ... ... @@ -57,7 +57,7 @@ def test_context_load_envvar(context_fixture):
    57 57
         context.load(config=os.devnull)
    
    58 58
         assert(context.sourcedir == os.path.join('/', 'some', 'path', 'buildstream', 'sources'))
    
    59 59
         assert(context.builddir == os.path.join('/', 'some', 'path', 'buildstream', 'build'))
    
    60
    -    assert(context.artifactdir == os.path.join('/', 'some', 'path', 'buildstream', 'artifacts'))
    
    60
    +    assert(context.cachedir == os.path.join('/', 'some', 'path', 'buildstream'))
    
    61 61
         assert(context.logdir == os.path.join('/', 'some', 'path', 'buildstream', 'logs'))
    
    62 62
     
    
    63 63
         # Reset the environment variable
    
    ... ... @@ -79,7 +79,7 @@ def test_context_load_user_config(context_fixture, datafiles):
    79 79
     
    
    80 80
         assert(context.sourcedir == os.path.expanduser('~/pony'))
    
    81 81
         assert(context.builddir == os.path.join(cache_home, 'buildstream', 'build'))
    
    82
    -    assert(context.artifactdir == os.path.join(cache_home, 'buildstream', 'artifacts'))
    
    82
    +    assert(context.cachedir == os.path.join(cache_home, 'buildstream'))
    
    83 83
         assert(context.logdir == os.path.join(cache_home, 'buildstream', 'logs'))
    
    84 84
     
    
    85 85
     
    

  • tests/internals/pluginloading.py
    ... ... @@ -16,7 +16,7 @@ def create_pipeline(tmpdir, basedir, target):
    16 16
         context = Context()
    
    17 17
         context.load(config=os.devnull)
    
    18 18
         context.deploydir = os.path.join(str(tmpdir), 'deploy')
    
    19
    -    context.artifactdir = os.path.join(str(tmpdir), 'artifact')
    
    19
    +    context.casdir = os.path.join(str(tmpdir), 'cas')
    
    20 20
         project = Project(basedir, context)
    
    21 21
     
    
    22 22
         def dummy_handler(message, context):
    

  • tests/testutils/artifactshare.py
    ... ... @@ -46,7 +46,6 @@ class ArtifactShare():
    46 46
             # in tests as a remote artifact push/pull configuration
    
    47 47
             #
    
    48 48
             self.repodir = os.path.join(self.directory, 'repo')
    
    49
    -
    
    50 49
             os.makedirs(self.repodir)
    
    51 50
     
    
    52 51
             self.cas = CASCache(self.repodir)
    
    ... ... @@ -171,7 +170,9 @@ class ArtifactShare():
    171 170
     
    
    172 171
         def _mock_statvfs(self, path):
    
    173 172
             repo_size = 0
    
    174
    -        for root, _, files in os.walk(self.repodir):
    
    173
    +        for root, dirs, files in os.walk(self.repodir):
    
    174
    +            for dirname in dirs:
    
    175
    +                repo_size += os.path.getsize(os.path.join(root, dirname))
    
    175 176
                 for filename in files:
    
    176 177
                     repo_size += os.path.getsize(os.path.join(root, filename))
    
    177 178
     
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]