[Notes] [Git][BuildStream/buildstream][tristan/finish-backport-of-679] 4 commits: _frontend/linuxapp.py: Fixing fallout from !693



Title: GitLab

Tristan Van Berkom pushed to branch tristan/finish-backport-of-679 at BuildStream / buildstream

Commits:

10 changed files:

Changes:

  • buildstream/_artifactcache/__init__.py
    ... ... @@ -17,4 +17,4 @@
    17 17
     #  Authors:
    
    18 18
     #        Tristan Van Berkom <tristan vanberkom codethink co uk>
    
    19 19
     
    
    20
    -from .artifactcache import ArtifactCache, ArtifactCacheSpec
    20
    +from .artifactcache import ArtifactCache, ArtifactCacheSpec, CACHE_SIZE_FILE

  • buildstream/_artifactcache/artifactcache.py
    ... ... @@ -28,6 +28,9 @@ from .. import utils
    28 28
     from .. import _yaml
    
    29 29
     
    
    30 30
     
    
    31
    +CACHE_SIZE_FILE = "cache_size"
    
    32
    +
    
    33
    +
    
    31 34
     # An ArtifactCacheSpec holds the user configuration for a single remote
    
    32 35
     # artifact cache.
    
    33 36
     #
    
    ... ... @@ -82,7 +85,6 @@ class ArtifactCache():
    82 85
             self.extractdir = os.path.join(context.artifactdir, 'extract')
    
    83 86
             self.tmpdir = os.path.join(context.artifactdir, 'tmp')
    
    84 87
     
    
    85
    -        self.max_size = context.cache_quota
    
    86 88
             self.estimated_size = None
    
    87 89
     
    
    88 90
             self.global_remote_specs = []
    
    ... ... @@ -90,6 +92,8 @@ class ArtifactCache():
    90 92
     
    
    91 93
             self._local = False
    
    92 94
             self.cache_size = None
    
    95
    +        self.cache_quota = None
    
    96
    +        self.cache_lower_threshold = None
    
    93 97
     
    
    94 98
             os.makedirs(self.extractdir, exist_ok=True)
    
    95 99
             os.makedirs(self.tmpdir, exist_ok=True)
    
    ... ... @@ -227,7 +231,7 @@ class ArtifactCache():
    227 231
         def clean(self):
    
    228 232
             artifacts = self.list_artifacts()
    
    229 233
     
    
    230
    -        while self.calculate_cache_size() >= self.context.cache_quota - self.context.cache_lower_threshold:
    
    234
    +        while self.calculate_cache_size() >= self.cache_quota - self.cache_lower_threshold:
    
    231 235
                 try:
    
    232 236
                     to_remove = artifacts.pop(0)
    
    233 237
                 except IndexError:
    
    ... ... @@ -241,7 +245,7 @@ class ArtifactCache():
    241 245
                               "Please increase the cache-quota in {}."
    
    242 246
                               .format(self.context.config_origin or default_conf))
    
    243 247
     
    
    244
    -                if self.calculate_cache_size() > self.context.cache_quota:
    
    248
    +                if self.calculate_cache_size() > self.cache_quota:
    
    245 249
                         raise ArtifactError("Cache too full. Aborting.",
    
    246 250
                                             detail=detail,
    
    247 251
                                             reason="cache-too-full")
    
    ... ... @@ -282,7 +286,11 @@ class ArtifactCache():
    282 286
             # If we don't currently have an estimate, figure out the real
    
    283 287
             # cache size.
    
    284 288
             if self.estimated_size is None:
    
    285
    -            self.estimated_size = self.calculate_cache_size()
    
    289
    +            stored_size = self._read_cache_size()
    
    290
    +            if stored_size is not None:
    
    291
    +                self.estimated_size = stored_size
    
    292
    +            else:
    
    293
    +                self.estimated_size = self.calculate_cache_size()
    
    286 294
     
    
    287 295
             return self.estimated_size
    
    288 296
     
    
    ... ... @@ -541,6 +549,7 @@ class ArtifactCache():
    541 549
                 self.estimated_size = self.calculate_cache_size()
    
    542 550
     
    
    543 551
             self.estimated_size += artifact_size
    
    552
    +        self._write_cache_size(self.estimated_size)
    
    544 553
     
    
    545 554
         # _set_cache_size()
    
    546 555
         #
    
    ... ... @@ -551,6 +560,109 @@ class ArtifactCache():
    551 560
         def _set_cache_size(self, cache_size):
    
    552 561
             self.estimated_size = cache_size
    
    553 562
     
    
    563
    +        # set_cache_size is called in cleanup, where it may set the cache to None
    
    564
    +        if self.estimated_size is not None:
    
    565
    +            self._write_cache_size(self.estimated_size)
    
    566
    +
    
    567
    +    # _write_cache_size()
    
    568
    +    #
    
    569
    +    # Writes the given size of the artifact to the cache's size file
    
    570
    +    #
    
    571
    +    def _write_cache_size(self, size):
    
    572
    +        assert isinstance(size, int)
    
    573
    +        size_file_path = os.path.join(self.context.artifactdir, CACHE_SIZE_FILE)
    
    574
    +        with open(size_file_path, "w") as f:
    
    575
    +            f.write(str(size))
    
    576
    +
    
    577
    +    # _read_cache_size()
    
    578
    +    #
    
    579
    +    # Reads and returns the size of the artifact cache that's stored in the
    
    580
    +    # cache's size file
    
    581
    +    #
    
    582
    +    def _read_cache_size(self):
    
    583
    +        size_file_path = os.path.join(self.context.artifactdir, CACHE_SIZE_FILE)
    
    584
    +
    
    585
    +        if not os.path.exists(size_file_path):
    
    586
    +            return None
    
    587
    +
    
    588
    +        with open(size_file_path, "r") as f:
    
    589
    +            size = f.read()
    
    590
    +
    
    591
    +        try:
    
    592
    +            num_size = int(size)
    
    593
    +        except ValueError as e:
    
    594
    +            raise ArtifactError("Size '{}' parsed from '{}' was not an integer".format(
    
    595
    +                size, size_file_path)) from e
    
    596
    +
    
    597
    +        return num_size
    
    598
    +
    
    599
    +    # _calculate_cache_quota()
    
    600
    +    #
    
    601
    +    # Calculates and sets the cache quota and lower threshold based on the
    
    602
    +    # quota set in Context.
    
    603
    +    # It checks that the quota is both a valid _expression_, and that there is
    
    604
    +    # enough disk space to satisfy that quota
    
    605
    +    #
    
    606
    +    def _calculate_cache_quota(self):
    
    607
    +        # Headroom intended to give BuildStream a bit of leeway.
    
    608
    +        # This acts as the minimum size of cache_quota and also
    
    609
    +        # is taken from the user requested cache_quota.
    
    610
    +        #
    
    611
    +        if 'BST_TEST_SUITE' in os.environ:
    
    612
    +            headroom = 0
    
    613
    +        else:
    
    614
    +            headroom = 2e9
    
    615
    +
    
    616
    +        artifactdir_volume = self.context.artifactdir
    
    617
    +        while not os.path.exists(artifactdir_volume):
    
    618
    +            artifactdir_volume = os.path.dirname(artifactdir_volume)
    
    619
    +
    
    620
    +        try:
    
    621
    +            cache_quota = utils._parse_size(self.context.config_cache_quota, artifactdir_volume)
    
    622
    +        except utils.UtilError as e:
    
    623
    +            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    624
    +                            "{}\nPlease specify the value in bytes or as a % of full disk space.\n"
    
    625
    +                            "\nValid values are, for example: 800M 10G 1T 50%\n"
    
    626
    +                            .format(str(e))) from e
    
    627
    +
    
    628
    +        stat = os.statvfs(artifactdir_volume)
    
    629
    +        available_space = (stat.f_bsize * stat.f_bavail)
    
    630
    +
    
    631
    +        cache_size = self.get_approximate_cache_size()
    
    632
    +
    
    633
    +        # Ensure system has enough storage for the cache_quota
    
    634
    +        #
    
    635
    +        # If cache_quota is none, set it to the maximum it could possibly be.
    
    636
    +        #
    
    637
    +        # Also check that cache_quota is atleast as large as our headroom.
    
    638
    +        #
    
    639
    +        if cache_quota is None:  # Infinity, set to max system storage
    
    640
    +            cache_quota = cache_size + available_space
    
    641
    +        if cache_quota < headroom:  # Check minimum
    
    642
    +            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    643
    +                            "Invalid cache quota ({}): ".format(utils._pretty_size(cache_quota)) +
    
    644
    +                            "BuildStream requires a minimum cache quota of 2G.")
    
    645
    +        elif cache_quota > cache_size + available_space:  # Check maximum
    
    646
    +            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    647
    +                            ("Your system does not have enough available " +
    
    648
    +                             "space to support the cache quota specified.\n" +
    
    649
    +                             "You currently have:\n" +
    
    650
    +                             "- {used} of cache in use at {local_cache_path}\n" +
    
    651
    +                             "- {available} of available system storage").format(
    
    652
    +                                 used=utils._pretty_size(cache_size),
    
    653
    +                                 local_cache_path=self.context.artifactdir,
    
    654
    +                                 available=utils._pretty_size(available_space)))
    
    655
    +
    
    656
    +        # Place a slight headroom (2e9 (2GB) on the cache_quota) into
    
    657
    +        # cache_quota to try and avoid exceptions.
    
    658
    +        #
    
    659
    +        # Of course, we might still end up running out during a build
    
    660
    +        # if we end up writing more than 2G, but hey, this stuff is
    
    661
    +        # already really fuzzy.
    
    662
    +        #
    
    663
    +        self.cache_quota = cache_quota - headroom
    
    664
    +        self.cache_lower_threshold = self.cache_quota / 2
    
    665
    +
    
    554 666
     
    
    555 667
     # _configured_remote_artifact_cache_specs():
    
    556 668
     #
    

  • buildstream/_artifactcache/cascache.py
    ... ... @@ -60,6 +60,8 @@ class CASCache(ArtifactCache):
    60 60
             os.makedirs(os.path.join(self.casdir, 'refs', 'heads'), exist_ok=True)
    
    61 61
             os.makedirs(os.path.join(self.casdir, 'objects'), exist_ok=True)
    
    62 62
     
    
    63
    +        self._calculate_cache_quota()
    
    64
    +
    
    63 65
             self._enable_push = enable_push
    
    64 66
     
    
    65 67
             # Per-project list of _CASRemote instances.
    
    ... ... @@ -326,7 +328,7 @@ class CASCache(ArtifactCache):
    326 328
                                         request.write_offset = offset
    
    327 329
                                         # max. 64 kB chunks
    
    328 330
                                         request.data = f.read(chunk_size)
    
    329
    -                                    request.resource_name = resource_name
    
    331
    +                                    request.resource_name = resource_name  # pylint: disable=cell-var-from-loop
    
    330 332
                                         request.finish_write = remaining <= 0
    
    331 333
                                         yield request
    
    332 334
                                         offset += chunk_size
    

  • buildstream/_context.py
    ... ... @@ -64,12 +64,6 @@ class Context():
    64 64
             # The locations from which to push and pull prebuilt artifacts
    
    65 65
             self.artifact_cache_specs = []
    
    66 66
     
    
    67
    -        # The artifact cache quota
    
    68
    -        self.cache_quota = None
    
    69
    -
    
    70
    -        # The lower threshold to which we aim to reduce the cache size
    
    71
    -        self.cache_lower_threshold = None
    
    72
    -
    
    73 67
             # The directory to store build logs
    
    74 68
             self.logdir = None
    
    75 69
     
    
    ... ... @@ -124,8 +118,8 @@ class Context():
    124 118
             self._workspaces = None
    
    125 119
             self._log_handle = None
    
    126 120
             self._log_filename = None
    
    127
    -        self._config_cache_quota = None
    
    128
    -        self._artifactdir_volume = None
    
    121
    +        self.config_cache_quota = 'infinity'
    
    122
    +        self.artifactdir_volume = None
    
    129 123
     
    
    130 124
         # load()
    
    131 125
         #
    
    ... ... @@ -185,23 +179,7 @@ class Context():
    185 179
             cache = _yaml.node_get(defaults, Mapping, 'cache')
    
    186 180
             _yaml.node_validate(cache, ['quota'])
    
    187 181
     
    
    188
    -        artifactdir_volume = self.artifactdir
    
    189
    -        while not os.path.exists(artifactdir_volume):
    
    190
    -            artifactdir_volume = os.path.dirname(artifactdir_volume)
    
    191
    -
    
    192
    -        self._artifactdir_volume = artifactdir_volume
    
    193
    -
    
    194
    -        # We read and parse the cache quota as specified by the user
    
    195
    -        cache_quota = _yaml.node_get(cache, str, 'quota', default_value='infinity')
    
    196
    -        try:
    
    197
    -            cache_quota = utils._parse_size(cache_quota, self._artifactdir_volume)
    
    198
    -        except utils.UtilError as e:
    
    199
    -            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    200
    -                            "{}\nPlease specify the value in bytes or as a % of full disk space.\n"
    
    201
    -                            "\nValid values are, for example: 800M 10G 1T 50%\n"
    
    202
    -                            .format(str(e))) from e
    
    203
    -
    
    204
    -        self._config_cache_quota = cache_quota
    
    182
    +        self.config_cache_quota = _yaml.node_get(cache, str, 'quota', default_value='infinity')
    
    205 183
     
    
    206 184
             # Load artifact share configuration
    
    207 185
             self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
    
    ... ... @@ -525,53 +503,6 @@ class Context():
    525 503
         def get_log_filename(self):
    
    526 504
             return self._log_filename
    
    527 505
     
    
    528
    -    def set_cache_quota(self, cache_size):
    
    529
    -        # Headroom intended to give BuildStream a bit of leeway.
    
    530
    -        # This acts as the minimum size of cache_quota and also
    
    531
    -        # is taken from the user requested cache_quota.
    
    532
    -        #
    
    533
    -        if 'BST_TEST_SUITE' in os.environ:
    
    534
    -            headroom = 0
    
    535
    -        else:
    
    536
    -            headroom = 2e9
    
    537
    -
    
    538
    -        stat = os.statvfs(self._artifactdir_volume)
    
    539
    -        available_space = (stat.f_bsize * stat.f_bavail)
    
    540
    -
    
    541
    -        # Ensure system has enough storage for the cache_quota
    
    542
    -        #
    
    543
    -        # If cache_quota is none, set it to the maximum it could possibly be.
    
    544
    -        #
    
    545
    -        # Also check that cache_quota is atleast as large as our headroom.
    
    546
    -        #
    
    547
    -        cache_quota = self._config_cache_quota
    
    548
    -        if cache_quota is None:  # Infinity, set to max system storage
    
    549
    -            cache_quota = cache_size + available_space
    
    550
    -        if cache_quota < headroom:  # Check minimum
    
    551
    -            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    552
    -                            "Invalid cache quota ({}): ".format(utils._pretty_size(cache_quota)) +
    
    553
    -                            "BuildStream requires a minimum cache quota of 2G.")
    
    554
    -        elif cache_quota > cache_size + available_space:  # Check maximum
    
    555
    -            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    556
    -                            ("Your system does not have enough available " +
    
    557
    -                             "space to support the cache quota specified.\n" +
    
    558
    -                             "You currently have:\n" +
    
    559
    -                             "- {used} of cache in use at {local_cache_path}\n" +
    
    560
    -                             "- {available} of available system storage").format(
    
    561
    -                                 used=utils._pretty_size(cache_size),
    
    562
    -                                 local_cache_path=self.artifactdir,
    
    563
    -                                 available=utils._pretty_size(available_space)))
    
    564
    -
    
    565
    -        # Place a slight headroom (2e9 (2GB) on the cache_quota) into
    
    566
    -        # cache_quota to try and avoid exceptions.
    
    567
    -        #
    
    568
    -        # Of course, we might still end up running out during a build
    
    569
    -        # if we end up writing more than 2G, but hey, this stuff is
    
    570
    -        # already really fuzzy.
    
    571
    -        #
    
    572
    -        self.cache_quota = cache_quota - headroom
    
    573
    -        self.cache_lower_threshold = self.cache_quota / 2
    
    574
    -
    
    575 506
         # _record_message()
    
    576 507
         #
    
    577 508
         # Records the message if recording is enabled
    

  • buildstream/_frontend/app.py
    ... ... @@ -199,12 +199,10 @@ class App():
    199 199
                 option_value = self._main_options.get(cli_option)
    
    200 200
                 if option_value is not None:
    
    201 201
                     setattr(self.context, context_attr, option_value)
    
    202
    -
    
    203
    -        Platform.create_instance(self.context)
    
    204
    -
    
    205
    -        platform = Platform.get_platform()
    
    206
    -        cache_size = platform._artifact_cache.calculate_cache_size()
    
    207
    -        self.context.set_cache_quota(cache_size)
    
    202
    +        try:
    
    203
    +            Platform.create_instance(self.context)
    
    204
    +        except BstError as e:
    
    205
    +            self._error_exit(e, "Error instantiating platform")
    
    208 206
     
    
    209 207
             # Create the logger right before setting the message handler
    
    210 208
             self.logger = LogLine(self.context,
    

  • buildstream/_frontend/linuxapp.py
    ... ... @@ -28,9 +28,9 @@ from .app import App
    28 28
     #
    
    29 29
     def _osc_777_supported():
    
    30 30
     
    
    31
    -    term = os.environ['TERM']
    
    31
    +    term = os.environ.get('TERM')
    
    32 32
     
    
    33
    -    if term.startswith('xterm') or term.startswith('vte'):
    
    33
    +    if term and (term.startswith('xterm') or term.startswith('vte')):
    
    34 34
     
    
    35 35
             # Since vte version 4600, upstream silently ignores
    
    36 36
             # the OSC 777 without printing garbage to the terminal.
    
    ... ... @@ -39,10 +39,10 @@ def _osc_777_supported():
    39 39
             # will trigger a desktop notification and bring attention
    
    40 40
             # to the terminal.
    
    41 41
             #
    
    42
    -        vte_version = os.environ['VTE_VERSION']
    
    42
    +        vte_version = os.environ.get('VTE_VERSION')
    
    43 43
             try:
    
    44 44
                 vte_version_int = int(vte_version)
    
    45
    -        except ValueError:
    
    45
    +        except (ValueError, TypeError):
    
    46 46
                 return False
    
    47 47
     
    
    48 48
             if vte_version_int >= 4600:
    

  • buildstream/_scheduler/queues/buildqueue.py
    ... ... @@ -61,7 +61,7 @@ class BuildQueue(Queue):
    61 61
                 cache = element._get_artifact_cache()
    
    62 62
                 cache._add_artifact_size(artifact_size)
    
    63 63
     
    
    64
    -            if cache.get_approximate_cache_size() > self._scheduler.context.cache_quota:
    
    64
    +            if cache.get_approximate_cache_size() > cache.cache_quota:
    
    65 65
                     self._scheduler._check_cache_size_real()
    
    66 66
     
    
    67 67
         def done(self, job, element, result, success):
    

  • buildstream/_scheduler/scheduler.py
    ... ... @@ -29,6 +29,7 @@ from contextlib import contextmanager
    29 29
     # Local imports
    
    30 30
     from .resources import Resources, ResourceType
    
    31 31
     from .jobs import CacheSizeJob, CleanupJob
    
    32
    +from .._platform import Platform
    
    32 33
     
    
    33 34
     
    
    34 35
     # A decent return code for Scheduler.run()
    
    ... ... @@ -314,7 +315,8 @@ class Scheduler():
    314 315
             self._sched()
    
    315 316
     
    
    316 317
         def _run_cleanup(self, cache_size):
    
    317
    -        if cache_size and cache_size < self.context.cache_quota:
    
    318
    +        platform = Platform.get_platform()
    
    319
    +        if cache_size and cache_size < platform.artifactcache.cache_quota:
    
    318 320
                 return
    
    319 321
     
    
    320 322
             job = CleanupJob(self, 'cleanup', 'cleanup',
    

  • tests/artifactcache/cache_size.py
    1
    +import os
    
    2
    +import pytest
    
    3
    +
    
    4
    +from buildstream import _yaml
    
    5
    +from buildstream._artifactcache import CACHE_SIZE_FILE
    
    6
    +
    
    7
    +from tests.testutils import cli, create_element_size
    
    8
    +
    
    9
    +# XXX: Currently lacking:
    
    10
    +#      * A way to check whether it's faster to read cache size on
    
    11
    +#        successive invocations.
    
    12
    +#      * A way to check whether the cache size file has been read.
    
    13
    +
    
    14
    +
    
    15
    +def create_project(project_dir):
    
    16
    +    project_file = os.path.join(project_dir, "project.conf")
    
    17
    +    project_conf = {
    
    18
    +        "name": "test"
    
    19
    +    }
    
    20
    +    _yaml.dump(project_conf, project_file)
    
    21
    +    element_name = "test.bst"
    
    22
    +    create_element_size(element_name, project_dir, ".", [], 1024)
    
    23
    +
    
    24
    +
    
    25
    +def test_cache_size_roundtrip(cli, tmpdir):
    
    26
    +    # Builds (to put files in the cache), then invokes buildstream again
    
    27
    +    # to check nothing breaks
    
    28
    +
    
    29
    +    # Create project
    
    30
    +    project_dir = str(tmpdir)
    
    31
    +    create_project(project_dir)
    
    32
    +
    
    33
    +    # Build, to populate the cache
    
    34
    +    res = cli.run(project=project_dir, args=["build", "test.bst"])
    
    35
    +    res.assert_success()
    
    36
    +
    
    37
    +    # Show, to check that nothing breaks while reading cache size
    
    38
    +    res = cli.run(project=project_dir, args=["show", "test.bst"])
    
    39
    +    res.assert_success()
    
    40
    +
    
    41
    +
    
    42
    +def test_cache_size_write(cli, tmpdir):
    
    43
    +    # Builds (to put files in the cache), then checks a number is
    
    44
    +    # written to the cache size file.
    
    45
    +
    
    46
    +    project_dir = str(tmpdir)
    
    47
    +    create_project(project_dir)
    
    48
    +
    
    49
    +    # Artifact cache must be in a known place
    
    50
    +    artifactdir = os.path.join(project_dir, "artifacts")
    
    51
    +    cli.configure({"artifactdir": artifactdir})
    
    52
    +
    
    53
    +    # Build, to populate the cache
    
    54
    +    res = cli.run(project=project_dir, args=["build", "test.bst"])
    
    55
    +    res.assert_success()
    
    56
    +
    
    57
    +    # Inspect the artifact cache
    
    58
    +    sizefile = os.path.join(artifactdir, CACHE_SIZE_FILE)
    
    59
    +    assert os.path.isfile(sizefile)
    
    60
    +    with open(sizefile, "r") as f:
    
    61
    +        size_data = f.read()
    
    62
    +    size = int(size_data)

  • tests/testutils/artifactshare.py
    ... ... @@ -140,6 +140,7 @@ class ArtifactShare():
    140 140
     
    
    141 141
             return statvfs_result(f_blocks=self.total_space,
    
    142 142
                                   f_bfree=self.free_space - repo_size,
    
    143
    +                              f_bavail=self.free_space - repo_size,
    
    143 144
                                   f_bsize=1)
    
    144 145
     
    
    145 146
     
    
    ... ... @@ -156,4 +157,4 @@ def create_artifact_share(directory, *, total_space=None, free_space=None):
    156 157
             share.close()
    
    157 158
     
    
    158 159
     
    
    159
    -statvfs_result = namedtuple('statvfs_result', 'f_blocks f_bfree f_bsize')
    160
    +statvfs_result = namedtuple('statvfs_result', 'f_blocks f_bfree f_bsize f_bavail')



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]