[Notes] [Git][BuildStream/buildstream][jmac/cas_to_cas_v2] 23 commits: plugins/sources/pip.py: Accomodate characters '-', '.', '_' for packages



Title: GitLab

Jim MacArthur pushed to branch jmac/cas_to_cas_v2 at BuildStream / buildstream

Commits:

29 changed files:

Changes:

  • .gitlab-ci.yml
    ... ... @@ -166,6 +166,12 @@ docs:
    166 166
         BST_EXT_REF: 1d6ab71151b93c8cbc0a91a36ffe9270f3b835f1 # 0.5.1
    
    167 167
         FD_SDK_REF: 88d7c22c2281b987faa02edd57df80d430eecf1f # 18.08.11-35-g88d7c22c
    
    168 168
       before_script:
    
    169
    +  - |
    
    170
    +    mkdir -p "${HOME}/.config"
    
    171
    +    cat <<EOF >"${HOME}/.config/buildstream.conf"
    
    172
    +    scheduler:
    
    173
    +      fetchers: 2
    
    174
    +    EOF
    
    169 175
       - (cd dist && ./unpack.sh && cd buildstream && pip3 install .)
    
    170 176
       - pip3 install --user -e ${BST_EXT_URL}@${BST_EXT_REF}#egg=bst_ext
    
    171 177
       - git clone https://gitlab.com/freedesktop-sdk/freedesktop-sdk.git
    

  • buildstream/_artifactcache/artifactcache.py
    ... ... @@ -17,17 +17,22 @@
    17 17
     #  Authors:
    
    18 18
     #        Tristan Maat <tristan maat codethink co uk>
    
    19 19
     
    
    20
    +import multiprocessing
    
    20 21
     import os
    
    22
    +import signal
    
    21 23
     import string
    
    22 24
     from collections import namedtuple
    
    23 25
     from collections.abc import Mapping
    
    24 26
     
    
    25 27
     from ..types import _KeyStrength
    
    26
    -from .._exceptions import ArtifactError, ImplError, LoadError, LoadErrorReason
    
    28
    +from .._exceptions import ArtifactError, CASError, LoadError, LoadErrorReason
    
    27 29
     from .._message import Message, MessageType
    
    30
    +from .. import _signals
    
    28 31
     from .. import utils
    
    29 32
     from .. import _yaml
    
    30 33
     
    
    34
    +from .cascache import CASCache, CASRemote
    
    35
    +
    
    31 36
     
    
    32 37
     CACHE_SIZE_FILE = "cache_size"
    
    33 38
     
    
    ... ... @@ -93,7 +98,8 @@ class ArtifactCache():
    93 98
         def __init__(self, context):
    
    94 99
             self.context = context
    
    95 100
             self.extractdir = os.path.join(context.artifactdir, 'extract')
    
    96
    -        self.tmpdir = os.path.join(context.artifactdir, 'tmp')
    
    101
    +
    
    102
    +        self.cas = CASCache(context.artifactdir)
    
    97 103
     
    
    98 104
             self.global_remote_specs = []
    
    99 105
             self.project_remote_specs = {}
    
    ... ... @@ -104,12 +110,15 @@ class ArtifactCache():
    104 110
             self._cache_lower_threshold = None    # The target cache size for a cleanup
    
    105 111
             self._remotes_setup = False           # Check to prevent double-setup of remotes
    
    106 112
     
    
    113
    +        # Per-project list of _CASRemote instances.
    
    114
    +        self._remotes = {}
    
    115
    +
    
    116
    +        self._has_fetch_remotes = False
    
    117
    +        self._has_push_remotes = False
    
    118
    +
    
    107 119
             os.makedirs(self.extractdir, exist_ok=True)
    
    108
    -        os.makedirs(self.tmpdir, exist_ok=True)
    
    109 120
     
    
    110
    -    ################################################
    
    111
    -    #  Methods implemented on the abstract class   #
    
    112
    -    ################################################
    
    121
    +        self._calculate_cache_quota()
    
    113 122
     
    
    114 123
         # get_artifact_fullname()
    
    115 124
         #
    
    ... ... @@ -240,8 +249,10 @@ class ArtifactCache():
    240 249
                 for key in (strong_key, weak_key):
    
    241 250
                     if key:
    
    242 251
                         try:
    
    243
    -                        self.update_mtime(element, key)
    
    244
    -                    except ArtifactError:
    
    252
    +                        ref = self.get_artifact_fullname(element, key)
    
    253
    +
    
    254
    +                        self.cas.update_mtime(ref)
    
    255
    +                    except CASError:
    
    245 256
                             pass
    
    246 257
     
    
    247 258
         # clean():
    
    ... ... @@ -252,7 +263,7 @@ class ArtifactCache():
    252 263
         #    (int): The size of the cache after having cleaned up
    
    253 264
         #
    
    254 265
         def clean(self):
    
    255
    -        artifacts = self.list_artifacts()  # pylint: disable=assignment-from-no-return
    
    266
    +        artifacts = self.list_artifacts()
    
    256 267
     
    
    257 268
             # Build a set of the cache keys which are required
    
    258 269
             # based on the required elements at cleanup time
    
    ... ... @@ -294,7 +305,7 @@ class ArtifactCache():
    294 305
                 if key not in required_artifacts:
    
    295 306
     
    
    296 307
                     # Remove the actual artifact, if it's not required.
    
    297
    -                size = self.remove(to_remove)  # pylint: disable=assignment-from-no-return
    
    308
    +                size = self.remove(to_remove)
    
    298 309
     
    
    299 310
                     # Remove the size from the removed size
    
    300 311
                     self.set_cache_size(self._cache_size - size)
    
    ... ... @@ -311,7 +322,7 @@ class ArtifactCache():
    311 322
         #    (int): The size of the artifact cache.
    
    312 323
         #
    
    313 324
         def compute_cache_size(self):
    
    314
    -        self._cache_size = self.calculate_cache_size()  # pylint: disable=assignment-from-no-return
    
    325
    +        self._cache_size = self.cas.calculate_cache_size()
    
    315 326
     
    
    316 327
             return self._cache_size
    
    317 328
     
    
    ... ... @@ -380,28 +391,12 @@ class ArtifactCache():
    380 391
         def has_quota_exceeded(self):
    
    381 392
             return self.get_cache_size() > self._cache_quota
    
    382 393
     
    
    383
    -    ################################################
    
    384
    -    # Abstract methods for subclasses to implement #
    
    385
    -    ################################################
    
    386
    -
    
    387 394
         # preflight():
    
    388 395
         #
    
    389 396
         # Preflight check.
    
    390 397
         #
    
    391 398
         def preflight(self):
    
    392
    -        pass
    
    393
    -
    
    394
    -    # update_mtime()
    
    395
    -    #
    
    396
    -    # Update the mtime of an artifact.
    
    397
    -    #
    
    398
    -    # Args:
    
    399
    -    #     element (Element): The Element to update
    
    400
    -    #     key (str): The key of the artifact.
    
    401
    -    #
    
    402
    -    def update_mtime(self, element, key):
    
    403
    -        raise ImplError("Cache '{kind}' does not implement update_mtime()"
    
    404
    -                        .format(kind=type(self).__name__))
    
    399
    +        self.cas.preflight()
    
    405 400
     
    
    406 401
         # initialize_remotes():
    
    407 402
         #
    
    ... ... @@ -411,7 +406,59 @@ class ArtifactCache():
    411 406
         #     on_failure (callable): Called if we fail to contact one of the caches.
    
    412 407
         #
    
    413 408
         def initialize_remotes(self, *, on_failure=None):
    
    414
    -        pass
    
    409
    +        remote_specs = self.global_remote_specs
    
    410
    +
    
    411
    +        for project in self.project_remote_specs:
    
    412
    +            remote_specs += self.project_remote_specs[project]
    
    413
    +
    
    414
    +        remote_specs = list(utils._deduplicate(remote_specs))
    
    415
    +
    
    416
    +        remotes = {}
    
    417
    +        q = multiprocessing.Queue()
    
    418
    +        for remote_spec in remote_specs:
    
    419
    +            # Use subprocess to avoid creation of gRPC threads in main BuildStream process
    
    420
    +            # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    421
    +            p = multiprocessing.Process(target=self.cas.initialize_remote, args=(remote_spec, q))
    
    422
    +
    
    423
    +            try:
    
    424
    +                # Keep SIGINT blocked in the child process
    
    425
    +                with _signals.blocked([signal.SIGINT], ignore=False):
    
    426
    +                    p.start()
    
    427
    +
    
    428
    +                error = q.get()
    
    429
    +                p.join()
    
    430
    +            except KeyboardInterrupt:
    
    431
    +                utils._kill_process_tree(p.pid)
    
    432
    +                raise
    
    433
    +
    
    434
    +            if error and on_failure:
    
    435
    +                on_failure(remote_spec.url, error)
    
    436
    +            elif error:
    
    437
    +                raise ArtifactError(error)
    
    438
    +            else:
    
    439
    +                self._has_fetch_remotes = True
    
    440
    +                if remote_spec.push:
    
    441
    +                    self._has_push_remotes = True
    
    442
    +
    
    443
    +                remotes[remote_spec.url] = CASRemote(remote_spec)
    
    444
    +
    
    445
    +        for project in self.context.get_projects():
    
    446
    +            remote_specs = self.global_remote_specs
    
    447
    +            if project in self.project_remote_specs:
    
    448
    +                remote_specs = list(utils._deduplicate(remote_specs + self.project_remote_specs[project]))
    
    449
    +
    
    450
    +            project_remotes = []
    
    451
    +
    
    452
    +            for remote_spec in remote_specs:
    
    453
    +                # Errors are already handled in the loop above,
    
    454
    +                # skip unreachable remotes here.
    
    455
    +                if remote_spec.url not in remotes:
    
    456
    +                    continue
    
    457
    +
    
    458
    +                remote = remotes[remote_spec.url]
    
    459
    +                project_remotes.append(remote)
    
    460
    +
    
    461
    +            self._remotes[project] = project_remotes
    
    415 462
     
    
    416 463
         # contains():
    
    417 464
         #
    
    ... ... @@ -425,8 +472,9 @@ class ArtifactCache():
    425 472
         # Returns: True if the artifact is in the cache, False otherwise
    
    426 473
         #
    
    427 474
         def contains(self, element, key):
    
    428
    -        raise ImplError("Cache '{kind}' does not implement contains()"
    
    429
    -                        .format(kind=type(self).__name__))
    
    475
    +        ref = self.get_artifact_fullname(element, key)
    
    476
    +
    
    477
    +        return self.cas.contains(ref)
    
    430 478
     
    
    431 479
         # list_artifacts():
    
    432 480
         #
    
    ... ... @@ -437,8 +485,7 @@ class ArtifactCache():
    437 485
         #               `ArtifactCache.get_artifact_fullname` in LRU order
    
    438 486
         #
    
    439 487
         def list_artifacts(self):
    
    440
    -        raise ImplError("Cache '{kind}' does not implement list_artifacts()"
    
    441
    -                        .format(kind=type(self).__name__))
    
    488
    +        return self.cas.list_refs()
    
    442 489
     
    
    443 490
         # remove():
    
    444 491
         #
    
    ... ... @@ -450,9 +497,31 @@ class ArtifactCache():
    450 497
         #                          generated by
    
    451 498
         #                          `ArtifactCache.get_artifact_fullname`)
    
    452 499
         #
    
    453
    -    def remove(self, artifact_name):
    
    454
    -        raise ImplError("Cache '{kind}' does not implement remove()"
    
    455
    -                        .format(kind=type(self).__name__))
    
    500
    +    # Returns:
    
    501
    +    #    (int|None) The amount of space pruned from the repository in
    
    502
    +    #               Bytes, or None if defer_prune is True
    
    503
    +    #
    
    504
    +    def remove(self, ref):
    
    505
    +
    
    506
    +        # Remove extract if not used by other ref
    
    507
    +        tree = self.cas.resolve_ref(ref)
    
    508
    +        ref_name, ref_hash = os.path.split(ref)
    
    509
    +        extract = os.path.join(self.extractdir, ref_name, tree.hash)
    
    510
    +        keys_file = os.path.join(extract, 'meta', 'keys.yaml')
    
    511
    +        if os.path.exists(keys_file):
    
    512
    +            keys_meta = _yaml.load(keys_file)
    
    513
    +            keys = [keys_meta['strong'], keys_meta['weak']]
    
    514
    +            remove_extract = True
    
    515
    +            for other_hash in keys:
    
    516
    +                if other_hash == ref_hash:
    
    517
    +                    continue
    
    518
    +                remove_extract = False
    
    519
    +                break
    
    520
    +
    
    521
    +            if remove_extract:
    
    522
    +                utils._force_rmtree(extract)
    
    523
    +
    
    524
    +        return self.cas.remove(ref)
    
    456 525
     
    
    457 526
         # extract():
    
    458 527
         #
    
    ... ... @@ -472,8 +541,11 @@ class ArtifactCache():
    472 541
         # Returns: path to extracted artifact
    
    473 542
         #
    
    474 543
         def extract(self, element, key):
    
    475
    -        raise ImplError("Cache '{kind}' does not implement extract()"
    
    476
    -                        .format(kind=type(self).__name__))
    
    544
    +        ref = self.get_artifact_fullname(element, key)
    
    545
    +
    
    546
    +        path = os.path.join(self.extractdir, element._get_project().name, element.normal_name)
    
    547
    +
    
    548
    +        return self.cas.extract(ref, path)
    
    477 549
     
    
    478 550
         # commit():
    
    479 551
         #
    
    ... ... @@ -485,8 +557,9 @@ class ArtifactCache():
    485 557
         #     keys (list): The cache keys to use
    
    486 558
         #
    
    487 559
         def commit(self, element, content, keys):
    
    488
    -        raise ImplError("Cache '{kind}' does not implement commit()"
    
    489
    -                        .format(kind=type(self).__name__))
    
    560
    +        refs = [self.get_artifact_fullname(element, key) for key in keys]
    
    561
    +
    
    562
    +        self.cas.commit(refs, content)
    
    490 563
     
    
    491 564
         # diff():
    
    492 565
         #
    
    ... ... @@ -500,8 +573,10 @@ class ArtifactCache():
    500 573
         #     subdir (str): A subdirectory to limit the comparison to
    
    501 574
         #
    
    502 575
         def diff(self, element, key_a, key_b, *, subdir=None):
    
    503
    -        raise ImplError("Cache '{kind}' does not implement diff()"
    
    504
    -                        .format(kind=type(self).__name__))
    
    576
    +        ref_a = self.get_artifact_fullname(element, key_a)
    
    577
    +        ref_b = self.get_artifact_fullname(element, key_b)
    
    578
    +
    
    579
    +        return self.cas.diff(ref_a, ref_b, subdir=subdir)
    
    505 580
     
    
    506 581
         # has_fetch_remotes():
    
    507 582
         #
    
    ... ... @@ -513,7 +588,16 @@ class ArtifactCache():
    513 588
         # Returns: True if any remote repositories are configured, False otherwise
    
    514 589
         #
    
    515 590
         def has_fetch_remotes(self, *, element=None):
    
    516
    -        return False
    
    591
    +        if not self._has_fetch_remotes:
    
    592
    +            # No project has fetch remotes
    
    593
    +            return False
    
    594
    +        elif element is None:
    
    595
    +            # At least one (sub)project has fetch remotes
    
    596
    +            return True
    
    597
    +        else:
    
    598
    +            # Check whether the specified element's project has fetch remotes
    
    599
    +            remotes_for_project = self._remotes[element._get_project()]
    
    600
    +            return bool(remotes_for_project)
    
    517 601
     
    
    518 602
         # has_push_remotes():
    
    519 603
         #
    
    ... ... @@ -525,7 +609,16 @@ class ArtifactCache():
    525 609
         # Returns: True if any remote repository is configured, False otherwise
    
    526 610
         #
    
    527 611
         def has_push_remotes(self, *, element=None):
    
    528
    -        return False
    
    612
    +        if not self._has_push_remotes:
    
    613
    +            # No project has push remotes
    
    614
    +            return False
    
    615
    +        elif element is None:
    
    616
    +            # At least one (sub)project has push remotes
    
    617
    +            return True
    
    618
    +        else:
    
    619
    +            # Check whether the specified element's project has push remotes
    
    620
    +            remotes_for_project = self._remotes[element._get_project()]
    
    621
    +            return any(remote.spec.push for remote in remotes_for_project)
    
    529 622
     
    
    530 623
         # push():
    
    531 624
         #
    
    ... ... @@ -542,8 +635,28 @@ class ArtifactCache():
    542 635
         #   (ArtifactError): if there was an error
    
    543 636
         #
    
    544 637
         def push(self, element, keys):
    
    545
    -        raise ImplError("Cache '{kind}' does not implement push()"
    
    546
    -                        .format(kind=type(self).__name__))
    
    638
    +        refs = [self.get_artifact_fullname(element, key) for key in list(keys)]
    
    639
    +
    
    640
    +        project = element._get_project()
    
    641
    +
    
    642
    +        push_remotes = [r for r in self._remotes[project] if r.spec.push]
    
    643
    +
    
    644
    +        pushed = False
    
    645
    +
    
    646
    +        for remote in push_remotes:
    
    647
    +            remote.init()
    
    648
    +            display_key = element._get_brief_display_key()
    
    649
    +            element.status("Pushing artifact {} -> {}".format(display_key, remote.spec.url))
    
    650
    +
    
    651
    +            if self.cas.push(refs, remote):
    
    652
    +                element.info("Pushed artifact {} -> {}".format(display_key, remote.spec.url))
    
    653
    +                pushed = True
    
    654
    +            else:
    
    655
    +                element.info("Remote ({}) already has {} cached".format(
    
    656
    +                    remote.spec.url, element._get_brief_display_key()
    
    657
    +                ))
    
    658
    +
    
    659
    +        return pushed
    
    547 660
     
    
    548 661
         # pull():
    
    549 662
         #
    
    ... ... @@ -558,8 +671,130 @@ class ArtifactCache():
    558 671
         #   (bool): True if pull was successful, False if artifact was not available
    
    559 672
         #
    
    560 673
         def pull(self, element, key, *, progress=None):
    
    561
    -        raise ImplError("Cache '{kind}' does not implement pull()"
    
    562
    -                        .format(kind=type(self).__name__))
    
    674
    +        ref = self.get_artifact_fullname(element, key)
    
    675
    +
    
    676
    +        project = element._get_project()
    
    677
    +
    
    678
    +        for remote in self._remotes[project]:
    
    679
    +            try:
    
    680
    +                display_key = element._get_brief_display_key()
    
    681
    +                element.status("Pulling artifact {} <- {}".format(display_key, remote.spec.url))
    
    682
    +
    
    683
    +                if self.cas.pull(ref, remote, progress=progress):
    
    684
    +                    element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
    
    685
    +                    # no need to pull from additional remotes
    
    686
    +                    return True
    
    687
    +                else:
    
    688
    +                    element.info("Remote ({}) does not have {} cached".format(
    
    689
    +                        remote.spec.url, element._get_brief_display_key()
    
    690
    +                    ))
    
    691
    +
    
    692
    +            except CASError as e:
    
    693
    +                raise ArtifactError("Failed to pull artifact {}: {}".format(
    
    694
    +                    element._get_brief_display_key(), e)) from e
    
    695
    +
    
    696
    +        return False
    
    697
    +
    
    698
    +    # pull_tree():
    
    699
    +    #
    
    700
    +    # Pull a single Tree rather than an artifact.
    
    701
    +    # Does not update local refs.
    
    702
    +    #
    
    703
    +    # Args:
    
    704
    +    #     project (Project): The current project
    
    705
    +    #     digest (Digest): The digest of the tree
    
    706
    +    #
    
    707
    +    def pull_tree(self, project, digest):
    
    708
    +        for remote in self._remotes[project]:
    
    709
    +            digest = self.cas.pull_tree(remote, digest)
    
    710
    +
    
    711
    +            if digest:
    
    712
    +                # no need to pull from additional remotes
    
    713
    +                return digest
    
    714
    +
    
    715
    +        return None
    
    716
    +
    
    717
    +    # push_directory():
    
    718
    +    #
    
    719
    +    # Push the given virtual directory to all remotes.
    
    720
    +    #
    
    721
    +    # Args:
    
    722
    +    #     project (Project): The current project
    
    723
    +    #     directory (Directory): A virtual directory object to push.
    
    724
    +    #
    
    725
    +    # Raises:
    
    726
    +    #     (ArtifactError): if there was an error
    
    727
    +    #
    
    728
    +    def push_directory(self, project, directory):
    
    729
    +        if self._has_push_remotes:
    
    730
    +            push_remotes = [r for r in self._remotes[project] if r.spec.push]
    
    731
    +        else:
    
    732
    +            push_remotes = []
    
    733
    +
    
    734
    +        if not push_remotes:
    
    735
    +            raise ArtifactError("push_directory was called, but no remote artifact " +
    
    736
    +                                "servers are configured as push remotes.")
    
    737
    +
    
    738
    +        if directory.ref is None:
    
    739
    +            return
    
    740
    +
    
    741
    +        for remote in push_remotes:
    
    742
    +            self.cas.push_directory(remote, directory)
    
    743
    +
    
    744
    +    # push_message():
    
    745
    +    #
    
    746
    +    # Push the given protobuf message to all remotes.
    
    747
    +    #
    
    748
    +    # Args:
    
    749
    +    #     project (Project): The current project
    
    750
    +    #     message (Message): A protobuf message to push.
    
    751
    +    #
    
    752
    +    # Raises:
    
    753
    +    #     (ArtifactError): if there was an error
    
    754
    +    #
    
    755
    +    def push_message(self, project, message):
    
    756
    +
    
    757
    +        if self._has_push_remotes:
    
    758
    +            push_remotes = [r for r in self._remotes[project] if r.spec.push]
    
    759
    +        else:
    
    760
    +            push_remotes = []
    
    761
    +
    
    762
    +        if not push_remotes:
    
    763
    +            raise ArtifactError("push_message was called, but no remote artifact " +
    
    764
    +                                "servers are configured as push remotes.")
    
    765
    +
    
    766
    +        for remote in push_remotes:
    
    767
    +            message_digest = self.cas.push_message(remote, message)
    
    768
    +
    
    769
    +        return message_digest
    
    770
    +
    
    771
    +    # verify_digest_pushed():
    
    772
    +    #
    
    773
    +    # Check whether the object is already on the server in which case
    
    774
    +    # there is no need to upload it.
    
    775
    +    #
    
    776
    +    # Args:
    
    777
    +    #     project (Project): The current project
    
    778
    +    #     digest (Digest): The object digest.
    
    779
    +    #
    
    780
    +    def verify_digest_pushed(self, project, digest):
    
    781
    +
    
    782
    +        if self._has_push_remotes:
    
    783
    +            push_remotes = [r for r in self._remotes[project] if r.spec.push]
    
    784
    +        else:
    
    785
    +            push_remotes = []
    
    786
    +
    
    787
    +        if not push_remotes:
    
    788
    +            raise ArtifactError("verify_digest_pushed was called, but no remote artifact " +
    
    789
    +                                "servers are configured as push remotes.")
    
    790
    +
    
    791
    +        pushed = False
    
    792
    +
    
    793
    +        for remote in push_remotes:
    
    794
    +            if self.cas.verify_digest_on_remote(remote, digest):
    
    795
    +                pushed = True
    
    796
    +
    
    797
    +        return pushed
    
    563 798
     
    
    564 799
         # link_key():
    
    565 800
         #
    
    ... ... @@ -571,19 +806,10 @@ class ArtifactCache():
    571 806
         #     newkey (str): A new cache key for the artifact
    
    572 807
         #
    
    573 808
         def link_key(self, element, oldkey, newkey):
    
    574
    -        raise ImplError("Cache '{kind}' does not implement link_key()"
    
    575
    -                        .format(kind=type(self).__name__))
    
    809
    +        oldref = self.get_artifact_fullname(element, oldkey)
    
    810
    +        newref = self.get_artifact_fullname(element, newkey)
    
    576 811
     
    
    577
    -    # calculate_cache_size()
    
    578
    -    #
    
    579
    -    # Return the real artifact cache size.
    
    580
    -    #
    
    581
    -    # Returns:
    
    582
    -    #    (int): The size of the artifact cache.
    
    583
    -    #
    
    584
    -    def calculate_cache_size(self):
    
    585
    -        raise ImplError("Cache '{kind}' does not implement calculate_cache_size()"
    
    586
    -                        .format(kind=type(self).__name__))
    
    812
    +        self.cas.link_ref(oldref, newref)
    
    587 813
     
    
    588 814
         ################################################
    
    589 815
         #               Local Private Methods          #
    

  • buildstream/_artifactcache/cascache.py
    ... ... @@ -20,9 +20,7 @@
    20 20
     import hashlib
    
    21 21
     import itertools
    
    22 22
     import io
    
    23
    -import multiprocessing
    
    24 23
     import os
    
    25
    -import signal
    
    26 24
     import stat
    
    27 25
     import tempfile
    
    28 26
     import uuid
    
    ... ... @@ -31,17 +29,13 @@ from urllib.parse import urlparse
    31 29
     
    
    32 30
     import grpc
    
    33 31
     
    
    34
    -from .. import _yaml
    
    35
    -
    
    36 32
     from .._protos.google.rpc import code_pb2
    
    37 33
     from .._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
    
    38 34
     from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
    
    39 35
     from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc
    
    40 36
     
    
    41
    -from .. import _signals, utils
    
    42
    -from .._exceptions import ArtifactError
    
    43
    -
    
    44
    -from . import ArtifactCache
    
    37
    +from .. import utils
    
    38
    +from .._exceptions import CASError
    
    45 39
     
    
    46 40
     
    
    47 41
     # The default limit for gRPC messages is 4 MiB.
    
    ... ... @@ -49,62 +43,68 @@ from . import ArtifactCache
    49 43
     _MAX_PAYLOAD_BYTES = 1024 * 1024
    
    50 44
     
    
    51 45
     
    
    52
    -# A CASCache manages artifacts in a CAS repository as specified in the
    
    53
    -# Remote Execution API.
    
    46
    +# A CASCache manages a CAS repository as specified in the Remote Execution API.
    
    54 47
     #
    
    55 48
     # Args:
    
    56
    -#     context (Context): The BuildStream context
    
    57
    -#
    
    58
    -# Pushing is explicitly disabled by the platform in some cases,
    
    59
    -# like when we are falling back to functioning without using
    
    60
    -# user namespaces.
    
    49
    +#     path (str): The root directory for the CAS repository
    
    61 50
     #
    
    62
    -class CASCache(ArtifactCache):
    
    51
    +class CASCache():
    
    63 52
     
    
    64
    -    def __init__(self, context):
    
    65
    -        super().__init__(context)
    
    66
    -
    
    67
    -        self.casdir = os.path.join(context.artifactdir, 'cas')
    
    53
    +    def __init__(self, path):
    
    54
    +        self.casdir = os.path.join(path, 'cas')
    
    55
    +        self.tmpdir = os.path.join(path, 'tmp')
    
    68 56
             os.makedirs(os.path.join(self.casdir, 'refs', 'heads'), exist_ok=True)
    
    69 57
             os.makedirs(os.path.join(self.casdir, 'objects'), exist_ok=True)
    
    58
    +        os.makedirs(self.tmpdir, exist_ok=True)
    
    70 59
     
    
    71
    -        self._calculate_cache_quota()
    
    72
    -
    
    73
    -        # Per-project list of _CASRemote instances.
    
    74
    -        self._remotes = {}
    
    75
    -
    
    76
    -        self._has_fetch_remotes = False
    
    77
    -        self._has_push_remotes = False
    
    78
    -
    
    79
    -    ################################################
    
    80
    -    #     Implementation of abstract methods       #
    
    81
    -    ################################################
    
    82
    -
    
    60
    +    # preflight():
    
    61
    +    #
    
    62
    +    # Preflight check.
    
    63
    +    #
    
    83 64
         def preflight(self):
    
    84 65
             headdir = os.path.join(self.casdir, 'refs', 'heads')
    
    85 66
             objdir = os.path.join(self.casdir, 'objects')
    
    86 67
             if not (os.path.isdir(headdir) and os.path.isdir(objdir)):
    
    87
    -            raise ArtifactError("CAS repository check failed for '{}'"
    
    88
    -                                .format(self.casdir))
    
    68
    +            raise CASError("CAS repository check failed for '{}'".format(self.casdir))
    
    89 69
     
    
    90
    -    def contains(self, element, key):
    
    91
    -        refpath = self._refpath(self.get_artifact_fullname(element, key))
    
    70
    +    # contains():
    
    71
    +    #
    
    72
    +    # Check whether the specified ref is already available in the local CAS cache.
    
    73
    +    #
    
    74
    +    # Args:
    
    75
    +    #     ref (str): The ref to check
    
    76
    +    #
    
    77
    +    # Returns: True if the ref is in the cache, False otherwise
    
    78
    +    #
    
    79
    +    def contains(self, ref):
    
    80
    +        refpath = self._refpath(ref)
    
    92 81
     
    
    93 82
             # This assumes that the repository doesn't have any dangling pointers
    
    94 83
             return os.path.exists(refpath)
    
    95 84
     
    
    96
    -    def extract(self, element, key):
    
    97
    -        ref = self.get_artifact_fullname(element, key)
    
    98
    -
    
    85
    +    # extract():
    
    86
    +    #
    
    87
    +    # Extract cached directory for the specified ref if it hasn't
    
    88
    +    # already been extracted.
    
    89
    +    #
    
    90
    +    # Args:
    
    91
    +    #     ref (str): The ref whose directory to extract
    
    92
    +    #     path (str): The destination path
    
    93
    +    #
    
    94
    +    # Raises:
    
    95
    +    #     CASError: In cases there was an OSError, or if the ref did not exist.
    
    96
    +    #
    
    97
    +    # Returns: path to extracted directory
    
    98
    +    #
    
    99
    +    def extract(self, ref, path):
    
    99 100
             tree = self.resolve_ref(ref, update_mtime=True)
    
    100 101
     
    
    101
    -        dest = os.path.join(self.extractdir, element._get_project().name,
    
    102
    -                            element.normal_name, tree.hash)
    
    102
    +        dest = os.path.join(path, tree.hash)
    
    103 103
             if os.path.isdir(dest):
    
    104
    -            # artifact has already been extracted
    
    104
    +            # directory has already been extracted
    
    105 105
                 return dest
    
    106 106
     
    
    107
    -        with tempfile.TemporaryDirectory(prefix='tmp', dir=self.extractdir) as tmpdir:
    
    107
    +        with tempfile.TemporaryDirectory(prefix='tmp', dir=self.tmpdir) as tmpdir:
    
    108 108
                 checkoutdir = os.path.join(tmpdir, ref)
    
    109 109
                 self._checkout(checkoutdir, tree)
    
    110 110
     
    
    ... ... @@ -118,23 +118,35 @@ class CASCache(ArtifactCache):
    118 118
                     # If rename fails with these errors, another process beat
    
    119 119
                     # us to it so just ignore.
    
    120 120
                     if e.errno not in [errno.ENOTEMPTY, errno.EEXIST]:
    
    121
    -                    raise ArtifactError("Failed to extract artifact for ref '{}': {}"
    
    122
    -                                        .format(ref, e)) from e
    
    121
    +                    raise CASError("Failed to extract directory for ref '{}': {}".format(ref, e)) from e
    
    123 122
     
    
    124 123
             return dest
    
    125 124
     
    
    126
    -    def commit(self, element, content, keys):
    
    127
    -        refs = [self.get_artifact_fullname(element, key) for key in keys]
    
    128
    -
    
    129
    -        tree = self._commit_directory(content)
    
    125
    +    # commit():
    
    126
    +    #
    
    127
    +    # Commit directory to cache.
    
    128
    +    #
    
    129
    +    # Args:
    
    130
    +    #     refs (list): The refs to set
    
    131
    +    #     path (str): The directory to import
    
    132
    +    #
    
    133
    +    def commit(self, refs, path):
    
    134
    +        tree = self._commit_directory(path)
    
    130 135
     
    
    131 136
             for ref in refs:
    
    132 137
                 self.set_ref(ref, tree)
    
    133 138
     
    
    134
    -    def diff(self, element, key_a, key_b, *, subdir=None):
    
    135
    -        ref_a = self.get_artifact_fullname(element, key_a)
    
    136
    -        ref_b = self.get_artifact_fullname(element, key_b)
    
    137
    -
    
    139
    +    # diff():
    
    140
    +    #
    
    141
    +    # Return a list of files that have been added or modified between
    
    142
    +    # the refs described by ref_a and ref_b.
    
    143
    +    #
    
    144
    +    # Args:
    
    145
    +    #     ref_a (str): The first ref
    
    146
    +    #     ref_b (str): The second ref
    
    147
    +    #     subdir (str): A subdirectory to limit the comparison to
    
    148
    +    #
    
    149
    +    def diff(self, ref_a, ref_b, *, subdir=None):
    
    138 150
             tree_a = self.resolve_ref(ref_a)
    
    139 151
             tree_b = self.resolve_ref(ref_b)
    
    140 152
     
    
    ... ... @@ -150,158 +162,122 @@ class CASCache(ArtifactCache):
    150 162
     
    
    151 163
             return modified, removed, added
    
    152 164
     
    
    153
    -    def initialize_remotes(self, *, on_failure=None):
    
    154
    -        remote_specs = self.global_remote_specs
    
    155
    -
    
    156
    -        for project in self.project_remote_specs:
    
    157
    -            remote_specs += self.project_remote_specs[project]
    
    158
    -
    
    159
    -        remote_specs = list(utils._deduplicate(remote_specs))
    
    160
    -
    
    161
    -        remotes = {}
    
    162
    -        q = multiprocessing.Queue()
    
    163
    -        for remote_spec in remote_specs:
    
    164
    -            # Use subprocess to avoid creation of gRPC threads in main BuildStream process
    
    165
    -            # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    166
    -            p = multiprocessing.Process(target=self._initialize_remote, args=(remote_spec, q))
    
    165
    +    def initialize_remote(self, remote_spec, q):
    
    166
    +        try:
    
    167
    +            remote = CASRemote(remote_spec)
    
    168
    +            remote.init()
    
    167 169
     
    
    168
    -            try:
    
    169
    -                # Keep SIGINT blocked in the child process
    
    170
    -                with _signals.blocked([signal.SIGINT], ignore=False):
    
    171
    -                    p.start()
    
    172
    -
    
    173
    -                error = q.get()
    
    174
    -                p.join()
    
    175
    -            except KeyboardInterrupt:
    
    176
    -                utils._kill_process_tree(p.pid)
    
    177
    -                raise
    
    170
    +            request = buildstream_pb2.StatusRequest()
    
    171
    +            response = remote.ref_storage.Status(request)
    
    178 172
     
    
    179
    -            if error and on_failure:
    
    180
    -                on_failure(remote_spec.url, error)
    
    181
    -            elif error:
    
    182
    -                raise ArtifactError(error)
    
    173
    +            if remote_spec.push and not response.allow_updates:
    
    174
    +                q.put('CAS server does not allow push')
    
    183 175
                 else:
    
    184
    -                self._has_fetch_remotes = True
    
    185
    -                if remote_spec.push:
    
    186
    -                    self._has_push_remotes = True
    
    176
    +                # No error
    
    177
    +                q.put(None)
    
    187 178
     
    
    188
    -                remotes[remote_spec.url] = _CASRemote(remote_spec)
    
    179
    +        except grpc.RpcError as e:
    
    180
    +            # str(e) is too verbose for errors reported to the user
    
    181
    +            q.put(e.details())
    
    189 182
     
    
    190
    -        for project in self.context.get_projects():
    
    191
    -            remote_specs = self.global_remote_specs
    
    192
    -            if project in self.project_remote_specs:
    
    193
    -                remote_specs = list(utils._deduplicate(remote_specs + self.project_remote_specs[project]))
    
    183
    +        except Exception as e:               # pylint: disable=broad-except
    
    184
    +            # Whatever happens, we need to return it to the calling process
    
    185
    +            #
    
    186
    +            q.put(str(e))
    
    194 187
     
    
    195
    -            project_remotes = []
    
    188
    +    # pull():
    
    189
    +    #
    
    190
    +    # Pull a ref from a remote repository.
    
    191
    +    #
    
    192
    +    # Args:
    
    193
    +    #     ref (str): The ref to pull
    
    194
    +    #     remote (CASRemote): The remote repository to pull from
    
    195
    +    #     progress (callable): The progress callback, if any
    
    196
    +    #
    
    197
    +    # Returns:
    
    198
    +    #   (bool): True if pull was successful, False if ref was not available
    
    199
    +    #
    
    200
    +    def pull(self, ref, remote, *, progress=None):
    
    201
    +        try:
    
    202
    +            remote.init()
    
    196 203
     
    
    197
    -            for remote_spec in remote_specs:
    
    198
    -                # Errors are already handled in the loop above,
    
    199
    -                # skip unreachable remotes here.
    
    200
    -                if remote_spec.url not in remotes:
    
    201
    -                    continue
    
    204
    +            request = buildstream_pb2.GetReferenceRequest()
    
    205
    +            request.key = ref
    
    206
    +            response = remote.ref_storage.GetReference(request)
    
    202 207
     
    
    203
    -                remote = remotes[remote_spec.url]
    
    204
    -                project_remotes.append(remote)
    
    208
    +            tree = remote_execution_pb2.Digest()
    
    209
    +            tree.hash = response.digest.hash
    
    210
    +            tree.size_bytes = response.digest.size_bytes
    
    205 211
     
    
    206
    -            self._remotes[project] = project_remotes
    
    212
    +            self._fetch_directory(remote, tree)
    
    207 213
     
    
    208
    -    def has_fetch_remotes(self, *, element=None):
    
    209
    -        if not self._has_fetch_remotes:
    
    210
    -            # No project has fetch remotes
    
    211
    -            return False
    
    212
    -        elif element is None:
    
    213
    -            # At least one (sub)project has fetch remotes
    
    214
    -            return True
    
    215
    -        else:
    
    216
    -            # Check whether the specified element's project has fetch remotes
    
    217
    -            remotes_for_project = self._remotes[element._get_project()]
    
    218
    -            return bool(remotes_for_project)
    
    214
    +            self.set_ref(ref, tree)
    
    219 215
     
    
    220
    -    def has_push_remotes(self, *, element=None):
    
    221
    -        if not self._has_push_remotes:
    
    222
    -            # No project has push remotes
    
    223
    -            return False
    
    224
    -        elif element is None:
    
    225
    -            # At least one (sub)project has push remotes
    
    226 216
                 return True
    
    227
    -        else:
    
    228
    -            # Check whether the specified element's project has push remotes
    
    229
    -            remotes_for_project = self._remotes[element._get_project()]
    
    230
    -            return any(remote.spec.push for remote in remotes_for_project)
    
    231
    -
    
    232
    -    def pull(self, element, key, *, progress=None):
    
    233
    -        ref = self.get_artifact_fullname(element, key)
    
    234
    -
    
    235
    -        project = element._get_project()
    
    236
    -
    
    237
    -        for remote in self._remotes[project]:
    
    238
    -            try:
    
    239
    -                remote.init()
    
    240
    -                display_key = element._get_brief_display_key()
    
    241
    -                element.status("Pulling artifact {} <- {}".format(display_key, remote.spec.url))
    
    242
    -
    
    243
    -                request = buildstream_pb2.GetReferenceRequest()
    
    244
    -                request.key = ref
    
    245
    -                response = remote.ref_storage.GetReference(request)
    
    246
    -
    
    247
    -                tree = remote_execution_pb2.Digest()
    
    248
    -                tree.hash = response.digest.hash
    
    249
    -                tree.size_bytes = response.digest.size_bytes
    
    250
    -
    
    251
    -                self._fetch_directory(remote, tree)
    
    252
    -
    
    253
    -                self.set_ref(ref, tree)
    
    254
    -
    
    255
    -                element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
    
    256
    -                # no need to pull from additional remotes
    
    257
    -                return True
    
    258
    -
    
    259
    -            except grpc.RpcError as e:
    
    260
    -                if e.code() != grpc.StatusCode.NOT_FOUND:
    
    261
    -                    raise ArtifactError("Failed to pull artifact {}: {}".format(
    
    262
    -                        element._get_brief_display_key(), e)) from e
    
    263
    -                else:
    
    264
    -                    element.info("Remote ({}) does not have {} cached".format(
    
    265
    -                        remote.spec.url, element._get_brief_display_key()
    
    266
    -                    ))
    
    267
    -
    
    268
    -        return False
    
    269
    -
    
    270
    -    def pull_tree(self, project, digest):
    
    271
    -        """ Pull a single Tree rather than an artifact.
    
    272
    -        Does not update local refs. """
    
    217
    +        except grpc.RpcError as e:
    
    218
    +            if e.code() != grpc.StatusCode.NOT_FOUND:
    
    219
    +                raise CASError("Failed to pull ref {}: {}".format(ref, e)) from e
    
    220
    +            else:
    
    221
    +                return False
    
    273 222
     
    
    274
    -        for remote in self._remotes[project]:
    
    275
    -            try:
    
    276
    -                remote.init()
    
    223
    +    # pull_tree():
    
    224
    +    #
    
    225
    +    # Pull a single Tree rather than a ref.
    
    226
    +    # Does not update local refs.
    
    227
    +    #
    
    228
    +    # Args:
    
    229
    +    #     remote (CASRemote): The remote to pull from
    
    230
    +    #     digest (Digest): The digest of the tree
    
    231
    +    #
    
    232
    +    def pull_tree(self, remote, digest):
    
    233
    +        try:
    
    234
    +            remote.init()
    
    277 235
     
    
    278
    -                digest = self._fetch_tree(remote, digest)
    
    236
    +            digest = self._fetch_tree(remote, digest)
    
    279 237
     
    
    280
    -                # no need to pull from additional remotes
    
    281
    -                return digest
    
    238
    +            return digest
    
    282 239
     
    
    283
    -            except grpc.RpcError as e:
    
    284
    -                if e.code() != grpc.StatusCode.NOT_FOUND:
    
    285
    -                    raise
    
    240
    +        except grpc.RpcError as e:
    
    241
    +            if e.code() != grpc.StatusCode.NOT_FOUND:
    
    242
    +                raise
    
    286 243
     
    
    287 244
             return None
    
    288 245
     
    
    289
    -    def link_key(self, element, oldkey, newkey):
    
    290
    -        oldref = self.get_artifact_fullname(element, oldkey)
    
    291
    -        newref = self.get_artifact_fullname(element, newkey)
    
    292
    -
    
    246
    +    # link_ref():
    
    247
    +    #
    
    248
    +    # Add an alias for an existing ref.
    
    249
    +    #
    
    250
    +    # Args:
    
    251
    +    #     oldref (str): An existing ref
    
    252
    +    #     newref (str): A new ref for the same directory
    
    253
    +    #
    
    254
    +    def link_ref(self, oldref, newref):
    
    293 255
             tree = self.resolve_ref(oldref)
    
    294 256
     
    
    295 257
             self.set_ref(newref, tree)
    
    296 258
     
    
    297
    -    def _push_refs_to_remote(self, refs, remote):
    
    259
    +    # push():
    
    260
    +    #
    
    261
    +    # Push committed refs to remote repository.
    
    262
    +    #
    
    263
    +    # Args:
    
    264
    +    #     refs (list): The refs to push
    
    265
    +    #     remote (CASRemote): The remote to push to
    
    266
    +    #
    
    267
    +    # Returns:
    
    268
    +    #   (bool): True if any remote was updated, False if no pushes were required
    
    269
    +    #
    
    270
    +    # Raises:
    
    271
    +    #   (CASError): if there was an error
    
    272
    +    #
    
    273
    +    def push(self, refs, remote):
    
    298 274
             skipped_remote = True
    
    299 275
             try:
    
    300 276
                 for ref in refs:
    
    301 277
                     tree = self.resolve_ref(ref)
    
    302 278
     
    
    303 279
                     # Check whether ref is already on the server in which case
    
    304
    -                # there is no need to push the artifact
    
    280
    +                # there is no need to push the ref
    
    305 281
                     try:
    
    306 282
                         request = buildstream_pb2.GetReferenceRequest()
    
    307 283
                         request.key = ref
    
    ... ... @@ -327,65 +303,38 @@ class CASCache(ArtifactCache):
    327 303
                     skipped_remote = False
    
    328 304
             except grpc.RpcError as e:
    
    329 305
                 if e.code() != grpc.StatusCode.RESOURCE_EXHAUSTED:
    
    330
    -                raise ArtifactError("Failed to push artifact {}: {}".format(refs, e), temporary=True) from e
    
    306
    +                raise CASError("Failed to push ref {}: {}".format(refs, e), temporary=True) from e
    
    331 307
     
    
    332 308
             return not skipped_remote
    
    333 309
     
    
    334
    -    def push(self, element, keys):
    
    335
    -
    
    336
    -        refs = [self.get_artifact_fullname(element, key) for key in list(keys)]
    
    337
    -
    
    338
    -        project = element._get_project()
    
    339
    -
    
    340
    -        push_remotes = [r for r in self._remotes[project] if r.spec.push]
    
    341
    -
    
    342
    -        pushed = False
    
    343
    -
    
    344
    -        for remote in push_remotes:
    
    345
    -            remote.init()
    
    346
    -            display_key = element._get_brief_display_key()
    
    347
    -            element.status("Pushing artifact {} -> {}".format(display_key, remote.spec.url))
    
    348
    -
    
    349
    -            if self._push_refs_to_remote(refs, remote):
    
    350
    -                element.info("Pushed artifact {} -> {}".format(display_key, remote.spec.url))
    
    351
    -                pushed = True
    
    352
    -            else:
    
    353
    -                element.info("Remote ({}) already has {} cached".format(
    
    354
    -                    remote.spec.url, element._get_brief_display_key()
    
    355
    -                ))
    
    356
    -
    
    357
    -        return pushed
    
    358
    -
    
    359
    -    def push_directory(self, project, directory):
    
    360
    -        """ Push the given virtual directory to all remotes.
    
    361
    -
    
    362
    -        Args:
    
    363
    -            project (Project): The current project
    
    364
    -            directory (Directory): A virtual directory object to push.
    
    365
    -
    
    366
    -        Raises: ArtifactError if no push remotes are configured.
    
    367
    -        """
    
    368
    -
    
    369
    -        if self._has_push_remotes:
    
    370
    -            push_remotes = [r for r in self._remotes[project] if r.spec.push]
    
    371
    -        else:
    
    372
    -            push_remotes = []
    
    373
    -
    
    374
    -        if not push_remotes:
    
    375
    -            raise ArtifactError("CASCache: push_directory was called, but no remote artifact " +
    
    376
    -                                "servers are configured as push remotes.")
    
    377
    -
    
    378
    -        if directory.ref is None:
    
    379
    -            return
    
    380
    -
    
    381
    -        for remote in push_remotes:
    
    382
    -            remote.init()
    
    383
    -
    
    384
    -            self._send_directory(remote, directory.ref)
    
    310
    +    # push_directory():
    
    311
    +    #
    
    312
    +    # Push the given virtual directory to a remote.
    
    313
    +    #
    
    314
    +    # Args:
    
    315
    +    #     remote (CASRemote): The remote to push to
    
    316
    +    #     directory (Directory): A virtual directory object to push.
    
    317
    +    #
    
    318
    +    # Raises:
    
    319
    +    #     (CASError): if there was an error
    
    320
    +    #
    
    321
    +    def push_directory(self, remote, directory):
    
    322
    +        remote.init()
    
    385 323
     
    
    386
    -    def push_message(self, project, message):
    
    324
    +        self._send_directory(remote, directory.ref)
    
    387 325
     
    
    388
    -        push_remotes = [r for r in self._remotes[project] if r.spec.push]
    
    326
    +    # push_message():
    
    327
    +    #
    
    328
    +    # Push the given protobuf message to a remote.
    
    329
    +    #
    
    330
    +    # Args:
    
    331
    +    #     remote (CASRemote): The remote to push to
    
    332
    +    #     message (Message): A protobuf message to push.
    
    333
    +    #
    
    334
    +    # Raises:
    
    335
    +    #     (CASError): if there was an error
    
    336
    +    #
    
    337
    +    def push_message(self, remote, message):
    
    389 338
     
    
    390 339
             message_buffer = message.SerializeToString()
    
    391 340
             message_sha = hashlib.sha256(message_buffer)
    
    ... ... @@ -393,17 +342,25 @@ class CASCache(ArtifactCache):
    393 342
             message_digest.hash = message_sha.hexdigest()
    
    394 343
             message_digest.size_bytes = len(message_buffer)
    
    395 344
     
    
    396
    -        for remote in push_remotes:
    
    397
    -            remote.init()
    
    345
    +        remote.init()
    
    398 346
     
    
    399
    -            with io.BytesIO(message_buffer) as b:
    
    400
    -                self._send_blob(remote, message_digest, b)
    
    347
    +        with io.BytesIO(message_buffer) as b:
    
    348
    +            self._send_blob(remote, message_digest, b)
    
    401 349
     
    
    402 350
             return message_digest
    
    403 351
     
    
    404
    -    def _verify_digest_on_remote(self, remote, digest):
    
    405
    -        # Check whether ref is already on the server in which case
    
    406
    -        # there is no need to push the artifact
    
    352
    +    # verify_digest_on_remote():
    
    353
    +    #
    
    354
    +    # Check whether the object is already on the server in which case
    
    355
    +    # there is no need to upload it.
    
    356
    +    #
    
    357
    +    # Args:
    
    358
    +    #     remote (CASRemote): The remote to check
    
    359
    +    #     digest (Digest): The object digest.
    
    360
    +    #
    
    361
    +    def verify_digest_on_remote(self, remote, digest):
    
    362
    +        remote.init()
    
    363
    +
    
    407 364
             request = remote_execution_pb2.FindMissingBlobsRequest()
    
    408 365
             request.blob_digests.extend([digest])
    
    409 366
     
    
    ... ... @@ -413,24 +370,6 @@ class CASCache(ArtifactCache):
    413 370
     
    
    414 371
             return True
    
    415 372
     
    
    416
    -    def verify_digest_pushed(self, project, digest):
    
    417
    -
    
    418
    -        push_remotes = [r for r in self._remotes[project] if r.spec.push]
    
    419
    -
    
    420
    -        pushed = False
    
    421
    -
    
    422
    -        for remote in push_remotes:
    
    423
    -            remote.init()
    
    424
    -
    
    425
    -            if self._verify_digest_on_remote(remote, digest):
    
    426
    -                pushed = True
    
    427
    -
    
    428
    -        return pushed
    
    429
    -
    
    430
    -    ################################################
    
    431
    -    #                API Private Methods           #
    
    432
    -    ################################################
    
    433
    -
    
    434 373
         # objpath():
    
    435 374
         #
    
    436 375
         # Return the path of an object based on its digest.
    
    ... ... @@ -496,7 +435,7 @@ class CASCache(ArtifactCache):
    496 435
                 pass
    
    497 436
     
    
    498 437
             except OSError as e:
    
    499
    -            raise ArtifactError("Failed to hash object: {}".format(e)) from e
    
    438
    +            raise CASError("Failed to hash object: {}".format(e)) from e
    
    500 439
     
    
    501 440
             return digest
    
    502 441
     
    
    ... ... @@ -537,26 +476,39 @@ class CASCache(ArtifactCache):
    537 476
                     return digest
    
    538 477
     
    
    539 478
             except FileNotFoundError as e:
    
    540
    -            raise ArtifactError("Attempt to access unavailable artifact: {}".format(e)) from e
    
    479
    +            raise CASError("Attempt to access unavailable ref: {}".format(e)) from e
    
    541 480
     
    
    542
    -    def update_mtime(self, element, key):
    
    481
    +    # update_mtime()
    
    482
    +    #
    
    483
    +    # Update the mtime of a ref.
    
    484
    +    #
    
    485
    +    # Args:
    
    486
    +    #     ref (str): The ref to update
    
    487
    +    #
    
    488
    +    def update_mtime(self, ref):
    
    543 489
             try:
    
    544
    -            ref = self.get_artifact_fullname(element, key)
    
    545 490
                 os.utime(self._refpath(ref))
    
    546 491
             except FileNotFoundError as e:
    
    547
    -            raise ArtifactError("Attempt to access unavailable artifact: {}".format(e)) from e
    
    492
    +            raise CASError("Attempt to access unavailable ref: {}".format(e)) from e
    
    548 493
     
    
    494
    +    # calculate_cache_size()
    
    495
    +    #
    
    496
    +    # Return the real disk usage of the CAS cache.
    
    497
    +    #
    
    498
    +    # Returns:
    
    499
    +    #    (int): The size of the cache.
    
    500
    +    #
    
    549 501
         def calculate_cache_size(self):
    
    550 502
             return utils._get_dir_size(self.casdir)
    
    551 503
     
    
    552
    -    # list_artifacts():
    
    504
    +    # list_refs():
    
    553 505
         #
    
    554
    -    # List cached artifacts in Least Recently Modified (LRM) order.
    
    506
    +    # List refs in Least Recently Modified (LRM) order.
    
    555 507
         #
    
    556 508
         # Returns:
    
    557 509
         #     (list) - A list of refs in LRM order
    
    558 510
         #
    
    559
    -    def list_artifacts(self):
    
    511
    +    def list_refs(self):
    
    560 512
             # string of: /path/to/repo/refs/heads
    
    561 513
             ref_heads = os.path.join(self.casdir, 'refs', 'heads')
    
    562 514
     
    
    ... ... @@ -571,7 +523,7 @@ class CASCache(ArtifactCache):
    571 523
                     mtimes.append(os.path.getmtime(ref_path))
    
    572 524
     
    
    573 525
             # NOTE: Sorted will sort from earliest to latest, thus the
    
    574
    -        # first element of this list will be the file modified earliest.
    
    526
    +        # first ref of this list will be the file modified earliest.
    
    575 527
             return [ref for _, ref in sorted(zip(mtimes, refs))]
    
    576 528
     
    
    577 529
         # remove():
    
    ... ... @@ -590,28 +542,10 @@ class CASCache(ArtifactCache):
    590 542
         #
    
    591 543
         def remove(self, ref, *, defer_prune=False):
    
    592 544
     
    
    593
    -        # Remove extract if not used by other ref
    
    594
    -        tree = self.resolve_ref(ref)
    
    595
    -        ref_name, ref_hash = os.path.split(ref)
    
    596
    -        extract = os.path.join(self.extractdir, ref_name, tree.hash)
    
    597
    -        keys_file = os.path.join(extract, 'meta', 'keys.yaml')
    
    598
    -        if os.path.exists(keys_file):
    
    599
    -            keys_meta = _yaml.load(keys_file)
    
    600
    -            keys = [keys_meta['strong'], keys_meta['weak']]
    
    601
    -            remove_extract = True
    
    602
    -            for other_hash in keys:
    
    603
    -                if other_hash == ref_hash:
    
    604
    -                    continue
    
    605
    -                remove_extract = False
    
    606
    -                break
    
    607
    -
    
    608
    -            if remove_extract:
    
    609
    -                utils._force_rmtree(extract)
    
    610
    -
    
    611 545
             # Remove cache ref
    
    612 546
             refpath = self._refpath(ref)
    
    613 547
             if not os.path.exists(refpath):
    
    614
    -            raise ArtifactError("Could not find artifact for ref '{}'".format(ref))
    
    548
    +            raise CASError("Could not find ref '{}'".format(ref))
    
    615 549
     
    
    616 550
             os.unlink(refpath)
    
    617 551
     
    
    ... ... @@ -721,7 +655,7 @@ class CASCache(ArtifactCache):
    721 655
                     # The process serving the socket can't be cached anyway
    
    722 656
                     pass
    
    723 657
                 else:
    
    724
    -                raise ArtifactError("Unsupported file type for {}".format(full_path))
    
    658
    +                raise CASError("Unsupported file type for {}".format(full_path))
    
    725 659
     
    
    726 660
             return self.add_object(digest=dir_digest,
    
    727 661
                                    buffer=directory.SerializeToString())
    
    ... ... @@ -740,7 +674,7 @@ class CASCache(ArtifactCache):
    740 674
                 if dirnode.name == name:
    
    741 675
                     return dirnode.digest
    
    742 676
     
    
    743
    -        raise ArtifactError("Subdirectory {} not found".format(name))
    
    677
    +        raise CASError("Subdirectory {} not found".format(name))
    
    744 678
     
    
    745 679
         def _diff_trees(self, tree_a, tree_b, *, added, removed, modified, path=""):
    
    746 680
             dir_a = remote_execution_pb2.Directory()
    
    ... ... @@ -812,29 +746,6 @@ class CASCache(ArtifactCache):
    812 746
             for dirnode in directory.directories:
    
    813 747
                 self._reachable_refs_dir(reachable, dirnode.digest)
    
    814 748
     
    
    815
    -    def _initialize_remote(self, remote_spec, q):
    
    816
    -        try:
    
    817
    -            remote = _CASRemote(remote_spec)
    
    818
    -            remote.init()
    
    819
    -
    
    820
    -            request = buildstream_pb2.StatusRequest()
    
    821
    -            response = remote.ref_storage.Status(request)
    
    822
    -
    
    823
    -            if remote_spec.push and not response.allow_updates:
    
    824
    -                q.put('Artifact server does not allow push')
    
    825
    -            else:
    
    826
    -                # No error
    
    827
    -                q.put(None)
    
    828
    -
    
    829
    -        except grpc.RpcError as e:
    
    830
    -            # str(e) is too verbose for errors reported to the user
    
    831
    -            q.put(e.details())
    
    832
    -
    
    833
    -        except Exception as e:               # pylint: disable=broad-except
    
    834
    -            # Whatever happens, we need to return it to the calling process
    
    835
    -            #
    
    836
    -            q.put(str(e))
    
    837
    -
    
    838 749
         def _required_blobs(self, directory_digest):
    
    839 750
             # parse directory, and recursively add blobs
    
    840 751
             d = remote_execution_pb2.Digest()
    
    ... ... @@ -1080,7 +991,7 @@ class CASCache(ArtifactCache):
    1080 991
     
    
    1081 992
     # Represents a single remote CAS cache.
    
    1082 993
     #
    
    1083
    -class _CASRemote():
    
    994
    +class CASRemote():
    
    1084 995
         def __init__(self, spec):
    
    1085 996
             self.spec = spec
    
    1086 997
             self._initialized = False
    
    ... ... @@ -1125,7 +1036,7 @@ class _CASRemote():
    1125 1036
                                                                certificate_chain=client_cert_bytes)
    
    1126 1037
                     self.channel = grpc.secure_channel('{}:{}'.format(url.hostname, port), credentials)
    
    1127 1038
                 else:
    
    1128
    -                raise ArtifactError("Unsupported URL: {}".format(self.spec.url))
    
    1039
    +                raise CASError("Unsupported URL: {}".format(self.spec.url))
    
    1129 1040
     
    
    1130 1041
                 self.bytestream = bytestream_pb2_grpc.ByteStreamStub(self.channel)
    
    1131 1042
                 self.cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(self.channel)
    
    ... ... @@ -1203,10 +1114,10 @@ class _CASBatchRead():
    1203 1114
     
    
    1204 1115
             for response in batch_response.responses:
    
    1205 1116
                 if response.status.code != code_pb2.OK:
    
    1206
    -                raise ArtifactError("Failed to download blob {}: {}".format(
    
    1117
    +                raise CASError("Failed to download blob {}: {}".format(
    
    1207 1118
                         response.digest.hash, response.status.code))
    
    1208 1119
                 if response.digest.size_bytes != len(response.data):
    
    1209
    -                raise ArtifactError("Failed to download blob {}: expected {} bytes, received {} bytes".format(
    
    1120
    +                raise CASError("Failed to download blob {}: expected {} bytes, received {} bytes".format(
    
    1210 1121
                         response.digest.hash, response.digest.size_bytes, len(response.data)))
    
    1211 1122
     
    
    1212 1123
                 yield (response.digest, response.data)
    
    ... ... @@ -1248,7 +1159,7 @@ class _CASBatchUpdate():
    1248 1159
     
    
    1249 1160
             for response in batch_response.responses:
    
    1250 1161
                 if response.status.code != code_pb2.OK:
    
    1251
    -                raise ArtifactError("Failed to upload blob {}: {}".format(
    
    1162
    +                raise CASError("Failed to upload blob {}: {}".format(
    
    1252 1163
                         response.digest.hash, response.status.code))
    
    1253 1164
     
    
    1254 1165
     
    

  • buildstream/_artifactcache/casserver.py
    ... ... @@ -32,8 +32,9 @@ from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remo
    32 32
     from .._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
    
    33 33
     from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc
    
    34 34
     
    
    35
    -from .._exceptions import ArtifactError
    
    36
    -from .._context import Context
    
    35
    +from .._exceptions import CASError
    
    36
    +
    
    37
    +from .cascache import CASCache
    
    37 38
     
    
    38 39
     
    
    39 40
     # The default limit for gRPC messages is 4 MiB.
    
    ... ... @@ -55,26 +56,23 @@ class ArtifactTooLargeException(Exception):
    55 56
     #     enable_push (bool): Whether to allow blob uploads and artifact updates
    
    56 57
     #
    
    57 58
     def create_server(repo, *, enable_push):
    
    58
    -    context = Context()
    
    59
    -    context.artifactdir = os.path.abspath(repo)
    
    60
    -
    
    61
    -    artifactcache = context.artifactcache
    
    59
    +    cas = CASCache(os.path.abspath(repo))
    
    62 60
     
    
    63 61
         # Use max_workers default from Python 3.5+
    
    64 62
         max_workers = (os.cpu_count() or 1) * 5
    
    65 63
         server = grpc.server(futures.ThreadPoolExecutor(max_workers))
    
    66 64
     
    
    67 65
         bytestream_pb2_grpc.add_ByteStreamServicer_to_server(
    
    68
    -        _ByteStreamServicer(artifactcache, enable_push=enable_push), server)
    
    66
    +        _ByteStreamServicer(cas, enable_push=enable_push), server)
    
    69 67
     
    
    70 68
         remote_execution_pb2_grpc.add_ContentAddressableStorageServicer_to_server(
    
    71
    -        _ContentAddressableStorageServicer(artifactcache, enable_push=enable_push), server)
    
    69
    +        _ContentAddressableStorageServicer(cas, enable_push=enable_push), server)
    
    72 70
     
    
    73 71
         remote_execution_pb2_grpc.add_CapabilitiesServicer_to_server(
    
    74 72
             _CapabilitiesServicer(), server)
    
    75 73
     
    
    76 74
         buildstream_pb2_grpc.add_ReferenceStorageServicer_to_server(
    
    77
    -        _ReferenceStorageServicer(artifactcache, enable_push=enable_push), server)
    
    75
    +        _ReferenceStorageServicer(cas, enable_push=enable_push), server)
    
    78 76
     
    
    79 77
         return server
    
    80 78
     
    
    ... ... @@ -333,7 +331,7 @@ class _ReferenceStorageServicer(buildstream_pb2_grpc.ReferenceStorageServicer):
    333 331
     
    
    334 332
                 response.digest.hash = tree.hash
    
    335 333
                 response.digest.size_bytes = tree.size_bytes
    
    336
    -        except ArtifactError:
    
    334
    +        except CASError:
    
    337 335
                 context.set_code(grpc.StatusCode.NOT_FOUND)
    
    338 336
     
    
    339 337
             return response
    
    ... ... @@ -437,7 +435,7 @@ def _clean_up_cache(cas, object_size):
    437 435
             return 0
    
    438 436
     
    
    439 437
         # obtain a list of LRP artifacts
    
    440
    -    LRP_artifacts = cas.list_artifacts()
    
    438
    +    LRP_artifacts = cas.list_refs()
    
    441 439
     
    
    442 440
         removed_size = 0  # in bytes
    
    443 441
         while object_size - removed_size > free_disk_space:
    

  • buildstream/_context.py
    ... ... @@ -31,7 +31,6 @@ from ._exceptions import LoadError, LoadErrorReason, BstError
    31 31
     from ._message import Message, MessageType
    
    32 32
     from ._profile import Topics, profile_start, profile_end
    
    33 33
     from ._artifactcache import ArtifactCache
    
    34
    -from ._artifactcache.cascache import CASCache
    
    35 34
     from ._workspaces import Workspaces
    
    36 35
     from .plugin import _plugin_lookup
    
    37 36
     
    
    ... ... @@ -233,7 +232,7 @@ class Context():
    233 232
         @property
    
    234 233
         def artifactcache(self):
    
    235 234
             if not self._artifactcache:
    
    236
    -            self._artifactcache = CASCache(self)
    
    235
    +            self._artifactcache = ArtifactCache(self)
    
    237 236
     
    
    238 237
             return self._artifactcache
    
    239 238
     
    

  • buildstream/_elementfactory.py
    ... ... @@ -47,7 +47,6 @@ class ElementFactory(PluginContext):
    47 47
         # Args:
    
    48 48
         #    context (object): The Context object for processing
    
    49 49
         #    project (object): The project object
    
    50
    -    #    artifacts (ArtifactCache): The artifact cache
    
    51 50
         #    meta (object): The loaded MetaElement
    
    52 51
         #
    
    53 52
         # Returns: A newly created Element object of the appropriate kind
    
    ... ... @@ -56,9 +55,9 @@ class ElementFactory(PluginContext):
    56 55
         #    PluginError (if the kind lookup failed)
    
    57 56
         #    LoadError (if the element itself took issue with the config)
    
    58 57
         #
    
    59
    -    def create(self, context, project, artifacts, meta):
    
    58
    +    def create(self, context, project, meta):
    
    60 59
             element_type, default_config = self.lookup(meta.kind)
    
    61
    -        element = element_type(context, project, artifacts, meta, default_config)
    
    60
    +        element = element_type(context, project, meta, default_config)
    
    62 61
             version = self._format_versions.get(meta.kind, 0)
    
    63 62
             self._assert_plugin_format(element, version)
    
    64 63
             return element

  • buildstream/_exceptions.py
    ... ... @@ -90,6 +90,7 @@ class ErrorDomain(Enum):
    90 90
         APP = 12
    
    91 91
         STREAM = 13
    
    92 92
         VIRTUAL_FS = 14
    
    93
    +    CAS = 15
    
    93 94
     
    
    94 95
     
    
    95 96
     # BstError is an internal base exception class for BuildSream
    
    ... ... @@ -274,6 +275,15 @@ class ArtifactError(BstError):
    274 275
             super().__init__(message, detail=detail, domain=ErrorDomain.ARTIFACT, reason=reason, temporary=True)
    
    275 276
     
    
    276 277
     
    
    278
    +# CASError
    
    279
    +#
    
    280
    +# Raised when errors are encountered in the CAS
    
    281
    +#
    
    282
    +class CASError(BstError):
    
    283
    +    def __init__(self, message, *, detail=None, reason=None, temporary=False):
    
    284
    +        super().__init__(message, detail=detail, domain=ErrorDomain.CAS, reason=reason, temporary=True)
    
    285
    +
    
    286
    +
    
    277 287
     # PipelineError
    
    278 288
     #
    
    279 289
     # Raised from pipeline operations
    

  • buildstream/_loader/loader.py
    ... ... @@ -537,7 +537,7 @@ class Loader():
    537 537
                 raise LoadError(LoadErrorReason.INVALID_DATA,
    
    538 538
                                 "{}: Expected junction but element kind is {}".format(filename, meta_element.kind))
    
    539 539
     
    
    540
    -        element = Element._new_from_meta(meta_element, self._context.artifactcache)
    
    540
    +        element = Element._new_from_meta(meta_element)
    
    541 541
             element._preflight()
    
    542 542
     
    
    543 543
             sources = list(element.sources())
    

  • buildstream/_pipeline.py
    ... ... @@ -106,7 +106,7 @@ class Pipeline():
    106 106
     
    
    107 107
             profile_start(Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, '-') for t in targets))
    
    108 108
     
    
    109
    -        elements = self._project.load_elements(targets, self._artifacts,
    
    109
    +        elements = self._project.load_elements(targets,
    
    110 110
                                                    rewritable=rewritable,
    
    111 111
                                                    fetch_subprojects=fetch_subprojects)
    
    112 112
     
    

  • buildstream/_project.py
    ... ... @@ -224,18 +224,17 @@ class Project():
    224 224
         # Instantiate and return an element
    
    225 225
         #
    
    226 226
         # Args:
    
    227
    -    #    artifacts (ArtifactCache): The artifact cache
    
    228 227
         #    meta (MetaElement): The loaded MetaElement
    
    229 228
         #    first_pass (bool): Whether to use first pass configuration (for junctions)
    
    230 229
         #
    
    231 230
         # Returns:
    
    232 231
         #    (Element): A newly created Element object of the appropriate kind
    
    233 232
         #
    
    234
    -    def create_element(self, artifacts, meta, *, first_pass=False):
    
    233
    +    def create_element(self, meta, *, first_pass=False):
    
    235 234
             if first_pass:
    
    236
    -            return self.first_pass_config.element_factory.create(self._context, self, artifacts, meta)
    
    235
    +            return self.first_pass_config.element_factory.create(self._context, self, meta)
    
    237 236
             else:
    
    238
    -            return self.config.element_factory.create(self._context, self, artifacts, meta)
    
    237
    +            return self.config.element_factory.create(self._context, self, meta)
    
    239 238
     
    
    240 239
         # create_source()
    
    241 240
         #
    
    ... ... @@ -305,7 +304,6 @@ class Project():
    305 304
         #
    
    306 305
         # Args:
    
    307 306
         #    targets (list): Target names
    
    308
    -    #    artifacts (ArtifactCache): Artifact cache
    
    309 307
         #    rewritable (bool): Whether the loaded files should be rewritable
    
    310 308
         #                       this is a bit more expensive due to deep copies
    
    311 309
         #    fetch_subprojects (bool): Whether we should fetch subprojects as a part of the
    
    ... ... @@ -314,7 +312,7 @@ class Project():
    314 312
         # Returns:
    
    315 313
         #    (list): A list of loaded Element
    
    316 314
         #
    
    317
    -    def load_elements(self, targets, artifacts, *,
    
    315
    +    def load_elements(self, targets, *,
    
    318 316
                           rewritable=False, fetch_subprojects=False):
    
    319 317
             with self._context.timed_activity("Loading elements", silent_nested=True):
    
    320 318
                 meta_elements = self.loader.load(targets, rewritable=rewritable,
    
    ... ... @@ -323,7 +321,7 @@ class Project():
    323 321
     
    
    324 322
             with self._context.timed_activity("Resolving elements"):
    
    325 323
                 elements = [
    
    326
    -                Element._new_from_meta(meta, artifacts)
    
    324
    +                Element._new_from_meta(meta)
    
    327 325
                     for meta in meta_elements
    
    328 326
                 ]
    
    329 327
     
    

  • buildstream/element.py
    ... ... @@ -174,7 +174,7 @@ class Element(Plugin):
    174 174
         *Since: 1.4*
    
    175 175
         """
    
    176 176
     
    
    177
    -    def __init__(self, context, project, artifacts, meta, plugin_conf):
    
    177
    +    def __init__(self, context, project, meta, plugin_conf):
    
    178 178
     
    
    179 179
             self.__cache_key_dict = None            # Dict for cache key calculation
    
    180 180
             self.__cache_key = None                 # Our cached cache key
    
    ... ... @@ -199,7 +199,7 @@ class Element(Plugin):
    199 199
             self.__sources = []                     # List of Sources
    
    200 200
             self.__weak_cache_key = None            # Our cached weak cache key
    
    201 201
             self.__strict_cache_key = None          # Our cached cache key for strict builds
    
    202
    -        self.__artifacts = artifacts            # Artifact cache
    
    202
    +        self.__artifacts = context.artifactcache  # Artifact cache
    
    203 203
             self.__consistency = Consistency.INCONSISTENT  # Cached overall consistency state
    
    204 204
             self.__strong_cached = None             # Whether we have a cached artifact
    
    205 205
             self.__weak_cached = None               # Whether we have a cached artifact
    
    ... ... @@ -872,14 +872,13 @@ class Element(Plugin):
    872 872
         # and its dependencies from a meta element.
    
    873 873
         #
    
    874 874
         # Args:
    
    875
    -    #    artifacts (ArtifactCache): The artifact cache
    
    876 875
         #    meta (MetaElement): The meta element
    
    877 876
         #
    
    878 877
         # Returns:
    
    879 878
         #    (Element): A newly created Element instance
    
    880 879
         #
    
    881 880
         @classmethod
    
    882
    -    def _new_from_meta(cls, meta, artifacts):
    
    881
    +    def _new_from_meta(cls, meta):
    
    883 882
     
    
    884 883
             if not meta.first_pass:
    
    885 884
                 meta.project.ensure_fully_loaded()
    
    ... ... @@ -887,7 +886,7 @@ class Element(Plugin):
    887 886
             if meta in cls.__instantiated_elements:
    
    888 887
                 return cls.__instantiated_elements[meta]
    
    889 888
     
    
    890
    -        element = meta.project.create_element(artifacts, meta, first_pass=meta.first_pass)
    
    889
    +        element = meta.project.create_element(meta, first_pass=meta.first_pass)
    
    891 890
             cls.__instantiated_elements[meta] = element
    
    892 891
     
    
    893 892
             # Instantiate sources
    
    ... ... @@ -904,10 +903,10 @@ class Element(Plugin):
    904 903
     
    
    905 904
             # Instantiate dependencies
    
    906 905
             for meta_dep in meta.dependencies:
    
    907
    -            dependency = Element._new_from_meta(meta_dep, artifacts)
    
    906
    +            dependency = Element._new_from_meta(meta_dep)
    
    908 907
                 element.__runtime_dependencies.append(dependency)
    
    909 908
             for meta_dep in meta.build_dependencies:
    
    910
    -            dependency = Element._new_from_meta(meta_dep, artifacts)
    
    909
    +            dependency = Element._new_from_meta(meta_dep)
    
    911 910
                 element.__build_dependencies.append(dependency)
    
    912 911
     
    
    913 912
             return element
    
    ... ... @@ -2057,7 +2056,7 @@ class Element(Plugin):
    2057 2056
                     'sources': [s._get_unique_key(workspace is None) for s in self.__sources],
    
    2058 2057
                     'workspace': '' if workspace is None else workspace.get_key(self._get_project()),
    
    2059 2058
                     'public': self.__public,
    
    2060
    -                'cache': type(self.__artifacts).__name__
    
    2059
    +                'cache': 'CASCache'
    
    2061 2060
                 }
    
    2062 2061
     
    
    2063 2062
                 self.__cache_key_dict['fatal-warnings'] = sorted(project._fatal_warnings)
    
    ... ... @@ -2180,6 +2179,7 @@ class Element(Plugin):
    2180 2179
                                         stderr=stderr,
    
    2181 2180
                                         config=config,
    
    2182 2181
                                         server_url=self.__remote_execution_url,
    
    2182
    +                                    bare_directory=bare_directory,
    
    2183 2183
                                         allow_real_directory=False)
    
    2184 2184
                 yield sandbox
    
    2185 2185
     
    

  • buildstream/plugins/sources/pip.py
    ... ... @@ -96,7 +96,7 @@ _PYTHON_VERSIONS = [
    96 96
     # Names of source distribution archives must be of the form
    
    97 97
     # '%{package-name}-%{version}.%{extension}'.
    
    98 98
     _SDIST_RE = re.compile(
    
    99
    -    r'^([a-zA-Z0-9]+?)-(.+).(?:tar|tar.bz2|tar.gz|tar.xz|tar.Z|zip)$',
    
    99
    +    r'^([\w.-]+?)-((?:[\d.]+){2,})\.(?:tar|tar.bz2|tar.gz|tar.xz|tar.Z|zip)$',
    
    100 100
         re.IGNORECASE)
    
    101 101
     
    
    102 102
     
    
    ... ... @@ -225,12 +225,27 @@ class PipSource(Source):
    225 225
         def _parse_sdist_names(self, basedir):
    
    226 226
             reqs = []
    
    227 227
             for f in os.listdir(basedir):
    
    228
    -            pkg_match = _SDIST_RE.match(f)
    
    229
    -            if pkg_match:
    
    230
    -                reqs.append(pkg_match.groups())
    
    228
    +            pkg = _match_package_name(f)
    
    229
    +            if pkg is not None:
    
    230
    +                reqs.append(pkg)
    
    231 231
     
    
    232 232
             return sorted(reqs)
    
    233 233
     
    
    234 234
     
    
    235
    +# Extract the package name and version of a source distribution
    
    236
    +#
    
    237
    +# Args:
    
    238
    +#    filename (str): Filename of the source distribution
    
    239
    +#
    
    240
    +# Returns:
    
    241
    +#    (tuple): A tuple of (package_name, version)
    
    242
    +#
    
    243
    +def _match_package_name(filename):
    
    244
    +    pkg_match = _SDIST_RE.match(filename)
    
    245
    +    if pkg_match is None:
    
    246
    +        return None
    
    247
    +    return pkg_match.groups()
    
    248
    +
    
    249
    +
    
    235 250
     def setup():
    
    236 251
         return PipSource

  • buildstream/storage/_casbaseddirectory.py
    ... ... @@ -30,7 +30,6 @@ See also: :ref:`sandboxing`.
    30 30
     from collections import OrderedDict
    
    31 31
     
    
    32 32
     import os
    
    33
    -import tempfile
    
    34 33
     import stat
    
    35 34
     
    
    36 35
     from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
    
    ... ... @@ -51,6 +50,162 @@ class IndexEntry():
    51 50
             self.modified = modified
    
    52 51
     
    
    53 52
     
    
    53
    +class ResolutionException(VirtualDirectoryError):
    
    54
    +    """ Superclass of all exceptions that can be raised by
    
    55
    +    CasBasedDirectory._resolve. Should not be used outside this module. """
    
    56
    +    pass
    
    57
    +
    
    58
    +
    
    59
    +class InfiniteSymlinkException(ResolutionException):
    
    60
    +    """ Raised when an infinite symlink loop is found. """
    
    61
    +    pass
    
    62
    +
    
    63
    +
    
    64
    +class AbsoluteSymlinkException(ResolutionException):
    
    65
    +    """Raised if we try to follow an absolute symlink (i.e. one whose
    
    66
    +    target starts with the path separator) and we have disallowed
    
    67
    +    following such symlinks. """
    
    68
    +    pass
    
    69
    +
    
    70
    +
    
    71
    +class _Resolver():
    
    72
    +    """A class for resolving symlinks inside CAS-based directories. As
    
    73
    +    well as providing a namespace for some functions, this also
    
    74
    +    contains two flags which are constant throughout one resolution
    
    75
    +    operation and the 'seen_objects' list used to detect infinite
    
    76
    +    symlink loops.
    
    77
    +
    
    78
    +    """
    
    79
    +
    
    80
    +    def __init__(self, absolute_symlinks_resolve=True, force_create=False):
    
    81
    +        self.absolute_symlinks_resolve = absolute_symlinks_resolve
    
    82
    +        self.force_create = force_create
    
    83
    +        self.seen_objects = []
    
    84
    +
    
    85
    +    def resolve(self, name, directory):
    
    86
    +        """Resolves any name to an object. If the name points to a symlink in
    
    87
    +        the directory, it returns the thing it points to,
    
    88
    +        recursively.
    
    89
    +
    
    90
    +        Returns a CasBasedDirectory, FileNode or None. None indicates
    
    91
    +        either that 'target' does not exist in this directory, or is a
    
    92
    +        symlink chain which points to a nonexistent name (broken
    
    93
    +        symlink).
    
    94
    +
    
    95
    +        Raises:
    
    96
    +        - InfiniteSymlinkException if 'name' points to an infinite symlink loop.
    
    97
    +        - AbsoluteSymlinkException if 'name' points to an absolute symlink and absolute_symlinks_resolve is False.
    
    98
    +
    
    99
    +        If force_create is set, this will attempt to create directories to make symlinks and directories resolve.
    
    100
    +        Files present in symlink target paths will also be removed and replaced with directories.
    
    101
    +        If force_create is off, this will never alter 'directory'.
    
    102
    +
    
    103
    +        """
    
    104
    +
    
    105
    +        # First check for nonexistent things or 'normal' objects and return them
    
    106
    +        if name not in directory.index:
    
    107
    +            return None
    
    108
    +        index_entry = directory.index[name]
    
    109
    +        if isinstance(index_entry.buildstream_object, Directory):
    
    110
    +            return index_entry.buildstream_object
    
    111
    +        elif isinstance(index_entry.pb_object, remote_execution_pb2.FileNode):
    
    112
    +            return index_entry.pb_object
    
    113
    +
    
    114
    +        # Now we must be dealing with a symlink.
    
    115
    +        assert isinstance(index_entry.pb_object, remote_execution_pb2.SymlinkNode)
    
    116
    +
    
    117
    +        symlink_object = index_entry.pb_object
    
    118
    +        if symlink_object in self.seen_objects:
    
    119
    +            # Infinite symlink loop detected
    
    120
    +            message = ("Infinite symlink loop found during resolution. " +
    
    121
    +                       "First repeated element is {}".format(name))
    
    122
    +            raise InfiniteSymlinkException(message=message)
    
    123
    +
    
    124
    +        self.seen_objects.append(symlink_object)
    
    125
    +
    
    126
    +        components = symlink_object.target.split(CasBasedDirectory._pb2_path_sep)
    
    127
    +        absolute = symlink_object.target.startswith(CasBasedDirectory._pb2_absolute_path_prefix)
    
    128
    +
    
    129
    +        if absolute:
    
    130
    +            if self.absolute_symlinks_resolve:
    
    131
    +                directory = directory.find_root()
    
    132
    +                # Discard the first empty element
    
    133
    +                components.pop(0)
    
    134
    +            else:
    
    135
    +                # Unresolvable absolute symlink
    
    136
    +                message = "{} is an absolute symlink, which was disallowed during resolution".format(name)
    
    137
    +                raise AbsoluteSymlinkException(message=message)
    
    138
    +
    
    139
    +        resolution = directory
    
    140
    +        while components and isinstance(resolution, CasBasedDirectory):
    
    141
    +            c = components.pop(0)
    
    142
    +            directory = resolution
    
    143
    +
    
    144
    +            try:
    
    145
    +                resolution = self._resolve_path_component(c, directory, components)
    
    146
    +            except ResolutionException as original:
    
    147
    +                errormsg = ("Reached a file called {} while trying to resolve a symlink; " +
    
    148
    +                            "cannot proceed. The remaining path components are {}.")
    
    149
    +                raise ResolutionException(errormsg.format(c, components)) from original
    
    150
    +
    
    151
    +        return resolution
    
    152
    +
    
    153
    +    def _resolve_path_component(self, c, directory, components_remaining):
    
    154
    +        if c == ".":
    
    155
    +            resolution = directory
    
    156
    +        elif c == "..":
    
    157
    +            if directory.parent is not None:
    
    158
    +                resolution = directory.parent
    
    159
    +            else:
    
    160
    +                # If directory.parent *is* None, this is an attempt to
    
    161
    +                # access '..' from the root, which is valid under
    
    162
    +                # POSIX; it just returns the root.
    
    163
    +                resolution = directory
    
    164
    +        elif c in directory.index:
    
    165
    +            try:
    
    166
    +                resolution = self._resolve_through_files(c, directory, components_remaining)
    
    167
    +            except ResolutionException as original:
    
    168
    +                errormsg = ("Reached a file called {} while trying to resolve a symlink; " +
    
    169
    +                            "cannot proceed. The remaining path components are {}.")
    
    170
    +                raise ResolutionException(errormsg.format(c, components_remaining)) from original
    
    171
    +        else:
    
    172
    +            # c is not in our index
    
    173
    +            if self.force_create:
    
    174
    +                resolution = directory.descend(c, create=True)
    
    175
    +            else:
    
    176
    +                resolution = None
    
    177
    +        return resolution
    
    178
    +
    
    179
    +    def _resolve_through_files(self, c, directory, require_traversable):
    
    180
    +        """A wrapper to resolve() which deals with files being found
    
    181
    +        in the middle of paths, for example trying to resolve a symlink
    
    182
    +        which points to /usr/lib64/libfoo when 'lib64' is a file.
    
    183
    +
    
    184
    +        require_traversable: If this is True, never return a file
    
    185
    +        node.  Instead, if force_create is set, destroy the file node,
    
    186
    +        then create and return a normal directory in its place. If
    
    187
    +        force_create is off, throws ResolutionException.
    
    188
    +
    
    189
    +        """
    
    190
    +        resolved_thing = self.resolve(c, directory)
    
    191
    +
    
    192
    +        if isinstance(resolved_thing, remote_execution_pb2.FileNode):
    
    193
    +            if require_traversable:
    
    194
    +                # We have components still to resolve, but one of the path components
    
    195
    +                # is a file.
    
    196
    +                if self.force_create:
    
    197
    +                    directory.delete_entry(c)
    
    198
    +                    resolved_thing = directory.descend(c, create=True)
    
    199
    +                else:
    
    200
    +                    # This is a signal that we hit a file, but don't
    
    201
    +                    # have the data to give a proper message, so the
    
    202
    +                    # caller should reraise this with a proper
    
    203
    +                    # description.
    
    204
    +                    raise ResolutionException(message="")
    
    205
    +
    
    206
    +        return resolved_thing
    
    207
    +
    
    208
    +
    
    54 209
     # CasBasedDirectory intentionally doesn't call its superclass constuctor,
    
    55 210
     # which is meant to be unimplemented.
    
    56 211
     # pylint: disable=super-init-not-called
    
    ... ... @@ -79,7 +234,7 @@ class CasBasedDirectory(Directory):
    79 234
             self.filename = filename
    
    80 235
             self.common_name = common_name
    
    81 236
             self.pb2_directory = remote_execution_pb2.Directory()
    
    82
    -        self.cas_cache = context.artifactcache
    
    237
    +        self.cas_cache = context.artifactcache.cas
    
    83 238
             if ref:
    
    84 239
                 with open(self.cas_cache.objpath(ref), 'rb') as f:
    
    85 240
                     self.pb2_directory.ParseFromString(f.read())
    
    ... ... @@ -168,29 +323,34 @@ class CasBasedDirectory(Directory):
    168 323
             self.index[name] = IndexEntry(dirnode, buildstream_object=newdir)
    
    169 324
             return newdir
    
    170 325
     
    
    171
    -    def _add_new_file(self, basename, filename):
    
    326
    +    def _add_file(self, basename, filename, modified=False):
    
    172 327
             filenode = self.pb2_directory.files.add()
    
    173 328
             filenode.name = filename
    
    174 329
             self.cas_cache.add_object(digest=filenode.digest, path=os.path.join(basename, filename))
    
    175 330
             is_executable = os.access(os.path.join(basename, filename), os.X_OK)
    
    176 331
             filenode.is_executable = is_executable
    
    177
    -        self.index[filename] = IndexEntry(filenode, modified=(filename in self.index))
    
    332
    +        self.index[filename] = IndexEntry(filenode, modified=modified or filename in self.index)
    
    178 333
     
    
    179
    -    def _add_new_link(self, basename, filename):
    
    180
    -        existing_link = self._find_pb2_entry(filename)
    
    334
    +    def _copy_link_from_filesystem(self, basename, filename):
    
    335
    +        self._add_new_link_direct(filename, os.readlink(os.path.join(basename, filename)))
    
    336
    +
    
    337
    +    def _add_new_link_direct(self, name, target):
    
    338
    +        existing_link = self._find_pb2_entry(name)
    
    181 339
             if existing_link:
    
    182 340
                 symlinknode = existing_link
    
    183 341
             else:
    
    184 342
                 symlinknode = self.pb2_directory.symlinks.add()
    
    185
    -        symlinknode.name = filename
    
    343
    +        assert isinstance(symlinknode, remote_execution_pb2.SymlinkNode)
    
    344
    +        symlinknode.name = name
    
    186 345
             # A symlink node has no digest.
    
    187
    -        symlinknode.target = os.readlink(os.path.join(basename, filename))
    
    188
    -        self.index[filename] = IndexEntry(symlinknode, modified=(existing_link is not None))
    
    346
    +        symlinknode.target = target
    
    347
    +        self.index[name] = IndexEntry(symlinknode, modified=(existing_link is not None))
    
    189 348
     
    
    190 349
         def delete_entry(self, name):
    
    191 350
             for collection in [self.pb2_directory.files, self.pb2_directory.symlinks, self.pb2_directory.directories]:
    
    192
    -            if name in collection:
    
    193
    -                collection.remove(name)
    
    351
    +            for thing in collection:
    
    352
    +                if thing.name == name:
    
    353
    +                    collection.remove(thing)
    
    194 354
             if name in self.index:
    
    195 355
                 del self.index[name]
    
    196 356
     
    
    ... ... @@ -231,9 +391,13 @@ class CasBasedDirectory(Directory):
    231 391
                 if isinstance(entry, CasBasedDirectory):
    
    232 392
                     return entry.descend(subdirectory_spec[1:], create)
    
    233 393
                 else:
    
    394
    +                # May be a symlink
    
    395
    +                target = self._resolve(subdirectory_spec[0], force_create=create)
    
    396
    +                if isinstance(target, CasBasedDirectory):
    
    397
    +                    return target
    
    234 398
                     error = "Cannot descend into {}, which is a '{}' in the directory {}"
    
    235 399
                     raise VirtualDirectoryError(error.format(subdirectory_spec[0],
    
    236
    -                                                         type(entry).__name__,
    
    400
    +                                                         type(self.index[subdirectory_spec[0]].pb_object).__name__,
    
    237 401
                                                              self))
    
    238 402
             else:
    
    239 403
                 if create:
    
    ... ... @@ -254,36 +418,9 @@ class CasBasedDirectory(Directory):
    254 418
             else:
    
    255 419
                 return self
    
    256 420
     
    
    257
    -    def _resolve_symlink_or_directory(self, name):
    
    258
    -        """Used only by _import_files_from_directory. Tries to resolve a
    
    259
    -        directory name or symlink name. 'name' must be an entry in this
    
    260
    -        directory. It must be a single symlink or directory name, not a path
    
    261
    -        separated by path separators. If it's an existing directory name, it
    
    262
    -        just returns the Directory object for that. If it's a symlink, it will
    
    263
    -        attempt to find the target of the symlink and return that as a
    
    264
    -        Directory object.
    
    265
    -
    
    266
    -        If a symlink target doesn't exist, it will attempt to create it
    
    267
    -        as a directory as long as it's within this directory tree.
    
    268
    -        """
    
    269
    -
    
    270
    -        if isinstance(self.index[name].buildstream_object, Directory):
    
    271
    -            return self.index[name].buildstream_object
    
    272
    -        # OK then, it's a symlink
    
    273
    -        symlink = self._find_pb2_entry(name)
    
    274
    -        absolute = symlink.target.startswith(CasBasedDirectory._pb2_absolute_path_prefix)
    
    275
    -        if absolute:
    
    276
    -            root = self.find_root()
    
    277
    -        else:
    
    278
    -            root = self
    
    279
    -        directory = root
    
    280
    -        components = symlink.target.split(CasBasedDirectory._pb2_path_sep)
    
    281
    -        for c in components:
    
    282
    -            if c == "..":
    
    283
    -                directory = directory.parent
    
    284
    -            else:
    
    285
    -                directory = directory.descend(c, create=True)
    
    286
    -        return directory
    
    421
    +    def _resolve(self, name, absolute_symlinks_resolve=True, force_create=False):
    
    422
    +        resolver = _Resolver(absolute_symlinks_resolve, force_create)
    
    423
    +        return resolver.resolve(name, self)
    
    287 424
     
    
    288 425
         def _check_replacement(self, name, path_prefix, fileListResult):
    
    289 426
             """ Checks whether 'name' exists, and if so, whether we can overwrite it.
    
    ... ... @@ -297,6 +434,7 @@ class CasBasedDirectory(Directory):
    297 434
                 return True
    
    298 435
             if (isinstance(existing_entry,
    
    299 436
                            (remote_execution_pb2.FileNode, remote_execution_pb2.SymlinkNode))):
    
    437
    +            self.delete_entry(name)
    
    300 438
                 fileListResult.overwritten.append(relative_pathname)
    
    301 439
                 return True
    
    302 440
             elif isinstance(existing_entry, remote_execution_pb2.DirectoryNode):
    
    ... ... @@ -314,23 +452,44 @@ class CasBasedDirectory(Directory):
    314 452
                            .format(name, type(existing_entry)))
    
    315 453
             return False  # In case asserts are disabled
    
    316 454
     
    
    317
    -    def _import_directory_recursively(self, directory_name, source_directory, remaining_path, path_prefix):
    
    318
    -        """ _import_directory_recursively and _import_files_from_directory will be called alternately
    
    319
    -        as a directory tree is descended. """
    
    320
    -        if directory_name in self.index:
    
    321
    -            subdir = self._resolve_symlink_or_directory(directory_name)
    
    322
    -        else:
    
    323
    -            subdir = self._add_directory(directory_name)
    
    324
    -        new_path_prefix = os.path.join(path_prefix, directory_name)
    
    325
    -        subdir_result = subdir._import_files_from_directory(os.path.join(source_directory, directory_name),
    
    326
    -                                                            [os.path.sep.join(remaining_path)],
    
    327
    -                                                            path_prefix=new_path_prefix)
    
    328
    -        return subdir_result
    
    455
    +    def _replace_anything_with_dir(self, name, path_prefix, overwritten_files_list):
    
    456
    +        self.delete_entry(name)
    
    457
    +        subdir = self._add_directory(name)
    
    458
    +        overwritten_files_list.append(os.path.join(path_prefix, name))
    
    459
    +        return subdir
    
    329 460
     
    
    330 461
         def _import_files_from_directory(self, source_directory, files, path_prefix=""):
    
    331
    -        """ Imports files from a traditional directory """
    
    462
    +        """ Imports files from a traditional directory. """
    
    463
    +
    
    464
    +        def _ensure_followable(name, path_prefix):
    
    465
    +            """ Makes sure 'name' is a directory or symlink to a directory which can be descended into. """
    
    466
    +            if isinstance(self.index[name].buildstream_object, Directory):
    
    467
    +                return self.descend(name)
    
    468
    +            try:
    
    469
    +                target = self._resolve(name, force_create=True)
    
    470
    +            except InfiniteSymlinkException:
    
    471
    +                return self._replace_anything_with_dir(name, path_prefix, result.overwritten)
    
    472
    +            if isinstance(target, CasBasedDirectory):
    
    473
    +                return target
    
    474
    +            elif isinstance(target, remote_execution_pb2.FileNode):
    
    475
    +                return self._replace_anything_with_dir(name, path_prefix, result.overwritten)
    
    476
    +            return target
    
    477
    +
    
    478
    +        def _import_directory_recursively(directory_name, source_directory, remaining_path, path_prefix):
    
    479
    +            """ _import_directory_recursively and _import_files_from_directory will be called alternately
    
    480
    +            as a directory tree is descended. """
    
    481
    +            if directory_name in self.index:
    
    482
    +                subdir = _ensure_followable(directory_name, path_prefix)
    
    483
    +            else:
    
    484
    +                subdir = self._add_directory(directory_name)
    
    485
    +            new_path_prefix = os.path.join(path_prefix, directory_name)
    
    486
    +            subdir_result = subdir._import_files_from_directory(os.path.join(source_directory, directory_name),
    
    487
    +                                                                [os.path.sep.join(remaining_path)],
    
    488
    +                                                                path_prefix=new_path_prefix)
    
    489
    +            return subdir_result
    
    490
    +
    
    332 491
             result = FileListResult()
    
    333
    -        for entry in sorted(files):
    
    492
    +        for entry in files:
    
    334 493
                 split_path = entry.split(os.path.sep)
    
    335 494
                 # The actual file on the FS we're importing
    
    336 495
                 import_file = os.path.join(source_directory, entry)
    
    ... ... @@ -338,14 +497,18 @@ class CasBasedDirectory(Directory):
    338 497
                 relative_pathname = os.path.join(path_prefix, entry)
    
    339 498
                 if len(split_path) > 1:
    
    340 499
                     directory_name = split_path[0]
    
    341
    -                # Hand this off to the importer for that subdir. This will only do one file -
    
    342
    -                # a better way would be to hand off all the files in this subdir at once.
    
    343
    -                subdir_result = self._import_directory_recursively(directory_name, source_directory,
    
    344
    -                                                                   split_path[1:], path_prefix)
    
    500
    +                # Hand this off to the importer for that subdir.
    
    501
    +
    
    502
    +                # It would be advantageous to batch these together by
    
    503
    +                # directory_name. However, we can't do it out of
    
    504
    +                # order, since importing symlinks affects the results
    
    505
    +                # of other imports.
    
    506
    +                subdir_result = _import_directory_recursively(directory_name, source_directory,
    
    507
    +                                                              split_path[1:], path_prefix)
    
    345 508
                     result.combine(subdir_result)
    
    346 509
                 elif os.path.islink(import_file):
    
    347 510
                     if self._check_replacement(entry, path_prefix, result):
    
    348
    -                    self._add_new_link(source_directory, entry)
    
    511
    +                    self._copy_link_from_filesystem(source_directory, entry)
    
    349 512
                         result.files_written.append(relative_pathname)
    
    350 513
                 elif os.path.isdir(import_file):
    
    351 514
                     # A plain directory which already exists isn't a problem; just ignore it.
    
    ... ... @@ -353,10 +516,78 @@ class CasBasedDirectory(Directory):
    353 516
                         self._add_directory(entry)
    
    354 517
                 elif os.path.isfile(import_file):
    
    355 518
                     if self._check_replacement(entry, path_prefix, result):
    
    356
    -                    self._add_new_file(source_directory, entry)
    
    519
    +                    self._add_file(source_directory, entry, modified=relative_pathname in result.overwritten)
    
    357 520
                         result.files_written.append(relative_pathname)
    
    358 521
             return result
    
    359 522
     
    
    523
    +    @staticmethod
    
    524
    +    def _files_in_subdir(sorted_files, dirname):
    
    525
    +        """Filters sorted_files and returns only the ones which have
    
    526
    +           'dirname' as a prefix, with that prefix removed.
    
    527
    +
    
    528
    +        """
    
    529
    +        if not dirname.endswith(os.path.sep):
    
    530
    +            dirname += os.path.sep
    
    531
    +        return [f[len(dirname):] for f in sorted_files if f.startswith(dirname)]
    
    532
    +
    
    533
    +    def _partial_import_cas_into_cas(self, source_directory, files, path_prefix="", file_list_required=True):
    
    534
    +        """ Import only the files and symlinks listed in 'files' from source_directory to this one.
    
    535
    +        Args:
    
    536
    +           source_directory (:class:`.CasBasedDirectory`): The directory to import from
    
    537
    +           files ([str]): List of pathnames to import. Must be a list, not a generator.
    
    538
    +           path_prefix (str): Prefix used to add entries to the file list result.
    
    539
    +           file_list_required: Whether to update the file list while processing.
    
    540
    +        """
    
    541
    +        result = FileListResult()
    
    542
    +        processed_directories = set()
    
    543
    +        for f in files:
    
    544
    +            fullname = os.path.join(path_prefix, f)
    
    545
    +            components = f.split(os.path.sep)
    
    546
    +            if len(components) > 1:
    
    547
    +                # We are importing a thing which is in a subdirectory. We may have already seen this dirname
    
    548
    +                # for a previous file.
    
    549
    +                dirname = components[0]
    
    550
    +                if dirname not in processed_directories:
    
    551
    +                    # Now strip off the first directory name and import files recursively.
    
    552
    +                    subcomponents = CasBasedDirectory._files_in_subdir(files, dirname)
    
    553
    +                    # We will fail at this point if there is a file or symlink to file called 'dirname'.
    
    554
    +                    if dirname in self.index:
    
    555
    +                        resolved_component = self._resolve(dirname, force_create=True)
    
    556
    +                        if isinstance(resolved_component, remote_execution_pb2.FileNode):
    
    557
    +                            dest_subdir = self._replace_anything_with_dir(dirname, path_prefix, result.overwritten)
    
    558
    +                        else:
    
    559
    +                            dest_subdir = resolved_component
    
    560
    +                    else:
    
    561
    +                        dest_subdir = self.descend(dirname, create=True)
    
    562
    +                    src_subdir = source_directory.descend(dirname)
    
    563
    +                    import_result = dest_subdir._partial_import_cas_into_cas(src_subdir, subcomponents,
    
    564
    +                                                                             path_prefix=fullname,
    
    565
    +                                                                             file_list_required=file_list_required)
    
    566
    +                    result.combine(import_result)
    
    567
    +                processed_directories.add(dirname)
    
    568
    +            elif isinstance(source_directory.index[f].buildstream_object, CasBasedDirectory):
    
    569
    +                # The thing in the input file list is a directory on
    
    570
    +                # its own. We don't need to do anything other than create it if it doesn't exist.
    
    571
    +                # If we already have an entry with the same name that isn't a directory, that
    
    572
    +                # will be dealt with when importing files in this directory.
    
    573
    +                if f not in self.index:
    
    574
    +                    self.descend(f, create=True)
    
    575
    +            else:
    
    576
    +                # We're importing a file or symlink - replace anything with the same name.
    
    577
    +                importable = self._check_replacement(f, path_prefix, result)
    
    578
    +                if importable:
    
    579
    +                    item = source_directory.index[f].pb_object
    
    580
    +                    if isinstance(item, remote_execution_pb2.FileNode):
    
    581
    +                        filenode = self.pb2_directory.files.add(digest=item.digest, name=f,
    
    582
    +                                                                is_executable=item.is_executable)
    
    583
    +                        self.index[f] = IndexEntry(filenode, modified=True)
    
    584
    +                    else:
    
    585
    +                        assert isinstance(item, remote_execution_pb2.SymlinkNode)
    
    586
    +                        self._add_new_link_direct(name=f, target=item.target)
    
    587
    +                else:
    
    588
    +                    result.ignored.append(os.path.join(path_prefix, f))
    
    589
    +        return result
    
    590
    +
    
    360 591
         def import_files(self, external_pathspec, *, files=None,
    
    361 592
                          report_written=True, update_utimes=False,
    
    362 593
                          can_link=False):
    
    ... ... @@ -378,28 +609,27 @@ class CasBasedDirectory(Directory):
    378 609
     
    
    379 610
             can_link (bool): Ignored, since hard links do not have any meaning within CAS.
    
    380 611
             """
    
    381
    -        if isinstance(external_pathspec, FileBasedDirectory):
    
    382
    -            source_directory = external_pathspec._get_underlying_directory()
    
    383
    -        elif isinstance(external_pathspec, CasBasedDirectory):
    
    384
    -            # TODO: This transfers from one CAS to another via the
    
    385
    -            # filesystem, which is very inefficient. Alter this so it
    
    386
    -            # transfers refs across directly.
    
    387
    -            with tempfile.TemporaryDirectory(prefix="roundtrip") as tmpdir:
    
    388
    -                external_pathspec.export_files(tmpdir)
    
    389
    -                if files is None:
    
    390
    -                    files = list_relative_paths(tmpdir)
    
    391
    -                result = self._import_files_from_directory(tmpdir, files=files)
    
    392
    -            return result
    
    393
    -        else:
    
    394
    -            source_directory = external_pathspec
    
    395 612
     
    
    396 613
             if files is None:
    
    397
    -            files = list_relative_paths(source_directory)
    
    614
    +            if isinstance(external_pathspec, str):
    
    615
    +                files = list_relative_paths(external_pathspec)
    
    616
    +            else:
    
    617
    +                assert isinstance(external_pathspec, Directory)
    
    618
    +                files = external_pathspec.list_relative_paths()
    
    619
    +
    
    620
    +        if isinstance(external_pathspec, FileBasedDirectory):
    
    621
    +            source_directory = external_pathspec.get_underlying_directory()
    
    622
    +            result = self._import_files_from_directory(source_directory, files=files)
    
    623
    +        elif isinstance(external_pathspec, str):
    
    624
    +            source_directory = external_pathspec
    
    625
    +            result = self._import_files_from_directory(source_directory, files=files)
    
    626
    +        else:
    
    627
    +            assert isinstance(external_pathspec, CasBasedDirectory)
    
    628
    +            result = self._partial_import_cas_into_cas(external_pathspec, files=list(files))
    
    398 629
     
    
    399 630
             # TODO: No notice is taken of report_written, update_utimes or can_link.
    
    400 631
             # Current behaviour is to fully populate the report, which is inefficient,
    
    401 632
             # but still correct.
    
    402
    -        result = self._import_files_from_directory(source_directory, files=files)
    
    403 633
     
    
    404 634
             # We need to recalculate and store the hashes of all directories both
    
    405 635
             # up and down the tree; we have changed our directory by importing files
    
    ... ... @@ -511,6 +741,28 @@ class CasBasedDirectory(Directory):
    511 741
             else:
    
    512 742
                 self._mark_directory_unmodified()
    
    513 743
     
    
    744
    +    def _lightweight_resolve_to_index(self, path):
    
    745
    +        """A lightweight function for transforming paths into IndexEntry
    
    746
    +        objects. This does not follow symlinks.
    
    747
    +
    
    748
    +        path: The string to resolve. This should be a series of path
    
    749
    +        components separated by the protocol buffer path separator
    
    750
    +        _pb2_path_sep.
    
    751
    +
    
    752
    +        Returns: the IndexEntry found, or None if any of the path components were not present.
    
    753
    +
    
    754
    +        """
    
    755
    +        directory = self
    
    756
    +        path_components = path.split(CasBasedDirectory._pb2_path_sep)
    
    757
    +        for component in path_components[:-1]:
    
    758
    +            if component not in directory.index:
    
    759
    +                return None
    
    760
    +            if isinstance(directory.index[component].buildstream_object, CasBasedDirectory):
    
    761
    +                directory = directory.index[component].buildstream_object
    
    762
    +            else:
    
    763
    +                return None
    
    764
    +        return directory.index.get(path_components[-1], None)
    
    765
    +
    
    514 766
         def list_modified_paths(self):
    
    515 767
             """Provide a list of relative paths which have been modified since the
    
    516 768
             last call to mark_unmodified.
    
    ... ... @@ -518,29 +770,43 @@ class CasBasedDirectory(Directory):
    518 770
             Return value: List(str) - list of modified paths
    
    519 771
             """
    
    520 772
     
    
    521
    -        filelist = []
    
    522
    -        for (k, v) in self.index.items():
    
    523
    -            if isinstance(v.buildstream_object, CasBasedDirectory):
    
    524
    -                filelist.extend([k + os.path.sep + x for x in v.buildstream_object.list_modified_paths()])
    
    525
    -            elif isinstance(v.pb_object, remote_execution_pb2.FileNode) and v.modified:
    
    526
    -                filelist.append(k)
    
    527
    -        return filelist
    
    773
    +        for p in self.list_relative_paths():
    
    774
    +            i = self._lightweight_resolve_to_index(p)
    
    775
    +            if i and i.modified:
    
    776
    +                yield p
    
    528 777
     
    
    529
    -    def list_relative_paths(self):
    
    778
    +    def list_relative_paths(self, relpath=""):
    
    530 779
             """Provide a list of all relative paths.
    
    531 780
     
    
    532
    -        NOTE: This list is not in the same order as utils.list_relative_paths.
    
    533
    -
    
    534 781
             Return value: List(str) - list of all paths
    
    535 782
             """
    
    536 783
     
    
    537
    -        filelist = []
    
    538
    -        for (k, v) in self.index.items():
    
    539
    -            if isinstance(v.buildstream_object, CasBasedDirectory):
    
    540
    -                filelist.extend([k + os.path.sep + x for x in v.buildstream_object.list_relative_paths()])
    
    541
    -            elif isinstance(v.pb_object, remote_execution_pb2.FileNode):
    
    542
    -                filelist.append(k)
    
    543
    -        return filelist
    
    784
    +        symlink_list = filter(lambda i: isinstance(i[1].pb_object, remote_execution_pb2.SymlinkNode),
    
    785
    +                              self.index.items())
    
    786
    +        file_list = list(filter(lambda i: isinstance(i[1].pb_object, remote_execution_pb2.FileNode),
    
    787
    +                                self.index.items()))
    
    788
    +        directory_list = filter(lambda i: isinstance(i[1].buildstream_object, CasBasedDirectory),
    
    789
    +                                self.index.items())
    
    790
    +
    
    791
    +        # We need to mimic the behaviour of os.walk, in which symlinks
    
    792
    +        # to directories count as directories and symlinks to file or
    
    793
    +        # broken symlinks count as files. os.walk doesn't follow
    
    794
    +        # symlinks, so we don't recurse.
    
    795
    +        for (k, v) in sorted(symlink_list):
    
    796
    +            target = self._resolve(k, absolute_symlinks_resolve=True)
    
    797
    +            if isinstance(target, CasBasedDirectory):
    
    798
    +                yield os.path.join(relpath, k)
    
    799
    +            else:
    
    800
    +                file_list.append((k, v))
    
    801
    +
    
    802
    +        if file_list == [] and relpath != "":
    
    803
    +            yield relpath
    
    804
    +        else:
    
    805
    +            for (k, v) in sorted(file_list):
    
    806
    +                yield os.path.join(relpath, k)
    
    807
    +
    
    808
    +        for (k, v) in sorted(directory_list):
    
    809
    +            yield from v.buildstream_object.list_relative_paths(relpath=os.path.join(relpath, k))
    
    544 810
     
    
    545 811
         def recalculate_hash(self):
    
    546 812
             """ Recalcuates the hash for this directory and store the results in
    

  • buildstream/utils.py
    ... ... @@ -634,7 +634,7 @@ def _parse_size(size, volume):
    634 634
     
    
    635 635
     # _pretty_size()
    
    636 636
     #
    
    637
    -# Converts a number of bytes into a string representation in KB, MB, GB, TB
    
    637
    +# Converts a number of bytes into a string representation in KiB, MiB, GiB, TiB
    
    638 638
     # represented as K, M, G, T etc.
    
    639 639
     #
    
    640 640
     # Args:
    
    ... ... @@ -646,10 +646,11 @@ def _parse_size(size, volume):
    646 646
     def _pretty_size(size, dec_places=0):
    
    647 647
         psize = size
    
    648 648
         unit = 'B'
    
    649
    -    for unit in ('B', 'K', 'M', 'G', 'T'):
    
    649
    +    units = ('B', 'K', 'M', 'G', 'T')
    
    650
    +    for unit in units:
    
    650 651
             if psize < 1024:
    
    651 652
                 break
    
    652
    -        else:
    
    653
    +        elif unit != units[-1]:
    
    653 654
                 psize /= 1024
    
    654 655
         return "{size:g}{unit}".format(size=round(psize, dec_places), unit=unit)
    
    655 656
     
    

  • dev-requirements.txt
    1 1
     coverage == 4.4.0
    
    2 2
     pep8
    
    3 3
     pylint == 2.1.1
    
    4
    -pytest >= 3.7
    
    4
    +pytest >= 3.8
    
    5 5
     pytest-cov >= 2.5.0
    
    6 6
     pytest-datafiles
    
    7 7
     pytest-env
    

  • tests/artifactcache/pull.py
    ... ... @@ -90,7 +90,7 @@ def test_pull(cli, tmpdir, datafiles):
    90 90
             cas = context.artifactcache
    
    91 91
     
    
    92 92
             # Assert that the element's artifact is **not** cached
    
    93
    -        element = project.load_elements(['target.bst'], cas)[0]
    
    93
    +        element = project.load_elements(['target.bst'])[0]
    
    94 94
             element_key = cli.get_element_key(project_dir, 'target.bst')
    
    95 95
             assert not cas.contains(element, element_key)
    
    96 96
     
    
    ... ... @@ -132,7 +132,7 @@ def _test_pull(user_config_file, project_dir, artifact_dir,
    132 132
         cas = context.artifactcache
    
    133 133
     
    
    134 134
         # Load the target element
    
    135
    -    element = project.load_elements([element_name], cas)[0]
    
    135
    +    element = project.load_elements([element_name])[0]
    
    136 136
     
    
    137 137
         # Manually setup the CAS remote
    
    138 138
         cas.setup_remotes(use_config=True)
    
    ... ... @@ -190,15 +190,16 @@ def test_pull_tree(cli, tmpdir, datafiles):
    190 190
             # Load the project and CAS cache
    
    191 191
             project = Project(project_dir, context)
    
    192 192
             project.ensure_fully_loaded()
    
    193
    -        cas = context.artifactcache
    
    193
    +        artifactcache = context.artifactcache
    
    194
    +        cas = artifactcache.cas
    
    194 195
     
    
    195 196
             # Assert that the element's artifact is cached
    
    196
    -        element = project.load_elements(['target.bst'], cas)[0]
    
    197
    +        element = project.load_elements(['target.bst'])[0]
    
    197 198
             element_key = cli.get_element_key(project_dir, 'target.bst')
    
    198
    -        assert cas.contains(element, element_key)
    
    199
    +        assert artifactcache.contains(element, element_key)
    
    199 200
     
    
    200 201
             # Retrieve the Directory object from the cached artifact
    
    201
    -        artifact_ref = cas.get_artifact_fullname(element, element_key)
    
    202
    +        artifact_ref = artifactcache.get_artifact_fullname(element, element_key)
    
    202 203
             artifact_digest = cas.resolve_ref(artifact_ref)
    
    203 204
     
    
    204 205
             queue = multiprocessing.Queue()
    
    ... ... @@ -268,12 +269,13 @@ def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest
    268 269
         project.ensure_fully_loaded()
    
    269 270
     
    
    270 271
         # Create a local CAS cache handle
    
    271
    -    cas = context.artifactcache
    
    272
    +    artifactcache = context.artifactcache
    
    273
    +    cas = artifactcache.cas
    
    272 274
     
    
    273 275
         # Manually setup the CAS remote
    
    274
    -    cas.setup_remotes(use_config=True)
    
    276
    +    artifactcache.setup_remotes(use_config=True)
    
    275 277
     
    
    276
    -    if cas.has_push_remotes():
    
    278
    +    if artifactcache.has_push_remotes():
    
    277 279
             directory = remote_execution_pb2.Directory()
    
    278 280
     
    
    279 281
             with open(cas.objpath(artifact_digest), 'rb') as f:
    
    ... ... @@ -284,7 +286,7 @@ def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest
    284 286
             tree_maker(cas, tree, directory)
    
    285 287
     
    
    286 288
             # Push the Tree as a regular message
    
    287
    -        tree_digest = cas.push_message(project, tree)
    
    289
    +        tree_digest = artifactcache.push_message(project, tree)
    
    288 290
     
    
    289 291
             queue.put((tree_digest.hash, tree_digest.size_bytes))
    
    290 292
         else:
    

  • tests/artifactcache/push.py
    ... ... @@ -69,7 +69,7 @@ def test_push(cli, tmpdir, datafiles):
    69 69
             cas = context.artifactcache
    
    70 70
     
    
    71 71
             # Assert that the element's artifact is cached
    
    72
    -        element = project.load_elements(['target.bst'], cas)[0]
    
    72
    +        element = project.load_elements(['target.bst'])[0]
    
    73 73
             element_key = cli.get_element_key(project_dir, 'target.bst')
    
    74 74
             assert cas.contains(element, element_key)
    
    75 75
     
    
    ... ... @@ -111,7 +111,7 @@ def _test_push(user_config_file, project_dir, artifact_dir,
    111 111
         cas = context.artifactcache
    
    112 112
     
    
    113 113
         # Load the target element
    
    114
    -    element = project.load_elements([element_name], cas)[0]
    
    114
    +    element = project.load_elements([element_name])[0]
    
    115 115
     
    
    116 116
         # Manually setup the CAS remote
    
    117 117
         cas.setup_remotes(use_config=True)
    
    ... ... @@ -165,20 +165,21 @@ def test_push_directory(cli, tmpdir, datafiles):
    165 165
             # Load the project and CAS cache
    
    166 166
             project = Project(project_dir, context)
    
    167 167
             project.ensure_fully_loaded()
    
    168
    -        cas = context.artifactcache
    
    168
    +        artifactcache = context.artifactcache
    
    169
    +        cas = artifactcache.cas
    
    169 170
     
    
    170 171
             # Assert that the element's artifact is cached
    
    171
    -        element = project.load_elements(['target.bst'], cas)[0]
    
    172
    +        element = project.load_elements(['target.bst'])[0]
    
    172 173
             element_key = cli.get_element_key(project_dir, 'target.bst')
    
    173
    -        assert cas.contains(element, element_key)
    
    174
    +        assert artifactcache.contains(element, element_key)
    
    174 175
     
    
    175 176
             # Manually setup the CAS remote
    
    176
    -        cas.setup_remotes(use_config=True)
    
    177
    -        cas.initialize_remotes()
    
    178
    -        assert cas.has_push_remotes(element=element)
    
    177
    +        artifactcache.setup_remotes(use_config=True)
    
    178
    +        artifactcache.initialize_remotes()
    
    179
    +        assert artifactcache.has_push_remotes(element=element)
    
    179 180
     
    
    180 181
             # Recreate the CasBasedDirectory object from the cached artifact
    
    181
    -        artifact_ref = cas.get_artifact_fullname(element, element_key)
    
    182
    +        artifact_ref = artifactcache.get_artifact_fullname(element, element_key)
    
    182 183
             artifact_digest = cas.resolve_ref(artifact_ref)
    
    183 184
     
    
    184 185
             queue = multiprocessing.Queue()
    

  • tests/integration/pip_source.py
    ... ... @@ -4,6 +4,7 @@ import pytest
    4 4
     from buildstream import _yaml
    
    5 5
     
    
    6 6
     from tests.testutils import cli_integration as cli
    
    7
    +from tests.testutils.python_repo import setup_pypi_repo
    
    7 8
     from tests.testutils.integration import assert_contains
    
    8 9
     
    
    9 10
     
    
    ... ... @@ -17,12 +18,21 @@ DATA_DIR = os.path.join(
    17 18
     
    
    18 19
     
    
    19 20
     @pytest.mark.datafiles(DATA_DIR)
    
    20
    -def test_pip_source_import(cli, tmpdir, datafiles):
    
    21
    +def test_pip_source_import(cli, tmpdir, datafiles, setup_pypi_repo):
    
    21 22
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    22 23
         checkout = os.path.join(cli.directory, 'checkout')
    
    23 24
         element_path = os.path.join(project, 'elements')
    
    24 25
         element_name = 'pip/hello.bst'
    
    25 26
     
    
    27
    +    # check that exotically named packages are imported correctly
    
    28
    +    myreqs_packages = ['hellolib']
    
    29
    +    packages = ['app2', 'app.3', 'app-4', 'app_5', 'app.no.6', 'app-no-7', 'app_no_8']
    
    30
    +
    
    31
    +    # create mock pypi repository
    
    32
    +    pypi_repo = os.path.join(project, 'files', 'pypi-repo')
    
    33
    +    os.makedirs(pypi_repo, exist_ok=True)
    
    34
    +    setup_pypi_repo(myreqs_packages + packages, pypi_repo)
    
    35
    +
    
    26 36
         element = {
    
    27 37
             'kind': 'import',
    
    28 38
             'sources': [
    
    ... ... @@ -32,9 +42,9 @@ def test_pip_source_import(cli, tmpdir, datafiles):
    32 42
                 },
    
    33 43
                 {
    
    34 44
                     'kind': 'pip',
    
    35
    -                'url': 'file://{}'.format(os.path.realpath(os.path.join(project, 'files', 'pypi-repo'))),
    
    45
    +                'url': 'file://{}'.format(os.path.realpath(pypi_repo)),
    
    36 46
                     'requirements-files': ['myreqs.txt'],
    
    37
    -                'packages': ['app2']
    
    47
    +                'packages': packages
    
    38 48
                 }
    
    39 49
             ]
    
    40 50
         }
    
    ... ... @@ -51,16 +61,31 @@ def test_pip_source_import(cli, tmpdir, datafiles):
    51 61
         assert result.exit_code == 0
    
    52 62
     
    
    53 63
         assert_contains(checkout, ['/.bst_pip_downloads',
    
    54
    -                               '/.bst_pip_downloads/HelloLib-0.1.tar.gz',
    
    55
    -                               '/.bst_pip_downloads/App2-0.1.tar.gz'])
    
    64
    +                               '/.bst_pip_downloads/hellolib-0.1.tar.gz',
    
    65
    +                               '/.bst_pip_downloads/app2-0.1.tar.gz',
    
    66
    +                               '/.bst_pip_downloads/app.3-0.1.tar.gz',
    
    67
    +                               '/.bst_pip_downloads/app-4-0.1.tar.gz',
    
    68
    +                               '/.bst_pip_downloads/app_5-0.1.tar.gz',
    
    69
    +                               '/.bst_pip_downloads/app.no.6-0.1.tar.gz',
    
    70
    +                               '/.bst_pip_downloads/app-no-7-0.1.tar.gz',
    
    71
    +                               '/.bst_pip_downloads/app_no_8-0.1.tar.gz'])
    
    56 72
     
    
    57 73
     
    
    58 74
     @pytest.mark.datafiles(DATA_DIR)
    
    59
    -def test_pip_source_build(cli, tmpdir, datafiles):
    
    75
    +def test_pip_source_build(cli, tmpdir, datafiles, setup_pypi_repo):
    
    60 76
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    61 77
         element_path = os.path.join(project, 'elements')
    
    62 78
         element_name = 'pip/hello.bst'
    
    63 79
     
    
    80
    +    # check that exotically named packages are imported correctly
    
    81
    +    myreqs_packages = ['hellolib']
    
    82
    +    packages = ['app2', 'app.3', 'app-4', 'app_5', 'app.no.6', 'app-no-7', 'app_no_8']
    
    83
    +
    
    84
    +    # create mock pypi repository
    
    85
    +    pypi_repo = os.path.join(project, 'files', 'pypi-repo')
    
    86
    +    os.makedirs(pypi_repo, exist_ok=True)
    
    87
    +    setup_pypi_repo(myreqs_packages + packages, pypi_repo)
    
    88
    +
    
    64 89
         element = {
    
    65 90
             'kind': 'manual',
    
    66 91
             'depends': ['base.bst'],
    
    ... ... @@ -71,16 +96,15 @@ def test_pip_source_build(cli, tmpdir, datafiles):
    71 96
                 },
    
    72 97
                 {
    
    73 98
                     'kind': 'pip',
    
    74
    -                'url': 'file://{}'.format(os.path.realpath(os.path.join(project, 'files', 'pypi-repo'))),
    
    99
    +                'url': 'file://{}'.format(os.path.realpath(pypi_repo)),
    
    75 100
                     'requirements-files': ['myreqs.txt'],
    
    76
    -                'packages': ['app2']
    
    101
    +                'packages': packages
    
    77 102
                 }
    
    78 103
             ],
    
    79 104
             'config': {
    
    80 105
                 'install-commands': [
    
    81 106
                     'pip3 install --no-index --prefix %{install-root}/usr .bst_pip_downloads/*.tar.gz',
    
    82
    -                'chmod +x app1.py',
    
    83
    -                'install app1.py  %{install-root}/usr/bin/'
    
    107
    +                'install app1.py %{install-root}/usr/bin/'
    
    84 108
                 ]
    
    85 109
             }
    
    86 110
         }
    
    ... ... @@ -95,5 +119,4 @@ def test_pip_source_build(cli, tmpdir, datafiles):
    95 119
     
    
    96 120
         result = cli.run(project=project, args=['shell', element_name, '/usr/bin/app1.py'])
    
    97 121
         assert result.exit_code == 0
    
    98
    -    assert result.output == """Hello App1!
    
    99
    -"""
    122
    +    assert result.output == "Hello App1! This is hellolib\n"

  • tests/integration/project/files/pypi-repo/app2/App2-0.1.tar.gz deleted
    No preview for this file type
  • tests/integration/project/files/pypi-repo/app2/index.html deleted
    1
    -<html>
    
    2
    -  <head>
    
    3
    -    <title>Links for app1</title>
    
    4
    -  </head>
    
    5
    -  <body>
    
    6
    -    <a href="">'App2-0.1.tar.gz'>App2-0.1.tar.gz</a><br />
    
    7
    -  </body>
    
    8
    -</html>

  • tests/integration/project/files/pypi-repo/hellolib/HelloLib-0.1.tar.gz deleted
    No preview for this file type
  • tests/integration/project/files/pypi-repo/hellolib/index.html deleted
    1
    -<html>
    
    2
    -  <head>
    
    3
    -    <title>Links for app1</title>
    
    4
    -  </head>
    
    5
    -  <body>
    
    6
    -    <a href="">'HelloLib-0.1.tar.gz'>HelloLib-0.1.tar.gz</a><br />
    
    7
    -  </body>
    
    8
    -</html>

  • tests/sources/pip.py
    ... ... @@ -3,6 +3,7 @@ import pytest
    3 3
     
    
    4 4
     from buildstream._exceptions import ErrorDomain
    
    5 5
     from buildstream import _yaml
    
    6
    +from buildstream.plugins.sources.pip import _match_package_name
    
    6 7
     from tests.testutils import cli
    
    7 8
     
    
    8 9
     DATA_DIR = os.path.join(
    
    ... ... @@ -45,3 +46,22 @@ def test_no_packages(cli, tmpdir, datafiles):
    45 46
             'show', 'target.bst'
    
    46 47
         ])
    
    47 48
         result.assert_main_error(ErrorDomain.SOURCE, None)
    
    49
    +
    
    50
    +
    
    51
    +# Test that pip source parses tar ball names correctly for the ref
    
    52
    +@pytest.mark.parametrize(
    
    53
    +    'tarball, expected_name, expected_version',
    
    54
    +    [
    
    55
    +        ('dotted.package-0.9.8.tar.gz', 'dotted.package', '0.9.8'),
    
    56
    +        ('hyphenated-package-2.6.0.tar.gz', 'hyphenated-package', '2.6.0'),
    
    57
    +        ('underscore_pkg-3.1.0.tar.gz', 'underscore_pkg', '3.1.0'),
    
    58
    +        ('numbers2and5-1.0.1.tar.gz', 'numbers2and5', '1.0.1'),
    
    59
    +        ('multiple.dots.package-5.6.7.tar.gz', 'multiple.dots.package', '5.6.7'),
    
    60
    +        ('multiple-hyphens-package-1.2.3.tar.gz', 'multiple-hyphens-package', '1.2.3'),
    
    61
    +        ('multiple_underscore_pkg-3.4.5.tar.gz', 'multiple_underscore_pkg', '3.4.5'),
    
    62
    +        ('shortversion-1.0.tar.gz', 'shortversion', '1.0'),
    
    63
    +        ('longversion-1.2.3.4.tar.gz', 'longversion', '1.2.3.4')
    
    64
    +    ])
    
    65
    +def test_match_package_name(tarball, expected_name, expected_version):
    
    66
    +    name, version = _match_package_name(tarball)
    
    67
    +    assert (expected_name, expected_version) == (name, version)

  • tests/storage/virtual_directory_import.py
    1
    +from hashlib import sha256
    
    2
    +import os
    
    3
    +import pytest
    
    4
    +import random
    
    5
    +import tempfile
    
    6
    +from tests.testutils import cli
    
    7
    +
    
    8
    +from buildstream.storage._casbaseddirectory import CasBasedDirectory
    
    9
    +from buildstream.storage._filebaseddirectory import FileBasedDirectory
    
    10
    +from buildstream._artifactcache import ArtifactCache
    
    11
    +from buildstream._artifactcache.cascache import CASCache
    
    12
    +from buildstream import utils
    
    13
    +
    
    14
    +
    
    15
    +# These are comparitive tests that check that FileBasedDirectory and
    
    16
    +# CasBasedDirectory act identically.
    
    17
    +
    
    18
    +
    
    19
    +class FakeContext():
    
    20
    +    def __init__(self):
    
    21
    +        self.config_cache_quota = "65536"
    
    22
    +        self.artifactdir = ""
    
    23
    +
    
    24
    +    def get_projects(self):
    
    25
    +        return []
    
    26
    +
    
    27
    +# This is a set of example file system contents. It's a set of trees
    
    28
    +# which are either expected to be problematic or were found to be
    
    29
    +# problematic during random testing.
    
    30
    +
    
    31
    +# The test attempts to import each on top of each other to test
    
    32
    +# importing works consistently.  Each tuple is defined as (<filename>,
    
    33
    +# <type>, <content>). Type can be 'F' (file), 'S' (symlink) or 'D'
    
    34
    +# (directory) with content being the contents for a file or the
    
    35
    +# destination for a symlink.
    
    36
    +root_filesets = [
    
    37
    +    [('a/b/c/textfile1', 'F', 'This is textfile 1\n')],
    
    38
    +    [('a/b/c/textfile1', 'F', 'This is the replacement textfile 1\n')],
    
    39
    +    [('a/b/d', 'D', '')],
    
    40
    +    [('a/b/c', 'S', '/a/b/d')],
    
    41
    +    [('a/b/d', 'S', '/a/b/c')],
    
    42
    +    [('a/b/d', 'D', ''), ('a/b/c', 'S', '/a/b/d')],
    
    43
    +    [('a/b/c', 'D', ''), ('a/b/d', 'S', '/a/b/c')],
    
    44
    +    [('a/b', 'F', 'This is textfile 1\n')],
    
    45
    +    [('a/b/c', 'F', 'This is textfile 1\n')],
    
    46
    +    [('a/b/c', 'D', '')]
    
    47
    +]
    
    48
    +
    
    49
    +empty_hash_ref = sha256().hexdigest()
    
    50
    +RANDOM_SEED = 69105
    
    51
    +NUM_RANDOM_TESTS = 10
    
    52
    +
    
    53
    +
    
    54
    +def generate_import_roots(rootno, directory):
    
    55
    +    rootname = "root{}".format(rootno)
    
    56
    +    rootdir = os.path.join(directory, "content", rootname)
    
    57
    +    if os.path.exists(rootdir):
    
    58
    +        return
    
    59
    +    for (path, typesymbol, content) in root_filesets[rootno - 1]:
    
    60
    +        if typesymbol == 'F':
    
    61
    +            (dirnames, filename) = os.path.split(path)
    
    62
    +            os.makedirs(os.path.join(rootdir, dirnames), exist_ok=True)
    
    63
    +            with open(os.path.join(rootdir, dirnames, filename), "wt") as f:
    
    64
    +                f.write(content)
    
    65
    +        elif typesymbol == 'D':
    
    66
    +            os.makedirs(os.path.join(rootdir, path), exist_ok=True)
    
    67
    +        elif typesymbol == 'S':
    
    68
    +            (dirnames, filename) = os.path.split(path)
    
    69
    +            os.makedirs(os.path.join(rootdir, dirnames), exist_ok=True)
    
    70
    +            os.symlink(content, os.path.join(rootdir, path))
    
    71
    +
    
    72
    +
    
    73
    +def generate_random_root(rootno, directory):
    
    74
    +    random.seed(RANDOM_SEED + rootno)
    
    75
    +    rootname = "root{}".format(rootno)
    
    76
    +    rootdir = os.path.join(directory, "content", rootname)
    
    77
    +    if os.path.exists(rootdir):
    
    78
    +        return
    
    79
    +    things = []
    
    80
    +    locations = ['.']
    
    81
    +    os.makedirs(rootdir)
    
    82
    +    for i in range(0, 100):
    
    83
    +        location = random.choice(locations)
    
    84
    +        thingname = "node{}".format(i)
    
    85
    +        thing = random.choice(['dir', 'link', 'file'])
    
    86
    +        target = os.path.join(rootdir, location, thingname)
    
    87
    +        if thing == 'dir':
    
    88
    +            os.makedirs(target)
    
    89
    +            locations.append(os.path.join(location, thingname))
    
    90
    +        elif thing == 'file':
    
    91
    +            with open(target, "wt") as f:
    
    92
    +                f.write("This is node {}\n".format(i))
    
    93
    +        elif thing == 'link':
    
    94
    +            # TODO: Make some relative symlinks
    
    95
    +            if random.randint(1, 3) == 1 or not things:
    
    96
    +                os.symlink("/broken", target)
    
    97
    +            else:
    
    98
    +                symlink_destination = random.choice(things)
    
    99
    +                os.symlink(symlink_destination, target)
    
    100
    +        things.append(os.path.join(location, thingname))
    
    101
    +
    
    102
    +
    
    103
    +def file_contents(path):
    
    104
    +    with open(path, "r") as f:
    
    105
    +        result = f.read()
    
    106
    +    return result
    
    107
    +
    
    108
    +
    
    109
    +def file_contents_are(path, contents):
    
    110
    +    return file_contents(path) == contents
    
    111
    +
    
    112
    +
    
    113
    +def create_new_casdir(root_number, fake_context, tmpdir):
    
    114
    +    d = CasBasedDirectory(fake_context)
    
    115
    +    d.import_files(os.path.join(tmpdir, "content", "root{}".format(root_number)))
    
    116
    +    assert d.ref.hash != empty_hash_ref
    
    117
    +    return d
    
    118
    +
    
    119
    +
    
    120
    +def create_new_filedir(root_number, tmpdir):
    
    121
    +    root = os.path.join(tmpdir, "vdir")
    
    122
    +    os.makedirs(root)
    
    123
    +    d = FileBasedDirectory(root)
    
    124
    +    d.import_files(os.path.join(tmpdir, "content", "root{}".format(root_number)))
    
    125
    +    return d
    
    126
    +
    
    127
    +
    
    128
    +def combinations(integer_range):
    
    129
    +    for x in integer_range:
    
    130
    +        for y in integer_range:
    
    131
    +            yield (x, y)
    
    132
    +
    
    133
    +
    
    134
    +def resolve_symlinks(path, root):
    
    135
    +    """ A function to resolve symlinks inside 'path' components apart from the last one.
    
    136
    +        For example, resolve_symlinks('/a/b/c/d', '/a/b')
    
    137
    +        will return '/a/b/f/d' if /a/b/c is a symlink to /a/b/f. The final component of
    
    138
    +        'path' is not resolved, because we typically want to inspect the symlink found
    
    139
    +        at that path, not its target.
    
    140
    +
    
    141
    +    """
    
    142
    +    components = path.split(os.path.sep)
    
    143
    +    location = root
    
    144
    +    for i in range(0, len(components) - 1):
    
    145
    +        location = os.path.join(location, components[i])
    
    146
    +        if os.path.islink(location):
    
    147
    +            # Resolve the link, add on all the remaining components
    
    148
    +            target = os.path.join(os.readlink(location))
    
    149
    +            tail = os.path.sep.join(components[i + 1:])
    
    150
    +
    
    151
    +            if target.startswith(os.path.sep):
    
    152
    +                # Absolute link - relative to root
    
    153
    +                location = os.path.join(root, target, tail)
    
    154
    +            else:
    
    155
    +                # Relative link - relative to symlink location
    
    156
    +                location = os.path.join(location, target)
    
    157
    +            return resolve_symlinks(location, root)
    
    158
    +    # If we got here, no symlinks were found. Add on the final component and return.
    
    159
    +    location = os.path.join(location, components[-1])
    
    160
    +    return location
    
    161
    +
    
    162
    +
    
    163
    +def directory_not_empty(path):
    
    164
    +    return os.listdir(path)
    
    165
    +
    
    166
    +
    
    167
    +def _import_test(tmpdir, original, overlay, generator_function, verify_contents=False):
    
    168
    +    fake_context = FakeContext()
    
    169
    +    fake_context.artifactdir = tmpdir
    
    170
    +    fake_context.artifactcache = CASCache(fake_context)
    
    171
    +    # Create some fake content
    
    172
    +    generator_function(original, tmpdir)
    
    173
    +    if original != overlay:
    
    174
    +        generator_function(overlay, tmpdir)
    
    175
    +
    
    176
    +    d = create_new_casdir(original, fake_context, tmpdir)
    
    177
    +
    
    178
    +    duplicate_cas = create_new_casdir(original, fake_context, tmpdir)
    
    179
    +
    
    180
    +    assert duplicate_cas.ref.hash == d.ref.hash
    
    181
    +
    
    182
    +    d2 = create_new_casdir(overlay, fake_context, tmpdir)
    
    183
    +    d.import_files(d2)
    
    184
    +    export_dir = os.path.join(tmpdir, "output-{}-{}".format(original, overlay))
    
    185
    +    roundtrip_dir = os.path.join(tmpdir, "roundtrip-{}-{}".format(original, overlay))
    
    186
    +    d2.export_files(roundtrip_dir)
    
    187
    +    d.export_files(export_dir)
    
    188
    +
    
    189
    +    if verify_contents:
    
    190
    +        for item in root_filesets[overlay - 1]:
    
    191
    +            (path, typename, content) = item
    
    192
    +            realpath = resolve_symlinks(path, export_dir)
    
    193
    +            if typename == 'F':
    
    194
    +                if os.path.isdir(realpath) and directory_not_empty(realpath):
    
    195
    +                    # The file should not have overwritten the directory in this case.
    
    196
    +                    pass
    
    197
    +                else:
    
    198
    +                    assert os.path.isfile(realpath), "{} did not exist in the combined virtual directory".format(path)
    
    199
    +                    assert file_contents_are(realpath, content)
    
    200
    +            elif typename == 'S':
    
    201
    +                if os.path.isdir(realpath) and directory_not_empty(realpath):
    
    202
    +                    # The symlink should not have overwritten the directory in this case.
    
    203
    +                    pass
    
    204
    +                else:
    
    205
    +                    assert os.path.islink(realpath)
    
    206
    +                    assert os.readlink(realpath) == content
    
    207
    +            elif typename == 'D':
    
    208
    +                # We can't do any more tests than this because it
    
    209
    +                # depends on things present in the original. Blank
    
    210
    +                # directories here will be ignored and the original
    
    211
    +                # left in place.
    
    212
    +                assert os.path.lexists(realpath)
    
    213
    +
    
    214
    +    # Now do the same thing with filebaseddirectories and check the contents match
    
    215
    +
    
    216
    +    files = list(utils.list_relative_paths(roundtrip_dir))
    
    217
    +    duplicate_cas._import_files_from_directory(roundtrip_dir, files=files)
    
    218
    +    duplicate_cas._recalculate_recursing_down()
    
    219
    +    if duplicate_cas.parent:
    
    220
    +        duplicate_cas.parent._recalculate_recursing_up(duplicate_cas)
    
    221
    +
    
    222
    +    assert duplicate_cas.ref.hash == d.ref.hash
    
    223
    +
    
    224
    +
    
    225
    +# It's possible to parameterize on both original and overlay values,
    
    226
    +# but this leads to more tests being listed in the output than are
    
    227
    +# comfortable.
    
    228
    +@pytest.mark.parametrize("original", range(1, len(root_filesets) + 1))
    
    229
    +def test_fixed_cas_import(cli, tmpdir, original):
    
    230
    +    for overlay in range(1, len(root_filesets) + 1):
    
    231
    +        _import_test(str(tmpdir), original, overlay, generate_import_roots, verify_contents=True)
    
    232
    +
    
    233
    +
    
    234
    +@pytest.mark.parametrize("original", range(1, NUM_RANDOM_TESTS + 1))
    
    235
    +def test_random_cas_import(cli, tmpdir, original):
    
    236
    +    for overlay in range(1, NUM_RANDOM_TESTS + 1):
    
    237
    +        _import_test(str(tmpdir), original, overlay, generate_random_root, verify_contents=False)
    
    238
    +
    
    239
    +
    
    240
    +def _listing_test(tmpdir, root, generator_function):
    
    241
    +    fake_context = FakeContext()
    
    242
    +    fake_context.artifactdir = tmpdir
    
    243
    +    fake_context.artifactcache = CASCache(fake_context)
    
    244
    +    # Create some fake content
    
    245
    +    generator_function(root, tmpdir)
    
    246
    +
    
    247
    +    d = create_new_filedir(root, tmpdir)
    
    248
    +    filelist = list(d.list_relative_paths())
    
    249
    +
    
    250
    +    d2 = create_new_casdir(root, fake_context, tmpdir)
    
    251
    +    filelist2 = list(d2.list_relative_paths())
    
    252
    +
    
    253
    +    assert filelist == filelist2
    
    254
    +
    
    255
    +
    
    256
    +@pytest.mark.parametrize("root", range(1, 11))
    
    257
    +def test_random_directory_listing(cli, tmpdir, root):
    
    258
    +    _listing_test(str(tmpdir), root, generate_random_root)
    
    259
    +
    
    260
    +
    
    261
    +@pytest.mark.parametrize("root", [1, 2, 3, 4, 5])
    
    262
    +def test_fixed_directory_listing(cli, tmpdir, root):
    
    263
    +    _listing_test(str(tmpdir), root, generate_import_roots)

  • tests/testutils/__init__.py
    ... ... @@ -29,3 +29,4 @@ from .artifactshare import create_artifact_share
    29 29
     from .element_generators import create_element_size, update_element_size
    
    30 30
     from .junction import generate_junction
    
    31 31
     from .runner_integration import wait_for_cache_granularity
    
    32
    +from .python_repo import setup_pypi_repo

  • tests/testutils/artifactshare.py
    ... ... @@ -13,7 +13,7 @@ import pytest_cov
    13 13
     from buildstream import _yaml
    
    14 14
     from buildstream._artifactcache.casserver import create_server
    
    15 15
     from buildstream._context import Context
    
    16
    -from buildstream._exceptions import ArtifactError
    
    16
    +from buildstream._exceptions import CASError
    
    17 17
     from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
    
    18 18
     
    
    19 19
     
    
    ... ... @@ -48,7 +48,7 @@ class ArtifactShare():
    48 48
             context = Context()
    
    49 49
             context.artifactdir = self.repodir
    
    50 50
     
    
    51
    -        self.cas = context.artifactcache
    
    51
    +        self.cas = context.artifactcache.cas
    
    52 52
     
    
    53 53
             self.total_space = total_space
    
    54 54
             self.free_space = free_space
    
    ... ... @@ -135,7 +135,7 @@ class ArtifactShare():
    135 135
             try:
    
    136 136
                 tree = self.cas.resolve_ref(artifact_key)
    
    137 137
                 return True
    
    138
    -        except ArtifactError:
    
    138
    +        except CASError:
    
    139 139
                 return False
    
    140 140
     
    
    141 141
         # close():
    

  • tests/testutils/mock_os.py
    1
    +from contextlib import contextmanager
    
    2
    +import os
    
    3
    +
    
    4
    +
    
    5
    +# MockAttributeResult
    
    6
    +#
    
    7
    +# A class to take a dictionary of kwargs and make them accessible via
    
    8
    +# attributes of the object.
    
    9
    +#
    
    10
    +class MockAttributeResult(dict):
    
    11
    +    __getattr__ = dict.get
    
    12
    +
    
    13
    +
    
    14
    +# mock_statvfs():
    
    15
    +#
    
    16
    +# Gets a function which mocks statvfs and returns a statvfs result with the kwargs accessible.
    
    17
    +#
    
    18
    +# Returns:
    
    19
    +#    func(path) -> object: object will have all the kwargs accessible via object.kwarg
    
    20
    +#
    
    21
    +# Example:
    
    22
    +#    statvfs = mock_statvfs(f_blocks=10)
    
    23
    +#    result = statvfs("regardless/of/path")
    
    24
    +#    assert result.f_blocks == 10 # True
    
    25
    +def mock_statvfs(**kwargs):
    
    26
    +    def statvfs(path):
    
    27
    +        return MockAttributeResult(kwargs)
    
    28
    +    return statvfs
    
    29
    +
    
    30
    +
    
    31
    +# monkey_patch()
    
    32
    +#
    
    33
    +# with monkey_patch("statvfs", custom_statvfs):
    
    34
    +#    assert os.statvfs == custom_statvfs # True
    
    35
    +# assert os.statvfs == custom_statvfs # False
    
    36
    +#
    
    37
    +@contextmanager
    
    38
    +def monkey_patch(to_patch, patched_func):
    
    39
    +    orig = getattr(os, to_patch)
    
    40
    +    setattr(os, to_patch, patched_func)
    
    41
    +    try:
    
    42
    +        yield
    
    43
    +    finally:
    
    44
    +        setattr(os, to_patch, orig)

  • tests/testutils/python_repo.py
    1
    +from setuptools.sandbox import run_setup
    
    2
    +import os
    
    3
    +import pytest
    
    4
    +import re
    
    5
    +import shutil
    
    6
    +
    
    7
    +
    
    8
    +SETUP_TEMPLATE = '''\
    
    9
    +from setuptools import setup
    
    10
    +
    
    11
    +setup(
    
    12
    +    name='{name}',
    
    13
    +    version='{version}',
    
    14
    +    description='{name}',
    
    15
    +    packages=['{pkgdirname}'],
    
    16
    +    entry_points={{
    
    17
    +        'console_scripts': [
    
    18
    +            '{pkgdirname}={pkgdirname}:main'
    
    19
    +        ]
    
    20
    +    }}
    
    21
    +)
    
    22
    +'''
    
    23
    +
    
    24
    +# All packages generated via generate_pip_package will have the functions below
    
    25
    +INIT_TEMPLATE = '''\
    
    26
    +def main():
    
    27
    +    print('This is {name}')
    
    28
    +
    
    29
    +def hello(actor='world'):
    
    30
    +    print('Hello {{}}! This is {name}'.format(actor))
    
    31
    +'''
    
    32
    +
    
    33
    +HTML_TEMPLATE = '''\
    
    34
    +<html>
    
    35
    +  <head>
    
    36
    +    <title>Links for {name}</title>
    
    37
    +  </head>
    
    38
    +  <body>
    
    39
    +    <a href=''>{name}-{version}.tar.gz</a><br />
    
    40
    +  </body>
    
    41
    +</html>
    
    42
    +'''
    
    43
    +
    
    44
    +
    
    45
    +# Creates a simple python source distribution and copies this into a specified
    
    46
    +# directory which is to serve as a mock python repository
    
    47
    +#
    
    48
    +# Args:
    
    49
    +#    tmpdir (str): Directory in which the source files will be created
    
    50
    +#    pypi (str): Directory serving as a mock python repository
    
    51
    +#    name (str): The name of the package to be created
    
    52
    +#    version (str): The version of the package to be created
    
    53
    +#
    
    54
    +# Returns:
    
    55
    +#    None
    
    56
    +#
    
    57
    +def generate_pip_package(tmpdir, pypi, name, version='0.1'):
    
    58
    +    # check if package already exists in pypi
    
    59
    +    pypi_package = os.path.join(pypi, re.sub('[^0-9a-zA-Z]+', '-', name))
    
    60
    +    if os.path.exists(pypi_package):
    
    61
    +        return
    
    62
    +
    
    63
    +    # create the package source files in tmpdir resulting in a directory
    
    64
    +    # tree resembling the following structure:
    
    65
    +    #
    
    66
    +    # tmpdir
    
    67
    +    # |-- setup.py
    
    68
    +    # `-- package
    
    69
    +    #     `-- __init__.py
    
    70
    +    #
    
    71
    +    setup_file = os.path.join(tmpdir, 'setup.py')
    
    72
    +    pkgdirname = re.sub('[^0-9a-zA-Z]+', '', name)
    
    73
    +    with open(setup_file, 'w') as f:
    
    74
    +        f.write(
    
    75
    +            SETUP_TEMPLATE.format(
    
    76
    +                name=name,
    
    77
    +                version=version,
    
    78
    +                pkgdirname=pkgdirname
    
    79
    +            )
    
    80
    +        )
    
    81
    +    os.chmod(setup_file, 0o755)
    
    82
    +
    
    83
    +    package = os.path.join(tmpdir, pkgdirname)
    
    84
    +    os.makedirs(package)
    
    85
    +
    
    86
    +    main_file = os.path.join(package, '__init__.py')
    
    87
    +    with open(main_file, 'w') as f:
    
    88
    +        f.write(INIT_TEMPLATE.format(name=name))
    
    89
    +    os.chmod(main_file, 0o644)
    
    90
    +
    
    91
    +    run_setup(setup_file, ['sdist'])
    
    92
    +
    
    93
    +    # create directory for this package in pypi resulting in a directory
    
    94
    +    # tree resembling the following structure:
    
    95
    +    #
    
    96
    +    # pypi
    
    97
    +    # `-- pypi_package
    
    98
    +    #     |-- index.html
    
    99
    +    #     `-- foo-0.1.tar.gz
    
    100
    +    #
    
    101
    +    os.makedirs(pypi_package)
    
    102
    +
    
    103
    +    # add an index html page
    
    104
    +    index_html = os.path.join(pypi_package, 'index.html')
    
    105
    +    with open(index_html, 'w') as f:
    
    106
    +        f.write(HTML_TEMPLATE.format(name=name, version=version))
    
    107
    +
    
    108
    +    # copy generated tarfile to pypi package
    
    109
    +    dist_dir = os.path.join(tmpdir, 'dist')
    
    110
    +    for tar in os.listdir(dist_dir):
    
    111
    +        tarpath = os.path.join(dist_dir, tar)
    
    112
    +        shutil.copy(tarpath, pypi_package)
    
    113
    +
    
    114
    +
    
    115
    +@pytest.fixture
    
    116
    +def setup_pypi_repo(tmpdir):
    
    117
    +    def create_pkgdir(package):
    
    118
    +        pkgdirname = re.sub('[^0-9a-zA-Z]+', '', package)
    
    119
    +        pkgdir = os.path.join(str(tmpdir), pkgdirname)
    
    120
    +        os.makedirs(pkgdir)
    
    121
    +        return pkgdir
    
    122
    +
    
    123
    +    def add_packages(packages, pypi_repo):
    
    124
    +        for package in packages:
    
    125
    +            pkgdir = create_pkgdir(package)
    
    126
    +            generate_pip_package(pkgdir, pypi_repo, package)
    
    127
    +
    
    128
    +    return add_packages

  • tests/utils/misc.py
    1
    +from buildstream import _yaml
    
    2
    +from ..testutils import mock_os
    
    3
    +from ..testutils.runcli import cli
    
    4
    +
    
    5
    +import os
    
    6
    +import pytest
    
    7
    +
    
    8
    +
    
    9
    +KiB = 1024
    
    10
    +MiB = (KiB * 1024)
    
    11
    +GiB = (MiB * 1024)
    
    12
    +TiB = (GiB * 1024)
    
    13
    +
    
    14
    +
    
    15
    +def test_parse_size_over_1024T(cli, tmpdir):
    
    16
    +    BLOCK_SIZE = 4096
    
    17
    +    cli.configure({
    
    18
    +        'cache': {
    
    19
    +            'quota': 2048 * TiB
    
    20
    +        }
    
    21
    +    })
    
    22
    +    project = tmpdir.join("main")
    
    23
    +    os.makedirs(str(project))
    
    24
    +    _yaml.dump({'name': 'main'}, str(project.join("project.conf")))
    
    25
    +
    
    26
    +    bavail = (1025 * TiB) / BLOCK_SIZE
    
    27
    +    patched_statvfs = mock_os.mock_statvfs(f_bavail=bavail, f_bsize=BLOCK_SIZE)
    
    28
    +    with mock_os.monkey_patch("statvfs", patched_statvfs):
    
    29
    +        result = cli.run(project, args=["build", "file.bst"])
    
    30
    +        assert "1025T of available system storage" in result.stderr



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]