[Notes] [Git][BuildStream/buildstream][jmac/cas_virtual_directory] 14 commits: element.py: Raise an exception on unbuilt element checkout



Title: GitLab

Jim MacArthur pushed to branch jmac/cas_virtual_directory at BuildStream / buildstream

Commits:

14 changed files:

Changes:

  • buildstream/_artifactcache/artifactcache.py
    ... ... @@ -21,7 +21,7 @@ import os
    21 21
     import string
    
    22 22
     from collections import Mapping, namedtuple
    
    23 23
     
    
    24
    -from ..element import _KeyStrength
    
    24
    +from ..element_enums import KeyStrength
    
    25 25
     from .._exceptions import ArtifactError, ImplError, LoadError, LoadErrorReason
    
    26 26
     from .._message import Message, MessageType
    
    27 27
     from .. import utils
    
    ... ... @@ -201,8 +201,8 @@ class ArtifactCache():
    201 201
             # user inconvenience.
    
    202 202
     
    
    203 203
             for element in elements:
    
    204
    -            strong_key = element._get_cache_key(strength=_KeyStrength.STRONG)
    
    205
    -            weak_key = element._get_cache_key(strength=_KeyStrength.WEAK)
    
    204
    +            strong_key = element._get_cache_key(strength=KeyStrength.STRONG)
    
    205
    +            weak_key = element._get_cache_key(strength=KeyStrength.WEAK)
    
    206 206
     
    
    207 207
                 for key in (strong_key, weak_key):
    
    208 208
                     if key and key not in self.required_artifacts:
    

  • buildstream/_stream.py
    ... ... @@ -434,7 +434,7 @@ class Stream():
    434 434
     
    
    435 435
             except BstError as e:
    
    436 436
                 raise StreamError("Error while staging dependencies into a sandbox"
    
    437
    -                              ": '{}'".format(e), reason=e.reason) from e
    
    437
    +                              ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
    
    438 438
     
    
    439 439
         # workspace_open
    
    440 440
         #
    

  • buildstream/element.py
    ... ... @@ -78,7 +78,6 @@ import stat
    78 78
     import copy
    
    79 79
     from collections import Mapping, OrderedDict
    
    80 80
     from contextlib import contextmanager
    
    81
    -from enum import Enum
    
    82 81
     import tempfile
    
    83 82
     import shutil
    
    84 83
     
    
    ... ... @@ -97,41 +96,9 @@ from ._platform import Platform
    97 96
     from .sandbox._config import SandboxConfig
    
    98 97
     
    
    99 98
     from .storage.directory import Directory
    
    100
    -from .storage._filebaseddirectory import FileBasedDirectory, VirtualDirectoryError
    
    101
    -
    
    102
    -
    
    103
    -# _KeyStrength():
    
    104
    -#
    
    105
    -# Strength of cache key
    
    106
    -#
    
    107
    -class _KeyStrength(Enum):
    
    108
    -
    
    109
    -    # Includes strong cache keys of all build dependencies and their
    
    110
    -    # runtime dependencies.
    
    111
    -    STRONG = 1
    
    112
    -
    
    113
    -    # Includes names of direct build dependencies but does not include
    
    114
    -    # cache keys of dependencies.
    
    115
    -    WEAK = 2
    
    116
    -
    
    117
    -
    
    118
    -class Scope(Enum):
    
    119
    -    """Types of scope for a given element"""
    
    120
    -
    
    121
    -    ALL = 1
    
    122
    -    """All elements which the given element depends on, following
    
    123
    -    all elements required for building. Including the element itself.
    
    124
    -    """
    
    125
    -
    
    126
    -    BUILD = 2
    
    127
    -    """All elements required for building the element, including their
    
    128
    -    respective run dependencies. Not including the given element itself.
    
    129
    -    """
    
    130
    -
    
    131
    -    RUN = 3
    
    132
    -    """All elements required for running the element. Including the element
    
    133
    -    itself.
    
    134
    -    """
    
    99
    +from .storage._filebaseddirectory import FileBasedDirectory
    
    100
    +from .storage.directory import VirtualDirectoryError
    
    101
    +from .element_enums import KeyStrength, Scope
    
    135 102
     
    
    136 103
     
    
    137 104
     class ElementError(BstError):
    
    ... ... @@ -623,6 +590,12 @@ class Element(Plugin):
    623 590
                   dep.stage_artifact(sandbox)
    
    624 591
             """
    
    625 592
     
    
    593
    +        if not self._cached():
    
    594
    +            detail = "No artifacts have been cached yet for that element\n" + \
    
    595
    +                     "Try building the element first with `bst build`\n"
    
    596
    +            raise ElementError("No artifacts to stage",
    
    597
    +                               detail=detail, reason="uncached-checkout-attempt")
    
    598
    +
    
    626 599
             if update_mtimes is None:
    
    627 600
                 update_mtimes = []
    
    628 601
     
    
    ... ... @@ -1020,7 +993,7 @@ class Element(Plugin):
    1020 993
                 # if the pull job is still pending as the remote cache may have an artifact
    
    1021 994
                 # that matches the strict cache key, which is preferred over a locally
    
    1022 995
                 # cached artifact with a weak cache key match.
    
    1023
    -            if not dependency._cached_success() or not dependency._get_cache_key(strength=_KeyStrength.STRONG):
    
    996
    +            if not dependency._cached_success() or not dependency._get_cache_key(strength=KeyStrength.STRONG):
    
    1024 997
                     return False
    
    1025 998
     
    
    1026 999
             if not self.__assemble_scheduled:
    
    ... ... @@ -1033,15 +1006,15 @@ class Element(Plugin):
    1033 1006
         # Returns the cache key
    
    1034 1007
         #
    
    1035 1008
         # Args:
    
    1036
    -    #    strength (_KeyStrength): Either STRONG or WEAK key strength
    
    1009
    +    #    strength (KeyStrength): Either STRONG or WEAK key strength
    
    1037 1010
         #
    
    1038 1011
         # Returns:
    
    1039 1012
         #    (str): A hex digest cache key for this Element, or None
    
    1040 1013
         #
    
    1041 1014
         # None is returned if information for the cache key is missing.
    
    1042 1015
         #
    
    1043
    -    def _get_cache_key(self, strength=_KeyStrength.STRONG):
    
    1044
    -        if strength == _KeyStrength.STRONG:
    
    1016
    +    def _get_cache_key(self, strength=KeyStrength.STRONG):
    
    1017
    +        if strength == KeyStrength.STRONG:
    
    1045 1018
                 return self.__cache_key
    
    1046 1019
             else:
    
    1047 1020
                 return self.__weak_cache_key
    
    ... ... @@ -1101,7 +1074,7 @@ class Element(Plugin):
    1101 1074
                 # but does not include keys of dependencies.
    
    1102 1075
                 if self.BST_STRICT_REBUILD:
    
    1103 1076
                     dependencies = [
    
    1104
    -                    e._get_cache_key(strength=_KeyStrength.WEAK)
    
    1077
    +                    e._get_cache_key(strength=KeyStrength.WEAK)
    
    1105 1078
                         for e in self.dependencies(Scope.BUILD)
    
    1106 1079
                     ]
    
    1107 1080
                 else:
    
    ... ... @@ -1126,7 +1099,7 @@ class Element(Plugin):
    1126 1099
                 # are sufficient. However, don't update the `cached` attributes
    
    1127 1100
                 # until the full cache query below.
    
    1128 1101
                 if (not self.__assemble_scheduled and not self.__assemble_done and
    
    1129
    -                    not self.__cached_success(keystrength=_KeyStrength.WEAK) and
    
    1102
    +                    not self.__cached_success(keystrength=KeyStrength.WEAK) and
    
    1130 1103
                         not self._pull_pending() and self._is_required()):
    
    1131 1104
                     self._schedule_assemble()
    
    1132 1105
                     return
    
    ... ... @@ -1618,7 +1591,7 @@ class Element(Plugin):
    1618 1591
                         # Store keys.yaml
    
    1619 1592
                         _yaml.dump(_yaml.node_sanitize({
    
    1620 1593
                             'strong': self._get_cache_key(),
    
    1621
    -                        'weak': self._get_cache_key(_KeyStrength.WEAK),
    
    1594
    +                        'weak': self._get_cache_key(KeyStrength.WEAK),
    
    1622 1595
                         }), os.path.join(metadir, 'keys.yaml'))
    
    1623 1596
     
    
    1624 1597
                         # Store dependencies.yaml
    
    ... ... @@ -1687,7 +1660,7 @@ class Element(Plugin):
    1687 1660
             self._update_state()
    
    1688 1661
     
    
    1689 1662
         def _pull_strong(self, *, progress=None):
    
    1690
    -        weak_key = self._get_cache_key(strength=_KeyStrength.WEAK)
    
    1663
    +        weak_key = self._get_cache_key(strength=KeyStrength.WEAK)
    
    1691 1664
     
    
    1692 1665
             key = self.__strict_cache_key
    
    1693 1666
             if not self.__artifacts.pull(self, key, progress=progress):
    
    ... ... @@ -1699,7 +1672,7 @@ class Element(Plugin):
    1699 1672
             return True
    
    1700 1673
     
    
    1701 1674
         def _pull_weak(self, *, progress=None):
    
    1702
    -        weak_key = self._get_cache_key(strength=_KeyStrength.WEAK)
    
    1675
    +        weak_key = self._get_cache_key(strength=KeyStrength.WEAK)
    
    1703 1676
     
    
    1704 1677
             if not self.__artifacts.pull(self, weak_key, progress=progress):
    
    1705 1678
                 return False
    
    ... ... @@ -1708,7 +1681,7 @@ class Element(Plugin):
    1708 1681
             self._pull_done()
    
    1709 1682
     
    
    1710 1683
             # create tag for strong cache key
    
    1711
    -        key = self._get_cache_key(strength=_KeyStrength.STRONG)
    
    1684
    +        key = self._get_cache_key(strength=KeyStrength.STRONG)
    
    1712 1685
             self.__artifacts.link_key(self, weak_key, key)
    
    1713 1686
     
    
    1714 1687
             return True
    
    ... ... @@ -2079,13 +2052,13 @@ class Element(Plugin):
    2079 2052
             if keystrength is None:
    
    2080 2053
                 return self.__cached
    
    2081 2054
     
    
    2082
    -        return self.__strong_cached if keystrength == _KeyStrength.STRONG else self.__weak_cached
    
    2055
    +        return self.__strong_cached if keystrength == KeyStrength.STRONG else self.__weak_cached
    
    2083 2056
     
    
    2084 2057
         # __assert_cached()
    
    2085 2058
         #
    
    2086 2059
         # Raises an error if the artifact is not cached.
    
    2087 2060
         #
    
    2088
    -    def __assert_cached(self, keystrength=_KeyStrength.STRONG):
    
    2061
    +    def __assert_cached(self, keystrength=KeyStrength.STRONG):
    
    2089 2062
             assert self.__is_cached(keystrength=keystrength), "{}: Missing artifact {}".format(
    
    2090 2063
                 self, self._get_brief_display_key())
    
    2091 2064
     
    
    ... ... @@ -2409,7 +2382,7 @@ class Element(Plugin):
    2409 2382
                 # Use weak cache key, if artifact is missing for strong cache key
    
    2410 2383
                 # and the context allows use of weak cache keys
    
    2411 2384
                 if not context.get_strict() and not self.__artifacts.contains(self, key):
    
    2412
    -                key = self._get_cache_key(strength=_KeyStrength.WEAK)
    
    2385
    +                key = self._get_cache_key(strength=KeyStrength.WEAK)
    
    2413 2386
     
    
    2414 2387
             return (self.__artifacts.extract(self, key), key)
    
    2415 2388
     
    
    ... ... @@ -2552,7 +2525,7 @@ class Element(Plugin):
    2552 2525
             self.__assert_cached(keystrength=keystrength)
    
    2553 2526
             assert self.__build_result is None
    
    2554 2527
     
    
    2555
    -        artifact_base, _ = self.__extract(key=self.__weak_cache_key if keystrength is _KeyStrength.WEAK
    
    2528
    +        artifact_base, _ = self.__extract(key=self.__weak_cache_key if keystrength is KeyStrength.WEAK
    
    2556 2529
                                               else self.__strict_cache_key)
    
    2557 2530
     
    
    2558 2531
             metadir = os.path.join(artifact_base, 'meta')
    
    ... ... @@ -2566,7 +2539,7 @@ class Element(Plugin):
    2566 2539
     
    
    2567 2540
         def __get_build_result(self, keystrength):
    
    2568 2541
             if keystrength is None:
    
    2569
    -            keystrength = _KeyStrength.STRONG if self._get_context().get_strict() else _KeyStrength.WEAK
    
    2542
    +            keystrength = KeyStrength.STRONG if self._get_context().get_strict() else KeyStrength.WEAK
    
    2570 2543
     
    
    2571 2544
             if self.__build_result is None:
    
    2572 2545
                 self.__load_build_result(keystrength)
    
    ... ... @@ -2584,10 +2557,10 @@ class Element(Plugin):
    2584 2557
             keys = []
    
    2585 2558
     
    
    2586 2559
             # tag with strong cache key based on dependency versions used for the build
    
    2587
    -        keys.append(self._get_cache_key(strength=_KeyStrength.STRONG))
    
    2560
    +        keys.append(self._get_cache_key(strength=KeyStrength.STRONG))
    
    2588 2561
     
    
    2589 2562
             # also store under weak cache key
    
    2590
    -        keys.append(self._get_cache_key(strength=_KeyStrength.WEAK))
    
    2563
    +        keys.append(self._get_cache_key(strength=KeyStrength.WEAK))
    
    2591 2564
     
    
    2592 2565
             return utils._deduplicate(keys)
    
    2593 2566
     
    

  • buildstream/element_enums.py
    1
    +#
    
    2
    +#  Copyright (C) 2018 Bloomberg LLC
    
    3
    +#
    
    4
    +#  This program is free software; you can redistribute it and/or
    
    5
    +#  modify it under the terms of the GNU Lesser General Public
    
    6
    +#  License as published by the Free Software Foundation; either
    
    7
    +#  version 2 of the License, or (at your option) any later version.
    
    8
    +#
    
    9
    +#  This library is distributed in the hope that it will be useful,
    
    10
    +#  but WITHOUT ANY WARRANTY; without even the implied warranty of
    
    11
    +#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the GNU
    
    12
    +#  Lesser General Public License for more details.
    
    13
    +#
    
    14
    +#  You should have received a copy of the GNU Lesser General Public
    
    15
    +#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
    
    16
    +#
    
    17
    +#  Authors:
    
    18
    +#        Tristan Van Berkom <tristan vanberkom codethink co uk>
    
    19
    +#        Jim MacArthur <jim macarthur codethink co uk>
    
    20
    +
    
    21
    +"""
    
    22
    +Element - Globally visible enumerations
    
    23
    +=======================================
    
    24
    +
    
    25
    +"""
    
    26
    +
    
    27
    +from enum import Enum
    
    28
    +
    
    29
    +
    
    30
    +# KeyStrength():
    
    31
    +#
    
    32
    +# Strength of cache key
    
    33
    +#
    
    34
    +class KeyStrength(Enum):
    
    35
    +
    
    36
    +    # Includes strong cache keys of all build dependencies and their
    
    37
    +    # runtime dependencies.
    
    38
    +    STRONG = 1
    
    39
    +
    
    40
    +    # Includes names of direct build dependencies but does not include
    
    41
    +    # cache keys of dependencies.
    
    42
    +    WEAK = 2
    
    43
    +
    
    44
    +
    
    45
    +class Scope(Enum):
    
    46
    +    """Types of scope for a given element"""
    
    47
    +
    
    48
    +    ALL = 1
    
    49
    +    """All elements which the given element depends on, following
    
    50
    +    all elements required for building. Including the element itself.
    
    51
    +    """
    
    52
    +
    
    53
    +    BUILD = 2
    
    54
    +    """All elements required for building the element, including their
    
    55
    +    respective run dependencies. Not including the given element itself.
    
    56
    +    """
    
    57
    +
    
    58
    +    RUN = 3
    
    59
    +    """All elements required for running the element. Including the element
    
    60
    +    itself.
    
    61
    +    """

  • buildstream/plugins/elements/import.py
    ... ... @@ -36,6 +36,10 @@ from buildstream import Element, BuildElement, ElementError
    36 36
     
    
    37 37
     # Element implementation for the 'import' kind.
    
    38 38
     class ImportElement(BuildElement):
    
    39
    +
    
    40
    +    # This plugin has been modified to avoid the use of Sandbox.get_directory
    
    41
    +    BST_VIRTUAL_DIRECTORY = True
    
    42
    +
    
    39 43
         # pylint: disable=attribute-defined-outside-init
    
    40 44
     
    
    41 45
         # This plugin has been modified to avoid the use of Sandbox.get_directory
    

  • buildstream/sandbox/_mount.py
    ... ... @@ -33,7 +33,7 @@ class Mount():
    33 33
         def __init__(self, sandbox, mount_point, safe_hardlinks):
    
    34 34
             scratch_directory = sandbox._get_scratch_directory()
    
    35 35
             # Getting external_directory here is acceptable as we're part of the sandbox code.
    
    36
    -        root_directory = sandbox.get_virtual_directory().external_directory
    
    36
    +        root_directory = sandbox.get_virtual_directory().get_underlying_directory()
    
    37 37
     
    
    38 38
             self.mount_point = mount_point
    
    39 39
             self.safe_hardlinks = safe_hardlinks
    

  • buildstream/sandbox/_sandboxbwrap.py
    ... ... @@ -58,7 +58,7 @@ class SandboxBwrap(Sandbox):
    58 58
             stdout, stderr = self._get_output()
    
    59 59
     
    
    60 60
             # Allowable access to underlying storage as we're part of the sandbox
    
    61
    -        root_directory = self.get_virtual_directory().external_directory
    
    61
    +        root_directory = self.get_virtual_directory().get_underlying_directory()
    
    62 62
     
    
    63 63
             # Fallback to the sandbox default settings for
    
    64 64
             # the cwd and env.
    
    ... ... @@ -243,6 +243,7 @@ class SandboxBwrap(Sandbox):
    243 243
                             # a bug, bwrap mounted a tempfs here and when it exits, that better be empty.
    
    244 244
                             pass
    
    245 245
     
    
    246
    +        self._vdir.mark_changed()
    
    246 247
             return exit_code
    
    247 248
     
    
    248 249
         def run_bwrap(self, argv, stdin, stdout, stderr, env, interactive):
    

  • buildstream/sandbox/_sandboxchroot.py
    ... ... @@ -106,6 +106,7 @@ class SandboxChroot(Sandbox):
    106 106
                 status = self.chroot(rootfs, command, stdin, stdout,
    
    107 107
                                      stderr, cwd, env, flags)
    
    108 108
     
    
    109
    +        self._vdir.mark_changed()
    
    109 110
             return status
    
    110 111
     
    
    111 112
         # chroot()
    

  • buildstream/sandbox/sandbox.py
    ... ... @@ -31,6 +31,7 @@ See also: :ref:`sandboxing`.
    31 31
     import os
    
    32 32
     from .._exceptions import ImplError, BstError
    
    33 33
     from ..storage._filebaseddirectory import FileBasedDirectory
    
    34
    +from ..storage._casbaseddirectory import CasBasedDirectory
    
    34 35
     
    
    35 36
     
    
    36 37
     class SandboxFlags():
    
    ... ... @@ -105,6 +106,7 @@ class Sandbox():
    105 106
             self.__scratch = os.path.join(self.__directory, 'scratch')
    
    106 107
             for directory_ in [self._root, self.__scratch]:
    
    107 108
                 os.makedirs(directory_, exist_ok=True)
    
    109
    +        self._vdir = None
    
    108 110
     
    
    109 111
         def get_directory(self):
    
    110 112
             """Fetches the sandbox root directory
    
    ... ... @@ -133,8 +135,12 @@ class Sandbox():
    133 135
                (str): The sandbox root directory
    
    134 136
     
    
    135 137
             """
    
    136
    -        # For now, just create a new Directory every time we're asked
    
    137
    -        return FileBasedDirectory(self._root)
    
    138
    +        if not self._vdir:
    
    139
    +            if 'BST_CAS_DIRECTORIES' in os.environ:
    
    140
    +                self._vdir = CasBasedDirectory(self.__context, ref=None)
    
    141
    +            else:
    
    142
    +                self._vdir = FileBasedDirectory(self._root)
    
    143
    +        return self._vdir
    
    138 144
     
    
    139 145
         def set_environment(self, environment):
    
    140 146
             """Sets the environment variables for the sandbox
    

  • buildstream/storage/_casbaseddirectory.py
    1
    +#!/usr/bin/env python3
    
    2
    +#
    
    3
    +#  Copyright (C) 2018 Bloomberg LLC
    
    4
    +#
    
    5
    +#  This program is free software; you can redistribute it and/or
    
    6
    +#  modify it under the terms of the GNU Lesser General Public
    
    7
    +#  License as published by the Free Software Foundation; either
    
    8
    +#  version 2 of the License, or (at your option) any later version.
    
    9
    +#
    
    10
    +#  This library is distributed in the hope that it will be useful,
    
    11
    +#  but WITHOUT ANY WARRANTY; without even the implied warranty of
    
    12
    +#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the GNU
    
    13
    +#  Lesser General Public License for more details.
    
    14
    +#
    
    15
    +#  You should have received a copy of the GNU Lesser General Public
    
    16
    +#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
    
    17
    +#
    
    18
    +#  Authors:
    
    19
    +#        Jim MacArthur <jim macarthur codethink co uk>
    
    20
    +
    
    21
    +"""
    
    22
    +CasBasedDirectory
    
    23
    +=========
    
    24
    +
    
    25
    +Implementation of the Directory class which backs onto a Merkle-tree based content
    
    26
    +addressable storage system.
    
    27
    +
    
    28
    +See also: :ref:`sandboxing`.
    
    29
    +"""
    
    30
    +
    
    31
    +from collections import OrderedDict
    
    32
    +
    
    33
    +import os
    
    34
    +import tempfile
    
    35
    +import stat
    
    36
    +
    
    37
    +from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
    
    38
    +from .._exceptions import BstError
    
    39
    +from .directory import Directory, VirtualDirectoryError
    
    40
    +from ._filebaseddirectory import FileBasedDirectory
    
    41
    +from ..utils import FileListResult, safe_copy, list_relative_paths
    
    42
    +from .._artifactcache.cascache import CASCache
    
    43
    +
    
    44
    +
    
    45
    +class IndexEntry():
    
    46
    +    """ Used in our index of names to objects to store the 'modified' flag
    
    47
    +    for directory entries. Because we need both the remote_execution_pb2 object
    
    48
    +    and our own Directory object for directory entries, we store both. For files
    
    49
    +    and symlinks, only pb2_object is used. """
    
    50
    +    def __init__(self, pb2_object, buildstream_object=None, modified=False):
    
    51
    +        self.pb2_object = pb2_object
    
    52
    +        self.buildstream_object = buildstream_object
    
    53
    +        self.modified = modified
    
    54
    +
    
    55
    +
    
    56
    +# CasBasedDirectory intentionally doesn't call its superclass constuctor,
    
    57
    +# which is meant to be unimplemented.
    
    58
    +# pylint: disable=super-init-not-called
    
    59
    +
    
    60
    +class CasBasedDirectory(Directory):
    
    61
    +    """
    
    62
    +    CAS-based directories can have two names; one is a 'common name' which has no effect
    
    63
    +    on functionality, and the 'filename'. If a CasBasedDirectory has a parent, then 'filename'
    
    64
    +    must be the name of an entry in the parent directory's index which points to this object.
    
    65
    +    This is used to inform a parent directory that it must update the given hash for this
    
    66
    +    object when this object changes.
    
    67
    +
    
    68
    +    Typically a top-level CasBasedDirectory will have a common_name and no filename, and
    
    69
    +    subdirectories wil have a filename and no common_name. common_name can used to identify
    
    70
    +    CasBasedDirectory objects in a log file, since they have no unique position in a file
    
    71
    +    system.
    
    72
    +    """
    
    73
    +    def __init__(self, context, ref=None, parent=None, common_name="untitled", filename=None):
    
    74
    +        self.context = context
    
    75
    +        self.cas_directory = os.path.join(context.artifactdir, 'cas')
    
    76
    +        self.filename = filename
    
    77
    +        self.common_name = common_name
    
    78
    +        self.pb2_directory = remote_execution_pb2.Directory()
    
    79
    +        self.cas_cache = CASCache(context)
    
    80
    +        if ref:
    
    81
    +            with open(self.cas_cache.objpath(ref), 'rb') as f:
    
    82
    +                self.pb2_directory.ParseFromString(f.read())
    
    83
    +
    
    84
    +        self.ref = ref
    
    85
    +        self.index = OrderedDict()
    
    86
    +        self.parent = parent
    
    87
    +        self._directory_read = False
    
    88
    +        self._populate_index()
    
    89
    +
    
    90
    +    def _populate_index(self):
    
    91
    +        if self._directory_read:
    
    92
    +            return
    
    93
    +        for entry in self.pb2_directory.directories:
    
    94
    +            buildStreamDirectory = CasBasedDirectory(self.context, ref=entry.digest,
    
    95
    +                                                     parent=self, filename=entry.name)
    
    96
    +            self.index[entry.name] = IndexEntry(entry, buildstream_object=buildStreamDirectory)
    
    97
    +        for entry in self.pb2_directory.files:
    
    98
    +            self.index[entry.name] = IndexEntry(entry)
    
    99
    +        for entry in self.pb2_directory.symlinks:
    
    100
    +            self.index[entry.name] = IndexEntry(entry)
    
    101
    +        self._directory_read = True
    
    102
    +
    
    103
    +    def _recalculate_recursing_up(self, caller=None):
    
    104
    +        """Recalcuate the hash for this directory and store the results in
    
    105
    +        the cache.  If this directory has a parent, tell it to
    
    106
    +        recalculate (since changing this directory changes an entry in
    
    107
    +        the parent).
    
    108
    +
    
    109
    +        """
    
    110
    +        self.ref = self.cas_cache.add_object(buffer=self.pb2_directory.SerializeToString())
    
    111
    +        if caller:
    
    112
    +            old_dir = self._find_pb2_entry(caller.filename)
    
    113
    +            self.cas_cache.add_object(digest=old_dir.digest, buffer=caller.pb2_directory.SerializeToString())
    
    114
    +        if self.parent:
    
    115
    +            self.parent._recalculate_recursing_up(self)
    
    116
    +
    
    117
    +    def _recalculate_recursing_down(self, parent=None):
    
    118
    +        """Recalcuate the hash for this directory and any
    
    119
    +        subdirectories. Hashes for subdirectories should be calculated
    
    120
    +        and stored after a significant operation (e.g. an
    
    121
    +        import_files() call) but not after adding each file, as that
    
    122
    +        is extremely wasteful.
    
    123
    +
    
    124
    +        """
    
    125
    +        for entry in self.pb2_directory.directories:
    
    126
    +            self.index[entry.name].buildstream_object._recalculate_recursing_down(entry)
    
    127
    +
    
    128
    +        if parent:
    
    129
    +            self.ref = self.cas_cache.add_object(digest=parent.digest, buffer=self.pb2_directory.SerializeToString())
    
    130
    +        else:
    
    131
    +            self.ref = self.cas_cache.add_object(buffer=self.pb2_directory.SerializeToString())
    
    132
    +        # We don't need to do anything more than that; files were already added ealier, and symlinks are
    
    133
    +        # part of the directory structure.
    
    134
    +
    
    135
    +    def _find_pb2_entry(self, name):
    
    136
    +        if name in self.index:
    
    137
    +            return self.index[name].pb2_object
    
    138
    +        return None
    
    139
    +
    
    140
    +    def _add_directory(self, name):
    
    141
    +        if name in self.index:
    
    142
    +            newdir = self.index[name].buildstream_object
    
    143
    +            if not isinstance(newdir, CasBasedDirectory):
    
    144
    +                # TODO: This may not be an actual error; it may actually overwrite it
    
    145
    +                raise VirtualDirectoryError("New directory {} in {} would overwrite existing non-directory of type {}"
    
    146
    +                                            .format(name, str(self), type(newdir)))
    
    147
    +            dirnode = self._find_pb2_entry(name)
    
    148
    +        else:
    
    149
    +            newdir = CasBasedDirectory(self.context, parent=self, filename=name)
    
    150
    +            dirnode = self.pb2_directory.directories.add()
    
    151
    +
    
    152
    +        dirnode.name = name
    
    153
    +
    
    154
    +        # Calculate the hash for an empty directory
    
    155
    +        new_directory = remote_execution_pb2.Directory()
    
    156
    +        self.cas_cache.add_object(digest=dirnode.digest, buffer=new_directory.SerializeToString())
    
    157
    +        self.index[name] = IndexEntry(dirnode, buildstream_object=newdir)
    
    158
    +        return newdir
    
    159
    +
    
    160
    +    def _add_new_file(self, basename, filename):
    
    161
    +        filenode = self.pb2_directory.files.add()
    
    162
    +        filenode.name = filename
    
    163
    +        self.cas_cache.add_object(digest=filenode.digest, path=os.path.join(basename, filename))
    
    164
    +        is_executable = os.access(os.path.join(basename, filename), os.X_OK)
    
    165
    +        filenode.is_executable = is_executable
    
    166
    +        self.index[filename] = IndexEntry(filenode, modified=(filename in self.index))
    
    167
    +
    
    168
    +    def _add_new_link(self, basename, filename):
    
    169
    +        existing_link = self._find_pb2_entry(filename)
    
    170
    +        if existing_link:
    
    171
    +            symlinknode = existing_link
    
    172
    +        else:
    
    173
    +            symlinknode = self.pb2_directory.symlinks.add()
    
    174
    +        symlinknode.name = filename
    
    175
    +        # A symlink node has no digest.
    
    176
    +        symlinknode.target = os.readlink(os.path.join(basename, filename))
    
    177
    +        self.index[filename] = IndexEntry(symlinknode, modified=(existing_link is not None))
    
    178
    +
    
    179
    +    def delete_entry(self, name):
    
    180
    +        for collection in [self.pb2_directory.files, self.pb2_directory.symlinks, self.pb2_directory.directories]:
    
    181
    +            if name in collection:
    
    182
    +                collection.remove(name)
    
    183
    +        if name in self.index:
    
    184
    +            del self.index[name]
    
    185
    +
    
    186
    +    def descend(self, subdirectory_spec, create=False):
    
    187
    +        """Descend one or more levels of directory hierarchy and return a new
    
    188
    +        Directory object for that directory.
    
    189
    +
    
    190
    +        Arguments:
    
    191
    +        * subdirectory_spec (list of strings): A list of strings which are all directory
    
    192
    +          names.
    
    193
    +        * create (boolean): If this is true, the directories will be created if
    
    194
    +          they don't already exist.
    
    195
    +
    
    196
    +        Note: At the moment, creating a directory by descending does
    
    197
    +        not update this object in the CAS cache. However, performing
    
    198
    +        an import_files() into a subdirectory of any depth obtained by
    
    199
    +        descending from this object *will* cause this directory to be
    
    200
    +        updated and stored.
    
    201
    +
    
    202
    +        """
    
    203
    +
    
    204
    +        # It's very common to send a directory name instead of a list and this causes
    
    205
    +        # bizarre errors, so check for it here
    
    206
    +        if not isinstance(subdirectory_spec, list):
    
    207
    +            subdirectory_spec = [subdirectory_spec]
    
    208
    +        if not subdirectory_spec:
    
    209
    +            return self
    
    210
    +        # Because of the way split works, it's common to get a list which begins with
    
    211
    +        # an empty string. Detect these and remove them, then start again.
    
    212
    +        if subdirectory_spec[0] == "":
    
    213
    +            return self.descend(subdirectory_spec[1:], create)
    
    214
    +
    
    215
    +        if subdirectory_spec[0] in self.index:
    
    216
    +            entry = self.index[subdirectory_spec[0]].buildstream_object
    
    217
    +            if isinstance(entry, CasBasedDirectory):
    
    218
    +                return entry.descend(subdirectory_spec[1:], create)
    
    219
    +            else:
    
    220
    +                error = "Cannot descend into {}, which is a '{}' in the directory {}"
    
    221
    +                raise VirtualDirectoryError(error.format(subdirectory_spec[0],
    
    222
    +                                                         type(entry).__name__,
    
    223
    +                                                         self))
    
    224
    +        else:
    
    225
    +            if create:
    
    226
    +                newdir = self._add_directory(subdirectory_spec[0])
    
    227
    +                return newdir.descend(subdirectory_spec[1:], create)
    
    228
    +            else:
    
    229
    +                error = "No entry called '{}' found in {}. There are directories called {}."
    
    230
    +                directory_list = ",".join([entry.name for entry in self.pb2_directory.directories])
    
    231
    +                raise VirtualDirectoryError(error.format(subdirectory_spec[0], str(self),
    
    232
    +                                                         directory_list))
    
    233
    +        return None
    
    234
    +
    
    235
    +    def find_root(self):
    
    236
    +        """ Finds the root of this directory tree by following 'parent' until there is
    
    237
    +        no parent. """
    
    238
    +        if self.parent:
    
    239
    +            return self.parent.find_root()
    
    240
    +        else:
    
    241
    +            return self
    
    242
    +
    
    243
    +    def _resolve_symlink_or_directory(self, name):
    
    244
    +        """Used only by _import_files_from_directory. Tries to resolve a
    
    245
    +        directory name or symlink name. 'name' must be an entry in this
    
    246
    +        directory. It must be a single symlink or directory name, not a path
    
    247
    +        separated by path separators. If it's an existing directory name, it
    
    248
    +        just returns the Directory object for that. If it's a symlink, it will
    
    249
    +        attempt to find the target of the symlink and return that as a
    
    250
    +        Directory object.
    
    251
    +
    
    252
    +        If a symlink target doesn't exist, it will attempt to create it
    
    253
    +        as a directory as long as it's within this directory tree.
    
    254
    +        """
    
    255
    +
    
    256
    +        if isinstance(self.index[name].buildstream_object, Directory):
    
    257
    +            return self.index[name].buildstream_object
    
    258
    +        # OK then, it's a symlink
    
    259
    +        symlink = self._find_pb2_entry(name)
    
    260
    +        absolute = symlink.target.startswith(os.path.sep)
    
    261
    +        if absolute:
    
    262
    +            root = self.find_root()
    
    263
    +        else:
    
    264
    +            root = self
    
    265
    +        directory = root
    
    266
    +        components = symlink.target.split(os.path.sep)
    
    267
    +        for c in components:
    
    268
    +            if c == "..":
    
    269
    +                directory = directory.parent
    
    270
    +            else:
    
    271
    +                directory = directory.descend(c, create=True)
    
    272
    +        return directory
    
    273
    +
    
    274
    +    def _check_replacement(self, name, path_prefix, fileListResult):
    
    275
    +        """ Checks whether 'name' exists, and if so, whether we can overwrite it.
    
    276
    +        If we can, add the name to 'overwritten_files' and delete the existing entry.
    
    277
    +        Returns 'True' if the import should go ahead.
    
    278
    +        fileListResult.overwritten and fileListResult.ignore are updated depending
    
    279
    +        on the result. """
    
    280
    +        existing_entry = self._find_pb2_entry(name)
    
    281
    +        relative_pathname = os.path.join(path_prefix, name)
    
    282
    +        if existing_entry is None:
    
    283
    +            return True
    
    284
    +        if (isinstance(existing_entry,
    
    285
    +                       (remote_execution_pb2.FileNode, remote_execution_pb2.SymlinkNode))):
    
    286
    +            self.delete_entry(name)
    
    287
    +            fileListResult.overwritten.append(relative_pathname)
    
    288
    +            return True
    
    289
    +        elif isinstance(existing_entry, remote_execution_pb2.DirectoryNode):
    
    290
    +            # If 'name' maps to a DirectoryNode, then there must be an entry in index
    
    291
    +            # pointing to another Directory.
    
    292
    +            if self.index[name].buildstream_object.is_empty():
    
    293
    +                self.delete_entry(name)
    
    294
    +                fileListResult.overwritten.append(relative_pathname)
    
    295
    +                return True
    
    296
    +            else:
    
    297
    +                # We can't overwrite a non-empty directory, so we just ignore it.
    
    298
    +                fileListResult.ignored.append(relative_pathname)
    
    299
    +                return False
    
    300
    +        raise VirtualDirectoryError("Entry '{}' is not a recognised file/link/directory and not None; it is {}"
    
    301
    +                                    .format(name, type(existing_entry)))
    
    302
    +
    
    303
    +    def _import_directory_recursively(self, directory_name, source_directory, remaining_path, path_prefix):
    
    304
    +        """ _import_directory_recursively and _import_files_from_directory will be called alternately
    
    305
    +        as a directory tree is descended. """
    
    306
    +        if directory_name in self.index:
    
    307
    +            subdir = self._resolve_symlink_or_directory(directory_name)
    
    308
    +        else:
    
    309
    +            subdir = self._add_directory(directory_name)
    
    310
    +        new_path_prefix = os.path.join(path_prefix, directory_name)
    
    311
    +        subdir_result = subdir._import_files_from_directory(os.path.join(source_directory, directory_name),
    
    312
    +                                                            [os.path.sep.join(remaining_path)],
    
    313
    +                                                            path_prefix=new_path_prefix)
    
    314
    +        return subdir_result
    
    315
    +
    
    316
    +    def _import_files_from_directory(self, source_directory, files, path_prefix=""):
    
    317
    +        """ Imports files from a traditional directory """
    
    318
    +        result = FileListResult()
    
    319
    +        for entry in sorted(files):
    
    320
    +            split_path = entry.split(os.path.sep)
    
    321
    +            # The actual file on the FS we're importing
    
    322
    +            import_file = os.path.join(source_directory, entry)
    
    323
    +            # The destination filename, relative to the root where the import started
    
    324
    +            relative_pathname = os.path.join(path_prefix, entry)
    
    325
    +            if len(split_path) > 1:
    
    326
    +                directory_name = split_path[0]
    
    327
    +                # Hand this off to the importer for that subdir. This will only do one file -
    
    328
    +                # a better way would be to hand off all the files in this subdir at once.
    
    329
    +                subdir_result = self._import_directory_recursively(directory_name, source_directory,
    
    330
    +                                                                   split_path[1:], path_prefix)
    
    331
    +                result.combine(subdir_result)
    
    332
    +            elif os.path.islink(import_file):
    
    333
    +                if self._check_replacement(entry, path_prefix, result):
    
    334
    +                    self._add_new_link(source_directory, entry)
    
    335
    +                    result.files_written.append(relative_pathname)
    
    336
    +            elif os.path.isdir(import_file):
    
    337
    +                # A plain directory which already exists isn't a problem; just ignore it.
    
    338
    +                if entry not in self.index:
    
    339
    +                    self._add_directory(entry)
    
    340
    +            elif os.path.isfile(import_file):
    
    341
    +                if self._check_replacement(entry, path_prefix, result):
    
    342
    +                    self._add_new_file(source_directory, entry)
    
    343
    +                    result.files_written.append(relative_pathname)
    
    344
    +        return result
    
    345
    +
    
    346
    +    def import_files(self, external_pathspec, *, files=None,
    
    347
    +                     report_written=True, update_utimes=False,
    
    348
    +                     can_link=False):
    
    349
    +        """Imports some or all files from external_path into this directory.
    
    350
    +
    
    351
    +        Keyword arguments: external_pathspec: Either a string
    
    352
    +        containing a pathname, or a Directory object, to use as the
    
    353
    +        source.
    
    354
    +
    
    355
    +        files (list of strings): A list of all the files relative to
    
    356
    +        the external_pathspec to copy. If 'None' is supplied, all
    
    357
    +        files are copied.
    
    358
    +
    
    359
    +        report_written (bool): Return the full list of files
    
    360
    +        written. Defaults to true. If false, only a list of
    
    361
    +        overwritten files is returned.
    
    362
    +
    
    363
    +        update_utimes (bool): Currently ignored, since CAS does not store utimes.
    
    364
    +
    
    365
    +        can_link (bool): Ignored, since hard links do not have any meaning within CAS.
    
    366
    +        """
    
    367
    +        if isinstance(external_pathspec, FileBasedDirectory):
    
    368
    +            source_directory = external_pathspec.get_underlying_directory()
    
    369
    +        elif isinstance(external_pathspec, CasBasedDirectory):
    
    370
    +            # TODO: This transfers from one CAS to another via the
    
    371
    +            # filesystem, which is very inefficient. Alter this so it
    
    372
    +            # transfers refs across directly.
    
    373
    +            with tempfile.TemporaryDirectory(prefix="roundtrip") as tmpdir:
    
    374
    +                external_pathspec.export_files(tmpdir)
    
    375
    +                if files is None:
    
    376
    +                    files = list_relative_paths(tmpdir)
    
    377
    +                result = self._import_files_from_directory(tmpdir, files=files)
    
    378
    +            return result
    
    379
    +        else:
    
    380
    +            source_directory = external_pathspec
    
    381
    +
    
    382
    +        if files is None:
    
    383
    +            files = list_relative_paths(source_directory)
    
    384
    +
    
    385
    +        # TODO: No notice is taken of report_written, update_utimes or can_link.
    
    386
    +        # Current behaviour is to fully populate the report, which is inefficient,
    
    387
    +        # but still correct.
    
    388
    +        result = self._import_files_from_directory(source_directory, files=files)
    
    389
    +
    
    390
    +        # We need to recalculate and store the hashes of all directories both
    
    391
    +        # up and down the tree; we have changed our directory by importing files
    
    392
    +        # which changes our hash and all our parents' hashes of us. The trees
    
    393
    +        # lower down need to be stored in the CAS as they are not automatically
    
    394
    +        # added during construction.
    
    395
    +        self._recalculate_recursing_down()
    
    396
    +        if self.parent:
    
    397
    +            self.parent._recalculate_recursing_up(self)
    
    398
    +        return result
    
    399
    +
    
    400
    +    def set_deterministic_mtime(self):
    
    401
    +        """ Sets a static modification time for all regular files in this directory.
    
    402
    +        Since we don't store any modification time, we don't need to do anything.
    
    403
    +        """
    
    404
    +        pass
    
    405
    +
    
    406
    +    def set_deterministic_user(self):
    
    407
    +        """ Sets all files in this directory to the current user's euid/egid.
    
    408
    +        We also don't store user data, so this can be ignored.
    
    409
    +        """
    
    410
    +        pass
    
    411
    +
    
    412
    +    def export_files(self, to_directory, *, can_link=False, can_destroy=False):
    
    413
    +        """Copies everything from this into to_directory, which must be the name
    
    414
    +        of a traditional filesystem directory.
    
    415
    +
    
    416
    +        Arguments:
    
    417
    +
    
    418
    +        to_directory (string): a path outside this directory object
    
    419
    +        where the contents will be copied to.
    
    420
    +
    
    421
    +        can_link (bool): Whether we can create hard links in to_directory
    
    422
    +        instead of copying.
    
    423
    +
    
    424
    +        can_destroy (bool): Whether we can destroy elements in this
    
    425
    +        directory to export them (e.g. by renaming them as the
    
    426
    +        target).
    
    427
    +
    
    428
    +        """
    
    429
    +
    
    430
    +        if not os.path.exists(to_directory):
    
    431
    +            os.mkdir(to_directory)
    
    432
    +
    
    433
    +        for entry in self.pb2_directory.directories:
    
    434
    +            if entry.name not in self.index:
    
    435
    +                raise VirtualDirectoryError("CasDir {} contained {} in directories but not in the index"
    
    436
    +                                            .format(str(self), entry.name))
    
    437
    +            if not self._directory_read:
    
    438
    +                raise VirtualDirectoryError("CasDir {} has not been indexed yet".format(str(self)))
    
    439
    +            dest_dir = os.path.join(to_directory, entry.name)
    
    440
    +            if not os.path.exists(dest_dir):
    
    441
    +                os.mkdir(dest_dir)
    
    442
    +            target = self.descend([entry.name])
    
    443
    +            target.export_files(dest_dir)
    
    444
    +        for entry in self.pb2_directory.files:
    
    445
    +            # Extract the entry to a single file
    
    446
    +            dest_name = os.path.join(to_directory, entry.name)
    
    447
    +            src_name = self.cas_cache.objpath(entry.digest)
    
    448
    +            safe_copy(src_name, dest_name)
    
    449
    +            if entry.is_executable:
    
    450
    +                os.chmod(dest_name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR |
    
    451
    +                         stat.S_IRGRP | stat.S_IXGRP |
    
    452
    +                         stat.S_IROTH | stat.S_IXOTH)
    
    453
    +        for entry in self.pb2_directory.symlinks:
    
    454
    +            src_name = os.path.join(to_directory, entry.name)
    
    455
    +            target_name = entry.target
    
    456
    +            try:
    
    457
    +                os.symlink(target_name, src_name)
    
    458
    +            except FileExistsError as e:
    
    459
    +                raise BstError(("Cannot create a symlink named {} pointing to {}." +
    
    460
    +                                " The original error was: {}").
    
    461
    +                               format(src_name, entry.target, e))
    
    462
    +
    
    463
    +    def export_to_tar(self, tarfile, destination_dir, mtime=0):
    
    464
    +        raise NotImplementedError()
    
    465
    +
    
    466
    +    def mark_changed(self):
    
    467
    +        """ It should not be possible to externally modify a CAS-based
    
    468
    +        directory at the moment."""
    
    469
    +        raise NotImplementedError()
    
    470
    +
    
    471
    +    def is_empty(self):
    
    472
    +        """ Return true if this directory has no files, subdirectories or links in it.
    
    473
    +        """
    
    474
    +        return len(self.index) == 0
    
    475
    +
    
    476
    +    def mark_unmodified(self):
    
    477
    +        """ Marks all files in this directory (recursively) as unmodified.
    
    478
    +        """
    
    479
    +        # TODO: We don't actually mark our own directory unmodified
    
    480
    +        # here, because we can't get to the containing IndexEntry -
    
    481
    +        # just the objects we contain.
    
    482
    +        for i in self.index.values():
    
    483
    +            i.modified = False
    
    484
    +            if isinstance(i.buildstream_object, CasBasedDirectory):
    
    485
    +                i.buildstream_object.mark_unmodified()
    
    486
    +
    
    487
    +    def list_modified_paths(self):
    
    488
    +        """Provide a list of relative paths which have been modified since the
    
    489
    +        last call to mark_unmodified.
    
    490
    +
    
    491
    +        Return value: List(str) - list of modified paths
    
    492
    +        """
    
    493
    +
    
    494
    +        filelist = []
    
    495
    +        for (k, v) in self.index.items():
    
    496
    +            if isinstance(v.buildstream_object, CasBasedDirectory):
    
    497
    +                filelist.extend([k + "/" + x for x in v.buildstream_object.list_relative_paths()])
    
    498
    +            elif isinstance(v.pb2_object, remote_execution_pb2.FileNode) and v.modified:
    
    499
    +                filelist.append(k)
    
    500
    +        return filelist
    
    501
    +
    
    502
    +    def list_relative_paths(self):
    
    503
    +        """Provide a list of all relative paths.
    
    504
    +
    
    505
    +        NOTE: This list is not in the same order as utils.list_relative_paths.
    
    506
    +
    
    507
    +        Return value: List(str) - list of all paths
    
    508
    +        """
    
    509
    +
    
    510
    +        filelist = []
    
    511
    +        for (k, v) in self.index.items():
    
    512
    +            if isinstance(v.buildstream_object, CasBasedDirectory):
    
    513
    +                filelist.extend([k + "/" + x for x in v.buildstream_object.list_relative_paths()])
    
    514
    +            elif isinstance(v.pb2_object, remote_execution_pb2.FileNode):
    
    515
    +                filelist.append(k)
    
    516
    +        return filelist
    
    517
    +
    
    518
    +    def _get_identifier(self):
    
    519
    +        path = ""
    
    520
    +        if self.parent:
    
    521
    +            path = self.parent._get_identifier()
    
    522
    +        if self.filename:
    
    523
    +            path += "/" + self.filename
    
    524
    +        else:
    
    525
    +            path += "/" + self.common_name
    
    526
    +        return path
    
    527
    +
    
    528
    +    def __str__(self):
    
    529
    +        return "[CAS:{}]".format(self._get_identifier())
    
    530
    +
    
    531
    +    def get_underlying_directory(self):
    
    532
    +        """ There is no underlying directory for a CAS-backed directory, so
    
    533
    +        throw an exception. """
    
    534
    +        raise VirtualDirectoryError("get_underlying_directory was called on a CAS-backed directory," +
    
    535
    +                                    " which has no underlying directory.")

  • buildstream/storage/_filebaseddirectory.py
    ... ... @@ -29,25 +29,12 @@ See also: :ref:`sandboxing`.
    29 29
     
    
    30 30
     import os
    
    31 31
     import time
    
    32
    -from .._exceptions import BstError, ErrorDomain
    
    33
    -from .directory import Directory
    
    32
    +from .directory import Directory, VirtualDirectoryError
    
    34 33
     from ..utils import link_files, copy_files, list_relative_paths, _get_link_mtime, _magic_timestamp
    
    35 34
     from ..utils import _set_deterministic_user, _set_deterministic_mtime
    
    36 35
     
    
    37
    -
    
    38
    -class VirtualDirectoryError(BstError):
    
    39
    -    """Raised by Directory functions when system calls fail.
    
    40
    -    This will be handled internally by the BuildStream core,
    
    41
    -    if you need to handle this error, then it should be reraised,
    
    42
    -    or either of the :class:`.ElementError` or :class:`.SourceError`
    
    43
    -    exceptions should be raised from this error.
    
    44
    -    """
    
    45
    -    def __init__(self, message, reason=None):
    
    46
    -        super().__init__(message, domain=ErrorDomain.VIRTUAL_FS, reason=reason)
    
    47
    -
    
    48
    -
    
    49 36
     # FileBasedDirectory intentionally doesn't call its superclass constuctor,
    
    50
    -# which is mean to be unimplemented.
    
    37
    +# which is meant to be unimplemented.
    
    51 38
     # pylint: disable=super-init-not-called
    
    52 39
     
    
    53 40
     
    
    ... ... @@ -108,7 +95,8 @@ class FileBasedDirectory(Directory):
    108 95
                 if create:
    
    109 96
                     new_path = os.path.join(self.external_directory, subdirectory_spec[0])
    
    110 97
                     os.makedirs(new_path, exist_ok=True)
    
    111
    -                return FileBasedDirectory(new_path).descend(subdirectory_spec[1:], create)
    
    98
    +                self.index[subdirectory_spec[0]] = FileBasedDirectory(new_path).descend(subdirectory_spec[1:], create)
    
    99
    +                return self.index[subdirectory_spec[0]]
    
    112 100
                 else:
    
    113 101
                     error = "No entry called '{}' found in the directory rooted at {}"
    
    114 102
                     raise VirtualDirectoryError(error.format(subdirectory_spec[0], self.external_directory))
    
    ... ... @@ -134,8 +122,12 @@ class FileBasedDirectory(Directory):
    134 122
     
    
    135 123
                 for f in import_result.files_written:
    
    136 124
                     os.utime(os.path.join(self.external_directory, f), times=(cur_time, cur_time))
    
    125
    +        self.mark_changed()
    
    137 126
             return import_result
    
    138 127
     
    
    128
    +    def mark_changed(self):
    
    129
    +        self._directory_read = False
    
    130
    +
    
    139 131
         def set_deterministic_mtime(self):
    
    140 132
             _set_deterministic_mtime(self.external_directory)
    
    141 133
     
    
    ... ... @@ -214,3 +206,8 @@ class FileBasedDirectory(Directory):
    214 206
             # which exposes the sandbox directory; we will have to assume for the time being
    
    215 207
             # that people will not abuse __str__.
    
    216 208
             return self.external_directory
    
    209
    +
    
    210
    +    def get_underlying_directory(self) -> str:
    
    211
    +        """ Returns the underlying (real) file system directory this
    
    212
    +        object refers to. """
    
    213
    +        return self.external_directory

  • buildstream/storage/directory.py
    ... ... @@ -31,6 +31,19 @@ See also: :ref:`sandboxing`.
    31 31
     
    
    32 32
     """
    
    33 33
     
    
    34
    +from .._exceptions import BstError, ErrorDomain
    
    35
    +
    
    36
    +
    
    37
    +class VirtualDirectoryError(BstError):
    
    38
    +    """Raised by Directory functions when system calls fail.
    
    39
    +    This will be handled internally by the BuildStream core,
    
    40
    +    if you need to handle this error, then it should be reraised,
    
    41
    +    or either of the :class:`.ElementError` or :class:`.SourceError`
    
    42
    +    exceptions should be raised from this error.
    
    43
    +    """
    
    44
    +    def __init__(self, message, reason=None):
    
    45
    +        super().__init__(message, domain=ErrorDomain.VIRTUAL_FS, reason=reason)
    
    46
    +
    
    34 47
     
    
    35 48
     class Directory():
    
    36 49
         def __init__(self, external_directory=None):
    
    ... ... @@ -111,6 +124,14 @@ class Directory():
    111 124
             """
    
    112 125
             raise NotImplementedError()
    
    113 126
     
    
    127
    +    def mark_changed(self):
    
    128
    +        """ Mark this directory as having been changed outside this API. This
    
    129
    +        normally can only happen by calling the Sandbox's `run`
    
    130
    +        method.
    
    131
    +
    
    132
    +        """
    
    133
    +        raise NotImplementedError()
    
    134
    +
    
    114 135
         # Convenience functions
    
    115 136
         def is_empty(self):
    
    116 137
             """ Return true if this directory has no files, subdirectories or links in it.
    
    ... ... @@ -153,3 +174,9 @@ class Directory():
    153 174
     
    
    154 175
             """
    
    155 176
             raise NotImplementedError()
    
    177
    +
    
    178
    +    def get_underlying_directory(self) -> str:
    
    179
    +        """ Returns the underlying (real) file system directory this
    
    180
    +        object refers to. This will throw an exception if there isn't
    
    181
    +        a real directory behind the object. """
    
    182
    +        raise NotImplementedError()

  • doc/source/install_linux_distro.rst
    ... ... @@ -114,6 +114,30 @@ For the default plugins::
    114 114
           bzr git lzip patch ostree python3-arpy python3-gobject
    
    115 115
     
    
    116 116
     
    
    117
    +Ubuntu
    
    118
    +++++++
    
    119
    +
    
    120
    +Ubuntu 18.04 LTS or later
    
    121
    +^^^^^^^^^^^^^^^^^^^^^^^^^
    
    122
    +Install the dependencies with::
    
    123
    +
    
    124
    +  sudo apt install \
    
    125
    +      python3 fuse bubblewrap \
    
    126
    +      python3-pip python3-dev git
    
    127
    +
    
    128
    +For the default plugins::
    
    129
    +
    
    130
    +  sudo apt install \
    
    131
    +      bzr gir1.2-ostree-1.0 git lzip ostree patch python3-arpy python3-gi
    
    132
    +
    
    133
    +Ubuntu 16.04 LTS
    
    134
    +^^^^^^^^^^^^^^^^
    
    135
    +On Ubuntu 16.04, neither `bubblewrap<https://github.com/projectatomic/bubblewrap/>`
    
    136
    +or `ostree<https://github.com/ostreedev/ostree>` are available in the official repositories.
    
    137
    +You will need to install them in whichever way you see fit. Refer the the upstream documentation
    
    138
    +for advice on this.
    
    139
    +
    
    140
    +
    
    117 141
     Installing
    
    118 142
     ~~~~~~~~~~
    
    119 143
     Once you have the base system dependencies, you can clone the BuildStream
    

  • tests/frontend/buildcheckout.py
    ... ... @@ -96,6 +96,16 @@ def test_build_checkout_deps(datafiles, cli, deps):
    96 96
             assert not os.path.exists(filename)
    
    97 97
     
    
    98 98
     
    
    99
    +@pytest.mark.datafiles(DATA_DIR)
    
    100
    +def test_build_checkout_unbuilt(datafiles, cli):
    
    101
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    102
    +    checkout = os.path.join(cli.directory, 'checkout')
    
    103
    +
    
    104
    +    # Check that checking out an unbuilt element fails nicely
    
    105
    +    result = cli.run(project=project, args=['checkout', 'target.bst', checkout])
    
    106
    +    result.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
    
    107
    +
    
    108
    +
    
    99 109
     @pytest.mark.datafiles(DATA_DIR)
    
    100 110
     def test_build_checkout_tarball(datafiles, cli):
    
    101 111
         project = os.path.join(datafiles.dirname, datafiles.basename)
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]