[Notes] [Git][BuildStream/buildstream][raoul/440-source-cache] 17 commits: tests: Migrated cache quota test into artifactcache/cache_size.py



Title: GitLab

Raoul Hidalgo Charman pushed to branch raoul/440-source-cache at BuildStream / buildstream

Commits:

30 changed files:

Changes:

  • buildstream/_artifactcache.py
    ... ... @@ -20,8 +20,8 @@
    20 20
     import multiprocessing
    
    21 21
     import os
    
    22 22
     import string
    
    23
    -from collections.abc import Mapping
    
    24 23
     
    
    24
    +from ._basecache import BaseCache
    
    25 25
     from .types import _KeyStrength
    
    26 26
     from ._exceptions import ArtifactError, CASError, LoadError, LoadErrorReason
    
    27 27
     from ._message import Message, MessageType
    
    ... ... @@ -51,7 +51,12 @@ class ArtifactCacheSpec(CASRemoteSpec):
    51 51
     # Args:
    
    52 52
     #     context (Context): The BuildStream context
    
    53 53
     #
    
    54
    -class ArtifactCache():
    
    54
    +class ArtifactCache(BaseCache):
    
    55
    +
    
    56
    +    spec_class = ArtifactCacheSpec
    
    57
    +    spec_name = "artifact_cache_specs"
    
    58
    +    config_node_name = "artifacts"
    
    59
    +
    
    55 60
         def __init__(self, context):
    
    56 61
             self.context = context
    
    57 62
             self.extractdir = os.path.join(context.artifactdir, 'extract')
    
    ... ... @@ -133,44 +138,13 @@ class ArtifactCache():
    133 138
                 has_remote_caches = True
    
    134 139
             if use_config:
    
    135 140
                 for project in self.context.get_projects():
    
    136
    -                artifact_caches = _configured_remote_artifact_cache_specs(self.context, project)
    
    141
    +                artifact_caches = self._configured_remote_cache_specs(self.context, project)
    
    137 142
                     if artifact_caches:  # artifact_caches is a list of ArtifactCacheSpec instances
    
    138 143
                         self._set_remotes(artifact_caches, project=project)
    
    139 144
                         has_remote_caches = True
    
    140 145
             if has_remote_caches:
    
    141 146
                 self._initialize_remotes()
    
    142 147
     
    
    143
    -    # specs_from_config_node()
    
    144
    -    #
    
    145
    -    # Parses the configuration of remote artifact caches from a config block.
    
    146
    -    #
    
    147
    -    # Args:
    
    148
    -    #   config_node (dict): The config block, which may contain the 'artifacts' key
    
    149
    -    #   basedir (str): The base directory for relative paths
    
    150
    -    #
    
    151
    -    # Returns:
    
    152
    -    #   A list of ArtifactCacheSpec instances.
    
    153
    -    #
    
    154
    -    # Raises:
    
    155
    -    #   LoadError, if the config block contains invalid keys.
    
    156
    -    #
    
    157
    -    @staticmethod
    
    158
    -    def specs_from_config_node(config_node, basedir=None):
    
    159
    -        cache_specs = []
    
    160
    -
    
    161
    -        artifacts = config_node.get('artifacts', [])
    
    162
    -        if isinstance(artifacts, Mapping):
    
    163
    -            cache_specs.append(ArtifactCacheSpec._new_from_config_node(artifacts, basedir))
    
    164
    -        elif isinstance(artifacts, list):
    
    165
    -            for spec_node in artifacts:
    
    166
    -                cache_specs.append(ArtifactCacheSpec._new_from_config_node(spec_node, basedir))
    
    167
    -        else:
    
    168
    -            provenance = _yaml.node_get_provenance(config_node, key='artifacts')
    
    169
    -            raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
    
    170
    -                                  "%s: 'artifacts' must be a single 'url:' mapping, or a list of mappings" %
    
    171
    -                                  (str(provenance)))
    
    172
    -        return cache_specs
    
    173
    -
    
    174 148
         # mark_required_elements():
    
    175 149
         #
    
    176 150
         # Mark elements whose artifacts are required for the current run.
    
    ... ... @@ -882,16 +856,16 @@ class ArtifactCache():
    882 856
                 else:
    
    883 857
                     available = utils._pretty_size(available_space)
    
    884 858
     
    
    885
    -            raise LoadError(LoadErrorReason.INVALID_DATA,
    
    886
    -                            ("Your system does not have enough available " +
    
    887
    -                             "space to support the cache quota specified.\n" +
    
    888
    -                             "\nYou have specified a quota of {quota} total disk space.\n" +
    
    889
    -                             "- The filesystem containing {local_cache_path} only " +
    
    890
    -                             "has: {available_size} available.")
    
    891
    -                            .format(
    
    892
    -                                quota=self.context.config_cache_quota,
    
    893
    -                                local_cache_path=self.context.artifactdir,
    
    894
    -                                available_size=available))
    
    859
    +            raise ArtifactError("Your system does not have enough available " +
    
    860
    +                                "space to support the cache quota specified.",
    
    861
    +                                detail=("You have specified a quota of {quota} total disk space.\n" +
    
    862
    +                                        "The filesystem containing {local_cache_path} only " +
    
    863
    +                                        "has {available_size} available.")
    
    864
    +                                .format(
    
    865
    +                                    quota=self.context.config_cache_quota,
    
    866
    +                                    local_cache_path=self.context.artifactdir,
    
    867
    +                                    available_size=available),
    
    868
    +                                reason='insufficient-storage-for-quota')
    
    895 869
     
    
    896 870
             # Place a slight headroom (2e9 (2GB) on the cache_quota) into
    
    897 871
             # cache_quota to try and avoid exceptions.
    
    ... ... @@ -902,37 +876,3 @@ class ArtifactCache():
    902 876
             #
    
    903 877
             self._cache_quota = cache_quota - headroom
    
    904 878
             self._cache_lower_threshold = self._cache_quota / 2
    905
    -
    
    906
    -    # _get_volume_space_info_for
    
    907
    -    #
    
    908
    -    # Get the available space and total space for the given volume
    
    909
    -    #
    
    910
    -    # Args:
    
    911
    -    #     volume: volume for which to get the size
    
    912
    -    #
    
    913
    -    # Returns:
    
    914
    -    #     A tuple containing first the availabe number of bytes on the requested
    
    915
    -    #     volume, then the total number of bytes of the volume.
    
    916
    -    def _get_volume_space_info_for(self, volume):
    
    917
    -        stat = os.statvfs(volume)
    
    918
    -        return stat.f_bsize * stat.f_bavail, stat.f_bsize * stat.f_blocks
    
    919
    -
    
    920
    -
    
    921
    -# _configured_remote_artifact_cache_specs():
    
    922
    -#
    
    923
    -# Return the list of configured artifact remotes for a given project, in priority
    
    924
    -# order. This takes into account the user and project configuration.
    
    925
    -#
    
    926
    -# Args:
    
    927
    -#     context (Context): The BuildStream context
    
    928
    -#     project (Project): The BuildStream project
    
    929
    -#
    
    930
    -# Returns:
    
    931
    -#   A list of ArtifactCacheSpec instances describing the remote artifact caches.
    
    932
    -#
    
    933
    -def _configured_remote_artifact_cache_specs(context, project):
    
    934
    -    project_overrides = context.get_overrides(project.name)
    
    935
    -    project_extra_specs = ArtifactCache.specs_from_config_node(project_overrides)
    
    936
    -
    
    937
    -    return list(utils._deduplicate(
    
    938
    -        project_extra_specs + project.artifact_cache_specs + context.artifact_cache_specs))

  • buildstream/_basecache.py
    1
    +#  Copyright (C) 2019 Bloomberg Finance LP
    
    2
    +#
    
    3
    +#  This program is free software; you can redistribute it and/or
    
    4
    +#  modify it under the terms of the GNU Lesser General Public
    
    5
    +#  License as published by the Free Software Foundation; either
    
    6
    +#  version 2 of the License, or (at your option) any later version.
    
    7
    +#
    
    8
    +#  This library is distributed in the hope that it will be useful,
    
    9
    +#  but WITHOUT ANY WARRANTY; without even the implied warranty of
    
    10
    +#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the GNU
    
    11
    +#  Lesser General Public License for more details.
    
    12
    +#
    
    13
    +#  You should have received a copy of the GNU Lesser General Public
    
    14
    +#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
    
    15
    +#
    
    16
    +#  Authors:
    
    17
    +#        Raoul Hidalgo Charman <raoul hidalgocharman codethink co uk>
    
    18
    +#
    
    19
    +from collections.abc import Mapping
    
    20
    +import os
    
    21
    +
    
    22
    +from . import utils
    
    23
    +from . import _yaml
    
    24
    +
    
    25
    +
    
    26
    +# Base Cache for Caches to derive from
    
    27
    +#
    
    28
    +class BaseCache():
    
    29
    +
    
    30
    +    # None of these should ever be called in the base class, but this appeases
    
    31
    +    # pylint
    
    32
    +    spec_class = None
    
    33
    +    spec_name = None
    
    34
    +    config_node_name = None
    
    35
    +
    
    36
    +    # specs_from_config_node()
    
    37
    +    #
    
    38
    +    # Parses the configuration of remote artifact caches from a config block.
    
    39
    +    #
    
    40
    +    # Args:
    
    41
    +    #   config_node (dict): The config block, which may contain the 'artifacts' key
    
    42
    +    #   basedir (str): The base directory for relative paths
    
    43
    +    #
    
    44
    +    # Returns:
    
    45
    +    #   A list of ArtifactCacheSpec instances.
    
    46
    +    #
    
    47
    +    # Raises:
    
    48
    +    #   LoadError, if the config block contains invalid keys.
    
    49
    +    #
    
    50
    +    @classmethod
    
    51
    +    def specs_from_config_node(cls, config_node, basedir=None):
    
    52
    +        cache_specs = []
    
    53
    +
    
    54
    +        artifacts = config_node.get(cls.config_node_name, [])
    
    55
    +        if isinstance(artifacts, Mapping):
    
    56
    +            cache_specs.append(cls.spec_class._new_from_config_node(artifacts, basedir))
    
    57
    +        elif isinstance(artifacts, list):
    
    58
    +            for spec_node in artifacts:
    
    59
    +                cache_specs.append(cls.spec_class._new_from_config_node(spec_node, basedir))
    
    60
    +        else:
    
    61
    +            provenance = _yaml.node_get_provenance(config_node, key='artifacts')
    
    62
    +            raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
    
    63
    +                                  "%s: 'artifacts' must be a single 'url:' mapping, or a list of mappings" %
    
    64
    +                                  (str(provenance)))
    
    65
    +        return cache_specs
    
    66
    +
    
    67
    +    # _configured_remote_artifact_cache_specs():
    
    68
    +    #
    
    69
    +    # Return the list of configured remotes for a given project, in priority
    
    70
    +    # order. This takes into account the user and project configuration.
    
    71
    +    #
    
    72
    +    # Args:
    
    73
    +    #     context (Context): The BuildStream context
    
    74
    +    #     project (Project): The BuildStream project
    
    75
    +    #
    
    76
    +    # Returns:
    
    77
    +    #   A list of ArtifactCacheSpec instances describing the remote artifact caches.
    
    78
    +    #
    
    79
    +    @classmethod
    
    80
    +    def _configured_remote_cache_specs(cls, context, project):
    
    81
    +        project_overrides = context.get_overrides(project.name)
    
    82
    +        project_extra_specs = cls.specs_from_config_node(project_overrides)
    
    83
    +
    
    84
    +        project_specs = getattr(project, cls.spec_name)
    
    85
    +        context_specs = getattr(context, cls.spec_name)
    
    86
    +
    
    87
    +        return list(utils._deduplicate(
    
    88
    +            project_extra_specs + project_specs + context_specs))
    
    89
    +
    
    90
    +    # _get_volume_space_info_for
    
    91
    +    #
    
    92
    +    # Get the available space and total space for the given volume
    
    93
    +    #
    
    94
    +    # Args:
    
    95
    +    #     volume: volume for which to get the size
    
    96
    +    #
    
    97
    +    # Returns:
    
    98
    +    #     A tuple containing first the availabe number of bytes on the requested
    
    99
    +    #     volume, then the total number of bytes of the volume.
    
    100
    +    @staticmethod
    
    101
    +    def _get_volume_space_info_for(volume):
    
    102
    +        stat = os.statvfs(volume)
    
    103
    +        return stat.f_bsize * stat.f_bavail, stat.f_bsize * stat.f_blocks

  • buildstream/_context.py
    ... ... @@ -31,6 +31,7 @@ from ._exceptions import LoadError, LoadErrorReason, BstError
    31 31
     from ._message import Message, MessageType
    
    32 32
     from ._profile import Topics, profile_start, profile_end
    
    33 33
     from ._artifactcache import ArtifactCache
    
    34
    +from ._sourcecache import SourceCache
    
    34 35
     from ._cas import CASCache
    
    35 36
     from ._workspaces import Workspaces, WorkspaceProjectCache, WORKSPACE_PROJECT_FILE
    
    36 37
     from .plugin import _plugin_lookup
    
    ... ... @@ -58,12 +59,24 @@ class Context():
    58 59
             # Filename indicating which configuration file was used, or None for the defaults
    
    59 60
             self.config_origin = None
    
    60 61
     
    
    62
    +        # The directory under which other directories are based
    
    63
    +        self.rootcachedir = None
    
    64
    +
    
    61 65
             # The directory where various sources are stored
    
    62 66
             self.sourcedir = None
    
    63 67
     
    
    68
    +        # specs for source cache remotes
    
    69
    +        self.source_cache_specs = None
    
    70
    +
    
    64 71
             # The directory where build sandboxes will be created
    
    65 72
             self.builddir = None
    
    66 73
     
    
    74
    +        # The directory for CAS
    
    75
    +        self.casdir = None
    
    76
    +
    
    77
    +        # The directory for temporary files
    
    78
    +        self.tmpdir = None
    
    79
    +
    
    67 80
             # Default root location for workspaces
    
    68 81
             self.workspacedir = None
    
    69 82
     
    
    ... ... @@ -144,6 +157,7 @@ class Context():
    144 157
             self._message_handler = None
    
    145 158
             self._message_depth = deque()
    
    146 159
             self._artifactcache = None
    
    160
    +        self._sourcecache = None
    
    147 161
             self._projects = []
    
    148 162
             self._project_overrides = {}
    
    149 163
             self._workspaces = None
    
    ... ... @@ -160,6 +174,7 @@ class Context():
    160 174
         # Args:
    
    161 175
         #    config (filename): The user specified configuration file, if any
    
    162 176
         #
    
    177
    +
    
    163 178
         # Raises:
    
    164 179
         #   LoadError
    
    165 180
         #
    
    ... ... @@ -188,13 +203,30 @@ class Context():
    188 203
                 user_config = _yaml.load(config)
    
    189 204
                 _yaml.composite(defaults, user_config)
    
    190 205
     
    
    206
    +        # Give deprecation warnings
    
    207
    +        if defaults.get('builddir'):
    
    208
    +            print("builddir is deprecated, use rootcachedir")
    
    209
    +        else:
    
    210
    +            defaults['builddir'] = os.path.join(defaults['rootcachedir'], 'build')
    
    211
    +
    
    212
    +        if defaults.get('artifactdir'):
    
    213
    +            print("artifactdir is deprecated, use rootcachedir")
    
    214
    +        else:
    
    215
    +            defaults['artifactdir'] = os.path.join(defaults['rootcachedir'], 'artifacts')
    
    216
    +
    
    191 217
             _yaml.node_validate(defaults, [
    
    192
    -            'sourcedir', 'builddir', 'artifactdir', 'logdir',
    
    218
    +            'rootcachedir', 'sourcedir', 'builddir', 'artifactdir', 'logdir',
    
    193 219
                 'scheduler', 'artifacts', 'logging', 'projects',
    
    194
    -            'cache', 'prompt', 'workspacedir', 'remote-execution'
    
    220
    +            'cache', 'prompt', 'workspacedir', 'remote-execution',
    
    195 221
             ])
    
    196 222
     
    
    197
    -        for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
    
    223
    +        # add directories not set by users
    
    224
    +        defaults['tmpdir'] = os.path.join(defaults['rootcachedir'], 'tmp')
    
    225
    +        defaults['casdir'] = os.path.join(defaults['rootcachedir'], 'cas')
    
    226
    +
    
    227
    +        for directory in ['rootcachedir', 'sourcedir', 'builddir',
    
    228
    +                          'artifactdir', 'logdir', 'workspacedir', 'casdir',
    
    229
    +                          'tmpdir']:
    
    198 230
                 # Allow the ~ tilde expansion and any environment variables in
    
    199 231
                 # path specification in the config files.
    
    200 232
                 #
    
    ... ... @@ -216,6 +248,9 @@ class Context():
    216 248
             # Load artifact share configuration
    
    217 249
             self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
    
    218 250
     
    
    251
    +        # Load source cache config
    
    252
    +        self.source_cache_specs = SourceCache.specs_from_config_node(defaults)
    
    253
    +
    
    219 254
             self.remote_execution_specs = SandboxRemote.specs_from_config_node(defaults)
    
    220 255
     
    
    221 256
             # Load pull build trees configuration
    
    ... ... @@ -289,6 +324,13 @@ class Context():
    289 324
     
    
    290 325
             return self._artifactcache
    
    291 326
     
    
    327
    +    @property
    
    328
    +    def sourcecache(self):
    
    329
    +        if not self._sourcecache:
    
    330
    +            self._sourcecache = SourceCache(self)
    
    331
    +
    
    332
    +        return self._sourcecache
    
    333
    +
    
    292 334
         # add_project():
    
    293 335
         #
    
    294 336
         # Add a project to the context.
    
    ... ... @@ -654,7 +696,7 @@ class Context():
    654 696
     
    
    655 697
         def get_cascache(self):
    
    656 698
             if self._cascache is None:
    
    657
    -            self._cascache = CASCache(self.artifactdir)
    
    699
    +            self._cascache = CASCache(self.rootcachedir)
    
    658 700
             return self._cascache
    
    659 701
     
    
    660 702
         # guess_element()
    

  • buildstream/_project.py
    ... ... @@ -30,6 +30,7 @@ from ._profile import Topics, profile_start, profile_end
    30 30
     from ._exceptions import LoadError, LoadErrorReason
    
    31 31
     from ._options import OptionPool
    
    32 32
     from ._artifactcache import ArtifactCache
    
    33
    +from ._sourcecache import SourceCache
    
    33 34
     from .sandbox import SandboxRemote
    
    34 35
     from ._elementfactory import ElementFactory
    
    35 36
     from ._sourcefactory import SourceFactory
    
    ... ... @@ -133,6 +134,7 @@ class Project():
    133 134
             self._shell_host_files = []   # A list of HostMount objects
    
    134 135
     
    
    135 136
             self.artifact_cache_specs = None
    
    137
    +        self.source_cache_specs = None
    
    136 138
             self.remote_execution_specs = None
    
    137 139
             self._sandbox = None
    
    138 140
             self._splits = None
    
    ... ... @@ -232,7 +234,7 @@ class Project():
    232 234
                 'artifacts', 'options',
    
    233 235
                 'fail-on-overlap', 'shell', 'fatal-warnings',
    
    234 236
                 'ref-storage', 'sandbox', 'mirrors', 'remote-execution',
    
    235
    -            'sources', '(@)'
    
    237
    +            'sources', 'source-caches', '(@)'
    
    236 238
             ])
    
    237 239
     
    
    238 240
         # create_element()
    
    ... ... @@ -506,6 +508,9 @@ class Project():
    506 508
             # Load artifacts pull/push configuration for this project
    
    507 509
             self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config, self.directory)
    
    508 510
     
    
    511
    +        # Load source caches with pull/push config
    
    512
    +        self.source_cache_specs = SourceCache.specs_from_config_node(config, self.directory)
    
    513
    +
    
    509 514
             # Load remote-execution configuration for this project
    
    510 515
             project_specs = SandboxRemote.specs_from_config_node(config, self.directory)
    
    511 516
             override_specs = SandboxRemote.specs_from_config_node(
    

  • buildstream/_sourcecache.py
    1
    +#  Copyright (C) 2019 Bloomberg Finance LP
    
    2
    +#
    
    3
    +#  This program is free software; you can redistribute it and/or
    
    4
    +#  modify it under the terms of the GNU Lesser General Public
    
    5
    +#  License as published by the Free Software Foundation; either
    
    6
    +#  version 2 of the License, or (at your option) any later version.
    
    7
    +#
    
    8
    +#  This library is distributed in the hope that it will be useful,
    
    9
    +#  but WITHOUT ANY WARRANTY; without even the implied warranty of
    
    10
    +#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the GNU
    
    11
    +#  Lesser General Public License for more details.
    
    12
    +#
    
    13
    +#  You should have received a copy of the GNU Lesser General Public
    
    14
    +#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
    
    15
    +#
    
    16
    +#  Authors:
    
    17
    +#        Raoul Hidalgo Charman <raoul hidalgocharman codethink co uk>
    
    18
    +
    
    19
    +
    
    20
    +from collections.abc import Mapping
    
    21
    +from .types import Consistency
    
    22
    +from . import _yaml
    
    23
    +from ._cas import CASRemoteSpec, CASRemote
    
    24
    +
    
    25
    +
    
    26
    +class SourceCacheSpec(CASRemoteSpec):
    
    27
    +    """Holds configuration for a remote used for the source cache.
    
    28
    +
    
    29
    +    Args:
    
    30
    +        url (str): Location of the remote artifact cache
    
    31
    +        push (bool): Whether we should attempt to push artifacts to this cache,
    
    32
    +                    in addition to pulling from it.
    
    33
    +        instance-name (str): Name if any, of instance of server
    
    34
    +    """
    
    35
    +
    
    36
    +
    
    37
    +class SourceCache():
    
    38
    +
    
    39
    +    def __init__(self, context):
    
    40
    +        self.cas = context.get_cascache()
    
    41
    +
    
    42
    +        self.casremotes = [CASRemote(spec) for spec in context.source_cache_specs]
    
    43
    +
    
    44
    +    # TODO: pretty identical to the method in artifact cache, so would be best
    
    45
    +    # to move it to the CASRemoteSpec level to avoid duplication
    
    46
    +    @staticmethod
    
    47
    +    def specs_from_config_node(config_node, basedir=None):
    
    48
    +        source_specs = []
    
    49
    +
    
    50
    +        sources = config_node.get('source-caches', [])
    
    51
    +        if isinstance(sources, Mapping):
    
    52
    +            source_specs.append(SourceCacheSpec._new_from_config_node(sources, basedir))
    
    53
    +        elif isinstance(sources, list):
    
    54
    +            for source_spec in sources:
    
    55
    +                source_specs.append(SourceCacheSpec._new_from_config_node(source_spec, basedir))
    
    56
    +        else:
    
    57
    +            provenance = _yaml.node_get_provenance(config_node, key='source-caches')
    
    58
    +            raise _yaml.LoadError(
    
    59
    +                _yaml.LoadErrorReason.INVALID_DATA,
    
    60
    +                "%s: 'sources' must be a single 'url:' mapping, or a list of mappings" %
    
    61
    +                (str(provenance)))
    
    62
    +
    
    63
    +        return source_specs
    
    64
    +
    
    65
    +    def check_cas(self, source):
    
    66
    +        ref = source._get_unique_key()
    
    67
    +        return self.cas.contains(ref)
    
    68
    +
    
    69
    +    def fetch(self, source, previous_sources, *, progress=None):
    
    70
    +        # if not staged
    
    71
    +        if source.get_consistency() < Consistency.STAGED:
    
    72
    +            source.info("Trying to fetch from remote source cache")
    
    73
    +            ref = source._get_unique_key()
    
    74
    +            if self.cas.pull(ref, self.casremotes):
    
    75
    +                source.info("Fetched to local CAS")
    
    76
    +            else:
    
    77
    +                source.info("Falling back to upstream")
    
    78
    +                source._fetch(previous_sources)
    
    79
    +
    
    80
    +    def stage(self, source, vdir):
    
    81
    +        pass
    
    82
    +
    
    83
    +    def push(self, source):
    
    84
    +        pass
    
    85
    +
    
    86
    +    def init_workspace(self, source):
    
    87
    +        pass

  • buildstream/data/userconfig.yaml
    ... ... @@ -13,11 +13,8 @@
    13 13
     # Location to store sources
    
    14 14
     sourcedir: ${XDG_CACHE_HOME}/buildstream/sources
    
    15 15
     
    
    16
    -# Location to perform builds
    
    17
    -builddir: ${XDG_CACHE_HOME}/buildstream/build
    
    18
    -
    
    19
    -# Location to store local binary artifacts
    
    20
    -artifactdir: ${XDG_CACHE_HOME}/buildstream/artifacts
    
    16
    +# Root location for other directories in the cache
    
    17
    +rootcachedir: ${XDG_CACHE_HOME}/buildstream
    
    21 18
     
    
    22 19
     # Location to store build logs
    
    23 20
     logdir: ${XDG_CACHE_HOME}/buildstream/logs
    

  • buildstream/element.py
    ... ... @@ -2028,7 +2028,7 @@ class Element(Plugin):
    2028 2028
         def _fetch(self):
    
    2029 2029
             previous_sources = []
    
    2030 2030
             for source in self.sources():
    
    2031
    -            if source._get_consistency() < Consistency.CACHED:
    
    2031
    +            if source._get_consistency() < Consistency.STAGED:
    
    2032 2032
                     source._fetch(previous_sources)
    
    2033 2033
                 previous_sources.append(source)
    
    2034 2034
     
    

  • buildstream/source.py
    ... ... @@ -288,6 +288,8 @@ class Source(Plugin):
    288 288
             super().__init__("{}-{}".format(meta.element_name, meta.element_index),
    
    289 289
                              context, project, provenance, "source")
    
    290 290
     
    
    291
    +        self.__source_cache = context.sourcecache
    
    292
    +
    
    291 293
             self.__element_name = meta.element_name         # The name of the element owning this source
    
    292 294
             self.__element_index = meta.element_index       # The index of the source in the owning element's source list
    
    293 295
             self.__element_kind = meta.element_kind         # The kind of the element owning this source
    

  • buildstream/types.py
    ... ... @@ -73,7 +73,14 @@ class Consistency():
    73 73
         be fetched, however they cannot be staged.
    
    74 74
         """
    
    75 75
     
    
    76
    -    CACHED = 2
    
    76
    +    STAGED = 2
    
    77
    +    """STAGED
    
    78
    +
    
    79
    +    Sources are staged in the local CAS, but are not present unstaged in the
    
    80
    +    source cache.
    
    81
    +    """
    
    82
    +
    
    83
    +    CACHED = 3
    
    77 84
         """Cached
    
    78 85
     
    
    79 86
         Cached sources have a reference which is present in the local
    

  • conftest.py
    ... ... @@ -53,15 +53,15 @@ def pytest_runtest_setup(item):
    53 53
     class IntegrationCache():
    
    54 54
     
    
    55 55
         def __init__(self, cache):
    
    56
    -        cache = os.path.abspath(cache)
    
    56
    +        self.root = os.path.abspath(cache)
    
    57 57
     
    
    58 58
             # Use the same sources every time
    
    59
    -        self.sources = os.path.join(cache, 'sources')
    
    59
    +        self.sources = os.path.join(self.root, 'sources')
    
    60 60
     
    
    61 61
             # Create a temp directory for the duration of the test for
    
    62 62
             # the artifacts directory
    
    63 63
             try:
    
    64
    -            self.artifacts = tempfile.mkdtemp(dir=cache, prefix='artifacts-')
    
    64
    +            self.artifacts = tempfile.mkdtemp(dir=self.root, prefix='artifacts-')
    
    65 65
             except OSError as e:
    
    66 66
                 raise AssertionError("Unable to create test directory !") from e
    
    67 67
     
    
    ... ... @@ -86,6 +86,10 @@ def integration_cache(request):
    86 86
             shutil.rmtree(cache.artifacts)
    
    87 87
         except FileNotFoundError:
    
    88 88
             pass
    
    89
    +    try:
    
    90
    +        shutil.rmtree(os.path.join(cache.root, 'cas'))
    
    91
    +    except FileNotFoundError:
    
    92
    +        pass
    
    89 93
     
    
    90 94
     
    
    91 95
     #################################################
    

  • doc/bst2html.py
    ... ... @@ -194,10 +194,9 @@ def workdir(source_cache=None):
    194 194
     
    
    195 195
             bst_config_file = os.path.join(tempdir, 'buildstream.conf')
    
    196 196
             config = {
    
    197
    +            'rootcachedir': tempdir,
    
    197 198
                 'sourcedir': source_cache,
    
    198
    -            'artifactdir': os.path.join(tempdir, 'artifacts'),
    
    199 199
                 'logdir': os.path.join(tempdir, 'logs'),
    
    200
    -            'builddir': os.path.join(tempdir, 'build'),
    
    201 200
             }
    
    202 201
             _yaml.dump(config, bst_config_file)
    
    203 202
     
    
    ... ... @@ -411,12 +410,10 @@ def run_session(description, tempdir, source_cache, palette, config_file, force)
    411 410
             # Encode and save the output if that was asked for
    
    412 411
             output = _yaml.node_get(command, str, 'output', default_value=None)
    
    413 412
             if output is not None:
    
    414
    -
    
    415 413
                 # Convert / Generate a nice <div>
    
    416 414
                 converted = generate_html(command_out, directory, config_file,
    
    417 415
                                           source_cache, tempdir, palette,
    
    418 416
                                           command_str, command_fake_output is not None)
    
    419
    -
    
    420 417
                 # Save it
    
    421 418
                 filename = os.path.join(desc_dir, output)
    
    422 419
                 filename = os.path.realpath(filename)
    

  • doc/sessions/running-commands.run
    ... ... @@ -2,7 +2,7 @@
    2 2
     commands:
    
    3 3
     # Make it fetch first
    
    4 4
     - directory: ../examples/running-commands
    
    5
    -  command: fetch hello.bst
    
    5
    +  command: source fetch hello.bst
    
    6 6
     
    
    7 7
     # Capture a show output
    
    8 8
     - directory: ../examples/running-commands
    

  • tests/artifactcache/cache_size.py
    1 1
     import os
    
    2 2
     import pytest
    
    3
    +from unittest import mock
    
    3 4
     
    
    4 5
     from buildstream import _yaml
    
    5 6
     from buildstream._artifactcache import CACHE_SIZE_FILE
    
    7
    +from buildstream._exceptions import ErrorDomain
    
    6 8
     
    
    7 9
     from tests.testutils import cli, create_element_size
    
    8 10
     
    
    ... ... @@ -60,3 +62,29 @@ def test_cache_size_write(cli, tmpdir):
    60 62
         with open(sizefile, "r") as f:
    
    61 63
             size_data = f.read()
    
    62 64
         size = int(size_data)
    
    65
    +
    
    66
    +
    
    67
    +def test_quota_over_1024T(cli, tmpdir):
    
    68
    +    KiB = 1024
    
    69
    +    MiB = (KiB * 1024)
    
    70
    +    GiB = (MiB * 1024)
    
    71
    +    TiB = (GiB * 1024)
    
    72
    +
    
    73
    +    cli.configure({
    
    74
    +        'cache': {
    
    75
    +            'quota': 2048 * TiB
    
    76
    +        }
    
    77
    +    })
    
    78
    +    project = tmpdir.join("main")
    
    79
    +    os.makedirs(str(project))
    
    80
    +    _yaml.dump({'name': 'main'}, str(project.join("project.conf")))
    
    81
    +
    
    82
    +    volume_space_patch = mock.patch(
    
    83
    +        "buildstream._artifactcache.ArtifactCache._get_volume_space_info_for",
    
    84
    +        autospec=True,
    
    85
    +        return_value=(1025 * TiB, 1025 * TiB)
    
    86
    +    )
    
    87
    +
    
    88
    +    with volume_space_patch:
    
    89
    +        result = cli.run(project, args=["build", "file.bst"])
    
    90
    +        result.assert_main_error(ErrorDomain.ARTIFACT, 'insufficient-storage-for-quota')

  • tests/artifactcache/expiry.py
    ... ... @@ -66,8 +66,9 @@ def test_artifact_expires(cli, datafiles, tmpdir):
    66 66
         res.assert_success()
    
    67 67
     
    
    68 68
         # Check that the correct element remains in the cache
    
    69
    -    assert cli.get_element_state(project, 'target.bst') != 'cached'
    
    70
    -    assert cli.get_element_state(project, 'target2.bst') == 'cached'
    
    69
    +    states = cli.get_element_states(project, ['target.bst', 'target2.bst'])
    
    70
    +    assert states['target.bst'] != 'cached'
    
    71
    +    assert states['target2.bst'] == 'cached'
    
    71 72
     
    
    72 73
     
    
    73 74
     # Ensure that we don't end up deleting the whole cache (or worse) if
    
    ... ... @@ -144,9 +145,11 @@ def test_expiry_order(cli, datafiles, tmpdir):
    144 145
         # have been removed.
    
    145 146
         # Note that buildstream will reduce the cache to 50% of the
    
    146 147
         # original size - we therefore remove multiple elements.
    
    147
    -
    
    148
    -    assert (tuple(cli.get_element_state(project, element) for element in
    
    149
    -                  ('unrelated.bst', 'target.bst', 'target2.bst', 'dep.bst', 'expire.bst')) ==
    
    148
    +    check_elements = [
    
    149
    +        'unrelated.bst', 'target.bst', 'target2.bst', 'dep.bst', 'expire.bst'
    
    150
    +    ]
    
    151
    +    states = cli.get_element_states(project, check_elements)
    
    152
    +    assert (tuple(states[element] for element in check_elements) ==
    
    150 153
                 ('buildable', 'buildable', 'buildable', 'cached', 'cached', ))
    
    151 154
     
    
    152 155
     
    
    ... ... @@ -176,8 +179,9 @@ def test_keep_dependencies(cli, datafiles, tmpdir):
    176 179
         res.assert_success()
    
    177 180
     
    
    178 181
         # Check that the correct element remains in the cache
    
    179
    -    assert cli.get_element_state(project, 'dependency.bst') == 'cached'
    
    180
    -    assert cli.get_element_state(project, 'unrelated.bst') == 'cached'
    
    182
    +    states = cli.get_element_states(project, ['dependency.bst', 'unrelated.bst'])
    
    183
    +    assert states['dependency.bst'] == 'cached'
    
    184
    +    assert states['unrelated.bst'] == 'cached'
    
    181 185
     
    
    182 186
         # We try to build an element which depends on the LRU artifact,
    
    183 187
         # and could therefore fail if we didn't make sure dependencies
    
    ... ... @@ -192,9 +196,10 @@ def test_keep_dependencies(cli, datafiles, tmpdir):
    192 196
         res = cli.run(project=project, args=['build', 'target.bst'])
    
    193 197
         res.assert_success()
    
    194 198
     
    
    195
    -    assert cli.get_element_state(project, 'unrelated.bst') != 'cached'
    
    196
    -    assert cli.get_element_state(project, 'dependency.bst') == 'cached'
    
    197
    -    assert cli.get_element_state(project, 'target.bst') == 'cached'
    
    199
    +    states = cli.get_element_states(project, ['target.bst', 'unrelated.bst'])
    
    200
    +    assert states['target.bst'] == 'cached'
    
    201
    +    assert states['dependency.bst'] == 'cached'
    
    202
    +    assert states['unrelated.bst'] != 'cached'
    
    198 203
     
    
    199 204
     
    
    200 205
     # Assert that we never delete a dependency required for a build tree
    
    ... ... @@ -239,11 +244,11 @@ def test_never_delete_required(cli, datafiles, tmpdir):
    239 244
         # life there may potentially be N-builders cached artifacts
    
    240 245
         # which exceed the quota
    
    241 246
         #
    
    242
    -    assert cli.get_element_state(project, 'dep1.bst') == 'cached'
    
    243
    -    assert cli.get_element_state(project, 'dep2.bst') == 'cached'
    
    244
    -
    
    245
    -    assert cli.get_element_state(project, 'dep3.bst') != 'cached'
    
    246
    -    assert cli.get_element_state(project, 'target.bst') != 'cached'
    
    247
    +    states = cli.get_element_states(project, ['target.bst'])
    
    248
    +    assert states['dep1.bst'] == 'cached'
    
    249
    +    assert states['dep2.bst'] == 'cached'
    
    250
    +    assert states['dep3.bst'] != 'cached'
    
    251
    +    assert states['target.bst'] != 'cached'
    
    247 252
     
    
    248 253
     
    
    249 254
     # Assert that we never delete a dependency required for a build tree,
    
    ... ... @@ -275,10 +280,11 @@ def test_never_delete_required_track(cli, datafiles, tmpdir):
    275 280
         res.assert_success()
    
    276 281
     
    
    277 282
         # They should all be cached
    
    278
    -    assert cli.get_element_state(project, 'dep1.bst') == 'cached'
    
    279
    -    assert cli.get_element_state(project, 'dep2.bst') == 'cached'
    
    280
    -    assert cli.get_element_state(project, 'dep3.bst') == 'cached'
    
    281
    -    assert cli.get_element_state(project, 'target.bst') == 'cached'
    
    283
    +    states = cli.get_element_states(project, ['target.bst'])
    
    284
    +    assert states['dep1.bst'] == 'cached'
    
    285
    +    assert states['dep2.bst'] == 'cached'
    
    286
    +    assert states['dep3.bst'] == 'cached'
    
    287
    +    assert states['target.bst'] == 'cached'
    
    282 288
     
    
    283 289
         # Now increase the size of all the elements
    
    284 290
         #
    
    ... ... @@ -296,28 +302,37 @@ def test_never_delete_required_track(cli, datafiles, tmpdir):
    296 302
     
    
    297 303
         # Expect the same result that we did in test_never_delete_required()
    
    298 304
         #
    
    299
    -    assert cli.get_element_state(project, 'dep1.bst') == 'cached'
    
    300
    -    assert cli.get_element_state(project, 'dep2.bst') == 'cached'
    
    301
    -    assert cli.get_element_state(project, 'dep3.bst') != 'cached'
    
    302
    -    assert cli.get_element_state(project, 'target.bst') != 'cached'
    
    305
    +    states = cli.get_element_states(project, ['target.bst'])
    
    306
    +    assert states['dep1.bst'] == 'cached'
    
    307
    +    assert states['dep2.bst'] == 'cached'
    
    308
    +    assert states['dep3.bst'] != 'cached'
    
    309
    +    assert states['target.bst'] != 'cached'
    
    303 310
     
    
    304 311
     
    
    305 312
     # Ensure that only valid cache quotas make it through the loading
    
    306 313
     # process.
    
    307
    -@pytest.mark.parametrize("quota,success", [
    
    308
    -    ("1", True),
    
    309
    -    ("1K", True),
    
    310
    -    ("50%", True),
    
    311
    -    ("infinity", True),
    
    312
    -    ("0", True),
    
    313
    -    ("-1", False),
    
    314
    -    ("pony", False),
    
    315
    -    ("7K", False),
    
    316
    -    ("70%", False),
    
    317
    -    ("200%", False)
    
    314
    +#
    
    315
    +# This test virtualizes the condition to assume a storage volume
    
    316
    +# has 10K total disk space, and 6K of it is already in use (not
    
    317
    +# including any space used by the artifact cache).
    
    318
    +#
    
    319
    +@pytest.mark.parametrize("quota,err_domain,err_reason", [
    
    320
    +    # Valid configurations
    
    321
    +    ("1", 'success', None),
    
    322
    +    ("1K", 'success', None),
    
    323
    +    ("50%", 'success', None),
    
    324
    +    ("infinity", 'success', None),
    
    325
    +    ("0", 'success', None),
    
    326
    +    # Invalid configurations
    
    327
    +    ("-1", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
    
    328
    +    ("pony", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
    
    329
    +    ("200%", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
    
    330
    +    # Not enough space for these caches
    
    331
    +    ("7K", ErrorDomain.ARTIFACT, 'insufficient-storage-for-quota'),
    
    332
    +    ("70%", ErrorDomain.ARTIFACT, 'insufficient-storage-for-quota')
    
    318 333
     ])
    
    319 334
     @pytest.mark.datafiles(DATA_DIR)
    
    320
    -def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, success):
    
    335
    +def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, err_domain, err_reason):
    
    321 336
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    322 337
         os.makedirs(os.path.join(project, 'elements'))
    
    323 338
     
    
    ... ... @@ -356,10 +371,10 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, success):
    356 371
         with volume_space_patch, cache_size_patch:
    
    357 372
             res = cli.run(project=project, args=['workspace', 'list'])
    
    358 373
     
    
    359
    -    if success:
    
    374
    +    if err_domain == 'success':
    
    360 375
             res.assert_success()
    
    361 376
         else:
    
    362
    -        res.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
    
    377
    +        res.assert_main_error(err_domain, err_reason)
    
    363 378
     
    
    364 379
     
    
    365 380
     @pytest.mark.datafiles(DATA_DIR)
    
    ... ... @@ -403,7 +418,7 @@ def test_extract_expiry(cli, datafiles, tmpdir):
    403 418
         # Now we should have a directory for the cached target2.bst, which
    
    404 419
         # replaced target.bst in the cache, we should not have a directory
    
    405 420
         # for the target.bst
    
    406
    -    refsdir = os.path.join(project, 'cache', 'artifacts', 'cas', 'refs', 'heads')
    
    421
    +    refsdir = os.path.join(project, 'cache', 'cas', 'refs', 'heads')
    
    407 422
         refsdirtest = os.path.join(refsdir, 'test')
    
    408 423
         refsdirtarget = os.path.join(refsdirtest, 'target')
    
    409 424
         refsdirtarget2 = os.path.join(refsdirtest, 'target2')
    

  • tests/artifactcache/junctions.py
    ... ... @@ -68,8 +68,8 @@ def test_push_pull(cli, tmpdir, datafiles):
    68 68
             # Now we've pushed, delete the user's local artifact cache
    
    69 69
             # directory and try to redownload it from the share
    
    70 70
             #
    
    71
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    72
    -        shutil.rmtree(artifacts)
    
    71
    +        cas = os.path.join(cli.directory, 'cas')
    
    72
    +        shutil.rmtree(cas)
    
    73 73
     
    
    74 74
             # Assert that nothing is cached locally anymore
    
    75 75
             state = cli.get_element_state(project, 'target.bst')
    

  • tests/artifactcache/pull.py
    ... ... @@ -56,7 +56,7 @@ def test_pull(cli, tmpdir, datafiles):
    56 56
         # Set up an artifact cache.
    
    57 57
         with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
    
    58 58
             # Configure artifact share
    
    59
    -        artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    59
    +        cache_dir = os.path.join(str(tmpdir), 'cache')
    
    60 60
             user_config_file = str(tmpdir.join('buildstream.conf'))
    
    61 61
             user_config = {
    
    62 62
                 'scheduler': {
    
    ... ... @@ -65,7 +65,8 @@ def test_pull(cli, tmpdir, datafiles):
    65 65
                 'artifacts': {
    
    66 66
                     'url': share.repo,
    
    67 67
                     'push': True,
    
    68
    -            }
    
    68
    +            },
    
    69
    +            'rootcachedir': cache_dir
    
    69 70
             }
    
    70 71
     
    
    71 72
             # Write down the user configuration file
    
    ... ... @@ -92,7 +93,6 @@ def test_pull(cli, tmpdir, datafiles):
    92 93
             # Fake minimal context
    
    93 94
             context = Context()
    
    94 95
             context.load(config=user_config_file)
    
    95
    -        context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    96 96
             context.set_message_handler(message_handler)
    
    97 97
     
    
    98 98
             # Load the project and CAS cache
    
    ... ... @@ -102,7 +102,10 @@ def test_pull(cli, tmpdir, datafiles):
    102 102
     
    
    103 103
             # Assert that the element's artifact is **not** cached
    
    104 104
             element = project.load_elements(['target.bst'])[0]
    
    105
    +        print(element)
    
    105 106
             element_key = cli.get_element_key(project_dir, 'target.bst')
    
    107
    +        print(context.casdir)
    
    108
    +        print(cas.get_artifact_fullname(element, element_key))
    
    106 109
             assert not cas.contains(element, element_key)
    
    107 110
     
    
    108 111
             queue = multiprocessing.Queue()
    
    ... ... @@ -110,7 +113,7 @@ def test_pull(cli, tmpdir, datafiles):
    110 113
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    111 114
             process = multiprocessing.Process(target=_queue_wrapper,
    
    112 115
                                               args=(_test_pull, queue, user_config_file, project_dir,
    
    113
    -                                                artifact_dir, 'target.bst', element_key))
    
    116
    +                                                cache_dir, 'target.bst', element_key))
    
    114 117
     
    
    115 118
             try:
    
    116 119
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -127,12 +130,14 @@ def test_pull(cli, tmpdir, datafiles):
    127 130
             assert cas.contains(element, element_key)
    
    128 131
     
    
    129 132
     
    
    130
    -def _test_pull(user_config_file, project_dir, artifact_dir,
    
    133
    +def _test_pull(user_config_file, project_dir, cache_dir,
    
    131 134
                    element_name, element_key, queue):
    
    132 135
         # Fake minimal context
    
    133 136
         context = Context()
    
    134 137
         context.load(config=user_config_file)
    
    135
    -    context.artifactdir = artifact_dir
    
    138
    +    context.rootcachedir = cache_dir
    
    139
    +    context.casdir = os.path.join(cache_dir, 'cas')
    
    140
    +    context.tmpdir = os.path.join(cache_dir, 'tmp')
    
    136 141
         context.set_message_handler(message_handler)
    
    137 142
     
    
    138 143
         # Load the project manually
    
    ... ... @@ -165,7 +170,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
    165 170
         # Set up an artifact cache.
    
    166 171
         with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
    
    167 172
             # Configure artifact share
    
    168
    -        artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    173
    +        rootcache_dir = os.path.join(str(tmpdir), 'cache')
    
    169 174
             user_config_file = str(tmpdir.join('buildstream.conf'))
    
    170 175
             user_config = {
    
    171 176
                 'scheduler': {
    
    ... ... @@ -174,7 +179,8 @@ def test_pull_tree(cli, tmpdir, datafiles):
    174 179
                 'artifacts': {
    
    175 180
                     'url': share.repo,
    
    176 181
                     'push': True,
    
    177
    -            }
    
    182
    +            },
    
    183
    +            'rootcachedir': rootcache_dir
    
    178 184
             }
    
    179 185
     
    
    180 186
             # Write down the user configuration file
    
    ... ... @@ -195,7 +201,6 @@ def test_pull_tree(cli, tmpdir, datafiles):
    195 201
             # Fake minimal context
    
    196 202
             context = Context()
    
    197 203
             context.load(config=user_config_file)
    
    198
    -        context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    199 204
             context.set_message_handler(message_handler)
    
    200 205
     
    
    201 206
             # Load the project and CAS cache
    
    ... ... @@ -218,7 +223,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
    218 223
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    219 224
             process = multiprocessing.Process(target=_queue_wrapper,
    
    220 225
                                               args=(_test_push_tree, queue, user_config_file, project_dir,
    
    221
    -                                                artifact_dir, artifact_digest))
    
    226
    +                                                artifact_digest))
    
    222 227
     
    
    223 228
             try:
    
    224 229
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -246,7 +251,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
    246 251
             # Use subprocess to avoid creation of gRPC threads in main BuildStream process
    
    247 252
             process = multiprocessing.Process(target=_queue_wrapper,
    
    248 253
                                               args=(_test_pull_tree, queue, user_config_file, project_dir,
    
    249
    -                                                artifact_dir, tree_digest))
    
    254
    +                                                tree_digest))
    
    250 255
     
    
    251 256
             try:
    
    252 257
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -268,11 +273,10 @@ def test_pull_tree(cli, tmpdir, datafiles):
    268 273
             assert os.path.exists(cas.objpath(directory_digest))
    
    269 274
     
    
    270 275
     
    
    271
    -def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
    
    276
    +def _test_push_tree(user_config_file, project_dir, artifact_digest, queue):
    
    272 277
         # Fake minimal context
    
    273 278
         context = Context()
    
    274 279
         context.load(config=user_config_file)
    
    275
    -    context.artifactdir = artifact_dir
    
    276 280
         context.set_message_handler(message_handler)
    
    277 281
     
    
    278 282
         # Load the project manually
    
    ... ... @@ -304,11 +308,10 @@ def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest
    304 308
             queue.put("No remote configured")
    
    305 309
     
    
    306 310
     
    
    307
    -def _test_pull_tree(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
    
    311
    +def _test_pull_tree(user_config_file, project_dir, artifact_digest, queue):
    
    308 312
         # Fake minimal context
    
    309 313
         context = Context()
    
    310 314
         context.load(config=user_config_file)
    
    311
    -    context.artifactdir = artifact_dir
    
    312 315
         context.set_message_handler(message_handler)
    
    313 316
     
    
    314 317
         # Load the project manually
    

  • tests/artifactcache/push.py
    ... ... @@ -51,7 +51,7 @@ def test_push(cli, tmpdir, datafiles):
    51 51
         # Set up an artifact cache.
    
    52 52
         with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
    
    53 53
             # Configure artifact share
    
    54
    -        artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    54
    +        rootcache_dir = os.path.join(str(tmpdir), 'cache')
    
    55 55
             user_config_file = str(tmpdir.join('buildstream.conf'))
    
    56 56
             user_config = {
    
    57 57
                 'scheduler': {
    
    ... ... @@ -60,7 +60,8 @@ def test_push(cli, tmpdir, datafiles):
    60 60
                 'artifacts': {
    
    61 61
                     'url': share.repo,
    
    62 62
                     'push': True,
    
    63
    -            }
    
    63
    +            },
    
    64
    +            'rootcachedir': rootcache_dir
    
    64 65
             }
    
    65 66
     
    
    66 67
             # Write down the user configuration file
    
    ... ... @@ -69,7 +70,6 @@ def test_push(cli, tmpdir, datafiles):
    69 70
             # Fake minimal context
    
    70 71
             context = Context()
    
    71 72
             context.load(config=user_config_file)
    
    72
    -        context.artifactdir = artifact_dir
    
    73 73
             context.set_message_handler(message_handler)
    
    74 74
     
    
    75 75
             # Load the project manually
    
    ... ... @@ -89,7 +89,7 @@ def test_push(cli, tmpdir, datafiles):
    89 89
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    90 90
             process = multiprocessing.Process(target=_queue_wrapper,
    
    91 91
                                               args=(_test_push, queue, user_config_file, project_dir,
    
    92
    -                                                artifact_dir, 'target.bst', element_key))
    
    92
    +                                                'target.bst', element_key))
    
    93 93
     
    
    94 94
             try:
    
    95 95
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -106,12 +106,10 @@ def test_push(cli, tmpdir, datafiles):
    106 106
             assert share.has_artifact('test', 'target.bst', element_key)
    
    107 107
     
    
    108 108
     
    
    109
    -def _test_push(user_config_file, project_dir, artifact_dir,
    
    110
    -               element_name, element_key, queue):
    
    109
    +def _test_push(user_config_file, project_dir, element_name, element_key, queue):
    
    111 110
         # Fake minimal context
    
    112 111
         context = Context()
    
    113 112
         context.load(config=user_config_file)
    
    114
    -    context.artifactdir = artifact_dir
    
    115 113
         context.set_message_handler(message_handler)
    
    116 114
     
    
    117 115
         # Load the project manually
    
    ... ... @@ -152,7 +150,7 @@ def test_push_directory(cli, tmpdir, datafiles):
    152 150
         # Set up an artifact cache.
    
    153 151
         with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
    
    154 152
             # Configure artifact share
    
    155
    -        artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    153
    +        rootcache_dir = os.path.join(str(tmpdir), 'cache')
    
    156 154
             user_config_file = str(tmpdir.join('buildstream.conf'))
    
    157 155
             user_config = {
    
    158 156
                 'scheduler': {
    
    ... ... @@ -161,7 +159,8 @@ def test_push_directory(cli, tmpdir, datafiles):
    161 159
                 'artifacts': {
    
    162 160
                     'url': share.repo,
    
    163 161
                     'push': True,
    
    164
    -            }
    
    162
    +            },
    
    163
    +            'rootcachedir': rootcache_dir
    
    165 164
             }
    
    166 165
     
    
    167 166
             # Write down the user configuration file
    
    ... ... @@ -170,7 +169,6 @@ def test_push_directory(cli, tmpdir, datafiles):
    170 169
             # Fake minimal context
    
    171 170
             context = Context()
    
    172 171
             context.load(config=user_config_file)
    
    173
    -        context.artifactdir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    174 172
             context.set_message_handler(message_handler)
    
    175 173
     
    
    176 174
             # Load the project and CAS cache
    
    ... ... @@ -182,6 +180,7 @@ def test_push_directory(cli, tmpdir, datafiles):
    182 180
             # Assert that the element's artifact is cached
    
    183 181
             element = project.load_elements(['target.bst'])[0]
    
    184 182
             element_key = cli.get_element_key(project_dir, 'target.bst')
    
    183
    +        print(context.casdir)
    
    185 184
             assert artifactcache.contains(element, element_key)
    
    186 185
     
    
    187 186
             # Manually setup the CAS remote
    
    ... ... @@ -198,7 +197,7 @@ def test_push_directory(cli, tmpdir, datafiles):
    198 197
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    199 198
             process = multiprocessing.Process(target=_queue_wrapper,
    
    200 199
                                               args=(_test_push_directory, queue, user_config_file,
    
    201
    -                                                project_dir, artifact_dir, artifact_digest))
    
    200
    +                                                project_dir, artifact_digest))
    
    202 201
     
    
    203 202
             try:
    
    204 203
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -216,11 +215,10 @@ def test_push_directory(cli, tmpdir, datafiles):
    216 215
             assert share.has_object(artifact_digest)
    
    217 216
     
    
    218 217
     
    
    219
    -def _test_push_directory(user_config_file, project_dir, artifact_dir, artifact_digest, queue):
    
    218
    +def _test_push_directory(user_config_file, project_dir, artifact_digest, queue):
    
    220 219
         # Fake minimal context
    
    221 220
         context = Context()
    
    222 221
         context.load(config=user_config_file)
    
    223
    -    context.artifactdir = artifact_dir
    
    224 222
         context.set_message_handler(message_handler)
    
    225 223
     
    
    226 224
         # Load the project manually
    
    ... ... @@ -254,6 +252,7 @@ def test_push_message(cli, tmpdir, datafiles):
    254 252
         with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
    
    255 253
             # Configure artifact share
    
    256 254
             artifact_dir = os.path.join(str(tmpdir), 'cache', 'artifacts')
    
    255
    +        rootcache_dir = os.path.join(str(tmpdir), 'cache')
    
    257 256
             user_config_file = str(tmpdir.join('buildstream.conf'))
    
    258 257
             user_config = {
    
    259 258
                 'scheduler': {
    
    ... ... @@ -262,7 +261,8 @@ def test_push_message(cli, tmpdir, datafiles):
    262 261
                 'artifacts': {
    
    263 262
                     'url': share.repo,
    
    264 263
                     'push': True,
    
    265
    -            }
    
    264
    +            },
    
    265
    +            'rootcachedir': rootcache_dir
    
    266 266
             }
    
    267 267
     
    
    268 268
             # Write down the user configuration file
    
    ... ... @@ -273,7 +273,7 @@ def test_push_message(cli, tmpdir, datafiles):
    273 273
             # See https://github.com/grpc/grpc/blob/master/doc/fork_support.md for details
    
    274 274
             process = multiprocessing.Process(target=_queue_wrapper,
    
    275 275
                                               args=(_test_push_message, queue, user_config_file,
    
    276
    -                                                project_dir, artifact_dir))
    
    276
    +                                                project_dir))
    
    277 277
     
    
    278 278
             try:
    
    279 279
                 # Keep SIGINT blocked in the child process
    
    ... ... @@ -292,11 +292,10 @@ def test_push_message(cli, tmpdir, datafiles):
    292 292
             assert share.has_object(message_digest)
    
    293 293
     
    
    294 294
     
    
    295
    -def _test_push_message(user_config_file, project_dir, artifact_dir, queue):
    
    295
    +def _test_push_message(user_config_file, project_dir, queue):
    
    296 296
         # Fake minimal context
    
    297 297
         context = Context()
    
    298 298
         context.load(config=user_config_file)
    
    299
    -    context.artifactdir = artifact_dir
    
    300 299
         context.set_message_handler(message_handler)
    
    301 300
     
    
    302 301
         # Load the project manually
    

  • tests/elements/filter.py
    ... ... @@ -389,8 +389,9 @@ def test_filter_track_multi(datafiles, cli, tmpdir):
    389 389
         _yaml.dump(filter2_config, filter2_file)
    
    390 390
     
    
    391 391
         # Assert that a fetch is needed
    
    392
    -    assert cli.get_element_state(project, input_name) == 'no reference'
    
    393
    -    assert cli.get_element_state(project, input2_name) == 'no reference'
    
    392
    +    states = cli.get_element_states(project, [input_name, input2_name])
    
    393
    +    assert states[input_name] == 'no reference'
    
    394
    +    assert states[input2_name] == 'no reference'
    
    394 395
     
    
    395 396
         # Now try to track it
    
    396 397
         result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst"])
    
    ... ... @@ -450,8 +451,9 @@ def test_filter_track_multi_exclude(datafiles, cli, tmpdir):
    450 451
         _yaml.dump(filter2_config, filter2_file)
    
    451 452
     
    
    452 453
         # Assert that a fetch is needed
    
    453
    -    assert cli.get_element_state(project, input_name) == 'no reference'
    
    454
    -    assert cli.get_element_state(project, input2_name) == 'no reference'
    
    454
    +    states = cli.get_element_states(project, [input_name, input2_name])
    
    455
    +    assert states[input_name] == 'no reference'
    
    456
    +    assert states[input2_name] == 'no reference'
    
    455 457
     
    
    456 458
         # Now try to track it
    
    457 459
         result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst", "--except", input_name])
    

  • tests/frontend/pull.py
    ... ... @@ -62,20 +62,20 @@ def test_push_pull_all(cli, tmpdir, datafiles):
    62 62
             # Now we've pushed, delete the user's local artifact cache
    
    63 63
             # directory and try to redownload it from the share
    
    64 64
             #
    
    65
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    66
    -        shutil.rmtree(artifacts)
    
    65
    +        cas = os.path.join(cli.directory, 'cas')
    
    66
    +        shutil.rmtree(cas)
    
    67 67
     
    
    68 68
             # Assert that nothing is cached locally anymore
    
    69
    -        for element_name in all_elements:
    
    70
    -            assert cli.get_element_state(project, element_name) != 'cached'
    
    69
    +        states = cli.get_element_states(project, all_elements)
    
    70
    +        assert not any(states[e] == 'cached' for e in all_elements)
    
    71 71
     
    
    72 72
             # Now try bst pull
    
    73 73
             result = cli.run(project=project, args=['artifact', 'pull', '--deps', 'all', 'target.bst'])
    
    74 74
             result.assert_success()
    
    75 75
     
    
    76 76
             # And assert that it's again in the local cache, without having built
    
    77
    -        for element_name in all_elements:
    
    78
    -            assert cli.get_element_state(project, element_name) == 'cached'
    
    77
    +        states = cli.get_element_states(project, all_elements)
    
    78
    +        assert not any(states[e] != 'cached' for e in all_elements)
    
    79 79
     
    
    80 80
     
    
    81 81
     # Tests that:
    
    ... ... @@ -104,8 +104,8 @@ def test_pull_secondary_cache(cli, tmpdir, datafiles):
    104 104
             assert_shared(cli, share2, project, 'target.bst')
    
    105 105
     
    
    106 106
             # Delete the user's local artifact cache.
    
    107
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    108
    -        shutil.rmtree(artifacts)
    
    107
    +        cas = os.path.join(cli.directory, 'cas')
    
    108
    +        shutil.rmtree(cas)
    
    109 109
     
    
    110 110
             # Assert that the element is not cached anymore.
    
    111 111
             assert cli.get_element_state(project, 'target.bst') != 'cached'
    
    ... ... @@ -158,8 +158,8 @@ def test_push_pull_specific_remote(cli, tmpdir, datafiles):
    158 158
             # Now we've pushed, delete the user's local artifact cache
    
    159 159
             # directory and try to redownload it from the good_share.
    
    160 160
             #
    
    161
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    162
    -        shutil.rmtree(artifacts)
    
    161
    +        cas = os.path.join(cli.directory, 'cas')
    
    162
    +        shutil.rmtree(cas)
    
    163 163
     
    
    164 164
             result = cli.run(project=project, args=['artifact', 'pull', 'target.bst', '--remote',
    
    165 165
                                                     good_share.repo])
    
    ... ... @@ -199,8 +199,8 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles):
    199 199
             # Now we've pushed, delete the user's local artifact cache
    
    200 200
             # directory and try to redownload it from the share
    
    201 201
             #
    
    202
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    203
    -        shutil.rmtree(artifacts)
    
    202
    +        cas = os.path.join(cli.directory, 'cas')
    
    203
    +        shutil.rmtree(cas)
    
    204 204
     
    
    205 205
             # Assert that nothing is cached locally anymore
    
    206 206
             for element_name in all_elements:
    
    ... ... @@ -249,8 +249,8 @@ def test_push_pull_track_non_strict(cli, tmpdir, datafiles):
    249 249
             # Now we've pushed, delete the user's local artifact cache
    
    250 250
             # directory and try to redownload it from the share
    
    251 251
             #
    
    252
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    253
    -        shutil.rmtree(artifacts)
    
    252
    +        cas = os.path.join(cli.directory, 'cas')
    
    253
    +        shutil.rmtree(cas)
    
    254 254
     
    
    255 255
             # Assert that nothing is cached locally anymore
    
    256 256
             for element_name in all_elements:
    
    ... ... @@ -285,7 +285,7 @@ def test_push_pull_cross_junction(cli, tmpdir, datafiles):
    285 285
             result.assert_success()
    
    286 286
             assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'cached'
    
    287 287
     
    
    288
    -        cache_dir = os.path.join(project, 'cache', 'artifacts')
    
    288
    +        cache_dir = os.path.join(project, 'cache', 'cas')
    
    289 289
             shutil.rmtree(cache_dir)
    
    290 290
     
    
    291 291
             assert cli.get_element_state(project, 'junction.bst:import-etc.bst') == 'buildable'
    
    ... ... @@ -320,8 +320,8 @@ def test_pull_missing_blob(cli, tmpdir, datafiles):
    320 320
             # Now we've pushed, delete the user's local artifact cache
    
    321 321
             # directory and try to redownload it from the share
    
    322 322
             #
    
    323
    -        artifacts = os.path.join(cli.directory, 'artifacts')
    
    324
    -        shutil.rmtree(artifacts)
    
    323
    +        cas = os.path.join(cli.directory, 'cas')
    
    324
    +        shutil.rmtree(cas)
    
    325 325
     
    
    326 326
             # Assert that nothing is cached locally anymore
    
    327 327
             for element_name in all_elements:
    

  • tests/frontend/push.py
    ... ... @@ -250,9 +250,10 @@ def test_artifact_expires(cli, datafiles, tmpdir):
    250 250
             result.assert_success()
    
    251 251
     
    
    252 252
             # check that element's 1 and 2 are cached both locally and remotely
    
    253
    -        assert cli.get_element_state(project, 'element1.bst') == 'cached'
    
    253
    +        states = cli.get_element_states(project, ['element1.bst', 'element2.bst'])
    
    254
    +        assert states['element1.bst'] == 'cached'
    
    255
    +        assert states['element2.bst'] == 'cached'
    
    254 256
             assert_shared(cli, share, project, 'element1.bst')
    
    255
    -        assert cli.get_element_state(project, 'element2.bst') == 'cached'
    
    256 257
             assert_shared(cli, share, project, 'element2.bst')
    
    257 258
     
    
    258 259
             # Create and build another element of 5 MB (This will exceed the free disk space available)
    
    ... ... @@ -298,11 +299,12 @@ def test_artifact_too_large(cli, datafiles, tmpdir):
    298 299
             result.assert_success()
    
    299 300
     
    
    300 301
             # Ensure that the small artifact is still in the share
    
    301
    -        assert cli.get_element_state(project, 'small_element.bst') == 'cached'
    
    302
    +        states = cli.get_element_states(project, ['small_element.bst', 'large_element.bst'])
    
    303
    +        states['small_element.bst'] == 'cached'
    
    302 304
             assert_shared(cli, share, project, 'small_element.bst')
    
    303 305
     
    
    304 306
             # Ensure that the artifact is cached locally but NOT remotely
    
    305
    -        assert cli.get_element_state(project, 'large_element.bst') == 'cached'
    
    307
    +        states['large_element.bst'] == 'cached'
    
    306 308
             assert_not_shared(cli, share, project, 'large_element.bst')
    
    307 309
     
    
    308 310
     
    
    ... ... @@ -334,8 +336,9 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
    334 336
             result.assert_success()
    
    335 337
     
    
    336 338
             # Ensure they are cached locally
    
    337
    -        assert cli.get_element_state(project, 'element1.bst') == 'cached'
    
    338
    -        assert cli.get_element_state(project, 'element2.bst') == 'cached'
    
    339
    +        states = cli.get_element_states(project, ['element1.bst', 'element2.bst'])
    
    340
    +        assert states['element1.bst'] == 'cached'
    
    341
    +        assert states['element2.bst'] == 'cached'
    
    339 342
     
    
    340 343
             # Ensure that they have  been pushed to the cache
    
    341 344
             assert_shared(cli, share, project, 'element1.bst')
    

  • tests/frontend/track.py
    ... ... @@ -123,7 +123,7 @@ def test_track_recurse(cli, tmpdir, datafiles, kind, amount):
    123 123
             last_element_name = element_name
    
    124 124
     
    
    125 125
         # Assert that a fetch is needed
    
    126
    -    states = cli.get_element_states(project, last_element_name)
    
    126
    +    states = cli.get_element_states(project, [last_element_name])
    
    127 127
         for element_name in element_names:
    
    128 128
             assert states[element_name] == 'no reference'
    
    129 129
     
    
    ... ... @@ -143,7 +143,7 @@ def test_track_recurse(cli, tmpdir, datafiles, kind, amount):
    143 143
         result.assert_success()
    
    144 144
     
    
    145 145
         # Assert that the base is buildable and the rest are waiting
    
    146
    -    states = cli.get_element_states(project, last_element_name)
    
    146
    +    states = cli.get_element_states(project, [last_element_name])
    
    147 147
         for element_name in element_names:
    
    148 148
             if element_name == element_names[0]:
    
    149 149
                 assert states[element_name] == 'buildable'
    
    ... ... @@ -171,8 +171,9 @@ def test_track_single(cli, tmpdir, datafiles):
    171 171
                          dep_name=element_dep_name)
    
    172 172
     
    
    173 173
         # Assert that tracking is needed for both elements
    
    174
    -    assert cli.get_element_state(project, element_dep_name) == 'no reference'
    
    175
    -    assert cli.get_element_state(project, element_target_name) == 'no reference'
    
    174
    +    states = cli.get_element_states(project, [element_target_name])
    
    175
    +    assert states[element_dep_name] == 'no reference'
    
    176
    +    assert states[element_target_name] == 'no reference'
    
    176 177
     
    
    177 178
         # Now first try to track only one element
    
    178 179
         result = cli.run(project=project, args=[
    
    ... ... @@ -187,8 +188,9 @@ def test_track_single(cli, tmpdir, datafiles):
    187 188
         result.assert_success()
    
    188 189
     
    
    189 190
         # Assert that the dependency is waiting and the target has still never been tracked
    
    190
    -    assert cli.get_element_state(project, element_dep_name) == 'no reference'
    
    191
    -    assert cli.get_element_state(project, element_target_name) == 'waiting'
    
    191
    +    states = cli.get_element_states(project, [element_target_name])
    
    192
    +    assert states[element_dep_name] == 'no reference'
    
    193
    +    assert states[element_target_name] == 'waiting'
    
    192 194
     
    
    193 195
     
    
    194 196
     @pytest.mark.datafiles(DATA_DIR)
    
    ... ... @@ -212,8 +214,9 @@ def test_track_recurse_except(cli, tmpdir, datafiles, kind):
    212 214
                          dep_name=element_dep_name)
    
    213 215
     
    
    214 216
         # Assert that a fetch is needed
    
    215
    -    assert cli.get_element_state(project, element_dep_name) == 'no reference'
    
    216
    -    assert cli.get_element_state(project, element_target_name) == 'no reference'
    
    217
    +    states = cli.get_element_states(project, [element_target_name])
    
    218
    +    assert states[element_dep_name] == 'no reference'
    
    219
    +    assert states[element_target_name] == 'no reference'
    
    217 220
     
    
    218 221
         # Now first try to track it
    
    219 222
         result = cli.run(project=project, args=[
    
    ... ... @@ -231,8 +234,9 @@ def test_track_recurse_except(cli, tmpdir, datafiles, kind):
    231 234
         result.assert_success()
    
    232 235
     
    
    233 236
         # Assert that the dependency is buildable and the target is waiting
    
    234
    -    assert cli.get_element_state(project, element_dep_name) == 'no reference'
    
    235
    -    assert cli.get_element_state(project, element_target_name) == 'waiting'
    
    237
    +    states = cli.get_element_states(project, [element_target_name])
    
    238
    +    assert states[element_dep_name] == 'no reference'
    
    239
    +    assert states[element_target_name] == 'waiting'
    
    236 240
     
    
    237 241
     
    
    238 242
     @pytest.mark.datafiles(os.path.join(TOP_DIR))
    
    ... ... @@ -672,21 +676,20 @@ def test_track_junction_included(cli, tmpdir, datafiles, ref_storage, kind):
    672 676
     
    
    673 677
     
    
    674 678
     @pytest.mark.datafiles(DATA_DIR)
    
    675
    -@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
    
    676
    -def test_track_error_cannot_write_file(cli, tmpdir, datafiles, kind):
    
    679
    +def test_track_error_cannot_write_file(cli, tmpdir, datafiles):
    
    677 680
         if os.geteuid() == 0:
    
    678 681
             pytest.skip("This is not testable with root permissions")
    
    679 682
     
    
    680 683
         project = str(datafiles)
    
    681 684
         dev_files_path = os.path.join(project, 'files', 'dev-files')
    
    682 685
         element_path = os.path.join(project, 'elements')
    
    683
    -    element_name = 'track-test-{}.bst'.format(kind)
    
    686
    +    element_name = 'track-test.bst'
    
    684 687
     
    
    685 688
         configure_project(project, {
    
    686 689
             'ref-storage': 'inline'
    
    687 690
         })
    
    688 691
     
    
    689
    -    repo = create_repo(kind, str(tmpdir))
    
    692
    +    repo = create_repo('git', str(tmpdir))
    
    690 693
         ref = repo.create(dev_files_path)
    
    691 694
     
    
    692 695
         element_full_path = os.path.join(element_path, element_name)
    

  • tests/frontend/workspace.py
    ... ... @@ -107,15 +107,17 @@ class WorkspaceCreater():
    107 107
                 element_name, element_path, workspace_dir = \
    
    108 108
                     self.create_workspace_element(kind, track, suffix, workspace_dir_usr,
    
    109 109
                                                   element_attrs)
    
    110
    -
    
    111
    -            # Assert that there is no reference, a track & fetch is needed
    
    112
    -            state = self.cli.get_element_state(self.project_path, element_name)
    
    113
    -            if track:
    
    114
    -                assert state == 'no reference'
    
    115
    -            else:
    
    116
    -                assert state == 'fetch needed'
    
    117 110
                 element_tuples.append((element_name, workspace_dir))
    
    118 111
     
    
    112
    +        # Assert that there is no reference, a track & fetch is needed
    
    113
    +        states = self.cli.get_element_states(self.project_path, [
    
    114
    +            e for e, _ in element_tuples
    
    115
    +        ])
    
    116
    +        if track:
    
    117
    +            assert not any(states[e] != 'no reference' for e, _ in element_tuples)
    
    118
    +        else:
    
    119
    +            assert not any(states[e] != 'fetch needed' for e, _ in element_tuples)
    
    120
    +
    
    119 121
             return element_tuples
    
    120 122
     
    
    121 123
         def open_workspaces(self, kinds, track, suffixs=None, workspace_dir=None,
    
    ... ... @@ -140,12 +142,14 @@ class WorkspaceCreater():
    140 142
     
    
    141 143
             result.assert_success()
    
    142 144
     
    
    143
    -        for element_name, workspace_dir in element_tuples:
    
    144
    -            # Assert that we are now buildable because the source is
    
    145
    -            # now cached.
    
    146
    -            assert self.cli.get_element_state(self.project_path, element_name) == 'buildable'
    
    145
    +        # Assert that we are now buildable because the source is now cached.
    
    146
    +        states = self.cli.get_element_states(self.project_path, [
    
    147
    +            e for e, _ in element_tuples
    
    148
    +        ])
    
    149
    +        assert not any(states[e] != 'buildable' for e, _ in element_tuples)
    
    147 150
     
    
    148
    -            # Check that the executable hello file is found in the workspace
    
    151
    +        # Check that the executable hello file is found in each workspace
    
    152
    +        for element_name, workspace_dir in element_tuples:
    
    149 153
                 filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
    
    150 154
                 assert os.path.exists(filename)
    
    151 155
     
    

  • tests/integration/build-tree.py
    ... ... @@ -158,10 +158,8 @@ def test_buildtree_options(cli, tmpdir, datafiles):
    158 158
             assert cli.get_element_state(project, element_name) == 'cached'
    
    159 159
     
    
    160 160
             # Discard the cache
    
    161
    -        cli.configure({
    
    162
    -            'artifacts': {'url': share.repo, 'push': True},
    
    163
    -            'artifactdir': os.path.join(cli.directory, 'artifacts2')
    
    164
    -        })
    
    161
    +        shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'artifacts')))
    
    162
    +        shutil.rmtree(str(os.path.join(str(tmpdir), 'cache', 'cas')))
    
    165 163
             assert cli.get_element_state(project, element_name) != 'cached'
    
    166 164
     
    
    167 165
             # Pull from cache, but do not include buildtrees.
    

  • tests/integration/cachedfail.py
    ... ... @@ -160,7 +160,6 @@ def test_push_cached_fail(cli, tmpdir, datafiles, on_error):
    160 160
     
    
    161 161
             # This element should have failed
    
    162 162
             assert cli.get_element_state(project, 'element.bst') == 'failed'
    
    163
    -        # This element should have been pushed to the remote
    
    164 163
             assert share.has_artifact('test', 'element.bst', cli.get_element_key(project, 'element.bst'))
    
    165 164
     
    
    166 165
     
    

  • tests/integration/messages.py
    ... ... @@ -40,7 +40,7 @@ DATA_DIR = os.path.join(
    40 40
     @pytest.mark.integration
    
    41 41
     @pytest.mark.datafiles(DATA_DIR)
    
    42 42
     @pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
    
    43
    -def test_disable_message_lines(cli, tmpdir, datafiles):
    
    43
    +def test_disable_message_lines(cli, tmpdir, datafiles, integration_cache):
    
    44 44
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    45 45
         element_path = os.path.join(project, 'elements')
    
    46 46
         element_name = 'message.bst'
    
    ... ... @@ -66,7 +66,7 @@ def test_disable_message_lines(cli, tmpdir, datafiles):
    66 66
         assert 'echo "Silly message"' in result.stderr
    
    67 67
     
    
    68 68
         # Let's now build it again, but with --message-lines 0
    
    69
    -    cli.remove_artifact_from_cache(project, element_name)
    
    69
    +    cli.remove_artifact_from_cache(project, element_name, cache_dir=integration_cache.root)
    
    70 70
         result = cli.run(project=project, args=["--message-lines", "0",
    
    71 71
                                                 "build", element_name])
    
    72 72
         result.assert_success()
    
    ... ... @@ -76,7 +76,7 @@ def test_disable_message_lines(cli, tmpdir, datafiles):
    76 76
     @pytest.mark.integration
    
    77 77
     @pytest.mark.datafiles(DATA_DIR)
    
    78 78
     @pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
    
    79
    -def test_disable_error_lines(cli, tmpdir, datafiles):
    
    79
    +def test_disable_error_lines(cli, tmpdir, datafiles, integration_cache):
    
    80 80
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    81 81
         element_path = os.path.join(project, 'elements')
    
    82 82
         element_name = 'message.bst'
    
    ... ... @@ -103,7 +103,7 @@ def test_disable_error_lines(cli, tmpdir, datafiles):
    103 103
         assert "This is a syntax error" in result.stderr
    
    104 104
     
    
    105 105
         # Let's now build it again, but with --error-lines 0
    
    106
    -    cli.remove_artifact_from_cache(project, element_name)
    
    106
    +    cli.remove_artifact_from_cache(project, element_name, cache_dir=integration_cache.root)
    
    107 107
         result = cli.run(project=project, args=["--error-lines", "0",
    
    108 108
                                                 "build", element_name])
    
    109 109
         result.assert_main_error(ErrorDomain.STREAM, None)
    

  • tests/integration/pullbuildtrees.py
    ... ... @@ -19,9 +19,10 @@ DATA_DIR = os.path.join(
    19 19
     # cleared as just forcefully removing the refpath leaves dangling objects.
    
    20 20
     def default_state(cli, tmpdir, share):
    
    21 21
         shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
    
    22
    +    shutil.rmtree(os.path.join(str(tmpdir), 'cas'))
    
    22 23
         cli.configure({
    
    23 24
             'artifacts': {'url': share.repo, 'push': False},
    
    24
    -        'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
    
    25
    +        'rootcachedir': str(tmpdir),
    
    25 26
             'cache': {'pull-buildtrees': False},
    
    26 27
         })
    
    27 28
     
    
    ... ... @@ -42,7 +43,7 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
    42 43
             create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3:
    
    43 44
             cli2.configure({
    
    44 45
                 'artifacts': {'url': share1.repo, 'push': True},
    
    45
    -            'artifactdir': os.path.join(str(tmpdir), 'artifacts')
    
    46
    +            'rootcachedir': str(tmpdir),
    
    46 47
             })
    
    47 48
     
    
    48 49
             # Build autotools element, checked pushed, delete local
    

  • tests/integration/source-determinism.py
    ... ... @@ -94,9 +94,7 @@ def test_deterministic_source_umask(cli, tmpdir, datafiles, kind, integration_ca
    94 94
                     return f.read()
    
    95 95
             finally:
    
    96 96
                 os.umask(old_umask)
    
    97
    -            cache_dir = integration_cache.artifacts
    
    98
    -            cli.remove_artifact_from_cache(project, element_name,
    
    99
    -                                           cache_dir=cache_dir)
    
    97
    +            cli.remove_artifact_from_cache(project, element_name, cache_dir=integration_cache.root)
    
    100 98
     
    
    101 99
         assert get_value_for_umask(0o022) == get_value_for_umask(0o077)
    
    102 100
     
    
    ... ... @@ -156,8 +154,6 @@ def test_deterministic_source_local(cli, tmpdir, datafiles, integration_cache):
    156 154
                 with open(os.path.join(checkoutdir, 'ls-l'), 'r') as f:
    
    157 155
                     return f.read()
    
    158 156
             finally:
    
    159
    -            cache_dir = integration_cache.artifacts
    
    160
    -            cli.remove_artifact_from_cache(project, element_name,
    
    161
    -                                           cache_dir=cache_dir)
    
    157
    +            cli.remove_artifact_from_cache(project, element_name, cache_dir=integration_cache.root)
    
    162 158
     
    
    163 159
         assert get_value_for_mask(0o7777) == get_value_for_mask(0o0700)

  • tests/internals/utils.py deleted
    1
    -import os
    
    2
    -from unittest import mock
    
    3
    -
    
    4
    -from buildstream import _yaml
    
    5
    -
    
    6
    -from ..testutils.runcli import cli
    
    7
    -
    
    8
    -
    
    9
    -KiB = 1024
    
    10
    -MiB = (KiB * 1024)
    
    11
    -GiB = (MiB * 1024)
    
    12
    -TiB = (GiB * 1024)
    
    13
    -
    
    14
    -
    
    15
    -def test_parse_size_over_1024T(cli, tmpdir):
    
    16
    -    cli.configure({
    
    17
    -        'cache': {
    
    18
    -            'quota': 2048 * TiB
    
    19
    -        }
    
    20
    -    })
    
    21
    -    project = tmpdir.join("main")
    
    22
    -    os.makedirs(str(project))
    
    23
    -    _yaml.dump({'name': 'main'}, str(project.join("project.conf")))
    
    24
    -
    
    25
    -    volume_space_patch = mock.patch(
    
    26
    -        "buildstream._artifactcache.ArtifactCache._get_volume_space_info_for",
    
    27
    -        autospec=True,
    
    28
    -        return_value=(1025 * TiB, 1025 * TiB)
    
    29
    -    )
    
    30
    -
    
    31
    -    with volume_space_patch:
    
    32
    -        result = cli.run(project, args=["build", "file.bst"])
    
    33
    -        failure_msg = 'Your system does not have enough available space to support the cache quota specified.'
    
    34
    -        assert failure_msg in result.stderr

  • tests/sources/remote.py
    ... ... @@ -136,18 +136,25 @@ def test_unique_key(cli, tmpdir, datafiles):
    136 136
         '''
    
    137 137
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    138 138
         generate_project(project, tmpdir)
    
    139
    -    assert cli.get_element_state(project, 'target.bst') == "fetch needed"
    
    140
    -    assert cli.get_element_state(project, 'target-custom.bst') == "fetch needed"
    
    141
    -    assert cli.get_element_state(project, 'target-custom-executable.bst') == "fetch needed"
    
    139
    +    states = cli.get_element_states(project, [
    
    140
    +        'target.bst', 'target-custom.bst', 'target-custom-executable.bst'
    
    141
    +    ])
    
    142
    +    assert states['target.bst'] == "fetch needed"
    
    143
    +    assert states['target-custom.bst'] == "fetch needed"
    
    144
    +    assert states['target-custom-executable.bst'] == "fetch needed"
    
    145
    +
    
    142 146
         # Try to fetch it
    
    143 147
         result = cli.run(project=project, args=[
    
    144 148
             'source', 'fetch', 'target.bst'
    
    145 149
         ])
    
    146 150
     
    
    147 151
         # We should download the file only once
    
    148
    -    assert cli.get_element_state(project, 'target.bst') == 'buildable'
    
    149
    -    assert cli.get_element_state(project, 'target-custom.bst') == 'buildable'
    
    150
    -    assert cli.get_element_state(project, 'target-custom-executable.bst') == 'buildable'
    
    152
    +    states = cli.get_element_states(project, [
    
    153
    +        'target.bst', 'target-custom.bst', 'target-custom-executable.bst'
    
    154
    +    ])
    
    155
    +    assert states['target.bst'] == 'buildable'
    
    156
    +    assert states['target-custom.bst'] == 'buildable'
    
    157
    +    assert states['target-custom-executable.bst'] == 'buildable'
    
    151 158
     
    
    152 159
         # But the cache key is different because the 'filename' is different.
    
    153 160
         assert cli.get_element_key(project, 'target.bst') != \
    

  • tests/testutils/runcli.py
    ... ... @@ -247,15 +247,13 @@ class Cli():
    247 247
                                        *, cache_dir=None):
    
    248 248
             # Read configuration to figure out where artifacts are stored
    
    249 249
             if not cache_dir:
    
    250
    -            default = os.path.join(project, 'cache', 'artifacts')
    
    251
    -
    
    252
    -            if self.config is not None:
    
    253
    -                cache_dir = self.config.get('artifactdir', default)
    
    254
    -            else:
    
    255
    -                cache_dir = default
    
    250
    +            cache_dir = os.path.join(project, 'cache')
    
    256 251
     
    
    257 252
             cache_dir = os.path.join(cache_dir, 'cas', 'refs', 'heads')
    
    258 253
     
    
    254
    +        # replace forward slashes
    
    255
    +        element_name = element_name.replace('/', '-')
    
    256
    +
    
    259 257
             cache_dir = os.path.splitext(os.path.join(cache_dir, 'test', element_name))[0]
    
    260 258
             shutil.rmtree(cache_dir)
    
    261 259
     
    
    ... ... @@ -398,13 +396,12 @@ class Cli():
    398 396
         #
    
    399 397
         # Returns a dictionary with the element names as keys
    
    400 398
         #
    
    401
    -    def get_element_states(self, project, target, deps='all'):
    
    399
    +    def get_element_states(self, project, targets, deps='all'):
    
    402 400
             result = self.run(project=project, silent=True, args=[
    
    403 401
                 'show',
    
    404 402
                 '--deps', deps,
    
    405 403
                 '--format', '%{name}||%{state}',
    
    406
    -            target
    
    407
    -        ])
    
    404
    +        ] + targets)
    
    408 405
             result.assert_success()
    
    409 406
             lines = result.output.splitlines()
    
    410 407
             states = {}
    
    ... ... @@ -553,11 +550,21 @@ def cli_integration(tmpdir, integration_cache):
    553 550
         # We want to cache sources for integration tests more permanently,
    
    554 551
         # to avoid downloading the huge base-sdk repeatedly
    
    555 552
         fixture.configure({
    
    553
    +        'rootcachedir': integration_cache.root,
    
    556 554
             'sourcedir': integration_cache.sources,
    
    557
    -        'artifactdir': integration_cache.artifacts
    
    558 555
         })
    
    559 556
     
    
    560
    -    return fixture
    
    557
    +    yield fixture
    
    558
    +
    
    559
    +    # remove following folders if necessary
    
    560
    +    try:
    
    561
    +        shutil.rmtree(os.path.join(integration_cache.root, 'build'))
    
    562
    +    except FileNotFoundError:
    
    563
    +        pass
    
    564
    +    try:
    
    565
    +        shutil.rmtree(os.path.join(integration_cache.root, 'tmp'))
    
    566
    +    except FileNotFoundError:
    
    567
    +        pass
    
    561 568
     
    
    562 569
     
    
    563 570
     @contextmanager
    
    ... ... @@ -597,10 +604,8 @@ def configured(directory, config=None):
    597 604
     
    
    598 605
         if not config.get('sourcedir', False):
    
    599 606
             config['sourcedir'] = os.path.join(directory, 'sources')
    
    600
    -    if not config.get('builddir', False):
    
    601
    -        config['builddir'] = os.path.join(directory, 'build')
    
    602
    -    if not config.get('artifactdir', False):
    
    603
    -        config['artifactdir'] = os.path.join(directory, 'artifacts')
    
    607
    +    if not config.get('rootcachedir', False):
    
    608
    +        config['rootcachedir'] = directory
    
    604 609
         if not config.get('logdir', False):
    
    605 610
             config['logdir'] = os.path.join(directory, 'logs')
    
    606 611
     
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]