[Notes] [Git][BuildStream/buildstream][gokcennurlu/remote_url_override_push_error] 6 commits: _yamlcache.py: Use a project's junction name if present



Title: GitLab

Gökçen Nurlu pushed to branch gokcennurlu/remote_url_override_push_error at BuildStream / buildstream

Commits:

22 changed files:

Changes:

  • buildstream/_artifactcache/artifactcache.py
    ... ... @@ -110,36 +110,42 @@ class ArtifactCache():
    110 110
             # assume project and element names are not allowed to contain slashes
    
    111 111
             return '{0}/{1}/{2}'.format(project.name, element_name, key)
    
    112 112
     
    
    113
    +    # get_remotes_from_projects()
    
    114
    +    #
    
    115
    +    # Generates list artifact caches based on project configuration
    
    116
    +    #
    
    117
    +    # Returns:
    
    118
    +    #    (list of (list of ArtifactCacheSpec, Project)): Configurations each are
    
    119
    +    #        ready to be consumed by `self._set_remotes()`
    
    120
    +    #
    
    121
    +    # This requires that all of the projects which are to be processed in the session
    
    122
    +    # have already been loaded and are observable in the Context.
    
    123
    +    #
    
    124
    +    def get_remotes_from_projects(self):
    
    125
    +        return [
    
    126
    +            (_configured_remote_artifact_cache_specs(self.context, prj), prj)
    
    127
    +            for prj in self.context.get_projects()
    
    128
    +        ]
    
    129
    +
    
    113 130
         # setup_remotes():
    
    114 131
         #
    
    115 132
         # Sets up which remotes to use
    
    116 133
         #
    
    117 134
         # Args:
    
    118
    -    #    use_config (bool): Whether to use project configuration
    
    119
    -    #    remote_url (str): Remote artifact cache URL
    
    135
    +    #    remotes (list of (list of ArtifactCacheSpec, Project)): Configurations each are
    
    136
    +    #        ready to be consumed by `self._set_remotes()`
    
    120 137
         #
    
    121 138
         # This requires that all of the projects which are to be processed in the session
    
    122 139
         # have already been loaded and are observable in the Context.
    
    123 140
         #
    
    124
    -    def setup_remotes(self, *, use_config=False, remote_url=None):
    
    125
    -
    
    141
    +    def setup_remotes(self, *, remotes):
    
    126 142
             # Ensure we do not double-initialise since this can be expensive
    
    127 143
             assert not self._remotes_setup
    
    128 144
             self._remotes_setup = True
    
    129 145
     
    
    130
    -        # Initialize remote artifact caches. We allow the commandline to override
    
    131
    -        # the user config in some cases (for example `bst push --remote=...`).
    
    132
    -        has_remote_caches = False
    
    133
    -        if remote_url:
    
    134
    -            self._set_remotes([ArtifactCacheSpec(remote_url, push=True)])
    
    135
    -            has_remote_caches = True
    
    136
    -        if use_config:
    
    137
    -            for project in self.context.get_projects():
    
    138
    -                artifact_caches = _configured_remote_artifact_cache_specs(self.context, project)
    
    139
    -                if artifact_caches:  # artifact_caches is a list of ArtifactCacheSpec instances
    
    140
    -                    self._set_remotes(artifact_caches, project=project)
    
    141
    -                    has_remote_caches = True
    
    142
    -        if has_remote_caches:
    
    146
    +        if remotes:
    
    147
    +            for caches, project in remotes:
    
    148
    +                self._set_remotes(caches, project=project)
    
    143 149
                 self._initialize_remotes()
    
    144 150
     
    
    145 151
         # specs_from_config_node()
    

  • buildstream/_stream.py
    ... ... @@ -28,6 +28,7 @@ import tarfile
    28 28
     from contextlib import contextmanager
    
    29 29
     from tempfile import TemporaryDirectory
    
    30 30
     
    
    31
    +from ._artifactcache import ArtifactCacheSpec
    
    31 32
     from ._exceptions import StreamError, ImplError, BstError, set_last_task_error
    
    32 33
     from ._message import Message, MessageType
    
    33 34
     from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
    
    ... ... @@ -305,6 +306,7 @@ class Stream():
    305 306
                                      selection=selection,
    
    306 307
                                      use_artifact_config=use_config,
    
    307 308
                                      artifact_remote_url=remote,
    
    309
    +                                 artifact_remote_can_push=False,
    
    308 310
                                      fetch_subprojects=True)
    
    309 311
     
    
    310 312
             if not self._artifacts.has_fetch_remotes():
    
    ... ... @@ -343,6 +345,7 @@ class Stream():
    343 345
                                      selection=selection,
    
    344 346
                                      use_artifact_config=use_config,
    
    345 347
                                      artifact_remote_url=remote,
    
    348
    +                                 artifact_remote_can_push=True,
    
    346 349
                                      fetch_subprojects=True)
    
    347 350
     
    
    348 351
             if not self._artifacts.has_push_remotes():
    
    ... ... @@ -922,7 +925,8 @@ class Stream():
    922 925
         #    track_except_targets (list of str): Specified targets to except from fetching
    
    923 926
         #    track_cross_junctions (bool): Whether tracking should cross junction boundaries
    
    924 927
         #    use_artifact_config (bool): Whether to initialize artifacts with the config
    
    925
    -    #    artifact_remote_url (bool): A remote url for initializing the artifacts
    
    928
    +    #    artifact_remote_url (str): A remote url for initializing the artifacts
    
    929
    +    #    artifact_remote_can_push (bool): Whether `artifact_remote_url` can be used to push
    
    926 930
         #    fetch_subprojects (bool): Whether to fetch subprojects while loading
    
    927 931
         #
    
    928 932
         # Returns:
    
    ... ... @@ -937,6 +941,7 @@ class Stream():
    937 941
                   track_cross_junctions=False,
    
    938 942
                   use_artifact_config=False,
    
    939 943
                   artifact_remote_url=None,
    
    944
    +              artifact_remote_can_push=False,
    
    940 945
                   fetch_subprojects=False,
    
    941 946
                   dynamic_plan=False):
    
    942 947
     
    
    ... ... @@ -1000,12 +1005,20 @@ class Stream():
    1000 1005
                 self._pipeline.resolve_elements(track_selected)
    
    1001 1006
                 return [], track_selected
    
    1002 1007
     
    
    1003
    -        # ArtifactCache.setup_remotes expects all projects to be fully loaded
    
    1004
    -        for project in self._context.get_projects():
    
    1005
    -            project.ensure_fully_loaded()
    
    1006
    -
    
    1008
    +        if use_artifact_config:
    
    1009
    +            # ArtifactCache.get_remotes_from_projects expects all projects to be
    
    1010
    +            # fully loaded
    
    1011
    +            for project in self._context.get_projects():
    
    1012
    +                project.ensure_fully_loaded()
    
    1013
    +            remotes = self._artifacts.get_remotes_from_projects()
    
    1014
    +        else:
    
    1015
    +            # Build the ArtifactCacheSpec instance based on `--remote`
    
    1016
    +            remotes = [(
    
    1017
    +                [ArtifactCacheSpec(artifact_remote_url, push=artifact_remote_can_push)],
    
    1018
    +                None
    
    1019
    +            )]
    
    1007 1020
             # Connect to remote caches, this needs to be done before resolving element state
    
    1008
    -        self._artifacts.setup_remotes(use_config=use_artifact_config, remote_url=artifact_remote_url)
    
    1021
    +        self._artifacts.setup_remotes(remotes=remotes)
    
    1009 1022
     
    
    1010 1023
             # Now move on to loading primary selection.
    
    1011 1024
             #
    

  • buildstream/_yamlcache.py
    ... ... @@ -68,7 +68,7 @@ class YamlCache():
    68 68
         #    (bool): Whether the file is cached.
    
    69 69
         def is_cached(self, project, filepath):
    
    70 70
             cache_path = self._get_filepath(project, filepath)
    
    71
    -        project_name = project.name if project else ""
    
    71
    +        project_name = self.get_project_name(project)
    
    72 72
             try:
    
    73 73
                 project_cache = self._project_caches[project_name]
    
    74 74
                 if cache_path in project_cache.elements:
    
    ... ... @@ -167,7 +167,7 @@ class YamlCache():
    167 167
         #    value (decorated dict): The data to put into the cache.
    
    168 168
         def put_from_key(self, project, filepath, key, value):
    
    169 169
             cache_path = self._get_filepath(project, filepath)
    
    170
    -        project_name = project.name if project else ""
    
    170
    +        project_name = self.get_project_name(project)
    
    171 171
             try:
    
    172 172
                 project_cache = self._project_caches[project_name]
    
    173 173
             except KeyError:
    
    ... ... @@ -237,7 +237,7 @@ class YamlCache():
    237 237
         #    (decorated dict): The parsed yaml from the cache, or None if the file isn't in the cache.
    
    238 238
         def _get(self, project, filepath, key):
    
    239 239
             cache_path = self._get_filepath(project, filepath)
    
    240
    -        project_name = project.name if project else ""
    
    240
    +        project_name = self.get_project_name(project)
    
    241 241
             try:
    
    242 242
                 project_cache = self._project_caches[project_name]
    
    243 243
                 try:
    
    ... ... @@ -253,6 +253,30 @@ class YamlCache():
    253 253
                 pass
    
    254 254
             return None
    
    255 255
     
    
    256
    +    # get_project_name():
    
    257
    +    #
    
    258
    +    # Gets a name appropriate for Project. Projects must use their junction's
    
    259
    +    # name if present, otherwise elements with the same contents under the
    
    260
    +    # same path with identically-named projects are considered the same yaml
    
    261
    +    # object, despite existing in different Projects.
    
    262
    +    #
    
    263
    +    # Args:
    
    264
    +    #    project (Project): The project this file is in, or None.
    
    265
    +    #
    
    266
    +    # Returns:
    
    267
    +    #    (str): The project's junction's name if present, the project's name,
    
    268
    +    #           or an empty string if there is no project
    
    269
    +    @staticmethod
    
    270
    +    def get_project_name(project):
    
    271
    +        if project:
    
    272
    +            if project.junction:
    
    273
    +                project_name = project.junction.name
    
    274
    +            else:
    
    275
    +                project_name = project.name
    
    276
    +        else:
    
    277
    +            project_name = ""
    
    278
    +        return project_name
    
    279
    +
    
    256 280
     
    
    257 281
     CachedProject = namedtuple('CachedProject', ['elements'])
    
    258 282
     
    
    ... ... @@ -287,7 +311,7 @@ class BstPickler(pickle.Pickler):
    287 311
             if isinstance(obj, _yaml.ProvenanceFile):
    
    288 312
                 if obj.project:
    
    289 313
                     # ProvenanceFile's project object cannot be stored as it is.
    
    290
    -                project_tag = obj.project.name
    
    314
    +                project_tag = YamlCache.get_project_name(obj.project)
    
    291 315
                     # ProvenanceFile's filename must be stored relative to the
    
    292 316
                     # project, as the project dir may move.
    
    293 317
                     name = os.path.relpath(obj.name, obj.project.directory)
    
    ... ... @@ -319,14 +343,14 @@ class BstUnpickler(pickle.Unpickler):
    319 343
     
    
    320 344
                 if project_tag is not None:
    
    321 345
                     for p in self._context.get_projects():
    
    322
    -                    if project_tag == p.name:
    
    346
    +                    if YamlCache.get_project_name(p) == project_tag:
    
    323 347
                             project = p
    
    324 348
                             break
    
    325 349
     
    
    326 350
                     name = os.path.join(project.directory, tagged_name)
    
    327 351
     
    
    328 352
                     if not project:
    
    329
    -                    projects = [p.name for p in self._context.get_projects()]
    
    353
    +                    projects = [YamlCache.get_project_name(p) for p in self._context.get_projects()]
    
    330 354
                         raise pickle.UnpicklingError("No project with name {} found in {}"
    
    331 355
                                                      .format(project_tag, projects))
    
    332 356
                 else:
    

  • tests/artifactcache/pull.py
    ... ... @@ -146,7 +146,8 @@ def _test_pull(user_config_file, project_dir, artifact_dir,
    146 146
         element = project.load_elements([element_name])[0]
    
    147 147
     
    
    148 148
         # Manually setup the CAS remote
    
    149
    -    cas.setup_remotes(use_config=True)
    
    149
    +    remotes = cas.get_remotes_from_projects()
    
    150
    +    cas.setup_remotes(remotes=remotes)
    
    150 151
     
    
    151 152
         if cas.has_push_remotes(element=element):
    
    152 153
             # Push the element's artifact
    
    ... ... @@ -284,7 +285,8 @@ def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest
    284 285
         cas = artifactcache.cas
    
    285 286
     
    
    286 287
         # Manually setup the CAS remote
    
    287
    -    artifactcache.setup_remotes(use_config=True)
    
    288
    +    remotes = artifactcache.get_remotes_from_projects()
    
    289
    +    artifactcache.setup_remotes(remotes=remotes)
    
    288 290
     
    
    289 291
         if artifactcache.has_push_remotes():
    
    290 292
             directory = remote_execution_pb2.Directory()
    
    ... ... @@ -319,7 +321,8 @@ def _test_pull_tree(user_config_file, project_dir, artifact_dir, artifact_digest
    319 321
         cas = context.artifactcache
    
    320 322
     
    
    321 323
         # Manually setup the CAS remote
    
    322
    -    cas.setup_remotes(use_config=True)
    
    324
    +    remotes = cas.get_remotes_from_projects()
    
    325
    +    cas.setup_remotes(remotes=remotes)
    
    323 326
     
    
    324 327
         if cas.has_push_remotes():
    
    325 328
             # Pull the artifact using the Tree object
    

  • tests/artifactcache/push.py
    ... ... @@ -125,8 +125,8 @@ def _test_push(user_config_file, project_dir, artifact_dir,
    125 125
         element = project.load_elements([element_name])[0]
    
    126 126
     
    
    127 127
         # Manually setup the CAS remote
    
    128
    -    cas.setup_remotes(use_config=True)
    
    129
    -    cas.initialize_remotes()
    
    128
    +    remotes = cas.get_remotes_from_projects()
    
    129
    +    cas.setup_remotes(remotes=remotes)
    
    130 130
     
    
    131 131
         if cas.has_push_remotes(element=element):
    
    132 132
             # Push the element's artifact
    
    ... ... @@ -185,8 +185,8 @@ def test_push_directory(cli, tmpdir, datafiles):
    185 185
             assert artifactcache.contains(element, element_key)
    
    186 186
     
    
    187 187
             # Manually setup the CAS remote
    
    188
    -        artifactcache.setup_remotes(use_config=True)
    
    189
    -        artifactcache.initialize_remotes()
    
    188
    +        remotes = artifactcache.get_remotes_from_projects()
    
    189
    +        artifactcache.setup_remotes(remotes=remotes)
    
    190 190
             assert artifactcache.has_push_remotes(element=element)
    
    191 191
     
    
    192 192
             # Recreate the CasBasedDirectory object from the cached artifact
    
    ... ... @@ -231,8 +231,8 @@ def _test_push_directory(user_config_file, project_dir, artifact_dir, artifact_d
    231 231
         cas = context.artifactcache
    
    232 232
     
    
    233 233
         # Manually setup the CAS remote
    
    234
    -    cas.setup_remotes(use_config=True)
    
    235
    -    cas.initialize_remotes()
    
    234
    +    remotes = cas.get_remotes_from_projects()
    
    235
    +    cas.setup_remotes(remotes=remotes)
    
    236 236
     
    
    237 237
         if cas.has_push_remotes():
    
    238 238
             # Create a CasBasedDirectory from local CAS cache content
    
    ... ... @@ -307,8 +307,8 @@ def _test_push_message(user_config_file, project_dir, artifact_dir, queue):
    307 307
         cas = context.artifactcache
    
    308 308
     
    
    309 309
         # Manually setup the CAS remote
    
    310
    -    cas.setup_remotes(use_config=True)
    
    311
    -    cas.initialize_remotes()
    
    310
    +    remotes = cas.get_remotes_from_projects()
    
    311
    +    cas.setup_remotes(remotes=remotes)
    
    312 312
     
    
    313 313
         if cas.has_push_remotes():
    
    314 314
             # Create an example message object
    

  • tests/loader/junctions.py
    ... ... @@ -47,6 +47,16 @@ def test_simple_build(cli, tmpdir, datafiles):
    47 47
         assert(os.path.exists(os.path.join(checkoutdir, 'foo.txt')))
    
    48 48
     
    
    49 49
     
    
    50
    +@pytest.mark.datafiles(DATA_DIR)
    
    51
    +def test_build_of_same_junction_used_twice(cli, tmpdir, datafiles):
    
    52
    +    project = os.path.join(str(datafiles), 'inconsistent-names')
    
    53
    +
    
    54
    +    # Check we can build a project that contains the same junction
    
    55
    +    # that is used twice, but named differently
    
    56
    +    result = cli.run(project=project, args=['build', 'target.bst'])
    
    57
    +    assert result.exit_code == 0
    
    58
    +
    
    59
    +
    
    50 60
     @pytest.mark.datafiles(DATA_DIR)
    
    51 61
     def test_nested_simple(cli, tmpdir, datafiles):
    
    52 62
         foo = os.path.join(str(datafiles), 'foo')
    

  • tests/loader/junctions/inconsistent-names/elements/junction-A.bst
    1
    +kind: junction
    
    2
    +sources:
    
    3
    +- kind: local
    
    4
    +  path: junctionA

  • tests/loader/junctions/inconsistent-names/elements/junction-B-diff-name.bst
    1
    +kind: junction
    
    2
    +sources:
    
    3
    +- kind: local
    
    4
    +  path: junctionA/junctionB

  • tests/loader/junctions/inconsistent-names/elements/target.bst
    1
    +kind: import
    
    2
    +sources:
    
    3
    +- kind: local
    
    4
    +  path: files/foo
    
    5
    +depends:
    
    6
    +- filename: lib2.bst
    
    7
    +  junction: junction-B-diff-name.bst
    
    8
    +- filename: lib.bst
    
    9
    +  junction: junction-A.bst

  • tests/loader/junctions/inconsistent-names/files/foo

  • tests/loader/junctions/inconsistent-names/junctionA/elements/app.bst
    1
    +kind: import
    
    2
    +sources:
    
    3
    +- kind: local
    
    4
    +  path: files/app
    
    5
    +depends:
    
    6
    +- lib.bst

  • tests/loader/junctions/inconsistent-names/junctionA/elements/junction-B.bst
    1
    +kind: junction
    
    2
    +sources:
    
    3
    +- kind: local
    
    4
    +  path: junctionB

  • tests/loader/junctions/inconsistent-names/junctionA/elements/lib.bst
    1
    +kind: import
    
    2
    +sources:
    
    3
    +- kind: local
    
    4
    +  path: files/lib
    
    5
    +depends:
    
    6
    +- filename: base.bst
    
    7
    +  junction: junction-B.bst

  • tests/loader/junctions/inconsistent-names/junctionA/files/app

  • tests/loader/junctions/inconsistent-names/junctionA/files/lib

  • tests/loader/junctions/inconsistent-names/junctionA/junctionB/base/baseimg

  • tests/loader/junctions/inconsistent-names/junctionA/junctionB/elements/base.bst
    1
    +kind: import
    
    2
    +sources:
    
    3
    +- kind: local
    
    4
    +  path: base

  • tests/loader/junctions/inconsistent-names/junctionA/junctionB/elements/lib2.bst
    1
    +kind: import
    
    2
    +sources:
    
    3
    +- kind: local
    
    4
    +  path: files/lib2
    
    5
    +depends:
    
    6
    +- base.bst

  • tests/loader/junctions/inconsistent-names/junctionA/junctionB/files/lib2

  • tests/loader/junctions/inconsistent-names/junctionA/junctionB/project.conf
    1
    +# Unique project name
    
    2
    +name: projectB
    
    3
    +
    
    4
    +# Subdirectory where elements are stored
    
    5
    +element-path: elements

  • tests/loader/junctions/inconsistent-names/junctionA/project.conf
    1
    +# Unique project name
    
    2
    +name: projectA
    
    3
    +
    
    4
    +# Subdirectory where elements are stored
    
    5
    +element-path: elements

  • tests/loader/junctions/inconsistent-names/project.conf
    1
    +# Unique project name
    
    2
    +name: inconsistent-names
    
    3
    +
    
    4
    +# Subdirectory where elements are stored
    
    5
    +element-path: elements



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]