[Notes] [Git][BuildStream/buildstream][gokcennurlu/remote_url_override_push_error] 9 commits: Ensure `--deps=none` option works for `bst checkout`



Title: GitLab

Gökçen Nurlu pushed to branch gokcennurlu/remote_url_override_push_error at BuildStream / buildstream

Commits:

7 changed files:

Changes:

  • buildstream/_artifactcache/artifactcache.py
    ... ... @@ -110,36 +110,42 @@ class ArtifactCache():
    110 110
             # assume project and element names are not allowed to contain slashes
    
    111 111
             return '{0}/{1}/{2}'.format(project.name, element_name, key)
    
    112 112
     
    
    113
    +    # get_remotes_from_projects()
    
    114
    +    #
    
    115
    +    # Generates list artifact caches based on project configuration
    
    116
    +    #
    
    117
    +    # Returns:
    
    118
    +    #    (list of (list of ArtifactCacheSpec, Project)): Configurations each are
    
    119
    +    #        ready to be consumed by `self._set_remotes()`
    
    120
    +    #
    
    121
    +    # This requires that all of the projects which are to be processed in the session
    
    122
    +    # have already been loaded and are observable in the Context.
    
    123
    +    #
    
    124
    +    def get_remotes_from_projects(self):
    
    125
    +        return [
    
    126
    +            (_configured_remote_artifact_cache_specs(self.context, prj), prj)
    
    127
    +            for prj in self.context.get_projects()
    
    128
    +        ]
    
    129
    +
    
    113 130
         # setup_remotes():
    
    114 131
         #
    
    115 132
         # Sets up which remotes to use
    
    116 133
         #
    
    117 134
         # Args:
    
    118
    -    #    use_config (bool): Whether to use project configuration
    
    119
    -    #    remote_url (str): Remote artifact cache URL
    
    135
    +    #    remotes (list of (list of ArtifactCacheSpec, Project)): Configurations each are
    
    136
    +    #        ready to be consumed by `self._set_remotes()`
    
    120 137
         #
    
    121 138
         # This requires that all of the projects which are to be processed in the session
    
    122 139
         # have already been loaded and are observable in the Context.
    
    123 140
         #
    
    124
    -    def setup_remotes(self, *, use_config=False, remote_url=None):
    
    125
    -
    
    141
    +    def setup_remotes(self, *, remotes):
    
    126 142
             # Ensure we do not double-initialise since this can be expensive
    
    127 143
             assert not self._remotes_setup
    
    128 144
             self._remotes_setup = True
    
    129 145
     
    
    130
    -        # Initialize remote artifact caches. We allow the commandline to override
    
    131
    -        # the user config in some cases (for example `bst push --remote=...`).
    
    132
    -        has_remote_caches = False
    
    133
    -        if remote_url:
    
    134
    -            self._set_remotes([ArtifactCacheSpec(remote_url, push=True)])
    
    135
    -            has_remote_caches = True
    
    136
    -        if use_config:
    
    137
    -            for project in self.context.get_projects():
    
    138
    -                artifact_caches = _configured_remote_artifact_cache_specs(self.context, project)
    
    139
    -                if artifact_caches:  # artifact_caches is a list of ArtifactCacheSpec instances
    
    140
    -                    self._set_remotes(artifact_caches, project=project)
    
    141
    -                    has_remote_caches = True
    
    142
    -        if has_remote_caches:
    
    146
    +        if remotes:
    
    147
    +            for caches, project in remotes:
    
    148
    +                self._set_remotes(caches, project=project)
    
    143 149
                 self._initialize_remotes()
    
    144 150
     
    
    145 151
         # specs_from_config_node()
    

  • buildstream/_frontend/cli.py
    ... ... @@ -640,7 +640,7 @@ def shell(app, element, sysroot, mount, isolate, build_, command):
    640 640
     @click.option('--force', '-f', default=False, is_flag=True,
    
    641 641
                   help="Allow files to be overwritten")
    
    642 642
     @click.option('--deps', '-d', default='run',
    
    643
    -              type=click.Choice(['run', 'none']),
    
    643
    +              type=click.Choice(['run', 'build', 'none']),
    
    644 644
                   help='The dependencies to checkout (default: run)')
    
    645 645
     @click.option('--integrate/--no-integrate', default=True, is_flag=True,
    
    646 646
                   help="Whether to run integration commands")
    
    ... ... @@ -657,16 +657,24 @@ def shell(app, element, sysroot, mount, isolate, build_, command):
    657 657
     def checkout(app, element, location, force, deps, integrate, hardlinks, tar):
    
    658 658
         """Checkout a built artifact to the specified location
    
    659 659
         """
    
    660
    +    from ..element import Scope
    
    660 661
     
    
    661 662
         if hardlinks and tar:
    
    662 663
             click.echo("ERROR: options --hardlinks and --tar conflict", err=True)
    
    663 664
             sys.exit(-1)
    
    664 665
     
    
    666
    +    if deps == "run":
    
    667
    +        scope = Scope.RUN
    
    668
    +    elif deps == "build":
    
    669
    +        scope = Scope.BUILD
    
    670
    +    elif deps == "none":
    
    671
    +        scope = Scope.NONE
    
    672
    +
    
    665 673
         with app.initialized():
    
    666 674
             app.stream.checkout(element,
    
    667 675
                                 location=location,
    
    668 676
                                 force=force,
    
    669
    -                            deps=deps,
    
    677
    +                            scope=scope,
    
    670 678
                                 integrate=integrate,
    
    671 679
                                 hardlinks=hardlinks,
    
    672 680
                                 tar=tar)
    

  • buildstream/_stream.py
    ... ... @@ -28,6 +28,7 @@ import tarfile
    28 28
     from contextlib import contextmanager
    
    29 29
     from tempfile import TemporaryDirectory
    
    30 30
     
    
    31
    +from ._artifactcache import ArtifactCacheSpec
    
    31 32
     from ._exceptions import StreamError, ImplError, BstError, set_last_task_error
    
    32 33
     from ._message import Message, MessageType
    
    33 34
     from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
    
    ... ... @@ -305,6 +306,7 @@ class Stream():
    305 306
                                      selection=selection,
    
    306 307
                                      use_artifact_config=use_config,
    
    307 308
                                      artifact_remote_url=remote,
    
    309
    +                                 artifact_remote_can_push=False,
    
    308 310
                                      fetch_subprojects=True)
    
    309 311
     
    
    310 312
             if not self._artifacts.has_fetch_remotes():
    
    ... ... @@ -343,6 +345,7 @@ class Stream():
    343 345
                                      selection=selection,
    
    344 346
                                      use_artifact_config=use_config,
    
    345 347
                                      artifact_remote_url=remote,
    
    348
    +                                 artifact_remote_can_push=True,
    
    346 349
                                      fetch_subprojects=True)
    
    347 350
     
    
    348 351
             if not self._artifacts.has_push_remotes():
    
    ... ... @@ -370,7 +373,7 @@ class Stream():
    370 373
         #    target (str): Target to checkout
    
    371 374
         #    location (str): Location to checkout the artifact to
    
    372 375
         #    force (bool): Whether files can be overwritten if necessary
    
    373
    -    #    deps (str): The dependencies to checkout
    
    376
    +    #    scope (str): The scope of dependencies to checkout
    
    374 377
         #    integrate (bool): Whether to run integration commands
    
    375 378
         #    hardlinks (bool): Whether checking out files hardlinked to
    
    376 379
         #                      their artifacts is acceptable
    
    ... ... @@ -383,7 +386,7 @@ class Stream():
    383 386
         def checkout(self, target, *,
    
    384 387
                      location=None,
    
    385 388
                      force=False,
    
    386
    -                 deps='run',
    
    389
    +                 scope=Scope.RUN,
    
    387 390
                      integrate=True,
    
    388 391
                      hardlinks=False,
    
    389 392
                      tar=False):
    
    ... ... @@ -396,7 +399,7 @@ class Stream():
    396 399
     
    
    397 400
             # Stage deps into a temporary sandbox first
    
    398 401
             try:
    
    399
    -            with target._prepare_sandbox(Scope.RUN, None, deps=deps,
    
    402
    +            with target._prepare_sandbox(scope=scope, directory=None,
    
    400 403
                                              integrate=integrate) as sandbox:
    
    401 404
     
    
    402 405
                     # Copy or move the sandbox to the target directory
    
    ... ... @@ -922,7 +925,8 @@ class Stream():
    922 925
         #    track_except_targets (list of str): Specified targets to except from fetching
    
    923 926
         #    track_cross_junctions (bool): Whether tracking should cross junction boundaries
    
    924 927
         #    use_artifact_config (bool): Whether to initialize artifacts with the config
    
    925
    -    #    artifact_remote_url (bool): A remote url for initializing the artifacts
    
    928
    +    #    artifact_remote_url (str): A remote url for initializing the artifacts
    
    929
    +    #    artifact_remote_can_push (bool): Whether `artifact_remote_url` can be used to push
    
    926 930
         #    fetch_subprojects (bool): Whether to fetch subprojects while loading
    
    927 931
         #
    
    928 932
         # Returns:
    
    ... ... @@ -937,6 +941,7 @@ class Stream():
    937 941
                   track_cross_junctions=False,
    
    938 942
                   use_artifact_config=False,
    
    939 943
                   artifact_remote_url=None,
    
    944
    +              artifact_remote_can_push=False,
    
    940 945
                   fetch_subprojects=False,
    
    941 946
                   dynamic_plan=False):
    
    942 947
     
    
    ... ... @@ -1000,12 +1005,20 @@ class Stream():
    1000 1005
                 self._pipeline.resolve_elements(track_selected)
    
    1001 1006
                 return [], track_selected
    
    1002 1007
     
    
    1003
    -        # ArtifactCache.setup_remotes expects all projects to be fully loaded
    
    1004
    -        for project in self._context.get_projects():
    
    1005
    -            project.ensure_fully_loaded()
    
    1006
    -
    
    1008
    +        if use_artifact_config:
    
    1009
    +            # ArtifactCache.get_remotes_from_projects expects all projects to be
    
    1010
    +            # fully loaded
    
    1011
    +            for project in self._context.get_projects():
    
    1012
    +                project.ensure_fully_loaded()
    
    1013
    +            remotes = self._artifacts.get_remotes_from_projects()
    
    1014
    +        else:
    
    1015
    +            # Build the ArtifactCacheSpec instance based on `--remote`
    
    1016
    +            remotes = [(
    
    1017
    +                [ArtifactCacheSpec(artifact_remote_url, push=artifact_remote_can_push)],
    
    1018
    +                None
    
    1019
    +            )]
    
    1007 1020
             # Connect to remote caches, this needs to be done before resolving element state
    
    1008
    -        self._artifacts.setup_remotes(use_config=use_artifact_config, remote_url=artifact_remote_url)
    
    1021
    +        self._artifacts.setup_remotes(remotes=remotes)
    
    1009 1022
     
    
    1010 1023
             # Now move on to loading primary selection.
    
    1011 1024
             #
    

  • buildstream/element.py
    ... ... @@ -438,7 +438,7 @@ class Element(Plugin):
    438 438
                                                     visited=visited, recursed=True)
    
    439 439
     
    
    440 440
             # Yeild self only at the end, after anything needed has been traversed
    
    441
    -        if should_yield and (recurse or recursed) and (scope in (Scope.ALL, Scope.RUN)):
    
    441
    +        if should_yield and (recurse or recursed) and scope != Scope.BUILD:
    
    442 442
                 yield self
    
    443 443
     
    
    444 444
         def search(self, scope, name):
    
    ... ... @@ -1339,7 +1339,7 @@ class Element(Plugin):
    1339 1339
         # is used to stage things by the `bst checkout` codepath
    
    1340 1340
         #
    
    1341 1341
         @contextmanager
    
    1342
    -    def _prepare_sandbox(self, scope, directory, deps='run', integrate=True):
    
    1342
    +    def _prepare_sandbox(self, scope, directory, shell=False, integrate=True):
    
    1343 1343
             # bst shell and bst checkout require a local sandbox.
    
    1344 1344
             bare_directory = True if directory else False
    
    1345 1345
             with self.__sandbox(directory, config=self.__sandbox_config, allow_remote=False,
    
    ... ... @@ -1350,20 +1350,19 @@ class Element(Plugin):
    1350 1350
     
    
    1351 1351
                 # Stage something if we need it
    
    1352 1352
                 if not directory:
    
    1353
    -                if scope == Scope.BUILD:
    
    1353
    +                if shell and scope == Scope.BUILD:
    
    1354 1354
                         self.stage(sandbox)
    
    1355
    -                elif scope == Scope.RUN:
    
    1355
    +                else:
    
    1356 1356
                         # Stage deps in the sandbox root
    
    1357
    -                    if deps == 'run':
    
    1358
    -                        with self.timed_activity("Staging dependencies", silent_nested=True):
    
    1359
    -                            self.stage_dependency_artifacts(sandbox, scope)
    
    1357
    +                    with self.timed_activity("Staging dependencies", silent_nested=True):
    
    1358
    +                        self.stage_dependency_artifacts(sandbox, scope)
    
    1360 1359
     
    
    1361
    -                        # Run any integration commands provided by the dependencies
    
    1362
    -                        # once they are all staged and ready
    
    1363
    -                        if integrate:
    
    1364
    -                            with self.timed_activity("Integrating sandbox"):
    
    1365
    -                                for dep in self.dependencies(scope):
    
    1366
    -                                    dep.integrate(sandbox)
    
    1360
    +                    # Run any integration commands provided by the dependencies
    
    1361
    +                    # once they are all staged and ready
    
    1362
    +                    if integrate:
    
    1363
    +                        with self.timed_activity("Integrating sandbox"):
    
    1364
    +                            for dep in self.dependencies(scope):
    
    1365
    +                                dep.integrate(sandbox)
    
    1367 1366
     
    
    1368 1367
                 yield sandbox
    
    1369 1368
     
    
    ... ... @@ -1858,7 +1857,7 @@ class Element(Plugin):
    1858 1857
         # If directory is not specified, one will be staged using scope
    
    1859 1858
         def _shell(self, scope=None, directory=None, *, mounts=None, isolate=False, prompt=None, command=None):
    
    1860 1859
     
    
    1861
    -        with self._prepare_sandbox(scope, directory) as sandbox:
    
    1860
    +        with self._prepare_sandbox(scope, directory, shell=True) as sandbox:
    
    1862 1861
                 environment = self.get_environment()
    
    1863 1862
                 environment = copy.copy(environment)
    
    1864 1863
                 flags = SandboxFlags.INTERACTIVE | SandboxFlags.ROOT_READ_ONLY
    

  • buildstream/plugins/sources/git.py
    ... ... @@ -25,6 +25,11 @@ git - stage files from a git repository
    25 25
     
    
    26 26
       * git
    
    27 27
     
    
    28
    +.. attention::
    
    29
    +
    
    30
    +    Note that this plugin **will checkout git submodules by default**; even if
    
    31
    +    they are not specified in the `.bst` file.
    
    32
    +
    
    28 33
     **Usage:**
    
    29 34
     
    
    30 35
     .. code:: yaml
    

  • buildstream/types.py
    ... ... @@ -48,6 +48,12 @@ class Scope(Enum):
    48 48
         itself.
    
    49 49
         """
    
    50 50
     
    
    51
    +    NONE = 4
    
    52
    +    """Just the element itself, no dependencies.
    
    53
    +
    
    54
    +    *Since: 1.4*
    
    55
    +    """
    
    56
    +
    
    51 57
     
    
    52 58
     class Consistency():
    
    53 59
         """Defines the various consistency states of a :class:`.Source`.
    

  • tests/frontend/buildcheckout.py
    ... ... @@ -86,13 +86,14 @@ def test_build_invalid_suffix_dep(datafiles, cli, strict, hardlinks):
    86 86
     
    
    87 87
     
    
    88 88
     @pytest.mark.datafiles(DATA_DIR)
    
    89
    -@pytest.mark.parametrize("deps", [("run"), ("none")])
    
    89
    +@pytest.mark.parametrize("deps", [("run"), ("none"), ("build")])
    
    90 90
     def test_build_checkout_deps(datafiles, cli, deps):
    
    91 91
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    92 92
         checkout = os.path.join(cli.directory, 'checkout')
    
    93
    +    element_name = "checkout-deps.bst"
    
    93 94
     
    
    94 95
         # First build it
    
    95
    -    result = cli.run(project=project, args=['build', 'target.bst'])
    
    96
    +    result = cli.run(project=project, args=['build', element_name])
    
    96 97
         result.assert_success()
    
    97 98
     
    
    98 99
         # Assert that after a successful build, the builddir is empty
    
    ... ... @@ -101,20 +102,25 @@ def test_build_checkout_deps(datafiles, cli, deps):
    101 102
         assert not os.listdir(builddir)
    
    102 103
     
    
    103 104
         # Now check it out
    
    104
    -    result = cli.run(project=project, args=['checkout', 'target.bst', '--deps', deps, checkout])
    
    105
    +    result = cli.run(project=project, args=['checkout', element_name, '--deps', deps, checkout])
    
    105 106
         result.assert_success()
    
    106 107
     
    
    107
    -    # Check that the executable hello file is found in the checkout
    
    108
    -    filename = os.path.join(checkout, 'usr', 'bin', 'hello')
    
    108
    +    # Verify output of this element
    
    109
    +    filename = os.path.join(checkout, 'etc', 'buildstream', 'config')
    
    110
    +    if deps == "build":
    
    111
    +        assert not os.path.exists(filename)
    
    112
    +    else:
    
    113
    +        assert os.path.exists(filename)
    
    109 114
     
    
    110
    -    if deps == "run":
    
    115
    +    # Verify output of this element's build dependencies
    
    116
    +    filename = os.path.join(checkout, 'usr', 'include', 'pony.h')
    
    117
    +    if deps == "build":
    
    111 118
             assert os.path.exists(filename)
    
    112 119
         else:
    
    113 120
             assert not os.path.exists(filename)
    
    114 121
     
    
    115
    -    # Check that the executable hello file is found in the checkout
    
    116
    -    filename = os.path.join(checkout, 'usr', 'include', 'pony.h')
    
    117
    -
    
    122
    +    # Verify output of this element's runtime dependencies
    
    123
    +    filename = os.path.join(checkout, 'usr', 'bin', 'hello')
    
    118 124
         if deps == "run":
    
    119 125
             assert os.path.exists(filename)
    
    120 126
         else:
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]