[Notes] [Git][BuildStream/buildstream][bschubert/pipeline] 30 commits: _frontend/app.py: Use buildtree for interactive shell on build failure



Title: GitLab

Benjamin Schubert pushed to branch bschubert/pipeline at BuildStream / buildstream

Commits:

23 changed files:

Changes:

  • buildstream/_artifactcache/cascache.py
    ... ... @@ -53,7 +53,7 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key
    53 53
         #
    
    54 54
         @staticmethod
    
    55 55
         def _new_from_config_node(spec_node, basedir=None):
    
    56
    -        _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance_name'])
    
    56
    +        _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance-name'])
    
    57 57
             url = _yaml.node_get(spec_node, str, 'url')
    
    58 58
             push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
    
    59 59
             if not url:
    
    ... ... @@ -61,7 +61,7 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key
    61 61
                 raise LoadError(LoadErrorReason.INVALID_DATA,
    
    62 62
                                 "{}: empty artifact cache URL".format(provenance))
    
    63 63
     
    
    64
    -        instance_name = _yaml.node_get(spec_node, str, 'instance_name', default_value=None)
    
    64
    +        instance_name = _yaml.node_get(spec_node, str, 'instance-name', default_value=None)
    
    65 65
     
    
    66 66
             server_cert = _yaml.node_get(spec_node, str, 'server-cert', default_value=None)
    
    67 67
             if server_cert and basedir:
    

  • buildstream/_context.py
    ... ... @@ -34,6 +34,7 @@ from ._artifactcache import ArtifactCache
    34 34
     from ._artifactcache.cascache import CASCache
    
    35 35
     from ._workspaces import Workspaces, WorkspaceProjectCache, WORKSPACE_PROJECT_FILE
    
    36 36
     from .plugin import _plugin_lookup
    
    37
    +from .sandbox import SandboxRemote
    
    37 38
     
    
    38 39
     
    
    39 40
     # Context()
    
    ... ... @@ -72,6 +73,9 @@ class Context():
    72 73
             # The locations from which to push and pull prebuilt artifacts
    
    73 74
             self.artifact_cache_specs = None
    
    74 75
     
    
    76
    +        # The global remote execution configuration
    
    77
    +        self.remote_execution_specs = None
    
    78
    +
    
    75 79
             # The directory to store build logs
    
    76 80
             self.logdir = None
    
    77 81
     
    
    ... ... @@ -187,7 +191,7 @@ class Context():
    187 191
             _yaml.node_validate(defaults, [
    
    188 192
                 'sourcedir', 'builddir', 'artifactdir', 'logdir',
    
    189 193
                 'scheduler', 'artifacts', 'logging', 'projects',
    
    190
    -            'cache', 'prompt', 'workspacedir',
    
    194
    +            'cache', 'prompt', 'workspacedir', 'remote-execution'
    
    191 195
             ])
    
    192 196
     
    
    193 197
             for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
    
    ... ... @@ -212,6 +216,8 @@ class Context():
    212 216
             # Load artifact share configuration
    
    213 217
             self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
    
    214 218
     
    
    219
    +        self.remote_execution_specs = SandboxRemote.specs_from_config_node(defaults)
    
    220
    +
    
    215 221
             # Load pull build trees configuration
    
    216 222
             self.pull_buildtrees = _yaml.node_get(cache, bool, 'pull-buildtrees')
    
    217 223
     
    
    ... ... @@ -271,7 +277,8 @@ class Context():
    271 277
             # Shallow validation of overrides, parts of buildstream which rely
    
    272 278
             # on the overrides are expected to validate elsewhere.
    
    273 279
             for _, overrides in _yaml.node_items(self._project_overrides):
    
    274
    -            _yaml.node_validate(overrides, ['artifacts', 'options', 'strict', 'default-mirror'])
    
    280
    +            _yaml.node_validate(overrides, ['artifacts', 'options', 'strict', 'default-mirror',
    
    281
    +                                            'remote-execution'])
    
    275 282
     
    
    276 283
             profile_end(Topics.LOAD_CONTEXT, 'load')
    
    277 284
     
    

  • buildstream/_frontend/app.py
    ... ... @@ -38,7 +38,7 @@ from .._message import Message, MessageType, unconditional_messages
    38 38
     from .._stream import Stream
    
    39 39
     from .._versions import BST_FORMAT_VERSION
    
    40 40
     from .. import _yaml
    
    41
    -from .._scheduler import ElementJob
    
    41
    +from .._scheduler import ElementJob, JobStatus
    
    42 42
     
    
    43 43
     # Import frontend assets
    
    44 44
     from . import Profile, LogLine, Status
    
    ... ... @@ -515,13 +515,13 @@ class App():
    515 515
             self._status.add_job(job)
    
    516 516
             self._maybe_render_status()
    
    517 517
     
    
    518
    -    def _job_completed(self, job, success):
    
    518
    +    def _job_completed(self, job, status):
    
    519 519
             self._status.remove_job(job)
    
    520 520
             self._maybe_render_status()
    
    521 521
     
    
    522 522
             # Dont attempt to handle a failure if the user has already opted to
    
    523 523
             # terminate
    
    524
    -        if not success and not self.stream.terminated:
    
    524
    +        if status == JobStatus.FAIL and not self.stream.terminated:
    
    525 525
     
    
    526 526
                 if isinstance(job, ElementJob):
    
    527 527
                     element = job.element
    
    ... ... @@ -599,7 +599,7 @@ class App():
    599 599
                         click.echo("\nDropping into an interactive shell in the failed build sandbox\n", err=True)
    
    600 600
                         try:
    
    601 601
                             prompt = self.shell_prompt(element)
    
    602
    -                        self.stream.shell(element, Scope.BUILD, prompt, isolate=True)
    
    602
    +                        self.stream.shell(element, Scope.BUILD, prompt, isolate=True, usebuildtree=True)
    
    603 603
                         except BstError as e:
    
    604 604
                             click.echo("Error while attempting to create interactive shell: {}".format(e), err=True)
    
    605 605
                     elif choice == 'log':
    

  • buildstream/_project.py
    ... ... @@ -507,7 +507,16 @@ class Project():
    507 507
             self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config, self.directory)
    
    508 508
     
    
    509 509
             # Load remote-execution configuration for this project
    
    510
    -        self.remote_execution_specs = SandboxRemote.specs_from_config_node(config, self.directory)
    
    510
    +        project_specs = SandboxRemote.specs_from_config_node(config, self.directory)
    
    511
    +        override_specs = SandboxRemote.specs_from_config_node(
    
    512
    +            self._context.get_overrides(self.name), self.directory)
    
    513
    +
    
    514
    +        if override_specs is not None:
    
    515
    +            self.remote_execution_specs = override_specs
    
    516
    +        elif project_specs is not None:
    
    517
    +            self.remote_execution_specs = project_specs
    
    518
    +        else:
    
    519
    +            self.remote_execution_specs = self._context.remote_execution_specs
    
    511 520
     
    
    512 521
             # Load sandbox environment variables
    
    513 522
             self.base_environment = _yaml.node_get(config, Mapping, 'environment')
    

  • buildstream/_scheduler/__init__.py
    ... ... @@ -26,4 +26,4 @@ from .queues.pushqueue import PushQueue
    26 26
     from .queues.pullqueue import PullQueue
    
    27 27
     
    
    28 28
     from .scheduler import Scheduler, SchedStatus
    
    29
    -from .jobs import ElementJob
    29
    +from .jobs import ElementJob, JobStatus

  • buildstream/_scheduler/jobs/__init__.py
    ... ... @@ -20,3 +20,4 @@
    20 20
     from .elementjob import ElementJob
    
    21 21
     from .cachesizejob import CacheSizeJob
    
    22 22
     from .cleanupjob import CleanupJob
    
    23
    +from .job import JobStatus

  • buildstream/_scheduler/jobs/cachesizejob.py
    ... ... @@ -16,7 +16,7 @@
    16 16
     #  Author:
    
    17 17
     #        Tristan Daniël Maat <tristan maat codethink co uk>
    
    18 18
     #
    
    19
    -from .job import Job
    
    19
    +from .job import Job, JobStatus
    
    20 20
     
    
    21 21
     
    
    22 22
     class CacheSizeJob(Job):
    
    ... ... @@ -30,8 +30,8 @@ class CacheSizeJob(Job):
    30 30
         def child_process(self):
    
    31 31
             return self._artifacts.compute_cache_size()
    
    32 32
     
    
    33
    -    def parent_complete(self, success, result):
    
    34
    -        if success:
    
    33
    +    def parent_complete(self, status, result):
    
    34
    +        if status == JobStatus.OK:
    
    35 35
                 self._artifacts.set_cache_size(result)
    
    36 36
     
    
    37 37
                 if self._complete_cb:
    

  • buildstream/_scheduler/jobs/cleanupjob.py
    ... ... @@ -16,7 +16,7 @@
    16 16
     #  Author:
    
    17 17
     #        Tristan Daniël Maat <tristan maat codethink co uk>
    
    18 18
     #
    
    19
    -from .job import Job
    
    19
    +from .job import Job, JobStatus
    
    20 20
     
    
    21 21
     
    
    22 22
     class CleanupJob(Job):
    
    ... ... @@ -29,6 +29,6 @@ class CleanupJob(Job):
    29 29
         def child_process(self):
    
    30 30
             return self._artifacts.clean()
    
    31 31
     
    
    32
    -    def parent_complete(self, success, result):
    
    33
    -        if success:
    
    32
    +    def parent_complete(self, status, result):
    
    33
    +        if status == JobStatus.OK:
    
    34 34
                 self._artifacts.set_cache_size(result)

  • buildstream/_scheduler/jobs/elementjob.py
    ... ... @@ -60,7 +60,7 @@ from .job import Job
    60 60
     #     Args:
    
    61 61
     #        job (Job): The job object which completed
    
    62 62
     #        element (Element): The element passed to the Job() constructor
    
    63
    -#        success (bool): True if the action_cb did not raise an exception
    
    63
    +#        status (JobStatus): The status of whether the workload raised an exception
    
    64 64
     #        result (object): The deserialized object returned by the `action_cb`, or None
    
    65 65
     #                         if `success` is False
    
    66 66
     #
    
    ... ... @@ -93,8 +93,8 @@ class ElementJob(Job):
    93 93
             # Run the action
    
    94 94
             return self._action_cb(self._element)
    
    95 95
     
    
    96
    -    def parent_complete(self, success, result):
    
    97
    -        self._complete_cb(self, self._element, success, self._result)
    
    96
    +    def parent_complete(self, status, result):
    
    97
    +        self._complete_cb(self, self._element, status, self._result)
    
    98 98
     
    
    99 99
         def message(self, message_type, message, **kwargs):
    
    100 100
             args = dict(kwargs)
    

  • buildstream/_scheduler/jobs/job.py
    ... ... @@ -28,8 +28,6 @@ import traceback
    28 28
     import asyncio
    
    29 29
     import multiprocessing
    
    30 30
     
    
    31
    -import psutil
    
    32
    -
    
    33 31
     # BuildStream toplevel imports
    
    34 32
     from ..._exceptions import ImplError, BstError, set_last_task_error, SkipJob
    
    35 33
     from ..._message import Message, MessageType, unconditional_messages
    
    ... ... @@ -43,6 +41,22 @@ RC_PERM_FAIL = 2
    43 41
     RC_SKIPPED = 3
    
    44 42
     
    
    45 43
     
    
    44
    +# JobStatus:
    
    45
    +#
    
    46
    +# The job completion status, passed back through the
    
    47
    +# complete callbacks.
    
    48
    +#
    
    49
    +class JobStatus():
    
    50
    +    # Job succeeded
    
    51
    +    OK = 0
    
    52
    +
    
    53
    +    # A temporary BstError was raised
    
    54
    +    FAIL = 1
    
    55
    +
    
    56
    +    # A SkipJob was raised
    
    57
    +    SKIPPED = 3
    
    58
    +
    
    59
    +
    
    46 60
     # Used to distinguish between status messages and return values
    
    47 61
     class Envelope():
    
    48 62
         def __init__(self, message_type, message):
    
    ... ... @@ -118,7 +132,6 @@ class Job():
    118 132
             self._max_retries = max_retries        # Maximum number of automatic retries
    
    119 133
             self._result = None                    # Return value of child action in the parent
    
    120 134
             self._tries = 0                        # Try count, for retryable jobs
    
    121
    -        self._skipped_flag = False             # Indicate whether the job was skipped.
    
    122 135
             self._terminated = False               # Whether this job has been explicitly terminated
    
    123 136
     
    
    124 137
             # If False, a retry will not be attempted regardless of whether _tries is less than _max_retries.
    
    ... ... @@ -215,17 +228,10 @@ class Job():
    215 228
         # Forcefully kill the process, and any children it might have.
    
    216 229
         #
    
    217 230
         def kill(self):
    
    218
    -
    
    219 231
             # Force kill
    
    220 232
             self.message(MessageType.WARN,
    
    221 233
                          "{} did not terminate gracefully, killing".format(self.action_name))
    
    222
    -
    
    223
    -        try:
    
    224
    -            utils._kill_process_tree(self._process.pid)
    
    225
    -        # This can happen if the process died of its own accord before
    
    226
    -        # we try to kill it
    
    227
    -        except psutil.NoSuchProcess:
    
    228
    -            return
    
    234
    +        utils._kill_process_tree(self._process.pid)
    
    229 235
     
    
    230 236
         # suspend()
    
    231 237
         #
    
    ... ... @@ -282,18 +288,6 @@ class Job():
    282 288
         def set_task_id(self, task_id):
    
    283 289
             self._task_id = task_id
    
    284 290
     
    
    285
    -    # skipped
    
    286
    -    #
    
    287
    -    # This will evaluate to True if the job was skipped
    
    288
    -    # during processing, or if it was forcefully terminated.
    
    289
    -    #
    
    290
    -    # Returns:
    
    291
    -    #    (bool): Whether the job should appear as skipped
    
    292
    -    #
    
    293
    -    @property
    
    294
    -    def skipped(self):
    
    295
    -        return self._skipped_flag or self._terminated
    
    296
    -
    
    297 291
         #######################################################
    
    298 292
         #                  Abstract Methods                   #
    
    299 293
         #######################################################
    
    ... ... @@ -304,10 +298,10 @@ class Job():
    304 298
         # pass the result to the main thread.
    
    305 299
         #
    
    306 300
         # Args:
    
    307
    -    #    success (bool): Whether the job was successful.
    
    301
    +    #    status (JobStatus): The job exit status
    
    308 302
         #    result (any): The result returned by child_process().
    
    309 303
         #
    
    310
    -    def parent_complete(self, success, result):
    
    304
    +    def parent_complete(self, status, result):
    
    311 305
             raise ImplError("Job '{kind}' does not implement parent_complete()"
    
    312 306
                             .format(kind=type(self).__name__))
    
    313 307
     
    
    ... ... @@ -571,16 +565,23 @@ class Job():
    571 565
             #
    
    572 566
             self._retry_flag = returncode == RC_FAIL
    
    573 567
     
    
    574
    -        # Set the flag to alert Queue that this job skipped.
    
    575
    -        self._skipped_flag = returncode == RC_SKIPPED
    
    576
    -
    
    577 568
             if self._retry_flag and (self._tries <= self._max_retries) and not self._scheduler.terminated:
    
    578 569
                 self.spawn()
    
    579 570
                 return
    
    580 571
     
    
    581
    -        success = returncode in (RC_OK, RC_SKIPPED)
    
    582
    -        self.parent_complete(success, self._result)
    
    583
    -        self._scheduler.job_completed(self, success)
    
    572
    +        # Resolve the outward facing overall job completion status
    
    573
    +        #
    
    574
    +        if returncode == RC_OK:
    
    575
    +            status = JobStatus.OK
    
    576
    +        elif returncode == RC_SKIPPED:
    
    577
    +            status = JobStatus.SKIPPED
    
    578
    +        elif returncode in (RC_FAIL, RC_PERM_FAIL):
    
    579
    +            status = JobStatus.FAIL
    
    580
    +        else:
    
    581
    +            status = JobStatus.FAIL
    
    582
    +
    
    583
    +        self.parent_complete(status, self._result)
    
    584
    +        self._scheduler.job_completed(self, status)
    
    584 585
     
    
    585 586
             # Force the deletion of the queue and process objects to try and clean up FDs
    
    586 587
             self._queue = self._process = None
    

  • buildstream/_scheduler/queues/buildqueue.py
    ... ... @@ -21,7 +21,7 @@
    21 21
     from datetime import timedelta
    
    22 22
     
    
    23 23
     from . import Queue, QueueStatus
    
    24
    -from ..jobs import ElementJob
    
    24
    +from ..jobs import ElementJob, JobStatus
    
    25 25
     from ..resources import ResourceType
    
    26 26
     from ..._message import MessageType
    
    27 27
     
    
    ... ... @@ -71,9 +71,6 @@ class BuildQueue(Queue):
    71 71
             return element._assemble()
    
    72 72
     
    
    73 73
         def status(self, element):
    
    74
    -        # state of dependencies may have changed, recalculate element state
    
    75
    -        element._update_state()
    
    76
    -
    
    77 74
             if not element._is_required():
    
    78 75
                 # Artifact is not currently required but it may be requested later.
    
    79 76
                 # Keep it in the queue.
    
    ... ... @@ -104,11 +101,16 @@ class BuildQueue(Queue):
    104 101
             if artifacts.has_quota_exceeded():
    
    105 102
                 self._scheduler.check_cache_size()
    
    106 103
     
    
    107
    -    def done(self, job, element, result, success):
    
    104
    +    def done(self, job, element, result, status):
    
    108 105
     
    
    109 106
             # Inform element in main process that assembly is done
    
    110 107
             element._assemble_done()
    
    111 108
     
    
    109
    +        if element.is_workspaced:
    
    110
    +            # Update the state of all reverse dependencies
    
    111
    +            for reverse_dependency in element.reverse_build_dependencies(recurse=True):
    
    112
    +                reverse_dependency._update_state()
    
    113
    +
    
    112 114
             # This has to be done after _assemble_done, such that the
    
    113 115
             # element may register its cache key as required
    
    114 116
             #
    
    ... ... @@ -117,5 +119,5 @@ class BuildQueue(Queue):
    117 119
             #        artifact cache size for a successful build even though we know a
    
    118 120
             #        failed build also grows the artifact cache size.
    
    119 121
             #
    
    120
    -        if success:
    
    122
    +        if status == JobStatus.OK:
    
    121 123
                 self._check_cache_size(job, element, result)

  • buildstream/_scheduler/queues/fetchqueue.py
    ... ... @@ -24,6 +24,7 @@ from ... import Consistency
    24 24
     # Local imports
    
    25 25
     from . import Queue, QueueStatus
    
    26 26
     from ..resources import ResourceType
    
    27
    +from ..jobs import JobStatus
    
    27 28
     
    
    28 29
     
    
    29 30
     # A queue which fetches element sources
    
    ... ... @@ -43,9 +44,6 @@ class FetchQueue(Queue):
    43 44
             element._fetch()
    
    44 45
     
    
    45 46
         def status(self, element):
    
    46
    -        # state of dependencies may have changed, recalculate element state
    
    47
    -        element._update_state()
    
    48
    -
    
    49 47
             if not element._is_required():
    
    50 48
                 # Artifact is not currently required but it may be requested later.
    
    51 49
                 # Keep it in the queue.
    
    ... ... @@ -66,9 +64,9 @@ class FetchQueue(Queue):
    66 64
     
    
    67 65
             return QueueStatus.READY
    
    68 66
     
    
    69
    -    def done(self, _, element, result, success):
    
    67
    +    def done(self, _, element, result, status):
    
    70 68
     
    
    71
    -        if not success:
    
    69
    +        if status == JobStatus.FAIL:
    
    72 70
                 return
    
    73 71
     
    
    74 72
             element._update_state()
    

  • buildstream/_scheduler/queues/pullqueue.py
    ... ... @@ -21,6 +21,7 @@
    21 21
     # Local imports
    
    22 22
     from . import Queue, QueueStatus
    
    23 23
     from ..resources import ResourceType
    
    24
    +from ..jobs import JobStatus
    
    24 25
     from ..._exceptions import SkipJob
    
    25 26
     
    
    26 27
     
    
    ... ... @@ -38,9 +39,6 @@ class PullQueue(Queue):
    38 39
                 raise SkipJob(self.action_name)
    
    39 40
     
    
    40 41
         def status(self, element):
    
    41
    -        # state of dependencies may have changed, recalculate element state
    
    42
    -        element._update_state()
    
    43
    -
    
    44 42
             if not element._is_required():
    
    45 43
                 # Artifact is not currently required but it may be requested later.
    
    46 44
                 # Keep it in the queue.
    
    ... ... @@ -54,9 +52,9 @@ class PullQueue(Queue):
    54 52
             else:
    
    55 53
                 return QueueStatus.SKIP
    
    56 54
     
    
    57
    -    def done(self, _, element, result, success):
    
    55
    +    def done(self, _, element, result, status):
    
    58 56
     
    
    59
    -        if not success:
    
    57
    +        if status == JobStatus.FAIL:
    
    60 58
                 return
    
    61 59
     
    
    62 60
             element._pull_done()
    
    ... ... @@ -64,4 +62,5 @@ class PullQueue(Queue):
    64 62
             # Build jobs will check the "approximate" size first. Since we
    
    65 63
             # do not get an artifact size from pull jobs, we have to
    
    66 64
             # actually check the cache size.
    
    67
    -        self._scheduler.check_cache_size()
    65
    +        if status == JobStatus.OK:
    
    66
    +            self._scheduler.check_cache_size()

  • buildstream/_scheduler/queues/queue.py
    ... ... @@ -25,7 +25,7 @@ from enum import Enum
    25 25
     import traceback
    
    26 26
     
    
    27 27
     # Local imports
    
    28
    -from ..jobs import ElementJob
    
    28
    +from ..jobs import ElementJob, JobStatus
    
    29 29
     from ..resources import ResourceType
    
    30 30
     
    
    31 31
     # BuildStream toplevel imports
    
    ... ... @@ -133,10 +133,9 @@ class Queue():
    133 133
         #    job (Job): The job which completed processing
    
    134 134
         #    element (Element): The element which completed processing
    
    135 135
         #    result (any): The return value of the process() implementation
    
    136
    -    #    success (bool): True if the process() implementation did not
    
    137
    -    #                    raise any exception
    
    136
    +    #    status (JobStatus): The return status of the Job
    
    138 137
         #
    
    139
    -    def done(self, job, element, result, success):
    
    138
    +    def done(self, job, element, result, status):
    
    140 139
             pass
    
    141 140
     
    
    142 141
         #####################################################
    
    ... ... @@ -291,7 +290,7 @@ class Queue():
    291 290
         #
    
    292 291
         # See the Job object for an explanation of the call signature
    
    293 292
         #
    
    294
    -    def _job_done(self, job, element, success, result):
    
    293
    +    def _job_done(self, job, element, status, result):
    
    295 294
     
    
    296 295
             # Update values that need to be synchronized in the main task
    
    297 296
             # before calling any queue implementation
    
    ... ... @@ -301,7 +300,7 @@ class Queue():
    301 300
             # and determine if it should be considered as processed
    
    302 301
             # or skipped.
    
    303 302
             try:
    
    304
    -            self.done(job, element, result, success)
    
    303
    +            self.done(job, element, result, status)
    
    305 304
             except BstError as e:
    
    306 305
     
    
    307 306
                 # Report error and mark as failed
    
    ... ... @@ -332,12 +331,10 @@ class Queue():
    332 331
                 # All jobs get placed on the done queue for later processing.
    
    333 332
                 self._done_queue.append(job)
    
    334 333
     
    
    335
    -            # A Job can be skipped whether or not it has failed,
    
    336
    -            # we want to only bookkeep them as processed or failed
    
    337
    -            # if they are not skipped.
    
    338
    -            if job.skipped:
    
    334
    +            # These lists are for bookkeeping purposes for the UI and logging.
    
    335
    +            if status == JobStatus.SKIPPED:
    
    339 336
                     self.skipped_elements.append(element)
    
    340
    -            elif success:
    
    337
    +            elif status == JobStatus.OK:
    
    341 338
                     self.processed_elements.append(element)
    
    342 339
                 else:
    
    343 340
                     self.failed_elements.append(element)
    

  • buildstream/_scheduler/queues/trackqueue.py
    ... ... @@ -24,6 +24,7 @@ from ...plugin import _plugin_lookup
    24 24
     # Local imports
    
    25 25
     from . import Queue, QueueStatus
    
    26 26
     from ..resources import ResourceType
    
    27
    +from ..jobs import JobStatus
    
    27 28
     
    
    28 29
     
    
    29 30
     # A queue which tracks sources
    
    ... ... @@ -47,9 +48,9 @@ class TrackQueue(Queue):
    47 48
     
    
    48 49
             return QueueStatus.READY
    
    49 50
     
    
    50
    -    def done(self, _, element, result, success):
    
    51
    +    def done(self, _, element, result, status):
    
    51 52
     
    
    52
    -        if not success:
    
    53
    +        if status == JobStatus.FAIL:
    
    53 54
                 return
    
    54 55
     
    
    55 56
             # Set the new refs in the main process one by one as they complete
    
    ... ... @@ -58,3 +59,6 @@ class TrackQueue(Queue):
    58 59
                 source._save_ref(new_ref)
    
    59 60
     
    
    60 61
             element._tracking_done()
    
    62
    +
    
    63
    +        for rdep in element.reverse_build_dependencies(recurse=True):
    
    64
    +            rdep._update_state()

  • buildstream/_scheduler/scheduler.py
    ... ... @@ -38,6 +38,16 @@ class SchedStatus():
    38 38
         TERMINATED = 1
    
    39 39
     
    
    40 40
     
    
    41
    +# Our _REDUNDANT_EXCLUSIVE_ACTIONS jobs are special ones
    
    42
    +# which we launch dynamically, they have the property of being
    
    43
    +# meaningless to queue if one is already queued, and it also
    
    44
    +# doesnt make sense to run them in parallel
    
    45
    +#
    
    46
    +_ACTION_NAME_CLEANUP = 'cleanup'
    
    47
    +_ACTION_NAME_CACHE_SIZE = 'cache_size'
    
    48
    +_REDUNDANT_EXCLUSIVE_ACTIONS = [_ACTION_NAME_CLEANUP, _ACTION_NAME_CACHE_SIZE]
    
    49
    +
    
    50
    +
    
    41 51
     # Scheduler()
    
    42 52
     #
    
    43 53
     # The scheduler operates on a list queues, each of which is meant to accomplish
    
    ... ... @@ -94,6 +104,15 @@ class Scheduler():
    94 104
             self._suspendtime = None
    
    95 105
             self._queue_jobs = True      # Whether we should continue to queue jobs
    
    96 106
     
    
    107
    +        # Whether our exclusive jobs, like 'cleanup' are currently already
    
    108
    +        # waiting or active.
    
    109
    +        #
    
    110
    +        # This is just a bit quicker than scanning the wait queue and active
    
    111
    +        # queue and comparing job action names.
    
    112
    +        #
    
    113
    +        self._exclusive_waiting = set()
    
    114
    +        self._exclusive_active = set()
    
    115
    +
    
    97 116
             self._resources = Resources(context.sched_builders,
    
    98 117
                                         context.sched_fetchers,
    
    99 118
                                         context.sched_pushers)
    
    ... ... @@ -211,19 +230,6 @@ class Scheduler():
    211 230
                 starttime = timenow
    
    212 231
             return timenow - starttime
    
    213 232
     
    
    214
    -    # schedule_jobs()
    
    215
    -    #
    
    216
    -    # Args:
    
    217
    -    #     jobs ([Job]): A list of jobs to schedule
    
    218
    -    #
    
    219
    -    # Schedule 'Job's for the scheduler to run. Jobs scheduled will be
    
    220
    -    # run as soon any other queueing jobs finish, provided sufficient
    
    221
    -    # resources are available for them to run
    
    222
    -    #
    
    223
    -    def schedule_jobs(self, jobs):
    
    224
    -        for job in jobs:
    
    225
    -            self.waiting_jobs.append(job)
    
    226
    -
    
    227 233
         # job_completed():
    
    228 234
         #
    
    229 235
         # Called when a Job completes
    
    ... ... @@ -231,12 +237,14 @@ class Scheduler():
    231 237
         # Args:
    
    232 238
         #    queue (Queue): The Queue holding a complete job
    
    233 239
         #    job (Job): The completed Job
    
    234
    -    #    success (bool): Whether the Job completed with a success status
    
    240
    +    #    status (JobStatus): The status of the completed job
    
    235 241
         #
    
    236
    -    def job_completed(self, job, success):
    
    242
    +    def job_completed(self, job, status):
    
    237 243
             self._resources.clear_job_resources(job)
    
    238 244
             self.active_jobs.remove(job)
    
    239
    -        self._job_complete_callback(job, success)
    
    245
    +        if job.action_name in _REDUNDANT_EXCLUSIVE_ACTIONS:
    
    246
    +            self._exclusive_active.remove(job.action_name)
    
    247
    +        self._job_complete_callback(job, status)
    
    240 248
             self._schedule_queue_jobs()
    
    241 249
             self._sched()
    
    242 250
     
    
    ... ... @@ -246,18 +254,13 @@ class Scheduler():
    246 254
         # size is calculated, a cleanup job will be run automatically
    
    247 255
         # if needed.
    
    248 256
         #
    
    249
    -    # FIXME: This should ensure that only one cache size job
    
    250
    -    #        is ever pending at a given time. If a cache size
    
    251
    -    #        job is already running, it is correct to queue
    
    252
    -    #        a new one, it is incorrect to have more than one
    
    253
    -    #        of these jobs pending at a given time, though.
    
    254
    -    #
    
    255 257
         def check_cache_size(self):
    
    256
    -        job = CacheSizeJob(self, 'cache_size', 'cache_size/cache_size',
    
    258
    +        job = CacheSizeJob(self, _ACTION_NAME_CACHE_SIZE,
    
    259
    +                           'cache_size/cache_size',
    
    257 260
                                resources=[ResourceType.CACHE,
    
    258 261
                                           ResourceType.PROCESS],
    
    259 262
                                complete_cb=self._run_cleanup)
    
    260
    -        self.schedule_jobs([job])
    
    263
    +        self._schedule_jobs([job])
    
    261 264
     
    
    262 265
         #######################################################
    
    263 266
         #                  Local Private Methods              #
    
    ... ... @@ -276,10 +279,19 @@ class Scheduler():
    276 279
                 if not self._resources.reserve_job_resources(job):
    
    277 280
                     continue
    
    278 281
     
    
    282
    +            # Postpone these jobs if one is already running
    
    283
    +            if job.action_name in _REDUNDANT_EXCLUSIVE_ACTIONS and \
    
    284
    +               job.action_name in self._exclusive_active:
    
    285
    +                continue
    
    286
    +
    
    279 287
                 job.spawn()
    
    280 288
                 self.waiting_jobs.remove(job)
    
    281 289
                 self.active_jobs.append(job)
    
    282 290
     
    
    291
    +            if job.action_name in _REDUNDANT_EXCLUSIVE_ACTIONS:
    
    292
    +                self._exclusive_waiting.remove(job.action_name)
    
    293
    +                self._exclusive_active.add(job.action_name)
    
    294
    +
    
    283 295
                 if self._job_start_callback:
    
    284 296
                     self._job_start_callback(job)
    
    285 297
     
    
    ... ... @@ -287,6 +299,33 @@ class Scheduler():
    287 299
             if not self.active_jobs and not self.waiting_jobs:
    
    288 300
                 self.loop.stop()
    
    289 301
     
    
    302
    +    # _schedule_jobs()
    
    303
    +    #
    
    304
    +    # The main entry point for jobs to be scheduled.
    
    305
    +    #
    
    306
    +    # This is called either as a result of scanning the queues
    
    307
    +    # in _schedule_queue_jobs(), or directly by the Scheduler
    
    308
    +    # to insert special jobs like cleanups.
    
    309
    +    #
    
    310
    +    # Args:
    
    311
    +    #     jobs ([Job]): A list of jobs to schedule
    
    312
    +    #
    
    313
    +    def _schedule_jobs(self, jobs):
    
    314
    +        for job in jobs:
    
    315
    +
    
    316
    +            # Special treatment of our redundant exclusive jobs
    
    317
    +            #
    
    318
    +            if job.action_name in _REDUNDANT_EXCLUSIVE_ACTIONS:
    
    319
    +
    
    320
    +                # Drop the job if one is already queued
    
    321
    +                if job.action_name in self._exclusive_waiting:
    
    322
    +                    continue
    
    323
    +
    
    324
    +                # Mark this action type as queued
    
    325
    +                self._exclusive_waiting.add(job.action_name)
    
    326
    +
    
    327
    +            self.waiting_jobs.append(job)
    
    328
    +
    
    290 329
         # _schedule_queue_jobs()
    
    291 330
         #
    
    292 331
         # Ask the queues what jobs they want to schedule and schedule
    
    ... ... @@ -331,7 +370,7 @@ class Scheduler():
    331 370
                 # the next queue and process them.
    
    332 371
                 process_queues = any(q.dequeue_ready() for q in self.queues)
    
    333 372
     
    
    334
    -        self.schedule_jobs(ready)
    
    373
    +        self._schedule_jobs(ready)
    
    335 374
             self._sched()
    
    336 375
     
    
    337 376
         # _run_cleanup()
    
    ... ... @@ -353,11 +392,11 @@ class Scheduler():
    353 392
             if not artifacts.has_quota_exceeded():
    
    354 393
                 return
    
    355 394
     
    
    356
    -        job = CleanupJob(self, 'cleanup', 'cleanup/cleanup',
    
    395
    +        job = CleanupJob(self, _ACTION_NAME_CLEANUP, 'cleanup/cleanup',
    
    357 396
                              resources=[ResourceType.CACHE,
    
    358 397
                                         ResourceType.PROCESS],
    
    359 398
                              exclusive_resources=[ResourceType.CACHE])
    
    360
    -        self.schedule_jobs([job])
    
    399
    +        self._schedule_jobs([job])
    
    361 400
     
    
    362 401
         # _suspend_jobs()
    
    363 402
         #
    

  • buildstream/element.py
    ... ... @@ -65,7 +65,7 @@ Miscellaneous abstract methods also exist:
    65 65
     
    
    66 66
     * :func:`Element.generate_script() <buildstream.element.Element.generate_script>`
    
    67 67
     
    
    68
    -  For the purpose of ``bst source bundle``, an Element may optionally implement this.
    
    68
    +  For the purpose of ``bst source checkout --include-build-scripts``, an Element may optionally implement this.
    
    69 69
     
    
    70 70
     
    
    71 71
     Class Reference
    
    ... ... @@ -197,6 +197,7 @@ class Element(Plugin):
    197 197
     
    
    198 198
             self.__runtime_dependencies = []        # Direct runtime dependency Elements
    
    199 199
             self.__build_dependencies = []          # Direct build dependency Elements
    
    200
    +        self.__reverse_build_dependencies = []    # Direct reverse build dependency Elements
    
    200 201
             self.__sources = []                     # List of Sources
    
    201 202
             self.__weak_cache_key = None            # Our cached weak cache key
    
    202 203
             self.__strict_cache_key = None          # Our cached cache key for strict builds
    
    ... ... @@ -227,6 +228,8 @@ class Element(Plugin):
    227 228
             self.__metadata_workspaced = {}               # Boolean of whether it's workspaced
    
    228 229
             self.__metadata_workspaced_dependencies = {}  # List of which dependencies are workspaced
    
    229 230
     
    
    231
    +        self.__is_workspaced = None
    
    232
    +
    
    230 233
             # Ensure we have loaded this class's defaults
    
    231 234
             self.__init_defaults(plugin_conf)
    
    232 235
     
    
    ... ... @@ -370,6 +373,13 @@ class Element(Plugin):
    370 373
         #############################################################
    
    371 374
         #                       Public Methods                      #
    
    372 375
         #############################################################
    
    376
    +    @property
    
    377
    +    def is_workspaced(self):
    
    378
    +        if self.__is_workspaced is None:
    
    379
    +            self.__is_workspaced = self._get_workspace() is not None
    
    380
    +
    
    381
    +        return self.__is_workspaced
    
    382
    +
    
    373 383
         def sources(self):
    
    374 384
             """A generator function to enumerate the element sources
    
    375 385
     
    
    ... ... @@ -439,6 +449,27 @@ class Element(Plugin):
    439 449
             if should_yield and (recurse or recursed) and scope != Scope.BUILD:
    
    440 450
                 yield self
    
    441 451
     
    
    452
    +    def reverse_build_dependencies(self, recurse=False):
    
    453
    +        if not recurse:
    
    454
    +            yield from self.__reverse_build_dependencies
    
    455
    +            return
    
    456
    +
    
    457
    +        visited = set()
    
    458
    +
    
    459
    +        def recurse_rdeps(element):
    
    460
    +            if element in visited:
    
    461
    +                return
    
    462
    +
    
    463
    +            visited.add(element)
    
    464
    +
    
    465
    +            yield element
    
    466
    +
    
    467
    +            for rdep in element.__reverse_build_dependencies:
    
    468
    +                yield from recurse_rdeps(rdep)
    
    469
    +
    
    470
    +        for rdep in self.__reverse_build_dependencies:
    
    471
    +            yield from recurse_rdeps(rdep)
    
    472
    +
    
    442 473
         def search(self, scope, name):
    
    443 474
             """Search for a dependency by name
    
    444 475
     
    
    ... ... @@ -930,6 +961,7 @@ class Element(Plugin):
    930 961
             for meta_dep in meta.build_dependencies:
    
    931 962
                 dependency = Element._new_from_meta(meta_dep)
    
    932 963
                 element.__build_dependencies.append(dependency)
    
    964
    +            dependency.__reverse_build_dependencies.append(element)
    
    933 965
     
    
    934 966
             return element
    
    935 967
     
    
    ... ... @@ -1446,6 +1478,7 @@ class Element(Plugin):
    1446 1478
         # This unblocks pull/fetch/build.
    
    1447 1479
         #
    
    1448 1480
         def _set_required(self):
    
    1481
    +        # FIXME: this should enqueue stuff in the queue, it should not be here by default
    
    1449 1482
             if self.__required:
    
    1450 1483
                 # Already done
    
    1451 1484
                 return
    
    ... ... @@ -1456,6 +1489,7 @@ class Element(Plugin):
    1456 1489
             for dep in self.dependencies(Scope.RUN, recurse=False):
    
    1457 1490
                 dep._set_required()
    
    1458 1491
     
    
    1492
    +        # FIXME: this should not be done at all here
    
    1459 1493
             self._update_state()
    
    1460 1494
     
    
    1461 1495
         # _is_required():
    

  • buildstream/plugins/elements/script.py
    ... ... @@ -42,6 +42,9 @@ import buildstream
    42 42
     class ScriptElement(buildstream.ScriptElement):
    
    43 43
         # pylint: disable=attribute-defined-outside-init
    
    44 44
     
    
    45
    +    # This plugin has been modified to avoid the use of Sandbox.get_directory
    
    46
    +    BST_VIRTUAL_DIRECTORY = True
    
    47
    +
    
    45 48
         def configure(self, node):
    
    46 49
             for n in self.node_get_member(node, list, 'layout', []):
    
    47 50
                 dst = self.node_subst_member(n, 'destination')
    

  • buildstream/sandbox/_sandboxremote.py
    ... ... @@ -62,10 +62,32 @@ class SandboxRemote(Sandbox):
    62 62
             self.storage_url = config.storage_service['url']
    
    63 63
             self.exec_url = config.exec_service['url']
    
    64 64
     
    
    65
    +        exec_certs = {}
    
    66
    +        for key in ['client-cert', 'client-key', 'server-cert']:
    
    67
    +            if key in config.exec_service:
    
    68
    +                with open(config.exec_service[key], 'rb') as f:
    
    69
    +                    exec_certs[key] = f.read()
    
    70
    +
    
    71
    +        self.exec_credentials = grpc.ssl_channel_credentials(
    
    72
    +            root_certificates=exec_certs.get('server-cert'),
    
    73
    +            private_key=exec_certs.get('client-key'),
    
    74
    +            certificate_chain=exec_certs.get('client-cert'))
    
    75
    +
    
    76
    +        action_certs = {}
    
    77
    +        for key in ['client-cert', 'client-key', 'server-cert']:
    
    78
    +            if key in config.action_service:
    
    79
    +                with open(config.action_service[key], 'rb') as f:
    
    80
    +                    action_certs[key] = f.read()
    
    81
    +
    
    65 82
             if config.action_service:
    
    66 83
                 self.action_url = config.action_service['url']
    
    84
    +            self.action_credentials = grpc.ssl_channel_credentials(
    
    85
    +                root_certificates=action_certs.get('server-cert'),
    
    86
    +                private_key=action_certs.get('client-key'),
    
    87
    +                certificate_chain=action_certs.get('client-cert'))
    
    67 88
             else:
    
    68 89
                 self.action_url = None
    
    90
    +            self.action_credentials = None
    
    69 91
     
    
    70 92
             self.server_instance = config.exec_service.get('instance', None)
    
    71 93
             self.storage_instance = config.storage_service.get('instance', None)
    
    ... ... @@ -81,7 +103,7 @@ class SandboxRemote(Sandbox):
    81 103
             self._get_context().message(Message(None, MessageType.INFO, msg))
    
    82 104
     
    
    83 105
         @staticmethod
    
    84
    -    def specs_from_config_node(config_node, basedir):
    
    106
    +    def specs_from_config_node(config_node, basedir=None):
    
    85 107
     
    
    86 108
             def require_node(config, keyname):
    
    87 109
                 val = config.get(keyname)
    
    ... ... @@ -109,10 +131,10 @@ class SandboxRemote(Sandbox):
    109 131
             remote_exec_storage_config = require_node(remote_config, 'storage-service')
    
    110 132
             remote_exec_action_config = remote_config.get('action-cache-service', {})
    
    111 133
     
    
    112
    -        _yaml.node_validate(remote_exec_service_config, ['url', 'instance'])
    
    134
    +        _yaml.node_validate(remote_exec_service_config, ['url', 'instance'] + tls_keys)
    
    113 135
             _yaml.node_validate(remote_exec_storage_config, ['url', 'instance'] + tls_keys)
    
    114 136
             if remote_exec_action_config:
    
    115
    -            _yaml.node_validate(remote_exec_action_config, ['url'])
    
    137
    +            _yaml.node_validate(remote_exec_action_config, ['url'] + tls_keys)
    
    116 138
             else:
    
    117 139
                 remote_config['action-service'] = None
    
    118 140
     
    
    ... ... @@ -135,6 +157,19 @@ class SandboxRemote(Sandbox):
    135 157
                                           "remote-execution configuration. Your config is missing '{}'."
    
    136 158
                                           .format(str(provenance), tls_keys, key))
    
    137 159
     
    
    160
    +        def resolve_path(path):
    
    161
    +            if basedir and path:
    
    162
    +                return os.path.join(basedir, path)
    
    163
    +            else:
    
    164
    +                return path
    
    165
    +
    
    166
    +        for key in tls_keys:
    
    167
    +            for d in (remote_config['execution-service'],
    
    168
    +                      remote_config['storage-service'],
    
    169
    +                      remote_exec_action_config):
    
    170
    +                if key in d:
    
    171
    +                    d[key] = resolve_path(d[key])
    
    172
    +
    
    138 173
             spec = RemoteExecutionSpec(remote_config['execution-service'],
    
    139 174
                                        remote_config['storage-service'],
    
    140 175
                                        remote_exec_action_config)
    
    ... ... @@ -295,6 +330,8 @@ class SandboxRemote(Sandbox):
    295 330
                                    "for example: http://buildservice:50051.")
    
    296 331
             if url.scheme == 'http':
    
    297 332
                 channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
    
    333
    +        elif url.scheme == 'https':
    
    334
    +            channel = grpc.secure_channel('{}:{}'.format(url.hostname, url.port), self.exec_credentials)
    
    298 335
             else:
    
    299 336
                 raise SandboxError("Remote execution currently only supports the 'http' protocol "
    
    300 337
                                    "and '{}' was supplied.".format(url.scheme))
    
    ... ... @@ -352,11 +389,11 @@ class SandboxRemote(Sandbox):
    352 389
             if not url.port:
    
    353 390
                 raise SandboxError("You must supply a protocol and port number in the action-cache-service url, "
    
    354 391
                                    "for example: http://buildservice:50051.")
    
    355
    -        if not url.scheme == "http":
    
    356
    -            raise SandboxError("Currently only support http for the action cache"
    
    357
    -                               "and {} was supplied".format(url.scheme))
    
    392
    +        if url.scheme == 'http':
    
    393
    +            channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
    
    394
    +        elif url.scheme == 'https':
    
    395
    +            channel = grpc.secure_channel('{}:{}'.format(url.hostname, url.port), self.action_credentials)
    
    358 396
     
    
    359
    -        channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
    
    360 397
             request = remote_execution_pb2.GetActionResultRequest(action_digest=action_digest)
    
    361 398
             stub = remote_execution_pb2_grpc.ActionCacheStub(channel)
    
    362 399
             try:
    

  • buildstream/sandbox/sandbox.py
    ... ... @@ -288,8 +288,8 @@ class Sandbox():
    288 288
                 command = [command]
    
    289 289
     
    
    290 290
             if self.__batch:
    
    291
    -            if flags != self.__batch.flags:
    
    292
    -                raise SandboxError("Inconsistent sandbox flags in single command batch")
    
    291
    +            assert flags == self.__batch.flags, \
    
    292
    +                "Inconsistent sandbox flags in single command batch"
    
    293 293
     
    
    294 294
                 batch_command = _SandboxBatchCommand(command, cwd=cwd, env=env, label=label)
    
    295 295
     
    
    ... ... @@ -326,8 +326,8 @@ class Sandbox():
    326 326
     
    
    327 327
             if self.__batch:
    
    328 328
                 # Nested batch
    
    329
    -            if flags != self.__batch.flags:
    
    330
    -                raise SandboxError("Inconsistent sandbox flags in single command batch")
    
    329
    +            assert flags == self.__batch.flags, \
    
    330
    +                "Inconsistent sandbox flags in single command batch"
    
    331 331
     
    
    332 332
                 parent_group = self.__batch.current_group
    
    333 333
                 parent_group.append(group)
    
    ... ... @@ -592,7 +592,7 @@ class _SandboxBatch():
    592 592
             if command.label:
    
    593 593
                 context = self.sandbox._get_context()
    
    594 594
                 message = Message(self.sandbox._get_plugin_id(), MessageType.STATUS,
    
    595
    -                              'Running {}'.format(command.label))
    
    595
    +                              'Running command', detail=command.label)
    
    596 596
                 context.message(message)
    
    597 597
     
    
    598 598
             exitcode = self.sandbox._run(command.command, self.flags, cwd=command.cwd, env=command.env)
    

  • buildstream/utils.py
    ... ... @@ -1050,6 +1050,11 @@ def _kill_process_tree(pid):
    1050 1050
                 # Ignore this error, it can happen with
    
    1051 1051
                 # some setuid bwrap processes.
    
    1052 1052
                 pass
    
    1053
    +        except psutil.NoSuchProcess:
    
    1054
    +            # It is certain that this has already been sent
    
    1055
    +            # SIGTERM, so there is a window where the process
    
    1056
    +            # could have exited already.
    
    1057
    +            pass
    
    1053 1058
     
    
    1054 1059
         # Bloody Murder
    
    1055 1060
         for child in children:
    

  • doc/source/format_project.rst
    ... ... @@ -218,6 +218,7 @@ The use of ports are required to distinguish between pull only access and
    218 218
     push/pull access. For information regarding the server/client certificates
    
    219 219
     and keys, please see: :ref:`Key pair for the server <server_authentication>`.
    
    220 220
     
    
    221
    +.. _project_remote_execution:
    
    221 222
     
    
    222 223
     Remote execution
    
    223 224
     ~~~~~~~~~~~~~~~~
    
    ... ... @@ -243,9 +244,6 @@ using the `remote-execution` option:
    243 244
         action-cache-service:
    
    244 245
           url: http://bar.action.com:50052
    
    245 246
     
    
    246
    -The execution-service part of remote execution does not support encrypted
    
    247
    -connections yet, so the protocol must always be http.
    
    248
    -
    
    249 247
     storage-service specifies a remote CAS store and the parameters are the
    
    250 248
     same as those used to specify an :ref:`artifact server <artifacts>`.
    
    251 249
     
    
    ... ... @@ -268,6 +266,9 @@ instance names.
    268 266
     
    
    269 267
     The Remote Execution API can be found via https://github.com/bazelbuild/remote-apis.
    
    270 268
     
    
    269
    +Remote execution configuration can be also provided in the `user
    
    270
    +configuration <user_config_remote_execution>`.
    
    271
    +
    
    271 272
     .. _project_essentials_mirrors:
    
    272 273
     
    
    273 274
     Mirrors
    

  • doc/source/using_config.rst
    ... ... @@ -100,6 +100,54 @@ pull only access and push/pull access. For information regarding this and the
    100 100
     server/client certificates and keys, please see:
    
    101 101
     :ref:`Key pair for the server <server_authentication>`.
    
    102 102
     
    
    103
    +.. _user_config_remote_execution:
    
    104
    +
    
    105
    +Remote execution
    
    106
    +~~~~~~~~~~~~~~~~
    
    107
    +
    
    108
    +The same configuration for :ref:`remote execution <project_remote_execution>`
    
    109
    +in ``project.conf`` can be provided in the user configuation.
    
    110
    +
    
    111
    +There is only one remote execution configuration used per project.
    
    112
    +
    
    113
    +The project overrides will be taken in priority. The global
    
    114
    +configuration will be used as fallback.
    
    115
    +
    
    116
    +1. Global remote execution fallback:
    
    117
    +
    
    118
    +.. code:: yaml
    
    119
    +
    
    120
    +  remote-execution:
    
    121
    +    execution-service:
    
    122
    +      url: http://execution.fallback.example.com:50051
    
    123
    +      instance-name: main
    
    124
    +    storage-service:
    
    125
    +      url: https://storage.fallback.example.com:11002/
    
    126
    +      server-cert: /keys/server.crt
    
    127
    +      client-cert: /keys/client.crt
    
    128
    +      client-key: /keys/client.key
    
    129
    +      instance-name: main
    
    130
    +    action-cache-service:
    
    131
    +      url: http://action.flalback.example.com:50052
    
    132
    +
    
    133
    +2. Project override:
    
    134
    +
    
    135
    +.. code:: yaml
    
    136
    +
    
    137
    +  projects:
    
    138
    +    some_project:
    
    139
    +      remote-execution:
    
    140
    +        execution-service:
    
    141
    +          url: http://execution.some_project.example.com:50051
    
    142
    +          instance-name: main
    
    143
    +        storage-service:
    
    144
    +          url: https://storage.some_project.example.com:11002/
    
    145
    +          server-cert: /some_project_keys/server.crt
    
    146
    +          client-cert: /some_project_keys/client.crt
    
    147
    +          client-key: /some_project_keys/client.key
    
    148
    +          instance-name: main
    
    149
    +        action-cache-service:
    
    150
    +          url: http://action.some_project.example.com:50052
    
    103 151
     
    
    104 152
     
    
    105 153
     Strict build plan
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]