finn pushed to branch finn/87-multi-channel at BuildGrid / buildgrid
Commits:
-
f7052140
by finnball at 2018-09-17T13:40:08Z
-
53257bac
by Laurence Urhegyi at 2018-09-17T17:26:05Z
-
f8e2b7c0
by Laurence Urhegyi at 2018-09-17T17:34:59Z
-
37e735e5
by Laurence Urhegyi at 2018-09-17T17:39:20Z
-
862fa607
by finnball at 2018-09-18T14:49:35Z
-
73ecd7af
by finnball at 2018-09-18T14:49:35Z
-
49f63015
by finnball at 2018-09-18T14:49:35Z
-
13a67a9e
by finnball at 2018-09-18T14:49:35Z
-
b56d66e5
by finnball at 2018-09-18T14:49:35Z
-
f641049c
by finnball at 2018-09-18T14:49:35Z
-
e657382a
by finnball at 2018-09-18T14:49:35Z
-
04a1e9c4
by finnball at 2018-09-18T15:17:51Z
28 changed files:
- README.rst
- buildgrid/_app/commands/cmd_server.py
- − buildgrid/_app/server.py
- buildgrid/_app/settings/default.yml
- buildgrid/_app/settings/parser.py
- buildgrid/bot/bot_session.py
- buildgrid/server/actioncache/service.py
- buildgrid/server/actioncache/storage.py
- buildgrid/server/bots/instance.py
- buildgrid/server/bots/service.py
- buildgrid/server/cas/instance.py
- buildgrid/server/cas/service.py
- buildgrid/server/controller.py
- buildgrid/server/execution/instance.py
- buildgrid/server/execution/service.py
- + buildgrid/server/instance.py
- buildgrid/server/operations/instance.py
- buildgrid/server/operations/service.py
- buildgrid/server/referencestorage/service.py
- buildgrid/server/referencestorage/storage.py
- docs/source/index.rst
- tests/cas/test_services.py
- tests/cas/test_storage.py
- tests/integration/action_cache_service.py
- tests/integration/bots_service.py
- tests/integration/execution_service.py
- tests/integration/operations_service.py
- tests/integration/reference_storage_service.py
Changes:
| 1 |
- |
|
| 2 | 1 |
.. _about:
|
| 3 | 2 |
|
| 4 |
-About
|
|
| 5 |
-=====
|
|
| 3 |
+ |
|
| 6 | 4 |
|
| 7 | 5 |
.. image:: https://gitlab.com/Buildgrid/buildgrid/badges/master/pipeline.svg
|
| 8 | 6 |
:target: https://gitlab.com/BuildStream/buildstream/commits/master
|
| 9 | 7 |
|
| 10 | 8 |
.. image:: https://gitlab.com/BuildGrid/buildgrid/badges/master/coverage.svg?job=coverage
|
| 11 | 9 |
:target: https://buildgrid.gitlab.io/buildgrid/coverage
|
| 10 |
+
|
|
| 11 |
+About BuildGrid
|
|
| 12 |
+===============
|
|
| 13 |
+ |
|
| 14 |
+What is BuildGrid?
|
|
| 15 |
+------------------
|
|
| 12 | 16 |
|
| 13 | 17 |
BuildGrid is a Python remote execution service which implements Google's
|
| 14 | 18 |
`Remote Execution API`_ and the `Remote Workers API`_. The project's goal is to
|
| 15 | 19 |
be able to execute build jobs remotely on a grid of computers in order to
|
| 16 | 20 |
massively speed up build times. Workers on the grid should be able to run with
|
| 17 |
-different environments. It is designed to work with but not exclusively
|
|
| 21 |
+different environments. It is designed to work with clients such as `Bazel`_ and
|
|
| 18 | 22 |
`BuildStream`_.
|
| 19 | 23 |
|
| 20 | 24 |
.. _Remote Execution API: https://github.com/bazelbuild/remote-apis
|
| 21 | 25 |
.. _Remote Workers API: https://docs.google.com/document/d/1s_AzRRD2mdyktKUj2HWBn99rMg_3tcPvdjx3MPbFidU/edit#heading=h.1u2taqr2h940
|
| 22 | 26 |
.. _BuildStream: https://wiki.gnome.org/Projects/BuildStream
|
| 27 |
+.. _Bazel: https://bazel.build
|
|
| 23 | 28 |
|
| 24 | 29 |
|
| 25 | 30 |
.. _getting-started:
|
| ... | ... | @@ -40,10 +45,15 @@ instructions. |
| 40 | 45 |
Resources
|
| 41 | 46 |
---------
|
| 42 | 47 |
|
| 43 |
-- Homepage: https://buildgrid.build
|
|
| 44 |
-- GitLab repository: https://gitlab.com/BuildGrid/buildgrid
|
|
| 45 |
-- Bug tracking: https://gitlab.com/BuildGrid/buildgrid/issues
|
|
| 46 |
-- Mailing list: https://lists.buildgrid.build/cgi-bin/mailman/listinfo/buildgrid
|
|
| 47 |
-- Slack channel: https://buildteamworld.slack.com/messages/CC9MKC203 [`invite link`_]
|
|
| 48 |
- |
|
| 48 |
+- `Homepage`_
|
|
| 49 |
+- `GitLab repository`_
|
|
| 50 |
+- `Bug tracking`_
|
|
| 51 |
+- `Mailing list`_
|
|
| 52 |
+- `Slack channel`_ [`invite link`_]
|
|
| 53 |
+ |
|
| 54 |
+.. _Homepage: https://buildgrid.build
|
|
| 55 |
+.. _GitLab repository: https://gitlab.com/BuildGrid/buildgrid
|
|
| 56 |
+.. _Bug tracking: https://gitlab.com/BuildGrid/buildgrid/issues
|
|
| 57 |
+.. _Mailing list: https://lists.buildgrid.build/cgi-bin/mailman/listinfo/buildgrid
|
|
| 58 |
+.. _Slack channel: https://buildteamworld.slack.com/messages/CC9MKC203
|
|
| 49 | 59 |
.. _invite link: https://join.slack.com/t/buildteamworld/shared_invite/enQtMzkxNzE0MDMyMDY1LTRmZmM1OWE0OTFkMGE1YjU5Njc4ODEzYjc0MGMyOTM5ZTQ5MmE2YTQ1MzQwZDc5MWNhODY1ZmRkZTE4YjFhNjU
|
| ... | ... | @@ -26,14 +26,10 @@ import sys |
| 26 | 26 |
|
| 27 | 27 |
import click
|
| 28 | 28 |
|
| 29 |
-from buildgrid.server.controller import ExecutionController
|
|
| 30 |
-from buildgrid.server.actioncache.storage import ActionCache
|
|
| 31 |
-from buildgrid.server.cas.instance import ByteStreamInstance, ContentAddressableStorageInstance
|
|
| 32 |
-from buildgrid.server.referencestorage.storage import ReferenceCache
|
|
| 29 |
+from buildgrid.server.instance import BuildGridServer
|
|
| 33 | 30 |
|
| 34 | 31 |
from ..cli import pass_context
|
| 35 | 32 |
from ..settings import parser
|
| 36 |
-from ..server import BuildGridServer
|
|
| 37 | 33 |
|
| 38 | 34 |
|
| 39 | 35 |
@click.group(name='server', short_help="Start a local server instance.")
|
| ... | ... | @@ -51,57 +47,23 @@ def start(context, config): |
| 51 | 47 |
|
| 52 | 48 |
try:
|
| 53 | 49 |
server_settings = settings['server']
|
| 54 |
- insecure_mode = server_settings['insecure-mode']
|
|
| 55 |
- |
|
| 56 |
- credentials = None
|
|
| 57 |
- if not insecure_mode:
|
|
| 58 |
- credential_settings = server_settings['credentials']
|
|
| 59 |
- server_key = credential_settings['tls-server-key']
|
|
| 60 |
- server_cert = credential_settings['tls-server-cert']
|
|
| 61 |
- client_certs = credential_settings['tls-client-certs']
|
|
| 62 |
- credentials = context.load_server_credentials(server_key, server_cert, client_certs)
|
|
| 63 |
- |
|
| 64 |
- if not credentials:
|
|
| 65 |
- click.echo("ERROR: no TLS keys were specified and no defaults could be found.\n" +
|
|
| 66 |
- "Set `insecure-mode: false` in order to deactivate TLS encryption.\n", err=True)
|
|
| 67 |
- sys.exit(-1)
|
|
| 68 |
- |
|
| 69 |
- port = server_settings['port']
|
|
| 70 |
- instances = settings['instances']
|
|
| 71 |
- |
|
| 72 |
- execution_controllers = _instance_maker(instances, ExecutionController)
|
|
| 73 | 50 |
|
| 74 |
- execution_instances = {}
|
|
| 75 |
- bots_interfaces = {}
|
|
| 76 |
- operations_instances = {}
|
|
| 51 |
+ server = BuildGridServer()
|
|
| 52 |
+ for channel in server_settings:
|
|
| 53 |
+ server.add_port(channel.address, channel.credentials)
|
|
| 77 | 54 |
|
| 78 |
- # TODO: map properly in parser
|
|
| 79 |
- # Issue 82
|
|
| 80 |
- for k, v in execution_controllers.items():
|
|
| 81 |
- execution_instances[k] = v.execution_instance
|
|
| 82 |
- bots_interfaces[k] = v.bots_interface
|
|
| 83 |
- operations_instances[k] = v.operations_instance
|
|
| 55 |
+ instances = settings['instances']
|
|
| 56 |
+ for instance in instances:
|
|
| 57 |
+ instance_name = instance['name']
|
|
| 58 |
+ services = instance['services']
|
|
| 84 | 59 |
|
| 85 |
- reference_caches = _instance_maker(instances, ReferenceCache)
|
|
| 86 |
- action_caches = _instance_maker(instances, ActionCache)
|
|
| 87 |
- cas = _instance_maker(instances, ContentAddressableStorageInstance)
|
|
| 88 |
- bytestreams = _instance_maker(instances, ByteStreamInstance)
|
|
| 60 |
+ for service in services:
|
|
| 61 |
+ service.add_self_to_service(instance_name, server)
|
|
| 89 | 62 |
|
| 90 | 63 |
except KeyError as e:
|
| 91 | 64 |
click.echo("ERROR: Could not parse config: {}.\n".format(str(e)), err=True)
|
| 92 | 65 |
sys.exit(-1)
|
| 93 | 66 |
|
| 94 |
- server = BuildGridServer(port=port,
|
|
| 95 |
- credentials=credentials,
|
|
| 96 |
- execution_instances=execution_instances,
|
|
| 97 |
- bots_interfaces=bots_interfaces,
|
|
| 98 |
- operations_instances=operations_instances,
|
|
| 99 |
- reference_storage_instances=reference_caches,
|
|
| 100 |
- action_cache_instances=action_caches,
|
|
| 101 |
- cas_instances=cas,
|
|
| 102 |
- bytestream_instances=bytestreams)
|
|
| 103 |
- |
|
| 104 |
- context.logger.info("Starting server on port {}".format(port))
|
|
| 105 | 67 |
loop = asyncio.get_event_loop()
|
| 106 | 68 |
try:
|
| 107 | 69 |
server.start()
|
| ... | ... | @@ -114,17 +76,3 @@ def start(context, config): |
| 114 | 76 |
context.logger.info("Stopping server")
|
| 115 | 77 |
server.stop()
|
| 116 | 78 |
loop.close()
|
| 117 |
- |
|
| 118 |
- |
|
| 119 |
-# Turn away now if you want to keep your eyes
|
|
| 120 |
-def _instance_maker(instances, service_type):
|
|
| 121 |
- # TODO get this mapped in parser
|
|
| 122 |
- made = {}
|
|
| 123 |
- |
|
| 124 |
- for instance in instances:
|
|
| 125 |
- services = instance['services']
|
|
| 126 |
- instance_name = instance['name']
|
|
| 127 |
- for service in services:
|
|
| 128 |
- if isinstance(service, service_type):
|
|
| 129 |
- made[instance_name] = service
|
|
| 130 |
- return made
|
| 1 |
-# Copyright (C) 2018 Bloomberg LP
|
|
| 2 |
-#
|
|
| 3 |
-# Licensed under the Apache License, Version 2.0 (the "License");
|
|
| 4 |
-# you may not use this file except in compliance with the License.
|
|
| 5 |
-# You may obtain a copy of the License at
|
|
| 6 |
-#
|
|
| 7 |
-# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
| 8 |
-#
|
|
| 9 |
-# Unless required by applicable law or agreed to in writing, software
|
|
| 10 |
-# distributed under the License is distributed on an "AS IS" BASIS,
|
|
| 11 |
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
| 12 |
-# See the License for the specific language governing permissions and
|
|
| 13 |
-# limitations under the License.
|
|
| 14 |
- |
|
| 15 |
- |
|
| 16 |
-"""
|
|
| 17 |
-BuildGridServer
|
|
| 18 |
-==============
|
|
| 19 |
- |
|
| 20 |
-Creates a BuildGrid server, binding all the requisite service instances together.
|
|
| 21 |
-"""
|
|
| 22 |
- |
|
| 23 |
-import logging
|
|
| 24 |
-from concurrent import futures
|
|
| 25 |
- |
|
| 26 |
-import grpc
|
|
| 27 |
- |
|
| 28 |
-from buildgrid.server.cas.service import ByteStreamService, ContentAddressableStorageService
|
|
| 29 |
-from buildgrid.server.actioncache.service import ActionCacheService
|
|
| 30 |
-from buildgrid.server.execution.service import ExecutionService
|
|
| 31 |
-from buildgrid.server.operations.service import OperationsService
|
|
| 32 |
-from buildgrid.server.bots.service import BotsService
|
|
| 33 |
-from buildgrid.server.referencestorage.service import ReferenceStorageService
|
|
| 34 |
- |
|
| 35 |
- |
|
| 36 |
-class BuildGridServer:
|
|
| 37 |
- |
|
| 38 |
- def __init__(self, port=50051, max_workers=10, credentials=None,
|
|
| 39 |
- execution_instances=None, bots_interfaces=None, operations_instances=None,
|
|
| 40 |
- operations_service_instances=None, reference_storage_instances=None,
|
|
| 41 |
- action_cache_instances=None, cas_instances=None, bytestream_instances=None):
|
|
| 42 |
- |
|
| 43 |
- self.logger = logging.getLogger(__name__)
|
|
| 44 |
- address = '[::]:{0}'.format(port)
|
|
| 45 |
- |
|
| 46 |
- server = grpc.server(futures.ThreadPoolExecutor(max_workers))
|
|
| 47 |
- |
|
| 48 |
- if credentials is not None:
|
|
| 49 |
- self.logger.info("Secure connection")
|
|
| 50 |
- server.add_secure_port(address, credentials)
|
|
| 51 |
- |
|
| 52 |
- else:
|
|
| 53 |
- self.logger.info("Insecure connection")
|
|
| 54 |
- server.add_insecure_port(address)
|
|
| 55 |
- |
|
| 56 |
- if execution_instances:
|
|
| 57 |
- self.logger.debug("Adding execution instances {}".format(
|
|
| 58 |
- execution_instances.keys()))
|
|
| 59 |
- ExecutionService(server, execution_instances)
|
|
| 60 |
- |
|
| 61 |
- if bots_interfaces:
|
|
| 62 |
- self.logger.debug("Adding bots interfaces {}".format(
|
|
| 63 |
- bots_interfaces.keys()))
|
|
| 64 |
- BotsService(server, bots_interfaces)
|
|
| 65 |
- |
|
| 66 |
- if operations_instances:
|
|
| 67 |
- self.logger.debug("Adding operations instances {}".format(
|
|
| 68 |
- operations_instances.keys()))
|
|
| 69 |
- OperationsService(server, operations_instances)
|
|
| 70 |
- |
|
| 71 |
- if reference_storage_instances:
|
|
| 72 |
- self.logger.debug("Adding reference storages {}".format(
|
|
| 73 |
- reference_storage_instances.keys()))
|
|
| 74 |
- ReferenceStorageService(server, reference_storage_instances)
|
|
| 75 |
- |
|
| 76 |
- if action_cache_instances:
|
|
| 77 |
- self.logger.debug("Adding action cache instances {}".format(
|
|
| 78 |
- action_cache_instances.keys()))
|
|
| 79 |
- ActionCacheService(server, action_cache_instances)
|
|
| 80 |
- |
|
| 81 |
- if cas_instances:
|
|
| 82 |
- self.logger.debug("Adding cas instances {}".format(
|
|
| 83 |
- cas_instances.keys()))
|
|
| 84 |
- ContentAddressableStorageService(server, cas_instances)
|
|
| 85 |
- |
|
| 86 |
- if bytestream_instances:
|
|
| 87 |
- self.logger.debug("Adding bytestream instances {}".format(
|
|
| 88 |
- bytestream_instances.keys()))
|
|
| 89 |
- ByteStreamService(server, bytestream_instances)
|
|
| 90 |
- |
|
| 91 |
- self._server = server
|
|
| 92 |
- |
|
| 93 |
- def start(self):
|
|
| 94 |
- self._server.start()
|
|
| 95 |
- |
|
| 96 |
- def stop(self):
|
|
| 97 |
- self._server.stop(grace=0)
|
| 1 | 1 |
server:
|
| 2 |
- port: 50051
|
|
| 3 |
- insecure-mode: true
|
|
| 4 |
- credentials:
|
|
| 5 |
- tls-server-key: null
|
|
| 6 |
- tls-server-cert: null
|
|
| 7 |
- tls-client-certs: null
|
|
| 2 |
+ - !channel
|
|
| 3 |
+ port: 50051
|
|
| 4 |
+ insecure_mode: true
|
|
| 5 |
+ credentials:
|
|
| 6 |
+ tls-server-key: null
|
|
| 7 |
+ tls-server-cert: null
|
|
| 8 |
+ tls-client-certs: null
|
|
| 9 |
+ |
|
| 10 |
+ - !channel
|
|
| 11 |
+ port: 50052
|
|
| 12 |
+ insecure_mode: true
|
|
| 13 |
+ credentials:
|
|
| 14 |
+ tls-server-key: null
|
|
| 15 |
+ tls-server-cert: null
|
|
| 16 |
+ tls-client-certs: null
|
|
| 8 | 17 |
|
| 9 | 18 |
description: |
|
| 10 | 19 |
A single default instance
|
| ... | ... | @@ -37,8 +37,35 @@ from ..cli import Context |
| 37 | 37 |
class YamlFactory(yaml.YAMLObject):
|
| 38 | 38 |
@classmethod
|
| 39 | 39 |
def from_yaml(cls, loader, node):
|
| 40 |
- values = loader.construct_mapping(node, deep=True)
|
|
| 41 |
- return cls(**values)
|
|
| 40 |
+ if isinstance(node, yaml.ScalarNode):
|
|
| 41 |
+ value = loader.construct_scalar(node)
|
|
| 42 |
+ return cls(value)
|
|
| 43 |
+ |
|
| 44 |
+ else:
|
|
| 45 |
+ values = loader.construct_mapping(node, deep=True)
|
|
| 46 |
+ return cls(**values)
|
|
| 47 |
+ |
|
| 48 |
+ |
|
| 49 |
+class Channel(YamlFactory):
|
|
| 50 |
+ |
|
| 51 |
+ yaml_tag = u'!channel'
|
|
| 52 |
+ |
|
| 53 |
+ def __init__(self, port, insecure_mode, credentials=None):
|
|
| 54 |
+ self.address = '[::]:{0}'.format(port)
|
|
| 55 |
+ self.credentials = None
|
|
| 56 |
+ |
|
| 57 |
+ context = Context()
|
|
| 58 |
+ |
|
| 59 |
+ if not insecure_mode:
|
|
| 60 |
+ server_key = credentials['tls-server-key']
|
|
| 61 |
+ server_cert = credentials['tls-server-cert']
|
|
| 62 |
+ client_certs = credentials['tls-client-certs']
|
|
| 63 |
+ self.credentials = context.load_server_credentials(server_key, server_cert, client_certs)
|
|
| 64 |
+ |
|
| 65 |
+ if not credentials:
|
|
| 66 |
+ click.echo("ERROR: no TLS keys were specified and no defaults could be found.\n" +
|
|
| 67 |
+ "Set `insecure-mode: false` in order to deactivate TLS encryption.\n", err=True)
|
|
| 68 |
+ sys.exit(-1)
|
|
| 42 | 69 |
|
| 43 | 70 |
|
| 44 | 71 |
class Disk(YamlFactory):
|
| ... | ... | @@ -169,6 +196,7 @@ def _parse_size(size): |
| 169 | 196 |
|
| 170 | 197 |
def get_parser():
|
| 171 | 198 |
|
| 199 |
+ yaml.SafeLoader.add_constructor(Channel.yaml_tag, Channel.from_yaml)
|
|
| 172 | 200 |
yaml.SafeLoader.add_constructor(Execution.yaml_tag, Execution.from_yaml)
|
| 173 | 201 |
yaml.SafeLoader.add_constructor(Action.yaml_tag, Action.from_yaml)
|
| 174 | 202 |
yaml.SafeLoader.add_constructor(Reference.yaml_tag, Reference.from_yaml)
|
| ... | ... | @@ -99,13 +99,13 @@ class BotSession: |
| 99 | 99 |
session = self._interface.create_bot_session(self._parent, self.get_pb2())
|
| 100 | 100 |
self._name = session.name
|
| 101 | 101 |
|
| 102 |
- self.logger.info("Created bot session with name: {}".format(self._name))
|
|
| 102 |
+ self.logger.info("Created bot session with name: [{}]".format(self._name))
|
|
| 103 | 103 |
|
| 104 | 104 |
for lease in session.leases:
|
| 105 | 105 |
self._update_lease_from_server(lease)
|
| 106 | 106 |
|
| 107 | 107 |
def update_bot_session(self):
|
| 108 |
- self.logger.debug("Updating bot session: {}".format(self._bot_id))
|
|
| 108 |
+ self.logger.debug("Updating bot session: [{}]".format(self._bot_id))
|
|
| 109 | 109 |
session = self._interface.update_bot_session(self.get_pb2())
|
| 110 | 110 |
for k, v in list(self._leases.items()):
|
| 111 | 111 |
if v.state == LeaseState.COMPLETED.value:
|
| ... | ... | @@ -141,12 +141,12 @@ class BotSession: |
| 141 | 141 |
asyncio.ensure_future(self.create_work(lease))
|
| 142 | 142 |
|
| 143 | 143 |
async def create_work(self, lease):
|
| 144 |
- self.logger.debug("Work created: {}".format(lease.id))
|
|
| 144 |
+ self.logger.debug("Work created: [{}]".format(lease.id))
|
|
| 145 | 145 |
|
| 146 | 146 |
loop = asyncio.get_event_loop()
|
| 147 | 147 |
lease = await loop.run_in_executor(None, self._work, self._context, lease)
|
| 148 | 148 |
|
| 149 |
- self.logger.debug("Work complete: {}".format(lease.id))
|
|
| 149 |
+ self.logger.debug("Work complete: [{}]".format(lease.id))
|
|
| 150 | 150 |
self.lease_completed(lease)
|
| 151 | 151 |
|
| 152 | 152 |
|
| ... | ... | @@ -161,14 +161,14 @@ class Worker: |
| 161 | 161 |
if k == 'pool':
|
| 162 | 162 |
self.properties[k] = v
|
| 163 | 163 |
else:
|
| 164 |
- raise KeyError('Key not supported: {}'.format(k))
|
|
| 164 |
+ raise KeyError('Key not supported: [{}]'.format(k))
|
|
| 165 | 165 |
|
| 166 | 166 |
if configs:
|
| 167 | 167 |
for k, v in configs.items():
|
| 168 | 168 |
if k == 'DockerImage':
|
| 169 | 169 |
self.configs[k] = v
|
| 170 | 170 |
else:
|
| 171 |
- raise KeyError('Key not supported: {}'.format(k))
|
|
| 171 |
+ raise KeyError('Key not supported: [{}]'.format(k))
|
|
| 172 | 172 |
|
| 173 | 173 |
@property
|
| 174 | 174 |
def configs(self):
|
| ... | ... | @@ -214,11 +214,11 @@ class Device: |
| 214 | 214 |
|
| 215 | 215 |
elif k == 'docker':
|
| 216 | 216 |
if v not in ('True', 'False'):
|
| 217 |
- raise ValueError('Value not supported: {}'.format(v))
|
|
| 217 |
+ raise ValueError('Value not supported: [{}]'.format(v))
|
|
| 218 | 218 |
self._properties[k] = v
|
| 219 | 219 |
|
| 220 | 220 |
else:
|
| 221 |
- raise KeyError('Key not supported: {}'.format(k))
|
|
| 221 |
+ raise KeyError('Key not supported: [{}]'.format(k))
|
|
| 222 | 222 |
|
| 223 | 223 |
@property
|
| 224 | 224 |
def name(self):
|
| ... | ... | @@ -32,13 +32,16 @@ from .._exceptions import InvalidArgumentError, NotFoundError |
| 32 | 32 |
|
| 33 | 33 |
class ActionCacheService(remote_execution_pb2_grpc.ActionCacheServicer):
|
| 34 | 34 |
|
| 35 |
- def __init__(self, server, instances):
|
|
| 36 |
- self._instances = instances
|
|
| 37 |
- |
|
| 35 |
+ def __init__(self, server):
|
|
| 38 | 36 |
self.logger = logging.getLogger(__name__)
|
| 39 | 37 |
|
| 38 |
+ self._instances = dict()
|
|
| 39 |
+ |
|
| 40 | 40 |
remote_execution_pb2_grpc.add_ActionCacheServicer_to_server(self, server)
|
| 41 | 41 |
|
| 42 |
+ def add_instance(self, name, instance):
|
|
| 43 |
+ self._instances[name] = instance
|
|
| 44 |
+ |
|
| 42 | 45 |
def GetActionResult(self, request, context):
|
| 43 | 46 |
try:
|
| 44 | 47 |
instance = self._get_instance(request.instance_name)
|
| ... | ... | @@ -77,4 +80,4 @@ class ActionCacheService(remote_execution_pb2_grpc.ActionCacheServicer): |
| 77 | 80 |
return self._instances[instance_name]
|
| 78 | 81 |
|
| 79 | 82 |
except KeyError:
|
| 80 |
- raise InvalidArgumentError("Invalid instance name: {}".format(instance_name))
|
|
| 83 |
+ raise InvalidArgumentError("Invalid instance name: [{}]".format(instance_name))
|
| ... | ... | @@ -26,6 +26,9 @@ from ..referencestorage.storage import ReferenceCache |
| 26 | 26 |
|
| 27 | 27 |
class ActionCache(ReferenceCache):
|
| 28 | 28 |
|
| 29 |
+ def add_self_to_service(self, instance_name, server):
|
|
| 30 |
+ server.add_action_cache_instance(self, instance_name)
|
|
| 31 |
+ |
|
| 29 | 32 |
def get_action_result(self, action_digest):
|
| 30 | 33 |
key = self._get_key(action_digest)
|
| 31 | 34 |
return self.get_action_reference(key)
|
| ... | ... | @@ -36,6 +36,9 @@ class BotsInterface: |
| 36 | 36 |
self._bot_sessions = {}
|
| 37 | 37 |
self._scheduler = scheduler
|
| 38 | 38 |
|
| 39 |
+ def add_self_to_service(self, instance_name, server):
|
|
| 40 |
+ server.add_bots_interface(self, instance_name)
|
|
| 41 |
+ |
|
| 39 | 42 |
def create_bot_session(self, parent, bot_session):
|
| 40 | 43 |
""" Creates a new bot session. Server should assign a unique
|
| 41 | 44 |
name to the session. If a bot with the same bot id tries to
|
| ... | ... | @@ -60,7 +63,7 @@ class BotsInterface: |
| 60 | 63 |
|
| 61 | 64 |
self._bot_ids[name] = bot_id
|
| 62 | 65 |
self._bot_sessions[name] = bot_session
|
| 63 |
- self.logger.info("Created bot session name={} with bot_id={}".format(name, bot_id))
|
|
| 66 |
+ self.logger.info("Created bot session name=[{}] with bot_id=[{}]".format(name, bot_id))
|
|
| 64 | 67 |
|
| 65 | 68 |
for lease in self._scheduler.create_leases():
|
| 66 | 69 |
bot_session.leases.extend([lease])
|
| ... | ... | @@ -92,7 +95,7 @@ class BotsInterface: |
| 92 | 95 |
try:
|
| 93 | 96 |
server_lease = self._scheduler.get_job_lease(client_lease.id)
|
| 94 | 97 |
except KeyError:
|
| 95 |
- raise InvalidArgumentError("Lease not found on server: {}".format(client_lease))
|
|
| 98 |
+ raise InvalidArgumentError("Lease not found on server: [{}]".format(client_lease))
|
|
| 96 | 99 |
|
| 97 | 100 |
server_state = LeaseState(server_lease.state)
|
| 98 | 101 |
client_state = LeaseState(client_lease.state)
|
| ... | ... | @@ -105,7 +108,7 @@ class BotsInterface: |
| 105 | 108 |
# TODO: Lease was rejected
|
| 106 | 109 |
raise NotImplementedError("'Not Accepted' is unsupported")
|
| 107 | 110 |
else:
|
| 108 |
- raise OutofSyncError("Server lease: {}. Client lease: {}".format(server_lease, client_lease))
|
|
| 111 |
+ raise OutofSyncError("Server lease: [{}]. Client lease: [{}]".format(server_lease, client_lease))
|
|
| 109 | 112 |
|
| 110 | 113 |
elif server_state == LeaseState.ACTIVE:
|
| 111 | 114 |
|
| ... | ... | @@ -118,10 +121,10 @@ class BotsInterface: |
| 118 | 121 |
return None
|
| 119 | 122 |
|
| 120 | 123 |
else:
|
| 121 |
- raise OutofSyncError("Server lease: {}. Client lease: {}".format(server_lease, client_lease))
|
|
| 124 |
+ raise OutofSyncError("Server lease: [{}]. Client lease: [{}]".format(server_lease, client_lease))
|
|
| 122 | 125 |
|
| 123 | 126 |
elif server_state == LeaseState.COMPLETED:
|
| 124 |
- raise OutofSyncError("Server lease: {}. Client lease: {}".format(server_lease, client_lease))
|
|
| 127 |
+ raise OutofSyncError("Server lease: [{}]. Client lease: [{}]".format(server_lease, client_lease))
|
|
| 125 | 128 |
|
| 126 | 129 |
elif server_state == LeaseState.CANCELLED:
|
| 127 | 130 |
raise NotImplementedError("Cancelled states not supported yet")
|
| ... | ... | @@ -138,19 +141,19 @@ class BotsInterface: |
| 138 | 141 |
if name is not None:
|
| 139 | 142 |
_bot_id = self._bot_ids.get(name)
|
| 140 | 143 |
if _bot_id is None:
|
| 141 |
- raise InvalidArgumentError('Name not registered on server: {}'.format(name))
|
|
| 144 |
+ raise InvalidArgumentError('Name not registered on server: [{}]'.format(name))
|
|
| 142 | 145 |
elif _bot_id != bot_id:
|
| 143 | 146 |
self._close_bot_session(name)
|
| 144 | 147 |
raise InvalidArgumentError(
|
| 145 |
- 'Bot id invalid. ID sent: {} with name: {}.'
|
|
| 146 |
- 'ID registered: {} for that name'.format(bot_id, name, _bot_id))
|
|
| 148 |
+ 'Bot id invalid. ID sent: [{}] with name: [{}].'
|
|
| 149 |
+ 'ID registered: [{}] for that name'.format(bot_id, name, _bot_id))
|
|
| 147 | 150 |
else:
|
| 148 | 151 |
for _name, _bot_id in self._bot_ids.items():
|
| 149 | 152 |
if bot_id == _bot_id:
|
| 150 | 153 |
self._close_bot_session(_name)
|
| 151 | 154 |
raise InvalidArgumentError(
|
| 152 |
- 'Bot id already registered. ID sent: {}.'
|
|
| 153 |
- 'Id registered: {} with name: {}'.format(bot_id, _bot_id, _name))
|
|
| 155 |
+ 'Bot id already registered. ID sent: [{}].'
|
|
| 156 |
+ 'Id registered: [{}] with name: [{}]'.format(bot_id, _bot_id, _name))
|
|
| 154 | 157 |
|
| 155 | 158 |
def _close_bot_session(self, name):
|
| 156 | 159 |
""" Before removing the session, close any leases and
|
| ... | ... | @@ -159,14 +162,14 @@ class BotsInterface: |
| 159 | 162 |
bot_id = self._bot_ids.get(name)
|
| 160 | 163 |
|
| 161 | 164 |
if bot_id is None:
|
| 162 |
- raise InvalidArgumentError("Bot id does not exist: {}".format(name))
|
|
| 165 |
+ raise InvalidArgumentError("Bot id does not exist: [{}]".format(name))
|
|
| 163 | 166 |
|
| 164 |
- self.logger.debug("Attempting to close {} with name: {}".format(bot_id, name))
|
|
| 167 |
+ self.logger.debug("Attempting to close [{}] with name: [{}]".format(bot_id, name))
|
|
| 165 | 168 |
for lease in self._bot_sessions[name].leases:
|
| 166 | 169 |
if lease.state != LeaseState.COMPLETED.value:
|
| 167 | 170 |
# TODO: Be wary here, may need to handle rejected leases in future
|
| 168 | 171 |
self._scheduler.retry_job(lease.id)
|
| 169 | 172 |
|
| 170 |
- self.logger.debug("Closing bot session: {}".format(name))
|
|
| 173 |
+ self.logger.debug("Closing bot session: [{}]".format(name))
|
|
| 171 | 174 |
self._bot_ids.pop(name)
|
| 172 |
- self.logger.info("Closed bot {} with name: {}".format(bot_id, name))
|
|
| 175 |
+ self.logger.info("Closed bot [{}] with name: [{}]".format(bot_id, name))
|
| ... | ... | @@ -33,12 +33,16 @@ from .._exceptions import InvalidArgumentError, OutofSyncError |
| 33 | 33 |
|
| 34 | 34 |
class BotsService(bots_pb2_grpc.BotsServicer):
|
| 35 | 35 |
|
| 36 |
- def __init__(self, server, instances):
|
|
| 37 |
- self._instances = instances
|
|
| 36 |
+ def __init__(self, server):
|
|
| 38 | 37 |
self.logger = logging.getLogger(__name__)
|
| 39 | 38 |
|
| 39 |
+ self._instances = dict()
|
|
| 40 |
+ |
|
| 40 | 41 |
bots_pb2_grpc.add_BotsServicer_to_server(self, server)
|
| 41 | 42 |
|
| 43 |
+ def add_instance(self, name, instance):
|
|
| 44 |
+ self._instances[name] = instance
|
|
| 45 |
+ |
|
| 42 | 46 |
def CreateBotSession(self, request, context):
|
| 43 | 47 |
try:
|
| 44 | 48 |
parent = request.parent
|
| ... | ... | @@ -90,4 +94,4 @@ class BotsService(bots_pb2_grpc.BotsServicer): |
| 90 | 94 |
return self._instances[name]
|
| 91 | 95 |
|
| 92 | 96 |
except KeyError:
|
| 93 |
- raise InvalidArgumentError("Instance doesn't exist on server: {}".format(name))
|
|
| 97 |
+ raise InvalidArgumentError("Instance doesn't exist on server: [{}]".format(name))
|
| ... | ... | @@ -31,6 +31,9 @@ class ContentAddressableStorageInstance: |
| 31 | 31 |
def __init__(self, storage):
|
| 32 | 32 |
self._storage = storage
|
| 33 | 33 |
|
| 34 |
+ def add_self_to_service(self, instance_name, server):
|
|
| 35 |
+ server.add_cas_instance(self, instance_name)
|
|
| 36 |
+ |
|
| 34 | 37 |
def find_missing_blobs(self, blob_digests):
|
| 35 | 38 |
storage = self._storage
|
| 36 | 39 |
return re_pb2.FindMissingBlobsResponse(
|
| ... | ... | @@ -60,6 +63,9 @@ class ByteStreamInstance: |
| 60 | 63 |
def __init__(self, storage):
|
| 61 | 64 |
self._storage = storage
|
| 62 | 65 |
|
| 66 |
+ def add_self_to_service(self, instance_name, server):
|
|
| 67 |
+ server.add_bytestream_instance(self, instance_name)
|
|
| 68 |
+ |
|
| 63 | 69 |
def read(self, path, read_offset, read_limit):
|
| 64 | 70 |
storage = self._storage
|
| 65 | 71 |
|
| ... | ... | @@ -35,12 +35,16 @@ from .._exceptions import InvalidArgumentError, NotFoundError, OutOfRangeError |
| 35 | 35 |
|
| 36 | 36 |
class ContentAddressableStorageService(remote_execution_pb2_grpc.ContentAddressableStorageServicer):
|
| 37 | 37 |
|
| 38 |
- def __init__(self, server, instances):
|
|
| 38 |
+ def __init__(self, server):
|
|
| 39 | 39 |
self.logger = logging.getLogger(__name__)
|
| 40 |
- self._instances = instances
|
|
| 40 |
+ |
|
| 41 |
+ self._instances = dict()
|
|
| 41 | 42 |
|
| 42 | 43 |
remote_execution_pb2_grpc.add_ContentAddressableStorageServicer_to_server(self, server)
|
| 43 | 44 |
|
| 45 |
+ def add_instance(self, name, instance):
|
|
| 46 |
+ self._instances[name] = instance
|
|
| 47 |
+ |
|
| 44 | 48 |
def FindMissingBlobs(self, request, context):
|
| 45 | 49 |
try:
|
| 46 | 50 |
instance = self._get_instance(request.instance_name)
|
| ... | ... | @@ -70,17 +74,21 @@ class ContentAddressableStorageService(remote_execution_pb2_grpc.ContentAddressa |
| 70 | 74 |
return self._instances[instance_name]
|
| 71 | 75 |
|
| 72 | 76 |
except KeyError:
|
| 73 |
- raise InvalidArgumentError("Invalid instance name: {}".format(instance_name))
|
|
| 77 |
+ raise InvalidArgumentError("Invalid instance name: [{}]".format(instance_name))
|
|
| 74 | 78 |
|
| 75 | 79 |
|
| 76 | 80 |
class ByteStreamService(bytestream_pb2_grpc.ByteStreamServicer):
|
| 77 | 81 |
|
| 78 |
- def __init__(self, server, instances):
|
|
| 82 |
+ def __init__(self, server):
|
|
| 79 | 83 |
self.logger = logging.getLogger(__name__)
|
| 80 |
- self._instances = instances
|
|
| 84 |
+ |
|
| 85 |
+ self._instances = dict()
|
|
| 81 | 86 |
|
| 82 | 87 |
bytestream_pb2_grpc.add_ByteStreamServicer_to_server(self, server)
|
| 83 | 88 |
|
| 89 |
+ def add_instance(self, name, instance):
|
|
| 90 |
+ self._instances[name] = instance
|
|
| 91 |
+ |
|
| 84 | 92 |
def Read(self, request, context):
|
| 85 | 93 |
try:
|
| 86 | 94 |
path = request.resource_name.split("/")
|
| ... | ... | @@ -89,15 +97,15 @@ class ByteStreamService(bytestream_pb2_grpc.ByteStreamServicer): |
| 89 | 97 |
# TODO: Decide on default instance name
|
| 90 | 98 |
if path[0] == "blobs":
|
| 91 | 99 |
if len(path) < 3 or not path[2].isdigit():
|
| 92 |
- raise InvalidArgumentError("Invalid resource name: {}".format(request.resource_name))
|
|
| 100 |
+ raise InvalidArgumentError("Invalid resource name: [{}]".format(request.resource_name))
|
|
| 93 | 101 |
instance_name = ""
|
| 94 | 102 |
|
| 95 | 103 |
elif path[1] == "blobs":
|
| 96 | 104 |
if len(path) < 4 or not path[3].isdigit():
|
| 97 |
- raise InvalidArgumentError("Invalid resource name: {}".format(request.resource_name))
|
|
| 105 |
+ raise InvalidArgumentError("Invalid resource name: [{}]".format(request.resource_name))
|
|
| 98 | 106 |
|
| 99 | 107 |
else:
|
| 100 |
- raise InvalidArgumentError("Invalid resource name: {}".format(request.resource_name))
|
|
| 108 |
+ raise InvalidArgumentError("Invalid resource name: [{}]".format(request.resource_name))
|
|
| 101 | 109 |
|
| 102 | 110 |
instance = self._get_instance(instance_name)
|
| 103 | 111 |
yield from instance.read(path,
|
| ... | ... | @@ -134,15 +142,15 @@ class ByteStreamService(bytestream_pb2_grpc.ByteStreamServicer): |
| 134 | 142 |
# TODO: Sort out no instance name
|
| 135 | 143 |
if path[0] == "uploads":
|
| 136 | 144 |
if len(path) < 5 or path[2] != "blobs" or not path[4].isdigit():
|
| 137 |
- raise InvalidArgumentError("Invalid resource name: {}".format(first_request.resource_name))
|
|
| 145 |
+ raise InvalidArgumentError("Invalid resource name: [{}]".format(first_request.resource_name))
|
|
| 138 | 146 |
instance_name = ""
|
| 139 | 147 |
|
| 140 | 148 |
elif path[1] == "uploads":
|
| 141 | 149 |
if len(path) < 6 or path[3] != "blobs" or not path[5].isdigit():
|
| 142 |
- raise InvalidArgumentError("Invalid resource name: {}".format(first_request.resource_name))
|
|
| 150 |
+ raise InvalidArgumentError("Invalid resource name: [{}]".format(first_request.resource_name))
|
|
| 143 | 151 |
|
| 144 | 152 |
else:
|
| 145 |
- raise InvalidArgumentError("Invalid resource name: {}".format(first_request.resource_name))
|
|
| 153 |
+ raise InvalidArgumentError("Invalid resource name: [{}]".format(first_request.resource_name))
|
|
| 146 | 154 |
|
| 147 | 155 |
instance = self._get_instance(instance_name)
|
| 148 | 156 |
return instance.write(requests)
|
| ... | ... | @@ -169,4 +177,4 @@ class ByteStreamService(bytestream_pb2_grpc.ByteStreamServicer): |
| 169 | 177 |
return self._instances[instance_name]
|
| 170 | 178 |
|
| 171 | 179 |
except KeyError:
|
| 172 |
- raise InvalidArgumentError("Invalid instance name: {}".format(instance_name))
|
|
| 180 |
+ raise InvalidArgumentError("Invalid instance name: [{}]".format(instance_name))
|
| ... | ... | @@ -45,6 +45,11 @@ class ExecutionController: |
| 45 | 45 |
self._bots_interface = BotsInterface(scheduler)
|
| 46 | 46 |
self._operations_instance = OperationsInstance(scheduler)
|
| 47 | 47 |
|
| 48 |
+ def add_self_to_service(self, instance_name, server):
|
|
| 49 |
+ server.add_execution_instance(self._execution_instance, instance_name)
|
|
| 50 |
+ server.add_bots_interface(self._bots_interface, instance_name)
|
|
| 51 |
+ server.add_operations_instance(self._operations_instance, instance_name)
|
|
| 52 |
+ |
|
| 48 | 53 |
def stream_operation_updates(self, message_queue, operation_name):
|
| 49 | 54 |
operation = message_queue.get()
|
| 50 | 55 |
while not operation.done:
|
| ... | ... | @@ -34,6 +34,9 @@ class ExecutionInstance: |
| 34 | 34 |
self._storage = storage
|
| 35 | 35 |
self._scheduler = scheduler
|
| 36 | 36 |
|
| 37 |
+ def add_self_to_service(self, instance_name, server):
|
|
| 38 |
+ server.add_execution_instance(self, instance_name)
|
|
| 39 |
+ |
|
| 37 | 40 |
def execute(self, action_digest, skip_cache_lookup, message_queue=None):
|
| 38 | 41 |
""" Sends a job for execution.
|
| 39 | 42 |
Queues an action and creates an Operation instance to be associated with
|
| ... | ... | @@ -47,7 +50,7 @@ class ExecutionInstance: |
| 47 | 50 |
do_not_cache = action.do_not_cache
|
| 48 | 51 |
|
| 49 | 52 |
job = Job(action_digest, do_not_cache, message_queue)
|
| 50 |
- self.logger.info("Operation name: {}".format(job.name))
|
|
| 53 |
+ self.logger.info("Operation name: [{}]".format(job.name))
|
|
| 51 | 54 |
|
| 52 | 55 |
self._scheduler.append_job(job, skip_cache_lookup)
|
| 53 | 56 |
|
| ... | ... | @@ -58,14 +61,14 @@ class ExecutionInstance: |
| 58 | 61 |
self._scheduler.register_client(name, queue)
|
| 59 | 62 |
|
| 60 | 63 |
except KeyError:
|
| 61 |
- raise InvalidArgumentError("Operation name does not exist: {}".format(name))
|
|
| 64 |
+ raise InvalidArgumentError("Operation name does not exist: [{}]".format(name))
|
|
| 62 | 65 |
|
| 63 | 66 |
def unregister_message_client(self, name, queue):
|
| 64 | 67 |
try:
|
| 65 | 68 |
self._scheduler.unregister_client(name, queue)
|
| 66 | 69 |
|
| 67 | 70 |
except KeyError:
|
| 68 |
- raise InvalidArgumentError("Operation name does not exist: {}".format(name))
|
|
| 71 |
+ raise InvalidArgumentError("Operation name does not exist: [{}]".format(name))
|
|
| 69 | 72 |
|
| 70 | 73 |
def stream_operation_updates(self, message_queue, operation_name):
|
| 71 | 74 |
operation = message_queue.get()
|
| ... | ... | @@ -35,12 +35,14 @@ from .._exceptions import InvalidArgumentError |
| 35 | 35 |
|
| 36 | 36 |
class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer):
|
| 37 | 37 |
|
| 38 |
- def __init__(self, server, instances):
|
|
| 38 |
+ def __init__(self, server):
|
|
| 39 | 39 |
self.logger = logging.getLogger(__name__)
|
| 40 |
- self._instances = instances
|
|
| 41 |
- |
|
| 40 |
+ self._instances = dict()
|
|
| 42 | 41 |
remote_execution_pb2_grpc.add_ExecutionServicer_to_server(self, server)
|
| 43 | 42 |
|
| 43 |
+ def add_instance(self, name, instance):
|
|
| 44 |
+ self._instances[name] = instance
|
|
| 45 |
+ |
|
| 44 | 46 |
def Execute(self, request, context):
|
| 45 | 47 |
try:
|
| 46 | 48 |
message_queue = queue.Queue()
|
| ... | ... | @@ -92,4 +94,4 @@ class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer): |
| 92 | 94 |
return self._instances[name]
|
| 93 | 95 |
|
| 94 | 96 |
except KeyError:
|
| 95 |
- raise InvalidArgumentError("Instance doesn't exist on server: {}".format(name))
|
|
| 97 |
+ raise InvalidArgumentError("Instance doesn't exist on server: [{}]".format(name))
|
| 1 |
+# Copyright (C) 2018 Bloomberg LP
|
|
| 2 |
+#
|
|
| 3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
| 4 |
+# you may not use this file except in compliance with the License.
|
|
| 5 |
+# You may obtain a copy of the License at
|
|
| 6 |
+#
|
|
| 7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
| 8 |
+#
|
|
| 9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
| 10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
| 11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
| 12 |
+# See the License for the specific language governing permissions and
|
|
| 13 |
+# limitations under the License.
|
|
| 14 |
+ |
|
| 15 |
+ |
|
| 16 |
+"""
|
|
| 17 |
+BuildGridServer
|
|
| 18 |
+==============
|
|
| 19 |
+ |
|
| 20 |
+Creates a BuildGrid server, binding all the requisite service instances together.
|
|
| 21 |
+"""
|
|
| 22 |
+ |
|
| 23 |
+import logging
|
|
| 24 |
+import os
|
|
| 25 |
+from concurrent import futures
|
|
| 26 |
+ |
|
| 27 |
+import grpc
|
|
| 28 |
+ |
|
| 29 |
+from .cas.service import ByteStreamService, ContentAddressableStorageService
|
|
| 30 |
+from .actioncache.service import ActionCacheService
|
|
| 31 |
+from .execution.service import ExecutionService
|
|
| 32 |
+from .operations.service import OperationsService
|
|
| 33 |
+from .bots.service import BotsService
|
|
| 34 |
+from .referencestorage.service import ReferenceStorageService
|
|
| 35 |
+ |
|
| 36 |
+ |
|
| 37 |
+class BuildGridServer:
|
|
| 38 |
+ |
|
| 39 |
+ def __init__(self, max_workers=None):
|
|
| 40 |
+ |
|
| 41 |
+ self.logger = logging.getLogger(__name__)
|
|
| 42 |
+ |
|
| 43 |
+ if max_workers is None:
|
|
| 44 |
+ # Use max_workers default from Python 3.5+
|
|
| 45 |
+ max_workers = (os.cpu_count() or 1) * 5
|
|
| 46 |
+ |
|
| 47 |
+ server = grpc.server(futures.ThreadPoolExecutor(max_workers))
|
|
| 48 |
+ |
|
| 49 |
+ self._server = server
|
|
| 50 |
+ |
|
| 51 |
+ self._execution_service = None
|
|
| 52 |
+ self._bots_service = None
|
|
| 53 |
+ self._operations_service = None
|
|
| 54 |
+ self._reference_storage_service = None
|
|
| 55 |
+ self._action_cache_service = None
|
|
| 56 |
+ self._cas_service = None
|
|
| 57 |
+ self._bytestream_service = None
|
|
| 58 |
+ |
|
| 59 |
+ def start(self):
|
|
| 60 |
+ self._server.start()
|
|
| 61 |
+ |
|
| 62 |
+ def stop(self, grace=0):
|
|
| 63 |
+ self._server.stop(grace)
|
|
| 64 |
+ |
|
| 65 |
+ def add_port(self, address, credentials):
|
|
| 66 |
+ if credentials is not None:
|
|
| 67 |
+ self.logger.info("Adding secure connection on: [{}]".format(address))
|
|
| 68 |
+ self._server.add_secure_port(address, credentials)
|
|
| 69 |
+ |
|
| 70 |
+ else:
|
|
| 71 |
+ self.logger.info("Adding insecure connection on [{}]".format(address))
|
|
| 72 |
+ self._server.add_insecure_port(address)
|
|
| 73 |
+ |
|
| 74 |
+ def add_execution_instance(self, instance, instance_name):
|
|
| 75 |
+ if self._execution_service is None:
|
|
| 76 |
+ self._execution_service = ExecutionService(self._server)
|
|
| 77 |
+ |
|
| 78 |
+ self._execution_service.add_instance(instance_name, instance)
|
|
| 79 |
+ |
|
| 80 |
+ def add_bots_interface(self, instance, instance_name):
|
|
| 81 |
+ if self._bots_service is None:
|
|
| 82 |
+ self._bots_service = BotsService(self._server)
|
|
| 83 |
+ |
|
| 84 |
+ self._bots_service.add_instance(instance_name, instance)
|
|
| 85 |
+ |
|
| 86 |
+ def add_operations_instance(self, instance, instance_name):
|
|
| 87 |
+ if self._operations_service is None:
|
|
| 88 |
+ self._operations_service = OperationsService(self._server)
|
|
| 89 |
+ |
|
| 90 |
+ self._operations_service.add_instance(instance_name, instance)
|
|
| 91 |
+ |
|
| 92 |
+ def add_reference_storage_instance(self, instance, instance_name):
|
|
| 93 |
+ if self._reference_storage_service is None:
|
|
| 94 |
+ self._reference_storage_service = ReferenceStorageService(self._server)
|
|
| 95 |
+ |
|
| 96 |
+ self._reference_storage_service.add_instance(instance_name, instance)
|
|
| 97 |
+ |
|
| 98 |
+ def add_action_cache_instance(self, instance, instance_name):
|
|
| 99 |
+ if self._action_cache_service is None:
|
|
| 100 |
+ self._action_cache_service = ActionCacheService(self._server)
|
|
| 101 |
+ |
|
| 102 |
+ self._action_cache_service.add_instance(instance_name, instance)
|
|
| 103 |
+ |
|
| 104 |
+ def add_cas_instance(self, instance, instance_name):
|
|
| 105 |
+ if self._cas_service is None:
|
|
| 106 |
+ self._cas_service = ContentAddressableStorageService(self._server)
|
|
| 107 |
+ |
|
| 108 |
+ self._cas_service.add_instance(instance_name, instance)
|
|
| 109 |
+ |
|
| 110 |
+ def add_bytestream_instance(self, instance, instance_name):
|
|
| 111 |
+ if self._bytestream_service is None:
|
|
| 112 |
+ self._bytestream_service = ByteStreamService(self._server)
|
|
| 113 |
+ |
|
| 114 |
+ self._bytestream_service.add_instance(instance_name, instance)
|
| ... | ... | @@ -30,11 +30,14 @@ class OperationsInstance: |
| 30 | 30 |
self.logger = logging.getLogger(__name__)
|
| 31 | 31 |
self._scheduler = scheduler
|
| 32 | 32 |
|
| 33 |
+ def add_self_to_service(self, instance_name, server):
|
|
| 34 |
+ server.add_operations_instance(self, instance_name)
|
|
| 35 |
+ |
|
| 33 | 36 |
def get_operation(self, name):
|
| 34 | 37 |
operation = self._scheduler.jobs.get(name)
|
| 35 | 38 |
|
| 36 | 39 |
if operation is None:
|
| 37 |
- raise InvalidArgumentError("Operation name does not exist: {}".format(name))
|
|
| 40 |
+ raise InvalidArgumentError("Operation name does not exist: [{}]".format(name))
|
|
| 38 | 41 |
|
| 39 | 42 |
else:
|
| 40 | 43 |
return operation.get_operation()
|
| ... | ... | @@ -49,21 +52,21 @@ class OperationsInstance: |
| 49 | 52 |
self._scheduler.jobs.pop(name)
|
| 50 | 53 |
|
| 51 | 54 |
except KeyError:
|
| 52 |
- raise InvalidArgumentError("Operation name does not exist: {}".format(name))
|
|
| 55 |
+ raise InvalidArgumentError("Operation name does not exist: [{}]".format(name))
|
|
| 53 | 56 |
|
| 54 | 57 |
def register_message_client(self, name, queue):
|
| 55 | 58 |
try:
|
| 56 | 59 |
self._scheduler.register_client(name, queue)
|
| 57 | 60 |
|
| 58 | 61 |
except KeyError:
|
| 59 |
- raise InvalidArgumentError("Operation name does not exist: {}".format(name))
|
|
| 62 |
+ raise InvalidArgumentError("Operation name does not exist: [{}]".format(name))
|
|
| 60 | 63 |
|
| 61 | 64 |
def unregister_message_client(self, name, queue):
|
| 62 | 65 |
try:
|
| 63 | 66 |
self._scheduler.unregister_client(name, queue)
|
| 64 | 67 |
|
| 65 | 68 |
except KeyError:
|
| 66 |
- raise InvalidArgumentError("Operation name does not exist: {}".format(name))
|
|
| 69 |
+ raise InvalidArgumentError("Operation name does not exist: [{}]".format(name))
|
|
| 67 | 70 |
|
| 68 | 71 |
def stream_operation_updates(self, message_queue, operation_name):
|
| 69 | 72 |
operation = message_queue.get()
|
| ... | ... | @@ -32,12 +32,16 @@ from .._exceptions import InvalidArgumentError |
| 32 | 32 |
|
| 33 | 33 |
class OperationsService(operations_pb2_grpc.OperationsServicer):
|
| 34 | 34 |
|
| 35 |
- def __init__(self, server, instances):
|
|
| 36 |
- self._instances = instances
|
|
| 35 |
+ def __init__(self, server):
|
|
| 37 | 36 |
self.logger = logging.getLogger(__name__)
|
| 38 | 37 |
|
| 38 |
+ self._instances = dict()
|
|
| 39 |
+ |
|
| 39 | 40 |
operations_pb2_grpc.add_OperationsServicer_to_server(self, server)
|
| 40 | 41 |
|
| 42 |
+ def add_instance(self, name, instance):
|
|
| 43 |
+ self._instances[name] = instance
|
|
| 44 |
+ |
|
| 41 | 45 |
def GetOperation(self, request, context):
|
| 42 | 46 |
try:
|
| 43 | 47 |
name = request.name
|
| ... | ... | @@ -132,4 +136,4 @@ class OperationsService(operations_pb2_grpc.OperationsServicer): |
| 132 | 136 |
return self._instances[instance_name]
|
| 133 | 137 |
|
| 134 | 138 |
except KeyError:
|
| 135 |
- raise InvalidArgumentError("Instance doesn't exist on server: {}".format(name))
|
|
| 139 |
+ raise InvalidArgumentError("Instance doesn't exist on server: [{}]".format(name))
|
| ... | ... | @@ -25,13 +25,16 @@ from .._exceptions import InvalidArgumentError, NotFoundError |
| 25 | 25 |
|
| 26 | 26 |
class ReferenceStorageService(buildstream_pb2_grpc.ReferenceStorageServicer):
|
| 27 | 27 |
|
| 28 |
- def __init__(self, server, instances):
|
|
| 28 |
+ def __init__(self, server):
|
|
| 29 | 29 |
self.logger = logging.getLogger(__name__)
|
| 30 | 30 |
|
| 31 |
- self._instances = instances
|
|
| 31 |
+ self._instances = dict()
|
|
| 32 | 32 |
|
| 33 | 33 |
buildstream_pb2_grpc.add_ReferenceStorageServicer_to_server(self, server)
|
| 34 | 34 |
|
| 35 |
+ def add_instance(self, name, instance):
|
|
| 36 |
+ self._instances[name] = instance
|
|
| 37 |
+ |
|
| 35 | 38 |
def GetReference(self, request, context):
|
| 36 | 39 |
try:
|
| 37 | 40 |
instance = self._get_instance(request.instance_name)
|
| ... | ... | @@ -86,4 +89,4 @@ class ReferenceStorageService(buildstream_pb2_grpc.ReferenceStorageServicer): |
| 86 | 89 |
return self._instances[instance_name]
|
| 87 | 90 |
|
| 88 | 91 |
except KeyError:
|
| 89 |
- raise InvalidArgumentError("Invalid instance name: {}".format(instance_name))
|
|
| 92 |
+ raise InvalidArgumentError("Invalid instance name: [{}]".format(instance_name))
|
| ... | ... | @@ -44,6 +44,9 @@ class ReferenceCache: |
| 44 | 44 |
self._max_cached_refs = max_cached_refs
|
| 45 | 45 |
self._digest_map = collections.OrderedDict()
|
| 46 | 46 |
|
| 47 |
+ def add_self_to_service(self, instance_name, server):
|
|
| 48 |
+ server.add_reference_storage_instance(self, instance_name)
|
|
| 49 |
+ |
|
| 47 | 50 |
@property
|
| 48 | 51 |
def allow_updates(self):
|
| 49 | 52 |
return self._allow_updates
|
| ... | ... | @@ -24,10 +24,15 @@ Remote execution service implementing Google's REAPI and RWAPI. |
| 24 | 24 |
Resources
|
| 25 | 25 |
---------
|
| 26 | 26 |
|
| 27 |
-- Homepage: https://buildgrid.build
|
|
| 28 |
-- GitLab repository: https://gitlab.com/BuildGrid/buildgrid
|
|
| 29 |
-- Bug tracking: https://gitlab.com/BuildGrid/buildgrid/issues
|
|
| 30 |
-- Mailing list: https://lists.buildgrid.build/cgi-bin/mailman/listinfo/buildgrid
|
|
| 31 |
-- Slack channel: https://buildteamworld.slack.com/messages/CC9MKC203 [`invite link`_]
|
|
| 32 |
- |
|
| 27 |
+- `Homepage`_
|
|
| 28 |
+- `GitLab repository`_
|
|
| 29 |
+- `Bug tracking`_
|
|
| 30 |
+- `Mailing list`_
|
|
| 31 |
+- `Slack channel`_ [`invite link`_]
|
|
| 32 |
+ |
|
| 33 |
+.. _Homepage: https://buildgrid.build
|
|
| 34 |
+.. _GitLab repository: https://gitlab.com/BuildGrid/buildgrid
|
|
| 35 |
+.. _Bug tracking: https://gitlab.com/BuildGrid/buildgrid/issues
|
|
| 36 |
+.. _Mailing list: https://lists.buildgrid.build/cgi-bin/mailman/listinfo/buildgrid
|
|
| 37 |
+.. _Slack channel: https://buildteamworld.slack.com/messages/CC9MKC203
|
|
| 33 | 38 |
.. _invite link: https://join.slack.com/t/buildteamworld/shared_invite/enQtMzkxNzE0MDMyMDY1LTRmZmM1OWE0OTFkMGE1YjU5Njc4ODEzYjc0MGMyOTM5ZTQ5MmE2YTQ1MzQwZDc5MWNhODY1ZmRkZTE4YjFhNjU
|
| ... | ... | @@ -80,7 +80,8 @@ def test_bytestream_read(mocked, data_to_read, instance): |
| 80 | 80 |
storage = SimpleStorage([b"abc", b"defg", data_to_read])
|
| 81 | 81 |
|
| 82 | 82 |
bs_instance = ByteStreamInstance(storage)
|
| 83 |
- servicer = ByteStreamService(server, {instance: bs_instance})
|
|
| 83 |
+ servicer = ByteStreamService(server)
|
|
| 84 |
+ servicer.add_instance(instance, bs_instance)
|
|
| 84 | 85 |
|
| 85 | 86 |
request = bytestream_pb2.ReadRequest()
|
| 86 | 87 |
if instance != "":
|
| ... | ... | @@ -99,8 +100,10 @@ def test_bytestream_read_many(mocked, instance): |
| 99 | 100 |
data_to_read = b"testing" * 10000
|
| 100 | 101 |
|
| 101 | 102 |
storage = SimpleStorage([b"abc", b"defg", data_to_read])
|
| 103 |
+ |
|
| 102 | 104 |
bs_instance = ByteStreamInstance(storage)
|
| 103 |
- servicer = ByteStreamService(server, {instance: bs_instance})
|
|
| 105 |
+ servicer = ByteStreamService(server)
|
|
| 106 |
+ servicer.add_instance(instance, bs_instance)
|
|
| 104 | 107 |
|
| 105 | 108 |
request = bytestream_pb2.ReadRequest()
|
| 106 | 109 |
if instance != "":
|
| ... | ... | @@ -118,8 +121,10 @@ def test_bytestream_read_many(mocked, instance): |
| 118 | 121 |
@mock.patch.object(service, 'bytestream_pb2_grpc', autospec=True)
|
| 119 | 122 |
def test_bytestream_write(mocked, instance, extra_data):
|
| 120 | 123 |
storage = SimpleStorage()
|
| 124 |
+ |
|
| 121 | 125 |
bs_instance = ByteStreamInstance(storage)
|
| 122 |
- servicer = ByteStreamService(server, {instance: bs_instance})
|
|
| 126 |
+ servicer = ByteStreamService(server)
|
|
| 127 |
+ servicer.add_instance(instance, bs_instance)
|
|
| 123 | 128 |
|
| 124 | 129 |
resource_name = ""
|
| 125 | 130 |
if instance != "":
|
| ... | ... | @@ -142,8 +147,10 @@ def test_bytestream_write(mocked, instance, extra_data): |
| 142 | 147 |
@mock.patch.object(service, 'bytestream_pb2_grpc', autospec=True)
|
| 143 | 148 |
def test_bytestream_write_rejects_wrong_hash(mocked):
|
| 144 | 149 |
storage = SimpleStorage()
|
| 150 |
+ |
|
| 145 | 151 |
bs_instance = ByteStreamInstance(storage)
|
| 146 |
- servicer = ByteStreamService(server, {"": bs_instance})
|
|
| 152 |
+ servicer = ByteStreamService(server)
|
|
| 153 |
+ servicer.add_instance("", bs_instance)
|
|
| 147 | 154 |
|
| 148 | 155 |
data = b'some data'
|
| 149 | 156 |
wrong_hash = HASH(b'incorrect').hexdigest()
|
| ... | ... | @@ -163,7 +170,9 @@ def test_bytestream_write_rejects_wrong_hash(mocked): |
| 163 | 170 |
def test_cas_find_missing_blobs(mocked, instance):
|
| 164 | 171 |
storage = SimpleStorage([b'abc', b'def'])
|
| 165 | 172 |
cas_instance = ContentAddressableStorageInstance(storage)
|
| 166 |
- servicer = ContentAddressableStorageService(server, {instance: cas_instance})
|
|
| 173 |
+ servicer = ContentAddressableStorageService(server)
|
|
| 174 |
+ servicer.add_instance(instance, cas_instance)
|
|
| 175 |
+ |
|
| 167 | 176 |
digests = [
|
| 168 | 177 |
re_pb2.Digest(hash=HASH(b'def').hexdigest(), size_bytes=3),
|
| 169 | 178 |
re_pb2.Digest(hash=HASH(b'ghij').hexdigest(), size_bytes=4)
|
| ... | ... | @@ -178,8 +187,10 @@ def test_cas_find_missing_blobs(mocked, instance): |
| 178 | 187 |
@mock.patch.object(service, 'remote_execution_pb2_grpc', autospec=True)
|
| 179 | 188 |
def test_cas_batch_update_blobs(mocked, instance):
|
| 180 | 189 |
storage = SimpleStorage()
|
| 190 |
+ |
|
| 181 | 191 |
cas_instance = ContentAddressableStorageInstance(storage)
|
| 182 |
- servicer = ContentAddressableStorageService(server, {instance: cas_instance})
|
|
| 192 |
+ servicer = ContentAddressableStorageService(server)
|
|
| 193 |
+ servicer.add_instance(instance, cas_instance)
|
|
| 183 | 194 |
|
| 184 | 195 |
update_requests = [
|
| 185 | 196 |
re_pb2.BatchUpdateBlobsRequest.Request(
|
| ... | ... | @@ -69,9 +69,13 @@ class MockStubServer: |
| 69 | 69 |
instances = {"": MockCASStorage(), "dna": MockCASStorage()}
|
| 70 | 70 |
self._requests = []
|
| 71 | 71 |
with mock.patch.object(service, 'bytestream_pb2_grpc'):
|
| 72 |
- self._bs_service = service.ByteStreamService(server, instances)
|
|
| 72 |
+ self._bs_service = service.ByteStreamService(server)
|
|
| 73 |
+ for k, v in instances.items():
|
|
| 74 |
+ self._bs_service.add_instance(k, v)
|
|
| 73 | 75 |
with mock.patch.object(service, 'remote_execution_pb2_grpc'):
|
| 74 |
- self._cas_service = service.ContentAddressableStorageService(server, instances)
|
|
| 76 |
+ self._cas_service = service.ContentAddressableStorageService(server)
|
|
| 77 |
+ for k, v in instances.items():
|
|
| 78 |
+ self._cas_service.add_instance(k, v)
|
|
| 75 | 79 |
|
| 76 | 80 |
def Read(self, request):
|
| 77 | 81 |
yield from self._bs_service.Read(request, context)
|
| ... | ... | @@ -127,7 +131,7 @@ def any_storage(request): |
| 127 | 131 |
with mock.patch.object(remote, 'bytestream_pb2_grpc'):
|
| 128 | 132 |
with mock.patch.object(remote, 'remote_execution_pb2_grpc'):
|
| 129 | 133 |
mock_server = MockStubServer()
|
| 130 |
- storage = remote.RemoteStorage(instance, "")
|
|
| 134 |
+ storage = remote.RemoteStorage(None, "")
|
|
| 131 | 135 |
storage._stub_bs = mock_server
|
| 132 | 136 |
storage._stub_cas = mock_server
|
| 133 | 137 |
yield storage
|
| ... | ... | @@ -52,9 +52,11 @@ def cache_instances(cas): |
| 52 | 52 |
|
| 53 | 53 |
def test_simple_action_result(cache_instances, context):
|
| 54 | 54 |
with mock.patch.object(service, 'remote_execution_pb2_grpc'):
|
| 55 |
- ac_service = ActionCacheService(server, cache_instances)
|
|
| 55 |
+ ac_service = ActionCacheService(server)
|
|
| 56 |
+ |
|
| 57 |
+ for k, v in cache_instances.items():
|
|
| 58 |
+ ac_service.add_instance(k, v)
|
|
| 56 | 59 |
|
| 57 |
- print(cache_instances)
|
|
| 58 | 60 |
action_digest = remote_execution_pb2.Digest(hash='sample', size_bytes=4)
|
| 59 | 61 |
|
| 60 | 62 |
# Check that before adding the ActionResult, attempting to fetch it fails
|
| ... | ... | @@ -78,7 +80,8 @@ def test_simple_action_result(cache_instances, context): |
| 78 | 80 |
def test_disabled_update_action_result(context):
|
| 79 | 81 |
disabled_push = ActionCache(cas, 50, False)
|
| 80 | 82 |
with mock.patch.object(service, 'remote_execution_pb2_grpc'):
|
| 81 |
- ac_service = ActionCacheService(server, {"": disabled_push})
|
|
| 83 |
+ ac_service = ActionCacheService(server)
|
|
| 84 |
+ ac_service.add_instance("", disabled_push)
|
|
| 82 | 85 |
|
| 83 | 86 |
request = remote_execution_pb2.UpdateActionResultRequest(instance_name='')
|
| 84 | 87 |
ac_service.UpdateActionResult(request, context)
|
| ... | ... | @@ -59,7 +59,10 @@ def controller(): |
| 59 | 59 |
def instance(controller):
|
| 60 | 60 |
instances = {"": controller.bots_interface}
|
| 61 | 61 |
with mock.patch.object(service, 'bots_pb2_grpc'):
|
| 62 |
- yield BotsService(server, instances)
|
|
| 62 |
+ bots_service = BotsService(server)
|
|
| 63 |
+ for k, v in instances.items():
|
|
| 64 |
+ bots_service.add_instance(k, v)
|
|
| 65 |
+ yield bots_service
|
|
| 63 | 66 |
|
| 64 | 67 |
|
| 65 | 68 |
def test_create_bot_session(bot_session, context, instance):
|
| ... | ... | @@ -58,9 +58,10 @@ def controller(request): |
| 58 | 58 |
# Instance to test
|
| 59 | 59 |
@pytest.fixture
|
| 60 | 60 |
def instance(controller):
|
| 61 |
- instances = {"": controller.execution_instance}
|
|
| 62 | 61 |
with mock.patch.object(service, 'remote_execution_pb2_grpc'):
|
| 63 |
- yield ExecutionService(server, instances)
|
|
| 62 |
+ execution_service = ExecutionService(server)
|
|
| 63 |
+ execution_service.add_instance("", controller.execution_instance)
|
|
| 64 |
+ yield execution_service
|
|
| 64 | 65 |
|
| 65 | 66 |
|
| 66 | 67 |
@pytest.mark.parametrize("skip_cache_lookup", [True, False])
|
| ... | ... | @@ -63,9 +63,11 @@ def controller(): |
| 63 | 63 |
# Instance to test
|
| 64 | 64 |
@pytest.fixture
|
| 65 | 65 |
def instance(controller):
|
| 66 |
- instances = {instance_name: controller.operations_instance}
|
|
| 67 | 66 |
with mock.patch.object(service, 'operations_pb2_grpc'):
|
| 68 |
- yield OperationsService(server, instances)
|
|
| 67 |
+ operation_service = OperationsService(server)
|
|
| 68 |
+ operation_service.add_instance(instance_name, controller.operations_instance)
|
|
| 69 |
+ |
|
| 70 |
+ yield operation_service
|
|
| 69 | 71 |
|
| 70 | 72 |
|
| 71 | 73 |
# Queue an execution, get operation corresponding to that request
|
| ... | ... | @@ -52,9 +52,10 @@ def cache(cas): |
| 52 | 52 |
|
| 53 | 53 |
@pytest.fixture
|
| 54 | 54 |
def instance(cache):
|
| 55 |
- instances = {instance_name: cache}
|
|
| 56 | 55 |
with mock.patch.object(service, 'buildstream_pb2_grpc'):
|
| 57 |
- yield ReferenceStorageService(server, instances)
|
|
| 56 |
+ ref_service = ReferenceStorageService(server)
|
|
| 57 |
+ ref_service.add_instance(instance_name, cache)
|
|
| 58 |
+ yield ref_service
|
|
| 58 | 59 |
|
| 59 | 60 |
|
| 60 | 61 |
def test_simple_result(instance, context):
|
| ... | ... | @@ -83,7 +84,8 @@ def test_disabled_update_result(context): |
| 83 | 84 |
keys = ["rick", "roy", "rach"]
|
| 84 | 85 |
|
| 85 | 86 |
with mock.patch.object(service, 'buildstream_pb2_grpc'):
|
| 86 |
- instance = ReferenceStorageService(server, {'': disabled_push})
|
|
| 87 |
+ instance = ReferenceStorageService(server)
|
|
| 88 |
+ instance.add_instance(instance_name, disabled_push)
|
|
| 87 | 89 |
|
| 88 | 90 |
# Add an ReferenceResult to the cache
|
| 89 | 91 |
reference_result = remote_execution_pb2.Digest(hash='deckard')
|
| ... | ... | @@ -101,7 +103,8 @@ def test_disabled_update_result(context): |
| 101 | 103 |
def test_status(allow_updates, context):
|
| 102 | 104 |
cache = ReferenceCache(cas, 5, allow_updates)
|
| 103 | 105 |
with mock.patch.object(service, 'buildstream_pb2_grpc'):
|
| 104 |
- instance = ReferenceStorageService(server, {'': cache})
|
|
| 106 |
+ instance = ReferenceStorageService(server)
|
|
| 107 |
+ instance.add_instance("", cache)
|
|
| 105 | 108 |
|
| 106 | 109 |
request = buildstream_pb2.StatusRequest()
|
| 107 | 110 |
response = instance.Status(request, context)
|
