finn pushed to branch jmac/expand-user-for-tls at BuildGrid / buildgrid
Commits:
-
a7f2ea44
by finnball at 2018-09-12T15:51:38Z
-
142d7ef2
by finnball at 2018-09-12T15:51:42Z
-
2e426480
by finnball at 2018-09-12T15:51:42Z
-
8564f37c
by finnball at 2018-09-12T15:51:42Z
-
eb959e27
by finnball at 2018-09-12T15:55:02Z
-
becd3571
by finnball at 2018-09-14T07:55:57Z
-
f7052140
by finnball at 2018-09-17T13:40:08Z
-
53257bac
by Laurence Urhegyi at 2018-09-17T17:26:05Z
-
f8e2b7c0
by Laurence Urhegyi at 2018-09-17T17:34:59Z
-
37e735e5
by Laurence Urhegyi at 2018-09-17T17:39:20Z
-
f2eb355a
by finnball at 2018-09-20T08:55:09Z
20 changed files:
- README.rst
- buildgrid/_app/commands/cmd_server.py
- + buildgrid/_app/settings/__init__.py
- + buildgrid/_app/settings/cas.yml
- buildgrid/_app/settings/default.yml
- buildgrid/_app/settings/parser.py
- + buildgrid/_app/settings/remote-storage.yml
- buildgrid/bot/bot_session.py
- buildgrid/server/actioncache/service.py
- buildgrid/server/bots/instance.py
- buildgrid/server/bots/service.py
- buildgrid/server/cas/service.py
- buildgrid/server/cas/storage/remote.py
- buildgrid/server/execution/instance.py
- buildgrid/server/execution/service.py
- buildgrid/server/operations/instance.py
- buildgrid/server/operations/service.py
- buildgrid/server/referencestorage/service.py
- docs/source/index.rst
- tests/cas/test_storage.py
Changes:
1 |
- |
|
2 | 1 |
.. _about:
|
3 | 2 |
|
4 |
-About
|
|
5 |
-=====
|
|
3 |
+ |
|
6 | 4 |
|
7 | 5 |
.. image:: https://gitlab.com/Buildgrid/buildgrid/badges/master/pipeline.svg
|
8 | 6 |
:target: https://gitlab.com/BuildStream/buildstream/commits/master
|
9 | 7 |
|
10 | 8 |
.. image:: https://gitlab.com/BuildGrid/buildgrid/badges/master/coverage.svg?job=coverage
|
11 | 9 |
:target: https://buildgrid.gitlab.io/buildgrid/coverage
|
10 |
+
|
|
11 |
+About BuildGrid
|
|
12 |
+===============
|
|
13 |
+ |
|
14 |
+What is BuildGrid?
|
|
15 |
+------------------
|
|
12 | 16 |
|
13 | 17 |
BuildGrid is a Python remote execution service which implements Google's
|
14 | 18 |
`Remote Execution API`_ and the `Remote Workers API`_. The project's goal is to
|
15 | 19 |
be able to execute build jobs remotely on a grid of computers in order to
|
16 | 20 |
massively speed up build times. Workers on the grid should be able to run with
|
17 |
-different environments. It is designed to work with but not exclusively
|
|
21 |
+different environments. It is designed to work with clients such as `Bazel`_ and
|
|
18 | 22 |
`BuildStream`_.
|
19 | 23 |
|
20 | 24 |
.. _Remote Execution API: https://github.com/bazelbuild/remote-apis
|
21 | 25 |
.. _Remote Workers API: https://docs.google.com/document/d/1s_AzRRD2mdyktKUj2HWBn99rMg_3tcPvdjx3MPbFidU/edit#heading=h.1u2taqr2h940
|
22 | 26 |
.. _BuildStream: https://wiki.gnome.org/Projects/BuildStream
|
27 |
+.. _Bazel: https://bazel.build
|
|
23 | 28 |
|
24 | 29 |
|
25 | 30 |
.. _getting-started:
|
... | ... | @@ -40,10 +45,15 @@ instructions. |
40 | 45 |
Resources
|
41 | 46 |
---------
|
42 | 47 |
|
43 |
-- Homepage: https://buildgrid.build
|
|
44 |
-- GitLab repository: https://gitlab.com/BuildGrid/buildgrid
|
|
45 |
-- Bug tracking: https://gitlab.com/BuildGrid/buildgrid/issues
|
|
46 |
-- Mailing list: https://lists.buildgrid.build/cgi-bin/mailman/listinfo/buildgrid
|
|
47 |
-- Slack channel: https://buildteamworld.slack.com/messages/CC9MKC203 [`invite link`_]
|
|
48 |
- |
|
48 |
+- `Homepage`_
|
|
49 |
+- `GitLab repository`_
|
|
50 |
+- `Bug tracking`_
|
|
51 |
+- `Mailing list`_
|
|
52 |
+- `Slack channel`_ [`invite link`_]
|
|
53 |
+ |
|
54 |
+.. _Homepage: https://buildgrid.build
|
|
55 |
+.. _GitLab repository: https://gitlab.com/BuildGrid/buildgrid
|
|
56 |
+.. _Bug tracking: https://gitlab.com/BuildGrid/buildgrid/issues
|
|
57 |
+.. _Mailing list: https://lists.buildgrid.build/cgi-bin/mailman/listinfo/buildgrid
|
|
58 |
+.. _Slack channel: https://buildteamworld.slack.com/messages/CC9MKC203
|
|
49 | 59 |
.. _invite link: https://join.slack.com/t/buildteamworld/shared_invite/enQtMzkxNzE0MDMyMDY1LTRmZmM1OWE0OTFkMGE1YjU5Njc4ODEzYjc0MGMyOTM5ZTQ5MmE2YTQ1MzQwZDc5MWNhODY1ZmRkZTE4YjFhNjU
|
... | ... | @@ -49,41 +49,48 @@ def start(context, config): |
49 | 49 |
with open(config) as f:
|
50 | 50 |
settings = parser.get_parser().safe_load(f)
|
51 | 51 |
|
52 |
- server_settings = settings['server']
|
|
53 |
- insecure_mode = server_settings['insecure-mode']
|
|
54 |
- |
|
55 |
- credentials = None
|
|
56 |
- if not insecure_mode:
|
|
57 |
- server_key = server_settings['tls-server-key']
|
|
58 |
- server_cert = server_settings['tls-server-cert']
|
|
59 |
- client_certs = server_settings['tls-client-certs']
|
|
60 |
- credentials = context.load_server_credentials(server_key, server_cert, client_certs)
|
|
61 |
- |
|
62 |
- if not credentials:
|
|
63 |
- click.echo("ERROR: no TLS keys were specified and no defaults could be found.\n" +
|
|
64 |
- "Set `insecure-mode: false` in order to deactivate TLS encryption.\n", err=True)
|
|
65 |
- sys.exit(-1)
|
|
66 |
- |
|
67 |
- instances = settings['instances']
|
|
68 |
- |
|
69 |
- execution_controllers = _instance_maker(instances, ExecutionController)
|
|
70 |
- |
|
71 |
- execution_instances = {}
|
|
72 |
- bots_interfaces = {}
|
|
73 |
- operations_instances = {}
|
|
74 |
- |
|
75 |
- # TODO: map properly in parser
|
|
76 |
- for k, v in execution_controllers.items():
|
|
77 |
- execution_instances[k] = v.execution_instance
|
|
78 |
- bots_interfaces[k] = v.bots_interface
|
|
79 |
- operations_instances[k] = v.operations_instance
|
|
80 |
- |
|
81 |
- reference_caches = _instance_maker(instances, ReferenceCache)
|
|
82 |
- action_caches = _instance_maker(instances, ActionCache)
|
|
83 |
- cas = _instance_maker(instances, ContentAddressableStorageInstance)
|
|
84 |
- bytestreams = _instance_maker(instances, ByteStreamInstance)
|
|
52 |
+ try:
|
|
53 |
+ server_settings = settings['server']
|
|
54 |
+ insecure_mode = server_settings['insecure-mode']
|
|
55 |
+ |
|
56 |
+ credentials = None
|
|
57 |
+ if not insecure_mode:
|
|
58 |
+ credential_settings = server_settings['credentials']
|
|
59 |
+ server_key = credential_settings['tls-server-key']
|
|
60 |
+ server_cert = credential_settings['tls-server-cert']
|
|
61 |
+ client_certs = credential_settings['tls-client-certs']
|
|
62 |
+ credentials = context.load_server_credentials(server_key, server_cert, client_certs)
|
|
63 |
+ |
|
64 |
+ if not credentials:
|
|
65 |
+ click.echo("ERROR: no TLS keys were specified and no defaults could be found.\n" +
|
|
66 |
+ "Set `insecure-mode: false` in order to deactivate TLS encryption.\n", err=True)
|
|
67 |
+ sys.exit(-1)
|
|
68 |
+ |
|
69 |
+ port = server_settings['port']
|
|
70 |
+ instances = settings['instances']
|
|
71 |
+ |
|
72 |
+ execution_controllers = _instance_maker(instances, ExecutionController)
|
|
73 |
+ |
|
74 |
+ execution_instances = {}
|
|
75 |
+ bots_interfaces = {}
|
|
76 |
+ operations_instances = {}
|
|
77 |
+ |
|
78 |
+ # TODO: map properly in parser
|
|
79 |
+ # Issue 82
|
|
80 |
+ for k, v in execution_controllers.items():
|
|
81 |
+ execution_instances[k] = v.execution_instance
|
|
82 |
+ bots_interfaces[k] = v.bots_interface
|
|
83 |
+ operations_instances[k] = v.operations_instance
|
|
84 |
+ |
|
85 |
+ reference_caches = _instance_maker(instances, ReferenceCache)
|
|
86 |
+ action_caches = _instance_maker(instances, ActionCache)
|
|
87 |
+ cas = _instance_maker(instances, ContentAddressableStorageInstance)
|
|
88 |
+ bytestreams = _instance_maker(instances, ByteStreamInstance)
|
|
89 |
+ |
|
90 |
+ except KeyError as e:
|
|
91 |
+ click.echo("ERROR: Could not parse config: {}.\n".format(str(e)), err=True)
|
|
92 |
+ sys.exit(-1)
|
|
85 | 93 |
|
86 |
- port = server_settings['port']
|
|
87 | 94 |
server = BuildGridServer(port=port,
|
88 | 95 |
credentials=credentials,
|
89 | 96 |
execution_instances=execution_instances,
|
1 |
+server:
|
|
2 |
+ port: 50052
|
|
3 |
+ insecure-mode: true
|
|
4 |
+ credentials:
|
|
5 |
+ tls-server-key: null
|
|
6 |
+ tls-server-cert: null
|
|
7 |
+ tls-client-certs: null
|
|
8 |
+ |
|
9 |
+description: |
|
|
10 |
+ Just a CAS with some reference storage.
|
|
11 |
+ |
|
12 |
+instances:
|
|
13 |
+ - name: main
|
|
14 |
+ description: |
|
|
15 |
+ The main server
|
|
16 |
+ |
|
17 |
+ storages:
|
|
18 |
+ - !disk-storage &main-storage
|
|
19 |
+ path: ~/cas/
|
|
20 |
+ |
|
21 |
+ services:
|
|
22 |
+ - !cas
|
|
23 |
+ storage: *main-storage
|
|
24 |
+ |
|
25 |
+ - !bytestream
|
|
26 |
+ storage: *main-storage
|
|
27 |
+ |
|
28 |
+ - !reference-cache
|
|
29 |
+ storage: *main-storage
|
|
30 |
+ max_cached_refs: 256
|
|
31 |
+ allow_updates: true
|
1 | 1 |
server:
|
2 | 2 |
port: 50051
|
3 |
- tls-server-key: null
|
|
4 |
- tls-server-cert: null
|
|
5 |
- tls-client-certs: null
|
|
6 | 3 |
insecure-mode: true
|
4 |
+ credentials:
|
|
5 |
+ tls-server-key: null
|
|
6 |
+ tls-server-cert: null
|
|
7 |
+ tls-client-certs: null
|
|
7 | 8 |
|
8 | 9 |
description: |
|
9 | 10 |
A single default instance
|
... | ... | @@ -14,17 +14,25 @@ |
14 | 14 |
|
15 | 15 |
|
16 | 16 |
import os
|
17 |
+import sys
|
|
18 |
+from urllib.parse import urlparse
|
|
17 | 19 |
|
20 |
+import click
|
|
21 |
+import grpc
|
|
18 | 22 |
import yaml
|
19 | 23 |
|
20 | 24 |
from buildgrid.server.controller import ExecutionController
|
21 | 25 |
from buildgrid.server.actioncache.storage import ActionCache
|
26 |
+from buildgrid.server.referencestorage.storage import ReferenceCache
|
|
22 | 27 |
from buildgrid.server.cas.instance import ByteStreamInstance, ContentAddressableStorageInstance
|
23 | 28 |
from buildgrid.server.cas.storage.disk import DiskStorage
|
24 | 29 |
from buildgrid.server.cas.storage.lru_memory_cache import LRUMemoryCache
|
30 |
+from buildgrid.server.cas.storage.remote import RemoteStorage
|
|
25 | 31 |
from buildgrid.server.cas.storage.s3 import S3Storage
|
26 | 32 |
from buildgrid.server.cas.storage.with_cache import WithCacheStorage
|
27 | 33 |
|
34 |
+from ..cli import Context
|
|
35 |
+ |
|
28 | 36 |
|
29 | 37 |
class YamlFactory(yaml.YAMLObject):
|
30 | 38 |
@classmethod
|
... | ... | @@ -33,6 +41,15 @@ class YamlFactory(yaml.YAMLObject): |
33 | 41 |
return cls(**values)
|
34 | 42 |
|
35 | 43 |
|
44 |
+class Path(YamlFactory):
|
|
45 |
+ |
|
46 |
+ yaml_tag = u'!path'
|
|
47 |
+ |
|
48 |
+ def __new__(cls, path):
|
|
49 |
+ path = os.path.expanduser(path)
|
|
50 |
+ return path
|
|
51 |
+ |
|
52 |
+ |
|
36 | 53 |
class Disk(YamlFactory):
|
37 | 54 |
|
38 | 55 |
yaml_tag = u'!disk-storage'
|
... | ... | @@ -58,6 +75,47 @@ class S3(YamlFactory): |
58 | 75 |
return S3Storage(bucket, endpoint_url=endpoint)
|
59 | 76 |
|
60 | 77 |
|
78 |
+class Remote(YamlFactory):
|
|
79 |
+ |
|
80 |
+ yaml_tag = u'!remote-storage'
|
|
81 |
+ |
|
82 |
+ def __new__(cls, url, instance_name, credentials=None):
|
|
83 |
+ # TODO: Context could be passed into the parser.
|
|
84 |
+ # Also find way to get instance_name from parent
|
|
85 |
+ # Issue 82
|
|
86 |
+ context = Context()
|
|
87 |
+ |
|
88 |
+ url = urlparse(url)
|
|
89 |
+ remote = '{}:{}'.format(url.hostname, url.port or 50051)
|
|
90 |
+ |
|
91 |
+ channel = None
|
|
92 |
+ if url.scheme == 'http':
|
|
93 |
+ channel = grpc.insecure_channel(remote)
|
|
94 |
+ |
|
95 |
+ else:
|
|
96 |
+ if not credentials:
|
|
97 |
+ click.echo("ERROR: no TLS keys were specified and no defaults could be found.\n" +
|
|
98 |
+ "Set remote url scheme to `http` in order to deactivate" +
|
|
99 |
+ "TLS encryption.\n", err=True)
|
|
100 |
+ sys.exit(-1)
|
|
101 |
+ |
|
102 |
+ client_key = credentials['tls-client-key']
|
|
103 |
+ client_cert = credentials['tls-client-cert']
|
|
104 |
+ server_cert = credentials['tls-server-cert']
|
|
105 |
+ credentials = context.load_client_credentials(client_key,
|
|
106 |
+ client_cert,
|
|
107 |
+ server_cert)
|
|
108 |
+ if not credentials:
|
|
109 |
+ click.echo("ERROR: no TLS keys were specified and no defaults could be found.\n" +
|
|
110 |
+ "Set remote url scheme to `http` in order to deactivate" +
|
|
111 |
+ "TLS encryption.\n", err=True)
|
|
112 |
+ sys.exit(-1)
|
|
113 |
+ |
|
114 |
+ channel = grpc.secure_channel(remote, credentials)
|
|
115 |
+ |
|
116 |
+ return RemoteStorage(channel, instance_name)
|
|
117 |
+ |
|
118 |
+ |
|
61 | 119 |
class WithCache(YamlFactory):
|
62 | 120 |
|
63 | 121 |
yaml_tag = u'!with-cache-storage'
|
... | ... | @@ -78,10 +136,18 @@ class Action(YamlFactory): |
78 | 136 |
|
79 | 137 |
yaml_tag = u'!action-cache'
|
80 | 138 |
|
81 |
- def __new__(cls, storage, max_cached_refs=0, allow_updates=True):
|
|
139 |
+ def __new__(cls, storage, max_cached_refs, allow_updates=True):
|
|
82 | 140 |
return ActionCache(storage, max_cached_refs, allow_updates)
|
83 | 141 |
|
84 | 142 |
|
143 |
+class Reference(YamlFactory):
|
|
144 |
+ |
|
145 |
+ yaml_tag = u'!reference-cache'
|
|
146 |
+ |
|
147 |
+ def __new__(cls, storage, max_cached_refs, allow_updates=True):
|
|
148 |
+ return ReferenceCache(storage, max_cached_refs, allow_updates)
|
|
149 |
+ |
|
150 |
+ |
|
85 | 151 |
class CAS(YamlFactory):
|
86 | 152 |
|
87 | 153 |
yaml_tag = u'!cas'
|
... | ... | @@ -112,12 +178,14 @@ def _parse_size(size): |
112 | 178 |
|
113 | 179 |
def get_parser():
|
114 | 180 |
|
115 |
- yaml.SafeLoader.add_constructor(Execution.yaml_tag, Execution.from_yaml)
|
|
181 |
+ yaml.SafeLoader.add_constructor(Path.yaml_tag, Path.from_yaml)
|
|
116 | 182 |
yaml.SafeLoader.add_constructor(Execution.yaml_tag, Execution.from_yaml)
|
117 | 183 |
yaml.SafeLoader.add_constructor(Action.yaml_tag, Action.from_yaml)
|
184 |
+ yaml.SafeLoader.add_constructor(Reference.yaml_tag, Reference.from_yaml)
|
|
118 | 185 |
yaml.SafeLoader.add_constructor(Disk.yaml_tag, Disk.from_yaml)
|
119 | 186 |
yaml.SafeLoader.add_constructor(LRU.yaml_tag, LRU.from_yaml)
|
120 | 187 |
yaml.SafeLoader.add_constructor(S3.yaml_tag, S3.from_yaml)
|
188 |
+ yaml.SafeLoader.add_constructor(Remote.yaml_tag, Remote.from_yaml)
|
|
121 | 189 |
yaml.SafeLoader.add_constructor(WithCache.yaml_tag, WithCache.from_yaml)
|
122 | 190 |
yaml.SafeLoader.add_constructor(CAS.yaml_tag, CAS.from_yaml)
|
123 | 191 |
yaml.SafeLoader.add_constructor(ByteStream.yaml_tag, ByteStream.from_yaml)
|
1 |
+server:
|
|
2 |
+ port: 50051
|
|
3 |
+ insecure-mode: true
|
|
4 |
+ credentials:
|
|
5 |
+ tls-server-key: null
|
|
6 |
+ tls-server-cert: null
|
|
7 |
+ tls-client-certs: null
|
|
8 |
+ |
|
9 |
+ |
|
10 |
+description: |
|
|
11 |
+ A single default instance with remote storage.
|
|
12 |
+ |
|
13 |
+instances:
|
|
14 |
+ - name: main
|
|
15 |
+ description: |
|
|
16 |
+ The main server
|
|
17 |
+ |
|
18 |
+ storages:
|
|
19 |
+ - !remote-storage &main-storage
|
|
20 |
+ url: "http://localhost:50052"
|
|
21 |
+ instance_name: main
|
|
22 |
+ credentials:
|
|
23 |
+ tls-client-key: null
|
|
24 |
+ tls-client-cert: null
|
|
25 |
+ tls-server-cert: null
|
|
26 |
+ |
|
27 |
+ services:
|
|
28 |
+ - !action-cache &main-action
|
|
29 |
+ storage: *main-storage
|
|
30 |
+ max_cached_refs: 256
|
|
31 |
+ allow_updates: true
|
|
32 |
+ |
|
33 |
+ - !execution
|
|
34 |
+ storage: *main-storage
|
|
35 |
+ action_cache: *main-action
|
|
36 |
+ |
|
37 |
+ - !cas
|
|
38 |
+ storage: *main-storage
|
|
39 |
+ |
|
40 |
+ - !bytestream
|
|
41 |
+ storage: *main-storage
|
... | ... | @@ -99,13 +99,13 @@ class BotSession: |
99 | 99 |
session = self._interface.create_bot_session(self._parent, self.get_pb2())
|
100 | 100 |
self._name = session.name
|
101 | 101 |
|
102 |
- self.logger.info("Created bot session with name: {}".format(self._name))
|
|
102 |
+ self.logger.info("Created bot session with name: [{}]".format(self._name))
|
|
103 | 103 |
|
104 | 104 |
for lease in session.leases:
|
105 | 105 |
self._update_lease_from_server(lease)
|
106 | 106 |
|
107 | 107 |
def update_bot_session(self):
|
108 |
- self.logger.debug("Updating bot session: {}".format(self._bot_id))
|
|
108 |
+ self.logger.debug("Updating bot session: [{}]".format(self._bot_id))
|
|
109 | 109 |
session = self._interface.update_bot_session(self.get_pb2())
|
110 | 110 |
for k, v in list(self._leases.items()):
|
111 | 111 |
if v.state == LeaseState.COMPLETED.value:
|
... | ... | @@ -141,12 +141,12 @@ class BotSession: |
141 | 141 |
asyncio.ensure_future(self.create_work(lease))
|
142 | 142 |
|
143 | 143 |
async def create_work(self, lease):
|
144 |
- self.logger.debug("Work created: {}".format(lease.id))
|
|
144 |
+ self.logger.debug("Work created: [{}]".format(lease.id))
|
|
145 | 145 |
|
146 | 146 |
loop = asyncio.get_event_loop()
|
147 | 147 |
lease = await loop.run_in_executor(None, self._work, self._context, lease)
|
148 | 148 |
|
149 |
- self.logger.debug("Work complete: {}".format(lease.id))
|
|
149 |
+ self.logger.debug("Work complete: [{}]".format(lease.id))
|
|
150 | 150 |
self.lease_completed(lease)
|
151 | 151 |
|
152 | 152 |
|
... | ... | @@ -161,14 +161,14 @@ class Worker: |
161 | 161 |
if k == 'pool':
|
162 | 162 |
self.properties[k] = v
|
163 | 163 |
else:
|
164 |
- raise KeyError('Key not supported: {}'.format(k))
|
|
164 |
+ raise KeyError('Key not supported: [{}]'.format(k))
|
|
165 | 165 |
|
166 | 166 |
if configs:
|
167 | 167 |
for k, v in configs.items():
|
168 | 168 |
if k == 'DockerImage':
|
169 | 169 |
self.configs[k] = v
|
170 | 170 |
else:
|
171 |
- raise KeyError('Key not supported: {}'.format(k))
|
|
171 |
+ raise KeyError('Key not supported: [{}]'.format(k))
|
|
172 | 172 |
|
173 | 173 |
@property
|
174 | 174 |
def configs(self):
|
... | ... | @@ -214,11 +214,11 @@ class Device: |
214 | 214 |
|
215 | 215 |
elif k == 'docker':
|
216 | 216 |
if v not in ('True', 'False'):
|
217 |
- raise ValueError('Value not supported: {}'.format(v))
|
|
217 |
+ raise ValueError('Value not supported: [{}]'.format(v))
|
|
218 | 218 |
self._properties[k] = v
|
219 | 219 |
|
220 | 220 |
else:
|
221 |
- raise KeyError('Key not supported: {}'.format(k))
|
|
221 |
+ raise KeyError('Key not supported: [{}]'.format(k))
|
|
222 | 222 |
|
223 | 223 |
@property
|
224 | 224 |
def name(self):
|
... | ... | @@ -77,4 +77,4 @@ class ActionCacheService(remote_execution_pb2_grpc.ActionCacheServicer): |
77 | 77 |
return self._instances[instance_name]
|
78 | 78 |
|
79 | 79 |
except KeyError:
|
80 |
- raise InvalidArgumentError("Invalid instance name: {}".format(instance_name))
|
|
80 |
+ raise InvalidArgumentError("Invalid instance name: [{}]".format(instance_name))
|
... | ... | @@ -60,7 +60,7 @@ class BotsInterface: |
60 | 60 |
|
61 | 61 |
self._bot_ids[name] = bot_id
|
62 | 62 |
self._bot_sessions[name] = bot_session
|
63 |
- self.logger.info("Created bot session name={} with bot_id={}".format(name, bot_id))
|
|
63 |
+ self.logger.info("Created bot session name=[{}] with bot_id=[{}]".format(name, bot_id))
|
|
64 | 64 |
|
65 | 65 |
for lease in self._scheduler.create_leases():
|
66 | 66 |
bot_session.leases.extend([lease])
|
... | ... | @@ -92,7 +92,7 @@ class BotsInterface: |
92 | 92 |
try:
|
93 | 93 |
server_lease = self._scheduler.get_job_lease(client_lease.id)
|
94 | 94 |
except KeyError:
|
95 |
- raise InvalidArgumentError("Lease not found on server: {}".format(client_lease))
|
|
95 |
+ raise InvalidArgumentError("Lease not found on server: [{}]".format(client_lease))
|
|
96 | 96 |
|
97 | 97 |
server_state = LeaseState(server_lease.state)
|
98 | 98 |
client_state = LeaseState(client_lease.state)
|
... | ... | @@ -105,7 +105,7 @@ class BotsInterface: |
105 | 105 |
# TODO: Lease was rejected
|
106 | 106 |
raise NotImplementedError("'Not Accepted' is unsupported")
|
107 | 107 |
else:
|
108 |
- raise OutofSyncError("Server lease: {}. Client lease: {}".format(server_lease, client_lease))
|
|
108 |
+ raise OutofSyncError("Server lease: [{}]. Client lease: [{}]".format(server_lease, client_lease))
|
|
109 | 109 |
|
110 | 110 |
elif server_state == LeaseState.ACTIVE:
|
111 | 111 |
|
... | ... | @@ -118,10 +118,10 @@ class BotsInterface: |
118 | 118 |
return None
|
119 | 119 |
|
120 | 120 |
else:
|
121 |
- raise OutofSyncError("Server lease: {}. Client lease: {}".format(server_lease, client_lease))
|
|
121 |
+ raise OutofSyncError("Server lease: [{}]. Client lease: [{}]".format(server_lease, client_lease))
|
|
122 | 122 |
|
123 | 123 |
elif server_state == LeaseState.COMPLETED:
|
124 |
- raise OutofSyncError("Server lease: {}. Client lease: {}".format(server_lease, client_lease))
|
|
124 |
+ raise OutofSyncError("Server lease: [{}]. Client lease: [{}]".format(server_lease, client_lease))
|
|
125 | 125 |
|
126 | 126 |
elif server_state == LeaseState.CANCELLED:
|
127 | 127 |
raise NotImplementedError("Cancelled states not supported yet")
|
... | ... | @@ -138,19 +138,19 @@ class BotsInterface: |
138 | 138 |
if name is not None:
|
139 | 139 |
_bot_id = self._bot_ids.get(name)
|
140 | 140 |
if _bot_id is None:
|
141 |
- raise InvalidArgumentError('Name not registered on server: {}'.format(name))
|
|
141 |
+ raise InvalidArgumentError('Name not registered on server: [{}]'.format(name))
|
|
142 | 142 |
elif _bot_id != bot_id:
|
143 | 143 |
self._close_bot_session(name)
|
144 | 144 |
raise InvalidArgumentError(
|
145 |
- 'Bot id invalid. ID sent: {} with name: {}.'
|
|
146 |
- 'ID registered: {} for that name'.format(bot_id, name, _bot_id))
|
|
145 |
+ 'Bot id invalid. ID sent: [{}] with name: [{}].'
|
|
146 |
+ 'ID registered: [{}] for that name'.format(bot_id, name, _bot_id))
|
|
147 | 147 |
else:
|
148 | 148 |
for _name, _bot_id in self._bot_ids.items():
|
149 | 149 |
if bot_id == _bot_id:
|
150 | 150 |
self._close_bot_session(_name)
|
151 | 151 |
raise InvalidArgumentError(
|
152 |
- 'Bot id already registered. ID sent: {}.'
|
|
153 |
- 'Id registered: {} with name: {}'.format(bot_id, _bot_id, _name))
|
|
152 |
+ 'Bot id already registered. ID sent: [{}].'
|
|
153 |
+ 'Id registered: [{}] with name: [{}]'.format(bot_id, _bot_id, _name))
|
|
154 | 154 |
|
155 | 155 |
def _close_bot_session(self, name):
|
156 | 156 |
""" Before removing the session, close any leases and
|
... | ... | @@ -159,14 +159,14 @@ class BotsInterface: |
159 | 159 |
bot_id = self._bot_ids.get(name)
|
160 | 160 |
|
161 | 161 |
if bot_id is None:
|
162 |
- raise InvalidArgumentError("Bot id does not exist: {}".format(name))
|
|
162 |
+ raise InvalidArgumentError("Bot id does not exist: [{}]".format(name))
|
|
163 | 163 |
|
164 |
- self.logger.debug("Attempting to close {} with name: {}".format(bot_id, name))
|
|
164 |
+ self.logger.debug("Attempting to close [{}] with name: [{}]".format(bot_id, name))
|
|
165 | 165 |
for lease in self._bot_sessions[name].leases:
|
166 | 166 |
if lease.state != LeaseState.COMPLETED.value:
|
167 | 167 |
# TODO: Be wary here, may need to handle rejected leases in future
|
168 | 168 |
self._scheduler.retry_job(lease.id)
|
169 | 169 |
|
170 |
- self.logger.debug("Closing bot session: {}".format(name))
|
|
170 |
+ self.logger.debug("Closing bot session: [{}]".format(name))
|
|
171 | 171 |
self._bot_ids.pop(name)
|
172 |
- self.logger.info("Closed bot {} with name: {}".format(bot_id, name))
|
|
172 |
+ self.logger.info("Closed bot [{}] with name: [{}]".format(bot_id, name))
|
... | ... | @@ -90,4 +90,4 @@ class BotsService(bots_pb2_grpc.BotsServicer): |
90 | 90 |
return self._instances[name]
|
91 | 91 |
|
92 | 92 |
except KeyError:
|
93 |
- raise InvalidArgumentError("Instance doesn't exist on server: {}".format(name))
|
|
93 |
+ raise InvalidArgumentError("Instance doesn't exist on server: [{}]".format(name))
|
... | ... | @@ -70,7 +70,7 @@ class ContentAddressableStorageService(remote_execution_pb2_grpc.ContentAddressa |
70 | 70 |
return self._instances[instance_name]
|
71 | 71 |
|
72 | 72 |
except KeyError:
|
73 |
- raise InvalidArgumentError("Invalid instance name: {}".format(instance_name))
|
|
73 |
+ raise InvalidArgumentError("Invalid instance name: [{}]".format(instance_name))
|
|
74 | 74 |
|
75 | 75 |
|
76 | 76 |
class ByteStreamService(bytestream_pb2_grpc.ByteStreamServicer):
|
... | ... | @@ -89,15 +89,15 @@ class ByteStreamService(bytestream_pb2_grpc.ByteStreamServicer): |
89 | 89 |
# TODO: Decide on default instance name
|
90 | 90 |
if path[0] == "blobs":
|
91 | 91 |
if len(path) < 3 or not path[2].isdigit():
|
92 |
- raise InvalidArgumentError("Invalid resource name: {}".format(context.resource_name))
|
|
92 |
+ raise InvalidArgumentError("Invalid resource name: [{}]".format(request.resource_name))
|
|
93 | 93 |
instance_name = ""
|
94 | 94 |
|
95 | 95 |
elif path[1] == "blobs":
|
96 | 96 |
if len(path) < 4 or not path[3].isdigit():
|
97 |
- raise InvalidArgumentError("Invalid resource name: {}".format(context.resource_name))
|
|
97 |
+ raise InvalidArgumentError("Invalid resource name: [{}]".format(request.resource_name))
|
|
98 | 98 |
|
99 | 99 |
else:
|
100 |
- raise InvalidArgumentError("Invalid resource name: {}".format(context.resource_name))
|
|
100 |
+ raise InvalidArgumentError("Invalid resource name: [{}]".format(request.resource_name))
|
|
101 | 101 |
|
102 | 102 |
instance = self._get_instance(instance_name)
|
103 | 103 |
yield from instance.read(path,
|
... | ... | @@ -134,15 +134,15 @@ class ByteStreamService(bytestream_pb2_grpc.ByteStreamServicer): |
134 | 134 |
# TODO: Sort out no instance name
|
135 | 135 |
if path[0] == "uploads":
|
136 | 136 |
if len(path) < 5 or path[2] != "blobs" or not path[4].isdigit():
|
137 |
- raise InvalidArgumentError("Invalid resource name: {}".format(context.resource_name))
|
|
137 |
+ raise InvalidArgumentError("Invalid resource name: [{}]".format(first_request.resource_name))
|
|
138 | 138 |
instance_name = ""
|
139 | 139 |
|
140 | 140 |
elif path[1] == "uploads":
|
141 | 141 |
if len(path) < 6 or path[3] != "blobs" or not path[5].isdigit():
|
142 |
- raise InvalidArgumentError("Invalid resource name: {}".format(context.resource_name))
|
|
142 |
+ raise InvalidArgumentError("Invalid resource name: [{}]".format(first_request.resource_name))
|
|
143 | 143 |
|
144 | 144 |
else:
|
145 |
- raise InvalidArgumentError("Invalid resource name: {}".format(context.resource_name))
|
|
145 |
+ raise InvalidArgumentError("Invalid resource name: [{}]".format(first_request.resource_name))
|
|
146 | 146 |
|
147 | 147 |
instance = self._get_instance(instance_name)
|
148 | 148 |
return instance.write(requests)
|
... | ... | @@ -169,4 +169,4 @@ class ByteStreamService(bytestream_pb2_grpc.ByteStreamServicer): |
169 | 169 |
return self._instances[instance_name]
|
170 | 170 |
|
171 | 171 |
except KeyError:
|
172 |
- raise InvalidArgumentError("Invalid instance name: {}".format(instance_name))
|
|
172 |
+ raise InvalidArgumentError("Invalid instance name: [{}]".format(instance_name))
|
... | ... | @@ -23,6 +23,8 @@ Forwwards storage requests to a remote storage. |
23 | 23 |
import io
|
24 | 24 |
import logging
|
25 | 25 |
|
26 |
+import grpc
|
|
27 |
+ |
|
26 | 28 |
from buildgrid.utils import gen_fetch_blob, gen_write_request_blob
|
27 | 29 |
from buildgrid._protos.google.bytestream import bytestream_pb2_grpc
|
28 | 30 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
|
... | ... | @@ -32,7 +34,7 @@ from .storage_abc import StorageABC |
32 | 34 |
|
33 | 35 |
class RemoteStorage(StorageABC):
|
34 | 36 |
|
35 |
- def __init__(self, channel, instance_name=""):
|
|
37 |
+ def __init__(self, channel, instance_name):
|
|
36 | 38 |
self.logger = logging.getLogger(__name__)
|
37 | 39 |
self._instance_name = instance_name
|
38 | 40 |
self._stub_bs = bytestream_pb2_grpc.ByteStreamStub(channel)
|
... | ... | @@ -44,18 +46,29 @@ class RemoteStorage(StorageABC): |
44 | 46 |
return False
|
45 | 47 |
|
46 | 48 |
def get_blob(self, digest):
|
47 |
- fetched_data = io.BytesIO()
|
|
48 |
- length = 0
|
|
49 |
- for data in gen_fetch_blob(self._stub_bs, digest, self._instance_name):
|
|
50 |
- length += fetched_data.write(data)
|
|
51 |
- |
|
52 |
- if length:
|
|
53 |
- assert digest.size_bytes == length
|
|
54 |
- fetched_data.seek(0)
|
|
55 |
- return fetched_data
|
|
56 |
- |
|
57 |
- else:
|
|
58 |
- return None
|
|
49 |
+ try:
|
|
50 |
+ fetched_data = io.BytesIO()
|
|
51 |
+ length = 0
|
|
52 |
+ |
|
53 |
+ for data in gen_fetch_blob(self._stub_bs, digest, self._instance_name):
|
|
54 |
+ length += fetched_data.write(data)
|
|
55 |
+ |
|
56 |
+ if length:
|
|
57 |
+ assert digest.size_bytes == length
|
|
58 |
+ fetched_data.seek(0)
|
|
59 |
+ return fetched_data
|
|
60 |
+ |
|
61 |
+ else:
|
|
62 |
+ return None
|
|
63 |
+ |
|
64 |
+ except grpc.RpcError as e:
|
|
65 |
+ if e.code() == grpc.StatusCode.NOT_FOUND:
|
|
66 |
+ pass
|
|
67 |
+ else:
|
|
68 |
+ self.logger.error(e.details())
|
|
69 |
+ raise
|
|
70 |
+ |
|
71 |
+ return None
|
|
59 | 72 |
|
60 | 73 |
def begin_write(self, digest):
|
61 | 74 |
return io.BytesIO(digest.SerializeToString())
|
... | ... | @@ -47,7 +47,7 @@ class ExecutionInstance: |
47 | 47 |
do_not_cache = action.do_not_cache
|
48 | 48 |
|
49 | 49 |
job = Job(action_digest, do_not_cache, message_queue)
|
50 |
- self.logger.info("Operation name: {}".format(job.name))
|
|
50 |
+ self.logger.info("Operation name: [{}]".format(job.name))
|
|
51 | 51 |
|
52 | 52 |
self._scheduler.append_job(job, skip_cache_lookup)
|
53 | 53 |
|
... | ... | @@ -58,14 +58,14 @@ class ExecutionInstance: |
58 | 58 |
self._scheduler.register_client(name, queue)
|
59 | 59 |
|
60 | 60 |
except KeyError:
|
61 |
- raise InvalidArgumentError("Operation name does not exist: {}".format(name))
|
|
61 |
+ raise InvalidArgumentError("Operation name does not exist: [{}]".format(name))
|
|
62 | 62 |
|
63 | 63 |
def unregister_message_client(self, name, queue):
|
64 | 64 |
try:
|
65 | 65 |
self._scheduler.unregister_client(name, queue)
|
66 | 66 |
|
67 | 67 |
except KeyError:
|
68 |
- raise InvalidArgumentError("Operation name does not exist: {}".format(name))
|
|
68 |
+ raise InvalidArgumentError("Operation name does not exist: [{}]".format(name))
|
|
69 | 69 |
|
70 | 70 |
def stream_operation_updates(self, message_queue, operation_name):
|
71 | 71 |
operation = message_queue.get()
|
... | ... | @@ -92,4 +92,4 @@ class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer): |
92 | 92 |
return self._instances[name]
|
93 | 93 |
|
94 | 94 |
except KeyError:
|
95 |
- raise InvalidArgumentError("Instance doesn't exist on server: {}".format(name))
|
|
95 |
+ raise InvalidArgumentError("Instance doesn't exist on server: [{}]".format(name))
|
... | ... | @@ -34,7 +34,7 @@ class OperationsInstance: |
34 | 34 |
operation = self._scheduler.jobs.get(name)
|
35 | 35 |
|
36 | 36 |
if operation is None:
|
37 |
- raise InvalidArgumentError("Operation name does not exist: {}".format(name))
|
|
37 |
+ raise InvalidArgumentError("Operation name does not exist: [{}]".format(name))
|
|
38 | 38 |
|
39 | 39 |
else:
|
40 | 40 |
return operation.get_operation()
|
... | ... | @@ -49,21 +49,21 @@ class OperationsInstance: |
49 | 49 |
self._scheduler.jobs.pop(name)
|
50 | 50 |
|
51 | 51 |
except KeyError:
|
52 |
- raise InvalidArgumentError("Operation name does not exist: {}".format(name))
|
|
52 |
+ raise InvalidArgumentError("Operation name does not exist: [{}]".format(name))
|
|
53 | 53 |
|
54 | 54 |
def register_message_client(self, name, queue):
|
55 | 55 |
try:
|
56 | 56 |
self._scheduler.register_client(name, queue)
|
57 | 57 |
|
58 | 58 |
except KeyError:
|
59 |
- raise InvalidArgumentError("Operation name does not exist: {}".format(name))
|
|
59 |
+ raise InvalidArgumentError("Operation name does not exist: [{}]".format(name))
|
|
60 | 60 |
|
61 | 61 |
def unregister_message_client(self, name, queue):
|
62 | 62 |
try:
|
63 | 63 |
self._scheduler.unregister_client(name, queue)
|
64 | 64 |
|
65 | 65 |
except KeyError:
|
66 |
- raise InvalidArgumentError("Operation name does not exist: {}".format(name))
|
|
66 |
+ raise InvalidArgumentError("Operation name does not exist: [{}]".format(name))
|
|
67 | 67 |
|
68 | 68 |
def stream_operation_updates(self, message_queue, operation_name):
|
69 | 69 |
operation = message_queue.get()
|
... | ... | @@ -132,4 +132,4 @@ class OperationsService(operations_pb2_grpc.OperationsServicer): |
132 | 132 |
return self._instances[instance_name]
|
133 | 133 |
|
134 | 134 |
except KeyError:
|
135 |
- raise InvalidArgumentError("Instance doesn't exist on server: {}".format(name))
|
|
135 |
+ raise InvalidArgumentError("Instance doesn't exist on server: [{}]".format(name))
|
... | ... | @@ -86,4 +86,4 @@ class ReferenceStorageService(buildstream_pb2_grpc.ReferenceStorageServicer): |
86 | 86 |
return self._instances[instance_name]
|
87 | 87 |
|
88 | 88 |
except KeyError:
|
89 |
- raise InvalidArgumentError("Invalid instance name: {}".format(instance_name))
|
|
89 |
+ raise InvalidArgumentError("Invalid instance name: [{}]".format(instance_name))
|
... | ... | @@ -24,10 +24,15 @@ Remote execution service implementing Google's REAPI and RWAPI. |
24 | 24 |
Resources
|
25 | 25 |
---------
|
26 | 26 |
|
27 |
-- Homepage: https://buildgrid.build
|
|
28 |
-- GitLab repository: https://gitlab.com/BuildGrid/buildgrid
|
|
29 |
-- Bug tracking: https://gitlab.com/BuildGrid/buildgrid/issues
|
|
30 |
-- Mailing list: https://lists.buildgrid.build/cgi-bin/mailman/listinfo/buildgrid
|
|
31 |
-- Slack channel: https://buildteamworld.slack.com/messages/CC9MKC203 [`invite link`_]
|
|
32 |
- |
|
27 |
+- `Homepage`_
|
|
28 |
+- `GitLab repository`_
|
|
29 |
+- `Bug tracking`_
|
|
30 |
+- `Mailing list`_
|
|
31 |
+- `Slack channel`_ [`invite link`_]
|
|
32 |
+ |
|
33 |
+.. _Homepage: https://buildgrid.build
|
|
34 |
+.. _GitLab repository: https://gitlab.com/BuildGrid/buildgrid
|
|
35 |
+.. _Bug tracking: https://gitlab.com/BuildGrid/buildgrid/issues
|
|
36 |
+.. _Mailing list: https://lists.buildgrid.build/cgi-bin/mailman/listinfo/buildgrid
|
|
37 |
+.. _Slack channel: https://buildteamworld.slack.com/messages/CC9MKC203
|
|
33 | 38 |
.. _invite link: https://join.slack.com/t/buildteamworld/shared_invite/enQtMzkxNzE0MDMyMDY1LTRmZmM1OWE0OTFkMGE1YjU5Njc4ODEzYjc0MGMyOTM5ZTQ5MmE2YTQ1MzQwZDc5MWNhODY1ZmRkZTE4YjFhNjU
|
... | ... | @@ -98,17 +98,6 @@ def instance(params): |
98 | 98 |
return {params, MockCASStorage()}
|
99 | 99 |
|
100 | 100 |
|
101 |
-@pytest.fixture()
|
|
102 |
-@mock.patch.object(remote, 'bytestream_pb2_grpc')
|
|
103 |
-@mock.patch.object(remote, 'remote_execution_pb2_grpc')
|
|
104 |
-def remote_storage(mock_bs_grpc, mock_re_pb2_grpc):
|
|
105 |
- mock_server = MockStubServer()
|
|
106 |
- storage = remote.RemoteStorage(instance)
|
|
107 |
- storage._stub_bs = mock_server
|
|
108 |
- storage._stub_cas = mock_server
|
|
109 |
- yield storage
|
|
110 |
- |
|
111 |
- |
|
112 | 101 |
# General tests for all storage providers
|
113 | 102 |
|
114 | 103 |
|
... | ... | @@ -138,7 +127,7 @@ def any_storage(request): |
138 | 127 |
with mock.patch.object(remote, 'bytestream_pb2_grpc'):
|
139 | 128 |
with mock.patch.object(remote, 'remote_execution_pb2_grpc'):
|
140 | 129 |
mock_server = MockStubServer()
|
141 |
- storage = remote.RemoteStorage(instance)
|
|
130 |
+ storage = remote.RemoteStorage(instance, "")
|
|
142 | 131 |
storage._stub_bs = mock_server
|
143 | 132 |
storage._stub_cas = mock_server
|
144 | 133 |
yield storage
|