finnball pushed to branch master at BuildGrid / buildgrid
Commits:
-
ce4ec1e0
by finn at 2018-08-21T09:10:51Z
-
f6cab5dd
by finn at 2018-08-21T11:02:45Z
-
83d79fd1
by finn at 2018-08-21T11:19:54Z
-
19bcb6a6
by finn at 2018-08-21T11:19:57Z
-
a931727f
by finn at 2018-08-21T12:42:08Z
26 changed files:
- app/commands/cmd_server.py
- + buildgrid/_protos/buildstream/__init__.py
- + buildgrid/_protos/buildstream/v2/__init__.py
- + buildgrid/_protos/buildstream/v2/buildstream.proto
- + buildgrid/_protos/buildstream/v2/buildstream_pb2.py
- + buildgrid/_protos/buildstream/v2/buildstream_pb2_grpc.py
- buildgrid/server/worker/_exceptions.py → buildgrid/server/_exceptions.py
- buildgrid/server/build_grid_server.py
- + buildgrid/server/cas/reference_cache.py
- + buildgrid/server/cas/reference_storage_service.py
- − buildgrid/server/execution/_exceptions.py
- + buildgrid/server/execution/action_cache.py
- buildgrid/server/execution/action_cache_service.py
- buildgrid/server/execution/execution_instance.py
- buildgrid/server/execution/execution_service.py
- buildgrid/server/execution/operations_service.py
- buildgrid/server/scheduler.py
- buildgrid/server/worker/bots_interface.py
- buildgrid/server/worker/bots_service.py
- docs/source/reference_cli.rst
- tests/action_cache.py
- tests/cas/test_storage.py
- tests/integration/action_cache_service.py
- tests/integration/execution_service.py
- tests/integration/operations_service.py
- + tests/integration/reference_storage_service.py
Changes:
... | ... | @@ -28,11 +28,11 @@ import logging |
28 | 28 |
import click
|
29 | 29 |
|
30 | 30 |
from buildgrid.server import build_grid_server
|
31 |
-from buildgrid.server.action_cache import ActionCache
|
|
32 | 31 |
from buildgrid.server.cas.storage.disk import DiskStorage
|
33 | 32 |
from buildgrid.server.cas.storage.lru_memory_cache import LRUMemoryCache
|
34 | 33 |
from buildgrid.server.cas.storage.s3 import S3Storage
|
35 | 34 |
from buildgrid.server.cas.storage.with_cache import WithCacheStorage
|
35 |
+from buildgrid.server.execution.action_cache import ActionCache
|
|
36 | 36 |
|
37 | 37 |
from ..cli import pass_context
|
38 | 38 |
|
... | ... | @@ -72,36 +72,31 @@ def cli(context): |
72 | 72 |
def start(context, port, max_cached_actions, allow_uar, cas, **cas_args):
|
73 | 73 |
context.logger.info("Starting on port {}".format(port))
|
74 | 74 |
|
75 |
- loop = asyncio.get_event_loop()
|
|
76 |
- |
|
77 | 75 |
cas_storage = _make_cas_storage(context, cas, cas_args)
|
76 |
+ |
|
78 | 77 |
if cas_storage is None:
|
79 | 78 |
context.logger.info("Running without CAS - action cache will be unavailable")
|
80 | 79 |
action_cache = None
|
80 |
+ |
|
81 | 81 |
else:
|
82 |
- action_cache = ActionCache(cas_storage, max_cached_actions)
|
|
82 |
+ action_cache = ActionCache(cas_storage, max_cached_actions, allow_uar)
|
|
83 | 83 |
|
84 | 84 |
server = build_grid_server.BuildGridServer(port,
|
85 | 85 |
cas_storage=cas_storage,
|
86 |
- action_cache=action_cache,
|
|
87 |
- allow_update_action_result=allow_uar)
|
|
88 |
- |
|
86 |
+ action_cache=action_cache)
|
|
87 |
+ loop = asyncio.get_event_loop()
|
|
89 | 88 |
try:
|
90 |
- asyncio.ensure_future(server.start())
|
|
89 |
+ server.start()
|
|
91 | 90 |
loop.run_forever()
|
91 |
+ |
|
92 | 92 |
except KeyboardInterrupt:
|
93 | 93 |
pass
|
94 |
+ |
|
94 | 95 |
finally:
|
95 |
- loop.run_until_complete(server.stop())
|
|
96 |
+ server.stop()
|
|
96 | 97 |
loop.close()
|
97 | 98 |
|
98 | 99 |
|
99 |
-@cli.command('stop', short_help="Request a server to teardown.")
|
|
100 |
-@pass_context
|
|
101 |
-def stop(context):
|
|
102 |
- context.logger.error("Not implemented yet")
|
|
103 |
- |
|
104 |
- |
|
105 | 100 |
def _make_cas_storage(context, cas_type, cas_args):
|
106 | 101 |
"""Returns the storage provider corresponding to the given `cas_type`,
|
107 | 102 |
or None if the provider cannot be created.
|
1 |
+// Copyright 2018 Codethink Limited
|
|
2 |
+//
|
|
3 |
+// Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
+// you may not use this file except in compliance with the License.
|
|
5 |
+// You may obtain a copy of the License at
|
|
6 |
+//
|
|
7 |
+// http://www.apache.org/licenses/LICENSE-2.0
|
|
8 |
+//
|
|
9 |
+// Unless required by applicable law or agreed to in writing, software
|
|
10 |
+// distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
+// See the License for the specific language governing permissions and
|
|
13 |
+// limitations under the License.
|
|
14 |
+ |
|
15 |
+syntax = "proto3";
|
|
16 |
+ |
|
17 |
+package buildstream.v2;
|
|
18 |
+ |
|
19 |
+import "build/bazel/remote/execution/v2/remote_execution.proto";
|
|
20 |
+import "google/api/annotations.proto";
|
|
21 |
+ |
|
22 |
+service ReferenceStorage {
|
|
23 |
+ // Retrieve a CAS [Directory][build.bazel.remote.execution.v2.Directory]
|
|
24 |
+ // digest by name.
|
|
25 |
+ //
|
|
26 |
+ // Errors:
|
|
27 |
+ // * `NOT_FOUND`: The requested reference is not in the cache.
|
|
28 |
+ rpc GetReference(GetReferenceRequest) returns (GetReferenceResponse) {
|
|
29 |
+ option (google.api.http) = { get: "/v2/{instance_name=**}/buildstream/refs/{key}" };
|
|
30 |
+ }
|
|
31 |
+ |
|
32 |
+ // Associate a name with a CAS [Directory][build.bazel.remote.execution.v2.Directory]
|
|
33 |
+ // digest.
|
|
34 |
+ //
|
|
35 |
+ // Errors:
|
|
36 |
+ // * `RESOURCE_EXHAUSTED`: There is insufficient storage space to add the
|
|
37 |
+ // entry to the cache.
|
|
38 |
+ rpc UpdateReference(UpdateReferenceRequest) returns (UpdateReferenceResponse) {
|
|
39 |
+ option (google.api.http) = { put: "/v2/{instance_name=**}/buildstream/refs/{key}" body: "digest" };
|
|
40 |
+ }
|
|
41 |
+ |
|
42 |
+ rpc Status(StatusRequest) returns (StatusResponse) {
|
|
43 |
+ option (google.api.http) = { put: "/v2/{instance_name=**}/buildstream/refs:status" };
|
|
44 |
+ }
|
|
45 |
+}
|
|
46 |
+ |
|
47 |
+message GetReferenceRequest {
|
|
48 |
+ // The instance of the execution system to operate against. A server may
|
|
49 |
+ // support multiple instances of the execution system (with their own workers,
|
|
50 |
+ // storage, caches, etc.). The server MAY require use of this field to select
|
|
51 |
+ // between them in an implementation-defined fashion, otherwise it can be
|
|
52 |
+ // omitted.
|
|
53 |
+ string instance_name = 1;
|
|
54 |
+ |
|
55 |
+ // The name of the reference.
|
|
56 |
+ string key = 2;
|
|
57 |
+}
|
|
58 |
+ |
|
59 |
+message GetReferenceResponse {
|
|
60 |
+ // The digest of the CAS [Directory][build.bazel.remote.execution.v2.Directory].
|
|
61 |
+ build.bazel.remote.execution.v2.Digest digest = 1;
|
|
62 |
+}
|
|
63 |
+ |
|
64 |
+message UpdateReferenceRequest {
|
|
65 |
+ // The instance of the execution system to operate against. A server may
|
|
66 |
+ // support multiple instances of the execution system (with their own workers,
|
|
67 |
+ // storage, caches, etc.). The server MAY require use of this field to select
|
|
68 |
+ // between them in an implementation-defined fashion, otherwise it can be
|
|
69 |
+ // omitted.
|
|
70 |
+ string instance_name = 1;
|
|
71 |
+ |
|
72 |
+ // The name of the reference.
|
|
73 |
+ repeated string keys = 2;
|
|
74 |
+ |
|
75 |
+ // The digest of the CAS [Directory][build.bazel.remote.execution.v2.Directory]
|
|
76 |
+ // to store in the cache.
|
|
77 |
+ build.bazel.remote.execution.v2.Digest digest = 3;
|
|
78 |
+}
|
|
79 |
+ |
|
80 |
+message UpdateReferenceResponse {
|
|
81 |
+}
|
|
82 |
+ |
|
83 |
+message StatusRequest {
|
|
84 |
+ // The instance of the execution system to operate against. A server may
|
|
85 |
+ // support multiple instances of the execution system (with their own workers,
|
|
86 |
+ // storage, caches, etc.). The server MAY require use of this field to select
|
|
87 |
+ // between them in an implementation-defined fashion, otherwise it can be
|
|
88 |
+ // omitted.
|
|
89 |
+ string instance_name = 1;
|
|
90 |
+}
|
|
91 |
+ |
|
92 |
+message StatusResponse {
|
|
93 |
+ // Whether reference updates are allowed for the connected client.
|
|
94 |
+ bool allow_updates = 1;
|
|
95 |
+}
|
1 |
+# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
2 |
+# source: buildstream/v2/buildstream.proto
|
|
3 |
+ |
|
4 |
+import sys
|
|
5 |
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
|
|
6 |
+from google.protobuf import descriptor as _descriptor
|
|
7 |
+from google.protobuf import message as _message
|
|
8 |
+from google.protobuf import reflection as _reflection
|
|
9 |
+from google.protobuf import symbol_database as _symbol_database
|
|
10 |
+from google.protobuf import descriptor_pb2
|
|
11 |
+# @@protoc_insertion_point(imports)
|
|
12 |
+ |
|
13 |
+_sym_db = _symbol_database.Default()
|
|
14 |
+ |
|
15 |
+ |
|
16 |
+from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2 as build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2
|
|
17 |
+from buildgrid._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
|
|
18 |
+ |
|
19 |
+ |
|
20 |
+DESCRIPTOR = _descriptor.FileDescriptor(
|
|
21 |
+ name='buildstream/v2/buildstream.proto',
|
|
22 |
+ package='buildstream.v2',
|
|
23 |
+ syntax='proto3',
|
|
24 |
+ serialized_pb=_b('\n buildstream/v2/buildstream.proto\x12\x0e\x62uildstream.v2\x1a\x36\x62uild/bazel/remote/execution/v2/remote_execution.proto\x1a\x1cgoogle/api/annotations.proto\"9\n\x13GetReferenceRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\"O\n\x14GetReferenceResponse\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"v\n\x16UpdateReferenceRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12\x37\n\x06\x64igest\x18\x03 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\x19\n\x17UpdateReferenceResponse\"&\n\rStatusRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\"\'\n\x0eStatusResponse\x12\x15\n\rallow_updates\x18\x01 \x01(\x08\x32\xca\x03\n\x10ReferenceStorage\x12\x90\x01\n\x0cGetReference\x12#.buildstream.v2.GetReferenceRequest\x1a$.buildstream.v2.GetReferenceResponse\"5\x82\xd3\xe4\x93\x02/\x12-/v2/{instance_name=**}/buildstream/refs/{key}\x12\xa1\x01\n\x0fUpdateReference\x12&.buildstream.v2.UpdateReferenceRequest\x1a\'.buildstream.v2.UpdateReferenceResponse\"=\x82\xd3\xe4\x93\x02\x37\x1a-/v2/{instance_name=**}/buildstream/refs/{key}:\x06\x64igest\x12\x7f\n\x06Status\x12\x1d.buildstream.v2.StatusRequest\x1a\x1e.buildstream.v2.StatusResponse\"6\x82\xd3\xe4\x93\x02\x30\x1a./v2/{instance_name=**}/buildstream/refs:statusb\x06proto3')
|
|
25 |
+ ,
|
|
26 |
+ dependencies=[build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
|
|
27 |
+ |
|
28 |
+ |
|
29 |
+ |
|
30 |
+ |
|
31 |
+_GETREFERENCEREQUEST = _descriptor.Descriptor(
|
|
32 |
+ name='GetReferenceRequest',
|
|
33 |
+ full_name='buildstream.v2.GetReferenceRequest',
|
|
34 |
+ filename=None,
|
|
35 |
+ file=DESCRIPTOR,
|
|
36 |
+ containing_type=None,
|
|
37 |
+ fields=[
|
|
38 |
+ _descriptor.FieldDescriptor(
|
|
39 |
+ name='instance_name', full_name='buildstream.v2.GetReferenceRequest.instance_name', index=0,
|
|
40 |
+ number=1, type=9, cpp_type=9, label=1,
|
|
41 |
+ has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
42 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
43 |
+ is_extension=False, extension_scope=None,
|
|
44 |
+ options=None, file=DESCRIPTOR),
|
|
45 |
+ _descriptor.FieldDescriptor(
|
|
46 |
+ name='key', full_name='buildstream.v2.GetReferenceRequest.key', index=1,
|
|
47 |
+ number=2, type=9, cpp_type=9, label=1,
|
|
48 |
+ has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
49 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
50 |
+ is_extension=False, extension_scope=None,
|
|
51 |
+ options=None, file=DESCRIPTOR),
|
|
52 |
+ ],
|
|
53 |
+ extensions=[
|
|
54 |
+ ],
|
|
55 |
+ nested_types=[],
|
|
56 |
+ enum_types=[
|
|
57 |
+ ],
|
|
58 |
+ options=None,
|
|
59 |
+ is_extendable=False,
|
|
60 |
+ syntax='proto3',
|
|
61 |
+ extension_ranges=[],
|
|
62 |
+ oneofs=[
|
|
63 |
+ ],
|
|
64 |
+ serialized_start=138,
|
|
65 |
+ serialized_end=195,
|
|
66 |
+)
|
|
67 |
+ |
|
68 |
+ |
|
69 |
+_GETREFERENCERESPONSE = _descriptor.Descriptor(
|
|
70 |
+ name='GetReferenceResponse',
|
|
71 |
+ full_name='buildstream.v2.GetReferenceResponse',
|
|
72 |
+ filename=None,
|
|
73 |
+ file=DESCRIPTOR,
|
|
74 |
+ containing_type=None,
|
|
75 |
+ fields=[
|
|
76 |
+ _descriptor.FieldDescriptor(
|
|
77 |
+ name='digest', full_name='buildstream.v2.GetReferenceResponse.digest', index=0,
|
|
78 |
+ number=1, type=11, cpp_type=10, label=1,
|
|
79 |
+ has_default_value=False, default_value=None,
|
|
80 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
81 |
+ is_extension=False, extension_scope=None,
|
|
82 |
+ options=None, file=DESCRIPTOR),
|
|
83 |
+ ],
|
|
84 |
+ extensions=[
|
|
85 |
+ ],
|
|
86 |
+ nested_types=[],
|
|
87 |
+ enum_types=[
|
|
88 |
+ ],
|
|
89 |
+ options=None,
|
|
90 |
+ is_extendable=False,
|
|
91 |
+ syntax='proto3',
|
|
92 |
+ extension_ranges=[],
|
|
93 |
+ oneofs=[
|
|
94 |
+ ],
|
|
95 |
+ serialized_start=197,
|
|
96 |
+ serialized_end=276,
|
|
97 |
+)
|
|
98 |
+ |
|
99 |
+ |
|
100 |
+_UPDATEREFERENCEREQUEST = _descriptor.Descriptor(
|
|
101 |
+ name='UpdateReferenceRequest',
|
|
102 |
+ full_name='buildstream.v2.UpdateReferenceRequest',
|
|
103 |
+ filename=None,
|
|
104 |
+ file=DESCRIPTOR,
|
|
105 |
+ containing_type=None,
|
|
106 |
+ fields=[
|
|
107 |
+ _descriptor.FieldDescriptor(
|
|
108 |
+ name='instance_name', full_name='buildstream.v2.UpdateReferenceRequest.instance_name', index=0,
|
|
109 |
+ number=1, type=9, cpp_type=9, label=1,
|
|
110 |
+ has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
111 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
112 |
+ is_extension=False, extension_scope=None,
|
|
113 |
+ options=None, file=DESCRIPTOR),
|
|
114 |
+ _descriptor.FieldDescriptor(
|
|
115 |
+ name='keys', full_name='buildstream.v2.UpdateReferenceRequest.keys', index=1,
|
|
116 |
+ number=2, type=9, cpp_type=9, label=3,
|
|
117 |
+ has_default_value=False, default_value=[],
|
|
118 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
119 |
+ is_extension=False, extension_scope=None,
|
|
120 |
+ options=None, file=DESCRIPTOR),
|
|
121 |
+ _descriptor.FieldDescriptor(
|
|
122 |
+ name='digest', full_name='buildstream.v2.UpdateReferenceRequest.digest', index=2,
|
|
123 |
+ number=3, type=11, cpp_type=10, label=1,
|
|
124 |
+ has_default_value=False, default_value=None,
|
|
125 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
126 |
+ is_extension=False, extension_scope=None,
|
|
127 |
+ options=None, file=DESCRIPTOR),
|
|
128 |
+ ],
|
|
129 |
+ extensions=[
|
|
130 |
+ ],
|
|
131 |
+ nested_types=[],
|
|
132 |
+ enum_types=[
|
|
133 |
+ ],
|
|
134 |
+ options=None,
|
|
135 |
+ is_extendable=False,
|
|
136 |
+ syntax='proto3',
|
|
137 |
+ extension_ranges=[],
|
|
138 |
+ oneofs=[
|
|
139 |
+ ],
|
|
140 |
+ serialized_start=278,
|
|
141 |
+ serialized_end=396,
|
|
142 |
+)
|
|
143 |
+ |
|
144 |
+ |
|
145 |
+_UPDATEREFERENCERESPONSE = _descriptor.Descriptor(
|
|
146 |
+ name='UpdateReferenceResponse',
|
|
147 |
+ full_name='buildstream.v2.UpdateReferenceResponse',
|
|
148 |
+ filename=None,
|
|
149 |
+ file=DESCRIPTOR,
|
|
150 |
+ containing_type=None,
|
|
151 |
+ fields=[
|
|
152 |
+ ],
|
|
153 |
+ extensions=[
|
|
154 |
+ ],
|
|
155 |
+ nested_types=[],
|
|
156 |
+ enum_types=[
|
|
157 |
+ ],
|
|
158 |
+ options=None,
|
|
159 |
+ is_extendable=False,
|
|
160 |
+ syntax='proto3',
|
|
161 |
+ extension_ranges=[],
|
|
162 |
+ oneofs=[
|
|
163 |
+ ],
|
|
164 |
+ serialized_start=398,
|
|
165 |
+ serialized_end=423,
|
|
166 |
+)
|
|
167 |
+ |
|
168 |
+ |
|
169 |
+_STATUSREQUEST = _descriptor.Descriptor(
|
|
170 |
+ name='StatusRequest',
|
|
171 |
+ full_name='buildstream.v2.StatusRequest',
|
|
172 |
+ filename=None,
|
|
173 |
+ file=DESCRIPTOR,
|
|
174 |
+ containing_type=None,
|
|
175 |
+ fields=[
|
|
176 |
+ _descriptor.FieldDescriptor(
|
|
177 |
+ name='instance_name', full_name='buildstream.v2.StatusRequest.instance_name', index=0,
|
|
178 |
+ number=1, type=9, cpp_type=9, label=1,
|
|
179 |
+ has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
180 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
181 |
+ is_extension=False, extension_scope=None,
|
|
182 |
+ options=None, file=DESCRIPTOR),
|
|
183 |
+ ],
|
|
184 |
+ extensions=[
|
|
185 |
+ ],
|
|
186 |
+ nested_types=[],
|
|
187 |
+ enum_types=[
|
|
188 |
+ ],
|
|
189 |
+ options=None,
|
|
190 |
+ is_extendable=False,
|
|
191 |
+ syntax='proto3',
|
|
192 |
+ extension_ranges=[],
|
|
193 |
+ oneofs=[
|
|
194 |
+ ],
|
|
195 |
+ serialized_start=425,
|
|
196 |
+ serialized_end=463,
|
|
197 |
+)
|
|
198 |
+ |
|
199 |
+ |
|
200 |
+_STATUSRESPONSE = _descriptor.Descriptor(
|
|
201 |
+ name='StatusResponse',
|
|
202 |
+ full_name='buildstream.v2.StatusResponse',
|
|
203 |
+ filename=None,
|
|
204 |
+ file=DESCRIPTOR,
|
|
205 |
+ containing_type=None,
|
|
206 |
+ fields=[
|
|
207 |
+ _descriptor.FieldDescriptor(
|
|
208 |
+ name='allow_updates', full_name='buildstream.v2.StatusResponse.allow_updates', index=0,
|
|
209 |
+ number=1, type=8, cpp_type=7, label=1,
|
|
210 |
+ has_default_value=False, default_value=False,
|
|
211 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
212 |
+ is_extension=False, extension_scope=None,
|
|
213 |
+ options=None, file=DESCRIPTOR),
|
|
214 |
+ ],
|
|
215 |
+ extensions=[
|
|
216 |
+ ],
|
|
217 |
+ nested_types=[],
|
|
218 |
+ enum_types=[
|
|
219 |
+ ],
|
|
220 |
+ options=None,
|
|
221 |
+ is_extendable=False,
|
|
222 |
+ syntax='proto3',
|
|
223 |
+ extension_ranges=[],
|
|
224 |
+ oneofs=[
|
|
225 |
+ ],
|
|
226 |
+ serialized_start=465,
|
|
227 |
+ serialized_end=504,
|
|
228 |
+)
|
|
229 |
+ |
|
230 |
+_GETREFERENCERESPONSE.fields_by_name['digest'].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
|
|
231 |
+_UPDATEREFERENCEREQUEST.fields_by_name['digest'].message_type = build_dot_bazel_dot_remote_dot_execution_dot_v2_dot_remote__execution__pb2._DIGEST
|
|
232 |
+DESCRIPTOR.message_types_by_name['GetReferenceRequest'] = _GETREFERENCEREQUEST
|
|
233 |
+DESCRIPTOR.message_types_by_name['GetReferenceResponse'] = _GETREFERENCERESPONSE
|
|
234 |
+DESCRIPTOR.message_types_by_name['UpdateReferenceRequest'] = _UPDATEREFERENCEREQUEST
|
|
235 |
+DESCRIPTOR.message_types_by_name['UpdateReferenceResponse'] = _UPDATEREFERENCERESPONSE
|
|
236 |
+DESCRIPTOR.message_types_by_name['StatusRequest'] = _STATUSREQUEST
|
|
237 |
+DESCRIPTOR.message_types_by_name['StatusResponse'] = _STATUSRESPONSE
|
|
238 |
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
|
239 |
+ |
|
240 |
+GetReferenceRequest = _reflection.GeneratedProtocolMessageType('GetReferenceRequest', (_message.Message,), dict(
|
|
241 |
+ DESCRIPTOR = _GETREFERENCEREQUEST,
|
|
242 |
+ __module__ = 'buildstream.v2.buildstream_pb2'
|
|
243 |
+ # @@protoc_insertion_point(class_scope:buildstream.v2.GetReferenceRequest)
|
|
244 |
+ ))
|
|
245 |
+_sym_db.RegisterMessage(GetReferenceRequest)
|
|
246 |
+ |
|
247 |
+GetReferenceResponse = _reflection.GeneratedProtocolMessageType('GetReferenceResponse', (_message.Message,), dict(
|
|
248 |
+ DESCRIPTOR = _GETREFERENCERESPONSE,
|
|
249 |
+ __module__ = 'buildstream.v2.buildstream_pb2'
|
|
250 |
+ # @@protoc_insertion_point(class_scope:buildstream.v2.GetReferenceResponse)
|
|
251 |
+ ))
|
|
252 |
+_sym_db.RegisterMessage(GetReferenceResponse)
|
|
253 |
+ |
|
254 |
+UpdateReferenceRequest = _reflection.GeneratedProtocolMessageType('UpdateReferenceRequest', (_message.Message,), dict(
|
|
255 |
+ DESCRIPTOR = _UPDATEREFERENCEREQUEST,
|
|
256 |
+ __module__ = 'buildstream.v2.buildstream_pb2'
|
|
257 |
+ # @@protoc_insertion_point(class_scope:buildstream.v2.UpdateReferenceRequest)
|
|
258 |
+ ))
|
|
259 |
+_sym_db.RegisterMessage(UpdateReferenceRequest)
|
|
260 |
+ |
|
261 |
+UpdateReferenceResponse = _reflection.GeneratedProtocolMessageType('UpdateReferenceResponse', (_message.Message,), dict(
|
|
262 |
+ DESCRIPTOR = _UPDATEREFERENCERESPONSE,
|
|
263 |
+ __module__ = 'buildstream.v2.buildstream_pb2'
|
|
264 |
+ # @@protoc_insertion_point(class_scope:buildstream.v2.UpdateReferenceResponse)
|
|
265 |
+ ))
|
|
266 |
+_sym_db.RegisterMessage(UpdateReferenceResponse)
|
|
267 |
+ |
|
268 |
+StatusRequest = _reflection.GeneratedProtocolMessageType('StatusRequest', (_message.Message,), dict(
|
|
269 |
+ DESCRIPTOR = _STATUSREQUEST,
|
|
270 |
+ __module__ = 'buildstream.v2.buildstream_pb2'
|
|
271 |
+ # @@protoc_insertion_point(class_scope:buildstream.v2.StatusRequest)
|
|
272 |
+ ))
|
|
273 |
+_sym_db.RegisterMessage(StatusRequest)
|
|
274 |
+ |
|
275 |
+StatusResponse = _reflection.GeneratedProtocolMessageType('StatusResponse', (_message.Message,), dict(
|
|
276 |
+ DESCRIPTOR = _STATUSRESPONSE,
|
|
277 |
+ __module__ = 'buildstream.v2.buildstream_pb2'
|
|
278 |
+ # @@protoc_insertion_point(class_scope:buildstream.v2.StatusResponse)
|
|
279 |
+ ))
|
|
280 |
+_sym_db.RegisterMessage(StatusResponse)
|
|
281 |
+ |
|
282 |
+ |
|
283 |
+ |
|
284 |
+_REFERENCESTORAGE = _descriptor.ServiceDescriptor(
|
|
285 |
+ name='ReferenceStorage',
|
|
286 |
+ full_name='buildstream.v2.ReferenceStorage',
|
|
287 |
+ file=DESCRIPTOR,
|
|
288 |
+ index=0,
|
|
289 |
+ options=None,
|
|
290 |
+ serialized_start=507,
|
|
291 |
+ serialized_end=965,
|
|
292 |
+ methods=[
|
|
293 |
+ _descriptor.MethodDescriptor(
|
|
294 |
+ name='GetReference',
|
|
295 |
+ full_name='buildstream.v2.ReferenceStorage.GetReference',
|
|
296 |
+ index=0,
|
|
297 |
+ containing_service=None,
|
|
298 |
+ input_type=_GETREFERENCEREQUEST,
|
|
299 |
+ output_type=_GETREFERENCERESPONSE,
|
|
300 |
+ options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002/\022-/v2/{instance_name=**}/buildstream/refs/{key}')),
|
|
301 |
+ ),
|
|
302 |
+ _descriptor.MethodDescriptor(
|
|
303 |
+ name='UpdateReference',
|
|
304 |
+ full_name='buildstream.v2.ReferenceStorage.UpdateReference',
|
|
305 |
+ index=1,
|
|
306 |
+ containing_service=None,
|
|
307 |
+ input_type=_UPDATEREFERENCEREQUEST,
|
|
308 |
+ output_type=_UPDATEREFERENCERESPONSE,
|
|
309 |
+ options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0027\032-/v2/{instance_name=**}/buildstream/refs/{key}:\006digest')),
|
|
310 |
+ ),
|
|
311 |
+ _descriptor.MethodDescriptor(
|
|
312 |
+ name='Status',
|
|
313 |
+ full_name='buildstream.v2.ReferenceStorage.Status',
|
|
314 |
+ index=2,
|
|
315 |
+ containing_service=None,
|
|
316 |
+ input_type=_STATUSREQUEST,
|
|
317 |
+ output_type=_STATUSRESPONSE,
|
|
318 |
+ options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0020\032./v2/{instance_name=**}/buildstream/refs:status')),
|
|
319 |
+ ),
|
|
320 |
+])
|
|
321 |
+_sym_db.RegisterServiceDescriptor(_REFERENCESTORAGE)
|
|
322 |
+ |
|
323 |
+DESCRIPTOR.services_by_name['ReferenceStorage'] = _REFERENCESTORAGE
|
|
324 |
+ |
|
325 |
+# @@protoc_insertion_point(module_scope)
|
1 |
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
|
2 |
+import grpc
|
|
3 |
+ |
|
4 |
+from buildgrid._protos.buildstream.v2 import buildstream_pb2 as buildstream_dot_v2_dot_buildstream__pb2
|
|
5 |
+ |
|
6 |
+ |
|
7 |
+class ReferenceStorageStub(object):
|
|
8 |
+ # missing associated documentation comment in .proto file
|
|
9 |
+ pass
|
|
10 |
+ |
|
11 |
+ def __init__(self, channel):
|
|
12 |
+ """Constructor.
|
|
13 |
+ |
|
14 |
+ Args:
|
|
15 |
+ channel: A grpc.Channel.
|
|
16 |
+ """
|
|
17 |
+ self.GetReference = channel.unary_unary(
|
|
18 |
+ '/buildstream.v2.ReferenceStorage/GetReference',
|
|
19 |
+ request_serializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceRequest.SerializeToString,
|
|
20 |
+ response_deserializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceResponse.FromString,
|
|
21 |
+ )
|
|
22 |
+ self.UpdateReference = channel.unary_unary(
|
|
23 |
+ '/buildstream.v2.ReferenceStorage/UpdateReference',
|
|
24 |
+ request_serializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceRequest.SerializeToString,
|
|
25 |
+ response_deserializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceResponse.FromString,
|
|
26 |
+ )
|
|
27 |
+ self.Status = channel.unary_unary(
|
|
28 |
+ '/buildstream.v2.ReferenceStorage/Status',
|
|
29 |
+ request_serializer=buildstream_dot_v2_dot_buildstream__pb2.StatusRequest.SerializeToString,
|
|
30 |
+ response_deserializer=buildstream_dot_v2_dot_buildstream__pb2.StatusResponse.FromString,
|
|
31 |
+ )
|
|
32 |
+ |
|
33 |
+ |
|
34 |
+class ReferenceStorageServicer(object):
|
|
35 |
+ # missing associated documentation comment in .proto file
|
|
36 |
+ pass
|
|
37 |
+ |
|
38 |
+ def GetReference(self, request, context):
|
|
39 |
+ """Retrieve a CAS [Directory][build.bazel.remote.execution.v2.Directory]
|
|
40 |
+ digest by name.
|
|
41 |
+ |
|
42 |
+ Errors:
|
|
43 |
+ * `NOT_FOUND`: The requested reference is not in the cache.
|
|
44 |
+ """
|
|
45 |
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
|
46 |
+ context.set_details('Method not implemented!')
|
|
47 |
+ raise NotImplementedError('Method not implemented!')
|
|
48 |
+ |
|
49 |
+ def UpdateReference(self, request, context):
|
|
50 |
+ """Associate a name with a CAS [Directory][build.bazel.remote.execution.v2.Directory]
|
|
51 |
+ digest.
|
|
52 |
+ |
|
53 |
+ Errors:
|
|
54 |
+ * `RESOURCE_EXHAUSTED`: There is insufficient storage space to add the
|
|
55 |
+ entry to the cache.
|
|
56 |
+ """
|
|
57 |
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
|
58 |
+ context.set_details('Method not implemented!')
|
|
59 |
+ raise NotImplementedError('Method not implemented!')
|
|
60 |
+ |
|
61 |
+ def Status(self, request, context):
|
|
62 |
+ # missing associated documentation comment in .proto file
|
|
63 |
+ pass
|
|
64 |
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
|
65 |
+ context.set_details('Method not implemented!')
|
|
66 |
+ raise NotImplementedError('Method not implemented!')
|
|
67 |
+ |
|
68 |
+ |
|
69 |
+def add_ReferenceStorageServicer_to_server(servicer, server):
|
|
70 |
+ rpc_method_handlers = {
|
|
71 |
+ 'GetReference': grpc.unary_unary_rpc_method_handler(
|
|
72 |
+ servicer.GetReference,
|
|
73 |
+ request_deserializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceRequest.FromString,
|
|
74 |
+ response_serializer=buildstream_dot_v2_dot_buildstream__pb2.GetReferenceResponse.SerializeToString,
|
|
75 |
+ ),
|
|
76 |
+ 'UpdateReference': grpc.unary_unary_rpc_method_handler(
|
|
77 |
+ servicer.UpdateReference,
|
|
78 |
+ request_deserializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceRequest.FromString,
|
|
79 |
+ response_serializer=buildstream_dot_v2_dot_buildstream__pb2.UpdateReferenceResponse.SerializeToString,
|
|
80 |
+ ),
|
|
81 |
+ 'Status': grpc.unary_unary_rpc_method_handler(
|
|
82 |
+ servicer.Status,
|
|
83 |
+ request_deserializer=buildstream_dot_v2_dot_buildstream__pb2.StatusRequest.FromString,
|
|
84 |
+ response_serializer=buildstream_dot_v2_dot_buildstream__pb2.StatusResponse.SerializeToString,
|
|
85 |
+ ),
|
|
86 |
+ }
|
|
87 |
+ generic_handler = grpc.method_handlers_generic_handler(
|
|
88 |
+ 'buildstream.v2.ReferenceStorage', rpc_method_handlers)
|
|
89 |
+ server.add_generic_rpc_handlers((generic_handler,))
|
1 |
-from ..._exceptions import BgdError, ErrorDomain
|
|
1 |
+from .._exceptions import BgdError, ErrorDomain
|
|
2 | 2 |
|
3 | 3 |
|
4 | 4 |
class InvalidArgumentError(BgdError):
|
5 |
+ """A bad argument was passed, such as a name which doesn't exist.
|
|
5 | 6 |
"""
|
6 |
- A bad argument was passed, such as a name which doesn't exist.
|
|
7 |
+ |
|
8 |
+ def __init__(self, message, detail=None, reason=None):
|
|
9 |
+ super().__init__(message, detail=detail, domain=ErrorDomain.SERVER, reason=reason)
|
|
10 |
+ |
|
11 |
+ |
|
12 |
+class NotFoundError(BgdError):
|
|
13 |
+ """Requested resource not found.
|
|
7 | 14 |
"""
|
15 |
+ |
|
8 | 16 |
def __init__(self, message, detail=None, reason=None):
|
9 | 17 |
super().__init__(message, detail=detail, domain=ErrorDomain.SERVER, reason=reason)
|
10 | 18 |
|
11 | 19 |
|
12 | 20 |
class OutofSyncError(BgdError):
|
13 |
- """
|
|
14 |
- The worker is out of sync with the server, such as having a differing number of leases.
|
|
21 |
+ """The worker is out of sync with the server, such as having a differing number of leases.
|
|
15 | 22 |
"""
|
16 | 23 |
|
17 | 24 |
def __init__(self, message, detail=None, reason=None):
|
... | ... | @@ -44,8 +44,7 @@ from .worker.bots_interface import BotsInterface |
44 | 44 |
|
45 | 45 |
class BuildGridServer:
|
46 | 46 |
|
47 |
- def __init__(self, port='50051', max_workers=10, cas_storage=None,
|
|
48 |
- action_cache=None, allow_update_action_result=True):
|
|
47 |
+ def __init__(self, port='50051', max_workers=10, cas_storage=None, action_cache=None):
|
|
49 | 48 |
port = '[::]:{0}'.format(port)
|
50 | 49 |
scheduler = Scheduler(action_cache)
|
51 | 50 |
bots_interface = BotsInterface(scheduler)
|
... | ... | @@ -68,13 +67,12 @@ class BuildGridServer: |
68 | 67 |
bytestream_pb2_grpc.add_ByteStreamServicer_to_server(ByteStreamService(cas_storage),
|
69 | 68 |
self._server)
|
70 | 69 |
if action_cache is not None:
|
71 |
- action_cache_service = ActionCacheService(action_cache,
|
|
72 |
- allow_update_action_result)
|
|
70 |
+ action_cache_service = ActionCacheService(action_cache)
|
|
73 | 71 |
remote_execution_pb2_grpc.add_ActionCacheServicer_to_server(action_cache_service,
|
74 | 72 |
self._server)
|
75 | 73 |
|
76 |
- async def start(self):
|
|
74 |
+ def start(self):
|
|
77 | 75 |
self._server.start()
|
78 | 76 |
|
79 |
- async def stop(self):
|
|
77 |
+ def stop(self):
|
|
80 | 78 |
self._server.stop(0)
|
1 |
+# Copyright (C) 2018 Bloomberg LP
|
|
2 |
+#
|
|
3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
+# you may not use this file except in compliance with the License.
|
|
5 |
+# You may obtain a copy of the License at
|
|
6 |
+#
|
|
7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
8 |
+#
|
|
9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
+# See the License for the specific language governing permissions and
|
|
13 |
+# limitations under the License.
|
|
14 |
+ |
|
15 |
+ |
|
16 |
+"""
|
|
17 |
+Reference Cache
|
|
18 |
+==================
|
|
19 |
+ |
|
20 |
+Implements an in-memory reference cache.
|
|
21 |
+ |
|
22 |
+For a given key, it
|
|
23 |
+"""
|
|
24 |
+ |
|
25 |
+import collections
|
|
26 |
+ |
|
27 |
+from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
|
28 |
+ |
|
29 |
+from .._exceptions import NotFoundError
|
|
30 |
+ |
|
31 |
+ |
|
32 |
+class ReferenceCache:
|
|
33 |
+ |
|
34 |
+ def __init__(self, storage, max_cached_refs, allow_updates=True):
|
|
35 |
+ """ Initialises a new ReferenceCache instance.
|
|
36 |
+ |
|
37 |
+ Args:
|
|
38 |
+ storage (StorageABC): storage backend instance to be used.
|
|
39 |
+ max_cached_refs (int): maximum number of entries to be stored.
|
|
40 |
+ allow_updates (bool): allow the client to write to storage
|
|
41 |
+ """
|
|
42 |
+ self._allow_updates = allow_updates
|
|
43 |
+ self._storage = storage
|
|
44 |
+ self._max_cached_refs = max_cached_refs
|
|
45 |
+ self._digest_map = collections.OrderedDict()
|
|
46 |
+ |
|
47 |
+ @property
|
|
48 |
+ def allow_updates(self):
|
|
49 |
+ return self._allow_updates
|
|
50 |
+ |
|
51 |
+ def get_digest_reference(self, key):
|
|
52 |
+ """Retrieves the cached Digest for the given key.
|
|
53 |
+ |
|
54 |
+ Args:
|
|
55 |
+ key: key for Digest to query.
|
|
56 |
+ |
|
57 |
+ Returns:
|
|
58 |
+ The cached Digest matching the given key or raises
|
|
59 |
+ NotFoundError.
|
|
60 |
+ """
|
|
61 |
+ if key in self._digest_map:
|
|
62 |
+ reference_result = self._storage.get_message(self._digest_map[key], remote_execution_pb2.Digest)
|
|
63 |
+ |
|
64 |
+ if reference_result is not None:
|
|
65 |
+ return reference_result
|
|
66 |
+ |
|
67 |
+ del self._digest_map[key]
|
|
68 |
+ |
|
69 |
+ raise NotFoundError("Key not found: {}".format(key))
|
|
70 |
+ |
|
71 |
+ def get_action_reference(self, key):
|
|
72 |
+ """Retrieves the cached ActionResult for the given Action digest.
|
|
73 |
+ |
|
74 |
+ Args:
|
|
75 |
+ key: key for ActionResult to query.
|
|
76 |
+ |
|
77 |
+ Returns:
|
|
78 |
+ The cached ActionResult matching the given key or raises
|
|
79 |
+ NotFoundError.
|
|
80 |
+ """
|
|
81 |
+ if key in self._digest_map:
|
|
82 |
+ reference_result = self._storage.get_message(self._digest_map[key], remote_execution_pb2.ActionResult)
|
|
83 |
+ |
|
84 |
+ if reference_result is not None:
|
|
85 |
+ if self._action_result_blobs_still_exist(reference_result):
|
|
86 |
+ self._digest_map.move_to_end(key)
|
|
87 |
+ return reference_result
|
|
88 |
+ |
|
89 |
+ del self._digest_map[key]
|
|
90 |
+ |
|
91 |
+ raise NotFoundError("Key not found: {}".format(key))
|
|
92 |
+ |
|
93 |
+ def update_reference(self, key, result):
|
|
94 |
+ """Stores the result in cache for the given key.
|
|
95 |
+ |
|
96 |
+ If the cache size limit has been reached, the oldest cache entries will
|
|
97 |
+ be dropped before insertion so that the cache size never exceeds the
|
|
98 |
+ maximum numbers of entries allowed.
|
|
99 |
+ |
|
100 |
+ Args:
|
|
101 |
+ key: key to store result.
|
|
102 |
+ result (Digest): result digest to store.
|
|
103 |
+ """
|
|
104 |
+ if not self._allow_updates:
|
|
105 |
+ raise NotImplementedError("Updating cache not allowed")
|
|
106 |
+ |
|
107 |
+ if self._max_cached_refs == 0:
|
|
108 |
+ return
|
|
109 |
+ |
|
110 |
+ while len(self._digest_map) >= self._max_cached_refs:
|
|
111 |
+ self._digest_map.popitem(last=False)
|
|
112 |
+ |
|
113 |
+ result_digest = self._storage.put_message(result)
|
|
114 |
+ self._digest_map[key] = result_digest
|
|
115 |
+ |
|
116 |
+ def _action_result_blobs_still_exist(self, action_result):
|
|
117 |
+ """Checks CAS for ActionResult output blobs existance.
|
|
118 |
+ |
|
119 |
+ Args:
|
|
120 |
+ action_result (ActionResult): ActionResult to search referenced
|
|
121 |
+ output blobs for.
|
|
122 |
+ |
|
123 |
+ Returns:
|
|
124 |
+ True if all referenced blobs are present in CAS, False otherwise.
|
|
125 |
+ """
|
|
126 |
+ blobs_needed = []
|
|
127 |
+ |
|
128 |
+ for output_file in action_result.output_files:
|
|
129 |
+ blobs_needed.append(output_file.digest)
|
|
130 |
+ |
|
131 |
+ for output_directory in action_result.output_directories:
|
|
132 |
+ blobs_needed.append(output_directory.tree_digest)
|
|
133 |
+ tree = self._storage.get_message(output_directory.tree_digest,
|
|
134 |
+ remote_execution_pb2.Tree)
|
|
135 |
+ if tree is None:
|
|
136 |
+ return False
|
|
137 |
+ |
|
138 |
+ for file_node in tree.root.files:
|
|
139 |
+ blobs_needed.append(file_node.digest)
|
|
140 |
+ |
|
141 |
+ for child in tree.children:
|
|
142 |
+ for file_node in child.files:
|
|
143 |
+ blobs_needed.append(file_node.digest)
|
|
144 |
+ |
|
145 |
+ if action_result.stdout_digest.hash and not action_result.stdout_raw:
|
|
146 |
+ blobs_needed.append(action_result.stdout_digest)
|
|
147 |
+ |
|
148 |
+ if action_result.stderr_digest.hash and not action_result.stderr_raw:
|
|
149 |
+ blobs_needed.append(action_result.stderr_digest)
|
|
150 |
+ |
|
151 |
+ missing = self._storage.missing_blobs(blobs_needed)
|
|
152 |
+ return len(missing) == 0
|
1 |
+# Copyright (C) 2018 Bloomberg LP
|
|
2 |
+#
|
|
3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
+# you may not use this file except in compliance with the License.
|
|
5 |
+# You may obtain a copy of the License at
|
|
6 |
+#
|
|
7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
8 |
+#
|
|
9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
+# See the License for the specific language governing permissions and
|
|
13 |
+# limitations under the License.
|
|
14 |
+ |
|
15 |
+import logging
|
|
16 |
+ |
|
17 |
+import grpc
|
|
18 |
+ |
|
19 |
+from buildgrid._protos.buildstream.v2 import buildstream_pb2
|
|
20 |
+from buildgrid._protos.buildstream.v2 import buildstream_pb2_grpc
|
|
21 |
+ |
|
22 |
+from .._exceptions import NotFoundError
|
|
23 |
+ |
|
24 |
+ |
|
25 |
+class ReferenceStorageService(buildstream_pb2_grpc.ReferenceStorageServicer):
|
|
26 |
+ |
|
27 |
+ def __init__(self, reference_cache):
|
|
28 |
+ self._reference_cache = reference_cache
|
|
29 |
+ self.logger = logging.getLogger(__name__)
|
|
30 |
+ |
|
31 |
+ def GetReference(self, request, context):
|
|
32 |
+ try:
|
|
33 |
+ response = buildstream_pb2.GetReferenceResponse()
|
|
34 |
+ response.digest.CopyFrom(self._reference_cache.get_digest_reference(request.key))
|
|
35 |
+ return response
|
|
36 |
+ |
|
37 |
+ except NotFoundError:
|
|
38 |
+ context.set_code(grpc.StatusCode.NOT_FOUND)
|
|
39 |
+ |
|
40 |
+ def UpdateReference(self, request, context):
|
|
41 |
+ try:
|
|
42 |
+ for key in request.keys:
|
|
43 |
+ self._reference_cache.update_reference(key, request.digest)
|
|
44 |
+ |
|
45 |
+ return buildstream_pb2.UpdateReferenceResponse()
|
|
46 |
+ |
|
47 |
+ except NotImplementedError:
|
|
48 |
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
|
49 |
+ |
|
50 |
+ def Status(self, request, context):
|
|
51 |
+ allow_updates = self._reference_cache.allow_updates
|
|
52 |
+ return buildstream_pb2.StatusResponse(allow_updates=allow_updates)
|
1 |
-from ..._exceptions import BgdError, ErrorDomain
|
|
2 |
- |
|
3 |
- |
|
4 |
-class InvalidArgumentError(BgdError):
|
|
5 |
- """
|
|
6 |
- A bad argument was passed, such as a name which doesn't exist
|
|
7 |
- """
|
|
8 |
- |
|
9 |
- def __init__(self, message, detail=None, reason=None):
|
|
10 |
- super().__init__(message, detail=detail, domain=ErrorDomain.SERVER, reason=reason)
|
1 |
+# Copyright (C) 2018 Bloomberg LP
|
|
2 |
+#
|
|
3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
+# you may not use this file except in compliance with the License.
|
|
5 |
+# You may obtain a copy of the License at
|
|
6 |
+#
|
|
7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
8 |
+#
|
|
9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
+# See the License for the specific language governing permissions and
|
|
13 |
+# limitations under the License.
|
|
14 |
+ |
|
15 |
+ |
|
16 |
+"""
|
|
17 |
+Action Cache
|
|
18 |
+============
|
|
19 |
+ |
|
20 |
+Implements an in-memory action Cache
|
|
21 |
+"""
|
|
22 |
+ |
|
23 |
+ |
|
24 |
+from ..cas.reference_cache import ReferenceCache
|
|
25 |
+ |
|
26 |
+ |
|
27 |
+class ActionCache(ReferenceCache):
|
|
28 |
+ |
|
29 |
+ def get_action_result(self, action_digest):
|
|
30 |
+ key = self._get_key(action_digest)
|
|
31 |
+ return self.get_action_reference(key)
|
|
32 |
+ |
|
33 |
+ def update_action_result(self, action_digest, action_result):
|
|
34 |
+ key = self._get_key(action_digest)
|
|
35 |
+ self.update_reference(key, action_result)
|
|
36 |
+ |
|
37 |
+ def _get_key(self, action_digest):
|
|
38 |
+ return (action_digest.hash, action_digest.size_bytes)
|
... | ... | @@ -26,27 +26,28 @@ import logging |
26 | 26 |
|
27 | 27 |
import grpc
|
28 | 28 |
|
29 |
-from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
|
30 | 29 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2_grpc
|
31 | 30 |
|
31 |
+from .._exceptions import NotFoundError
|
|
32 |
+ |
|
32 | 33 |
|
33 | 34 |
class ActionCacheService(remote_execution_pb2_grpc.ActionCacheServicer):
|
34 | 35 |
|
35 |
- def __init__(self, action_cache, allow_updates=True):
|
|
36 |
+ def __init__(self, action_cache):
|
|
36 | 37 |
self._action_cache = action_cache
|
37 |
- self._allow_updates = allow_updates
|
|
38 | 38 |
self.logger = logging.getLogger(__name__)
|
39 | 39 |
|
40 | 40 |
def GetActionResult(self, request, context):
|
41 |
- result = self._action_cache.get_action_result(request.action_digest)
|
|
42 |
- if result is None:
|
|
41 |
+ try:
|
|
42 |
+ return self._action_cache.get_action_result(request.action_digest)
|
|
43 |
+ |
|
44 |
+ except NotFoundError:
|
|
43 | 45 |
context.set_code(grpc.StatusCode.NOT_FOUND)
|
44 |
- return remote_execution_pb2.ActionResult()
|
|
45 |
- return result
|
|
46 | 46 |
|
47 | 47 |
def UpdateActionResult(self, request, context):
|
48 |
- if not self._allow_updates:
|
|
48 |
+ try:
|
|
49 |
+ self._action_cache.update_action_result(request.action_digest, request.action_result)
|
|
50 |
+ return request.action_result
|
|
51 |
+ |
|
52 |
+ except NotImplementedError:
|
|
49 | 53 |
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
50 |
- return remote_execution_pb2.ActionResult()
|
|
51 |
- self._action_cache.put_action_result(request.action_digest, request.action_result)
|
|
52 |
- return request.action_result
|
... | ... | @@ -25,7 +25,7 @@ import logging |
25 | 25 |
|
26 | 26 |
from buildgrid._protos.build.bazel.remote.execution.v2.remote_execution_pb2 import Action
|
27 | 27 |
|
28 |
-from ._exceptions import InvalidArgumentError
|
|
28 |
+from .._exceptions import InvalidArgumentError
|
|
29 | 29 |
|
30 | 30 |
from ..job import Job
|
31 | 31 |
|
... | ... | @@ -30,7 +30,7 @@ import grpc |
30 | 30 |
|
31 | 31 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2_grpc
|
32 | 32 |
|
33 |
-from ._exceptions import InvalidArgumentError
|
|
33 |
+from .._exceptions import InvalidArgumentError
|
|
34 | 34 |
|
35 | 35 |
|
36 | 36 |
class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer):
|
... | ... | @@ -27,7 +27,7 @@ import grpc |
27 | 27 |
|
28 | 28 |
from buildgrid._protos.google.longrunning import operations_pb2_grpc, operations_pb2
|
29 | 29 |
|
30 |
-from ._exceptions import InvalidArgumentError
|
|
30 |
+from .._exceptions import InvalidArgumentError
|
|
31 | 31 |
|
32 | 32 |
|
33 | 33 |
class OperationsService(operations_pb2_grpc.OperationsServicer):
|
... | ... | @@ -25,6 +25,8 @@ from collections import deque |
25 | 25 |
|
26 | 26 |
from google.protobuf import any_pb2
|
27 | 27 |
|
28 |
+ |
|
29 |
+from buildgrid.server._exceptions import NotFoundError
|
|
28 | 30 |
from buildgrid._protos.google.longrunning import operations_pb2
|
29 | 31 |
|
30 | 32 |
from .job import ExecuteStage, LeaseState
|
... | ... | @@ -35,7 +37,7 @@ class Scheduler: |
35 | 37 |
MAX_N_TRIES = 5
|
36 | 38 |
|
37 | 39 |
def __init__(self, action_cache=None):
|
38 |
- self.action_cache = action_cache
|
|
40 |
+ self._action_cache = action_cache
|
|
39 | 41 |
self.jobs = {}
|
40 | 42 |
self.queue = deque()
|
41 | 43 |
|
... | ... | @@ -50,17 +52,23 @@ class Scheduler: |
50 | 52 |
|
51 | 53 |
def append_job(self, job, skip_cache_lookup=False):
|
52 | 54 |
self.jobs[job.name] = job
|
53 |
- if self.action_cache is not None and not skip_cache_lookup:
|
|
54 |
- cached_result = self.action_cache.get_action_result(job.action_digest)
|
|
55 |
- if cached_result is not None:
|
|
55 |
+ if self._action_cache is not None and not skip_cache_lookup:
|
|
56 |
+ try:
|
|
57 |
+ cached_result = self._action_cache.get_action_result(job.action_digest)
|
|
58 |
+ except NotFoundError:
|
|
59 |
+ self.queue.append(job)
|
|
60 |
+ job.update_execute_stage(ExecuteStage.QUEUED)
|
|
61 |
+ |
|
62 |
+ else:
|
|
56 | 63 |
cached_result_any = any_pb2.Any()
|
57 | 64 |
cached_result_any.Pack(cached_result)
|
58 | 65 |
job.result = cached_result_any
|
59 | 66 |
job.result_cached = True
|
60 | 67 |
job.update_execute_stage(ExecuteStage.COMPLETED)
|
61 |
- return
|
|
62 |
- self.queue.append(job)
|
|
63 |
- job.update_execute_stage(ExecuteStage.QUEUED)
|
|
68 |
+ |
|
69 |
+ else:
|
|
70 |
+ self.queue.append(job)
|
|
71 |
+ job.update_execute_stage(ExecuteStage.QUEUED)
|
|
64 | 72 |
|
65 | 73 |
def retry_job(self, name):
|
66 | 74 |
if name in self.jobs:
|
... | ... | @@ -81,8 +89,8 @@ class Scheduler: |
81 | 89 |
job.result = result
|
82 | 90 |
job.update_execute_stage(ExecuteStage.COMPLETED)
|
83 | 91 |
self.jobs[name] = job
|
84 |
- if not job.do_not_cache and self.action_cache is not None:
|
|
85 |
- self.action_cache.put_action_result(job.action_digest, result)
|
|
92 |
+ if not job.do_not_cache and self._action_cache is not None:
|
|
93 |
+ self._action_cache.put_action_result(job.action_digest, result)
|
|
86 | 94 |
|
87 | 95 |
def get_operations(self):
|
88 | 96 |
response = operations_pb2.ListOperationsResponse()
|
... | ... | @@ -25,7 +25,7 @@ Instance of the Remote Workers interface. |
25 | 25 |
import logging
|
26 | 26 |
import uuid
|
27 | 27 |
|
28 |
-from ._exceptions import InvalidArgumentError, OutofSyncError
|
|
28 |
+from .._exceptions import InvalidArgumentError, OutofSyncError
|
|
29 | 29 |
from ..job import LeaseState
|
30 | 30 |
|
31 | 31 |
|
... | ... | @@ -27,7 +27,7 @@ import grpc |
27 | 27 |
|
28 | 28 |
from buildgrid._protos.google.devtools.remoteworkers.v1test2 import bots_pb2_grpc
|
29 | 29 |
|
30 |
-from ._exceptions import InvalidArgumentError, OutofSyncError
|
|
30 |
+from .._exceptions import InvalidArgumentError, OutofSyncError
|
|
31 | 31 |
|
32 | 32 |
|
33 | 33 |
class BotsService(bots_pb2_grpc.BotsServicer):
|
... | ... | @@ -117,10 +117,3 @@ BuildGrid's Command Line Interface (CLI) reference documentation. |
117 | 117 |
|
118 | 118 |
.. click:: app.commands.cmd_server:start
|
119 | 119 |
:prog: bgd server start
|
120 |
- |
|
121 |
-----
|
|
122 |
- |
|
123 |
-.. _invoking_bgd_server_stop:
|
|
124 |
- |
|
125 |
-.. click:: app.commands.cmd_server:stop
|
|
126 |
- :prog: bgd server stop
|
... | ... | @@ -11,17 +11,16 @@ |
11 | 11 |
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 | 12 |
# See the License for the specific language governing permissions and
|
13 | 13 |
# limitations under the License.
|
14 |
-#
|
|
15 |
-# Authors:
|
|
16 |
-# Carter Sande <csande bloomberg net>
|
|
14 |
+ |
|
17 | 15 |
|
18 | 16 |
# pylint: disable=redefined-outer-name
|
19 | 17 |
|
20 | 18 |
import pytest
|
21 | 19 |
|
22 |
-from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
|
23 |
-from buildgrid.server import action_cache
|
|
24 | 20 |
from buildgrid.server.cas.storage import lru_memory_cache
|
21 |
+from buildgrid.server.execution import action_cache
|
|
22 |
+from buildgrid.server._exceptions import NotFoundError
|
|
23 |
+from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
|
25 | 24 |
|
26 | 25 |
|
27 | 26 |
@pytest.fixture
|
... | ... | @@ -35,8 +34,9 @@ def test_null_action_cache(cas): |
35 | 34 |
action_digest1 = remote_execution_pb2.Digest(hash='alpha', size_bytes=4)
|
36 | 35 |
dummy_result = remote_execution_pb2.ActionResult()
|
37 | 36 |
|
38 |
- cache.put_action_result(action_digest1, dummy_result)
|
|
39 |
- assert cache.get_action_result(action_digest1) is None
|
|
37 |
+ cache.update_action_result(action_digest1, dummy_result)
|
|
38 |
+ with pytest.raises(NotFoundError):
|
|
39 |
+ cache.get_action_result(action_digest1)
|
|
40 | 40 |
|
41 | 41 |
|
42 | 42 |
def test_action_cache_expiry(cas):
|
... | ... | @@ -47,16 +47,18 @@ def test_action_cache_expiry(cas): |
47 | 47 |
action_digest3 = remote_execution_pb2.Digest(hash='charlie', size_bytes=4)
|
48 | 48 |
dummy_result = remote_execution_pb2.ActionResult()
|
49 | 49 |
|
50 |
- cache.put_action_result(action_digest1, dummy_result)
|
|
51 |
- cache.put_action_result(action_digest2, dummy_result)
|
|
50 |
+ cache.update_action_result(action_digest1, dummy_result)
|
|
51 |
+ cache.update_action_result(action_digest2, dummy_result)
|
|
52 | 52 |
|
53 | 53 |
# Get digest 1 (making 2 the least recently used)
|
54 | 54 |
assert cache.get_action_result(action_digest1) is not None
|
55 | 55 |
# Add digest 3 (so 2 gets removed from the cache)
|
56 |
- cache.put_action_result(action_digest3, dummy_result)
|
|
56 |
+ cache.update_action_result(action_digest3, dummy_result)
|
|
57 | 57 |
|
58 | 58 |
assert cache.get_action_result(action_digest1) is not None
|
59 |
- assert cache.get_action_result(action_digest2) is None
|
|
59 |
+ with pytest.raises(NotFoundError):
|
|
60 |
+ cache.get_action_result(action_digest2)
|
|
61 |
+ |
|
60 | 62 |
assert cache.get_action_result(action_digest3) is not None
|
61 | 63 |
|
62 | 64 |
|
... | ... | @@ -67,34 +69,35 @@ def test_action_cache_checks_cas(cas): |
67 | 69 |
action_digest2 = remote_execution_pb2.Digest(hash='bravo', size_bytes=4)
|
68 | 70 |
action_digest3 = remote_execution_pb2.Digest(hash='charlie', size_bytes=4)
|
69 | 71 |
|
70 |
- # Create a tree that references digests in CAS
|
|
72 |
+ # Create a tree that actions digests in CAS
|
|
71 | 73 |
sample_digest = cas.put_message(remote_execution_pb2.Command(arguments=["sample"]))
|
72 | 74 |
tree = remote_execution_pb2.Tree()
|
73 | 75 |
tree.root.files.add().digest.CopyFrom(sample_digest)
|
74 | 76 |
tree.children.add().files.add().digest.CopyFrom(sample_digest)
|
75 | 77 |
tree_digest = cas.put_message(tree)
|
76 | 78 |
|
77 |
- # Add an ActionResult that references real digests to the cache
|
|
79 |
+ # Add an ActionResult that actions real digests to the cache
|
|
78 | 80 |
action_result1 = remote_execution_pb2.ActionResult()
|
79 | 81 |
action_result1.output_directories.add().tree_digest.CopyFrom(tree_digest)
|
80 | 82 |
action_result1.output_files.add().digest.CopyFrom(sample_digest)
|
81 | 83 |
action_result1.stdout_digest.CopyFrom(sample_digest)
|
82 | 84 |
action_result1.stderr_digest.CopyFrom(sample_digest)
|
83 |
- cache.put_action_result(action_digest1, action_result1)
|
|
85 |
+ cache.update_action_result(action_digest1, action_result1)
|
|
84 | 86 |
|
85 |
- # Add ActionResults that reference fake digests to the cache
|
|
87 |
+ # Add ActionResults that action fake digests to the cache
|
|
86 | 88 |
action_result2 = remote_execution_pb2.ActionResult()
|
87 | 89 |
action_result2.output_directories.add().tree_digest.hash = "nonexistent"
|
88 | 90 |
action_result2.output_directories[0].tree_digest.size_bytes = 8
|
89 |
- cache.put_action_result(action_digest2, action_result2)
|
|
91 |
+ cache.update_action_result(action_digest2, action_result2)
|
|
90 | 92 |
|
91 | 93 |
action_result3 = remote_execution_pb2.ActionResult()
|
92 | 94 |
action_result3.stdout_digest.hash = "nonexistent"
|
93 | 95 |
action_result3.stdout_digest.size_bytes = 8
|
94 |
- cache.put_action_result(action_digest3, action_result3)
|
|
96 |
+ cache.update_action_result(action_digest3, action_result3)
|
|
95 | 97 |
|
96 | 98 |
# Verify we can get the first ActionResult but not the others
|
97 | 99 |
fetched_result1 = cache.get_action_result(action_digest1)
|
98 | 100 |
assert fetched_result1.output_directories[0].tree_digest.hash == tree_digest.hash
|
99 |
- assert cache.get_action_result(action_digest2) is None
|
|
100 |
- assert cache.get_action_result(action_digest3) is None
|
|
101 |
+ with pytest.raises(NotFoundError):
|
|
102 |
+ cache.get_action_result(action_digest2)
|
|
103 |
+ cache.get_action_result(action_digest3)
|
... | ... | @@ -20,9 +20,10 @@ |
20 | 20 |
import tempfile
|
21 | 21 |
|
22 | 22 |
import boto3
|
23 |
-from moto import mock_s3
|
|
24 | 23 |
import pytest
|
25 | 24 |
|
25 |
+from moto import mock_s3
|
|
26 |
+ |
|
26 | 27 |
from buildgrid._protos.build.bazel.remote.execution.v2.remote_execution_pb2 import Digest
|
27 | 28 |
from buildgrid.server.cas.storage.lru_memory_cache import LRUMemoryCache
|
28 | 29 |
from buildgrid.server.cas.storage.disk import DiskStorage
|
... | ... | @@ -23,10 +23,10 @@ import grpc |
23 | 23 |
from grpc._server import _Context
|
24 | 24 |
import pytest
|
25 | 25 |
|
26 |
+ |
|
26 | 27 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
27 |
-from buildgrid.server import action_cache
|
|
28 | 28 |
from buildgrid.server.cas.storage import lru_memory_cache
|
29 |
-from buildgrid.server.execution import action_cache_service
|
|
29 |
+from buildgrid.server.execution import action_cache, action_cache_service
|
|
30 | 30 |
|
31 | 31 |
|
32 | 32 |
# Can mock this
|
... | ... | @@ -67,7 +67,8 @@ def test_simple_action_result(cache, context): |
67 | 67 |
|
68 | 68 |
|
69 | 69 |
def test_disabled_update_action_result(cache, context):
|
70 |
- service = action_cache_service.ActionCacheService(cache, False)
|
|
70 |
+ disabled_push = action_cache.ActionCache(cas, 50, False)
|
|
71 |
+ service = action_cache_service.ActionCacheService(disabled_push)
|
|
71 | 72 |
|
72 | 73 |
request = remote_execution_pb2.UpdateActionResultRequest()
|
73 | 74 |
service.UpdateActionResult(request, context)
|
... | ... | @@ -25,9 +25,10 @@ import pytest |
25 | 25 |
|
26 | 26 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
27 | 27 |
from buildgrid._protos.google.longrunning import operations_pb2
|
28 |
-from buildgrid.server import action_cache, scheduler, job
|
|
28 |
+ |
|
29 |
+from buildgrid.server import scheduler, job
|
|
29 | 30 |
from buildgrid.server.cas.storage import lru_memory_cache
|
30 |
-from buildgrid.server.execution import execution_instance, execution_service
|
|
31 |
+from buildgrid.server.execution import action_cache, execution_instance, execution_service
|
|
31 | 32 |
|
32 | 33 |
|
33 | 34 |
@pytest.fixture
|
... | ... | @@ -27,8 +27,10 @@ from google.protobuf import any_pb2 |
27 | 27 |
|
28 | 28 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
29 | 29 |
from buildgrid._protos.google.longrunning import operations_pb2
|
30 |
+ |
|
30 | 31 |
from buildgrid.server import scheduler
|
31 |
-from buildgrid.server.execution._exceptions import InvalidArgumentError
|
|
32 |
+from buildgrid.server._exceptions import InvalidArgumentError
|
|
33 |
+ |
|
32 | 34 |
from buildgrid.server.execution import execution_instance, operations_service
|
33 | 35 |
|
34 | 36 |
|
1 |
+# Copyright (C) 2018 Bloomberg LP
|
|
2 |
+#
|
|
3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
+# you may not use this file except in compliance with the License.
|
|
5 |
+# You may obtain a copy of the License at
|
|
6 |
+#
|
|
7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
8 |
+#
|
|
9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
+# See the License for the specific language governing permissions and
|
|
13 |
+# limitations under the License.
|
|
14 |
+ |
|
15 |
+# pylint: disable=redefined-outer-name
|
|
16 |
+ |
|
17 |
+from unittest import mock
|
|
18 |
+ |
|
19 |
+import grpc
|
|
20 |
+from grpc._server import _Context
|
|
21 |
+ |
|
22 |
+import pytest
|
|
23 |
+ |
|
24 |
+from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
|
25 |
+from buildgrid._protos.buildstream.v2 import buildstream_pb2
|
|
26 |
+ |
|
27 |
+from buildgrid.server.cas.storage import lru_memory_cache
|
|
28 |
+from buildgrid.server.cas import reference_cache, reference_storage_service
|
|
29 |
+ |
|
30 |
+ |
|
31 |
+# Can mock this
|
|
32 |
+@pytest.fixture
|
|
33 |
+def context():
|
|
34 |
+ yield mock.MagicMock(spec=_Context)
|
|
35 |
+ |
|
36 |
+ |
|
37 |
+@pytest.fixture
|
|
38 |
+def cas():
|
|
39 |
+ yield lru_memory_cache.LRUMemoryCache(1024 * 1024)
|
|
40 |
+ |
|
41 |
+ |
|
42 |
+@pytest.fixture
|
|
43 |
+def cache(cas):
|
|
44 |
+ yield reference_cache.ReferenceCache(cas, 50)
|
|
45 |
+ |
|
46 |
+ |
|
47 |
+def test_simple_result(cache, context):
|
|
48 |
+ keys = ["rick", "roy", "rach"]
|
|
49 |
+ service = reference_storage_service.ReferenceStorageService(cache)
|
|
50 |
+ |
|
51 |
+ # Check that before adding the ReferenceResult, attempting to fetch it fails
|
|
52 |
+ request = buildstream_pb2.GetReferenceRequest(key=keys[0])
|
|
53 |
+ service.GetReference(request, context)
|
|
54 |
+ context.set_code.assert_called_once_with(grpc.StatusCode.NOT_FOUND)
|
|
55 |
+ |
|
56 |
+ # Add an ReferenceResult to the cache
|
|
57 |
+ reference_result = remote_execution_pb2.Digest(hash='deckard')
|
|
58 |
+ request = buildstream_pb2.UpdateReferenceRequest(keys=keys,
|
|
59 |
+ digest=reference_result)
|
|
60 |
+ service.UpdateReference(request, context)
|
|
61 |
+ |
|
62 |
+ # Check that fetching it now works
|
|
63 |
+ for key in keys:
|
|
64 |
+ request = buildstream_pb2.GetReferenceRequest(key=key)
|
|
65 |
+ fetched_result = service.GetReference(request, context)
|
|
66 |
+ assert fetched_result.digest == reference_result
|
|
67 |
+ |
|
68 |
+ |
|
69 |
+def test_disabled_update_result(cache, context):
|
|
70 |
+ disabled_push = reference_cache.ReferenceCache(cas, 50, False)
|
|
71 |
+ keys = ["rick", "roy", "rach"]
|
|
72 |
+ service = reference_storage_service.ReferenceStorageService(disabled_push)
|
|
73 |
+ |
|
74 |
+ # Add an ReferenceResult to the cache
|
|
75 |
+ reference_result = remote_execution_pb2.Digest(hash='deckard')
|
|
76 |
+ request = buildstream_pb2.UpdateReferenceRequest(keys=keys,
|
|
77 |
+ digest=reference_result)
|
|
78 |
+ service.UpdateReference(request, context)
|
|
79 |
+ |
|
80 |
+ request = buildstream_pb2.UpdateReferenceRequest()
|
|
81 |
+ service.UpdateReference(request, context)
|
|
82 |
+ |
|
83 |
+ context.set_code.assert_called_once_with(grpc.StatusCode.UNIMPLEMENTED)
|
|
84 |
+ |
|
85 |
+ |
|
86 |
+@pytest.mark.parametrize("allow_updates", [True, False])
|
|
87 |
+def test_status(allow_updates, context):
|
|
88 |
+ cache = reference_cache.ReferenceCache(cas, 5, allow_updates)
|
|
89 |
+ service = reference_storage_service.ReferenceStorageService(cache)
|
|
90 |
+ |
|
91 |
+ request = buildstream_pb2.StatusRequest()
|
|
92 |
+ response = service.Status(request, context)
|
|
93 |
+ |
|
94 |
+ assert response.allow_updates == allow_updates
|