finn pushed to branch finn/separate-services at BuildGrid / buildgrid
Commits:
-
53018ab3
by finn at 2018-09-11T12:58:22Z
-
e55da704
by Finn Ball at 2018-09-11T12:58:30Z
-
4ee8df11
by Finn Ball at 2018-09-11T12:58:30Z
-
f98afb89
by finnball at 2018-09-11T12:58:30Z
28 changed files:
- .gitlab-ci.yml
- buildgrid/_app/cli.py
- buildgrid/_app/commands/cmd_execute.py
- + buildgrid/_app/commands/cmd_operation.py
- buildgrid/_app/commands/cmd_server.py
- + buildgrid/_app/server.py
- + buildgrid/_app/settings/default.yml
- + buildgrid/_app/settings/parser.py
- buildgrid/server/actioncache/service.py
- buildgrid/server/bots/service.py
- − buildgrid/server/buildgrid_server.py
- buildgrid/server/cas/service.py
- buildgrid/server/instance.py → buildgrid/server/controller.py
- buildgrid/server/execution/instance.py
- buildgrid/server/execution/service.py
- buildgrid/server/operations/instance.py
- buildgrid/server/operations/service.py
- buildgrid/server/referencestorage/service.py
- docs/source/using_dummy_build.rst
- docs/source/using_simple_build.rst
- setup.py
- tests/cas/test_services.py
- tests/cas/test_storage.py
- tests/integration/action_cache_service.py
- tests/integration/bots_service.py
- tests/integration/execution_service.py
- tests/integration/operations_service.py
- tests/integration/reference_storage_service.py
Changes:
... | ... | @@ -30,7 +30,7 @@ before_script: |
30 | 30 |
.run-dummy-job-template: &dummy-job
|
31 | 31 |
stage: test
|
32 | 32 |
script:
|
33 |
- - ${BGD} server start --allow-insecure &
|
|
33 |
+ - ${BGD} server start buildgrid/_app/settings/default.yml &
|
|
34 | 34 |
- sleep 1 # Allow server to boot
|
35 | 35 |
- ${BGD} bot dummy &
|
36 | 36 |
- ${BGD} execute request-dummy --wait-for-completion
|
... | ... | @@ -170,11 +170,8 @@ class BuildGridCLI(click.MultiCommand): |
170 | 170 |
return commands
|
171 | 171 |
|
172 | 172 |
def get_command(self, context, name):
|
173 |
- try:
|
|
174 |
- mod = __import__(name='buildgrid._app.commands.cmd_{}'.format(name),
|
|
175 |
- fromlist=['cli'])
|
|
176 |
- except ImportError:
|
|
177 |
- return None
|
|
173 |
+ mod = __import__(name='buildgrid._app.commands.cmd_{}'.format(name),
|
|
174 |
+ fromlist=['cli'])
|
|
178 | 175 |
return mod.cli
|
179 | 176 |
|
180 | 177 |
|
... | ... | @@ -33,7 +33,6 @@ import grpc |
33 | 33 |
from buildgrid.utils import merkle_maker, create_digest, write_fetch_blob
|
34 | 34 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
|
35 | 35 |
from buildgrid._protos.google.bytestream import bytestream_pb2_grpc
|
36 |
-from buildgrid._protos.google.longrunning import operations_pb2, operations_pb2_grpc
|
|
37 | 36 |
|
38 | 37 |
from ..cli import pass_context
|
39 | 38 |
|
... | ... | @@ -62,7 +61,7 @@ def cli(context, remote, instance_name, client_key, client_cert, server_cert): |
62 | 61 |
credentials = context.load_client_credentials(client_key, client_cert, server_cert)
|
63 | 62 |
if not credentials:
|
64 | 63 |
click.echo("ERROR: no TLS keys were specified and no defaults could be found.\n" +
|
65 |
- "Use --allow-insecure in order to deactivate TLS encryption.\n", err=True)
|
|
64 |
+ "Use `insecure-mode: false` in order to deactivate TLS encryption.\n", err=True)
|
|
66 | 65 |
sys.exit(-1)
|
67 | 66 |
|
68 | 67 |
context.channel = grpc.secure_channel(context.remote, credentials)
|
... | ... | @@ -99,37 +98,6 @@ def request_dummy(context, number, wait_for_completion): |
99 | 98 |
context.logger.info(next(response))
|
100 | 99 |
|
101 | 100 |
|
102 |
-@cli.command('status', short_help="Get the status of an operation.")
|
|
103 |
-@click.argument('operation-name', nargs=1, type=click.STRING, required=True)
|
|
104 |
-@pass_context
|
|
105 |
-def operation_status(context, operation_name):
|
|
106 |
- context.logger.info("Getting operation status...")
|
|
107 |
- stub = operations_pb2_grpc.OperationsStub(context.channel)
|
|
108 |
- |
|
109 |
- request = operations_pb2.GetOperationRequest(name=operation_name)
|
|
110 |
- |
|
111 |
- response = stub.GetOperation(request)
|
|
112 |
- context.logger.info(response)
|
|
113 |
- |
|
114 |
- |
|
115 |
-@cli.command('list', short_help="List operations.")
|
|
116 |
-@pass_context
|
|
117 |
-def list_operations(context):
|
|
118 |
- context.logger.info("Getting list of operations")
|
|
119 |
- stub = operations_pb2_grpc.OperationsStub(context.channel)
|
|
120 |
- |
|
121 |
- request = operations_pb2.ListOperationsRequest(name=context.instance_name)
|
|
122 |
- |
|
123 |
- response = stub.ListOperations(request)
|
|
124 |
- |
|
125 |
- if not response.operations:
|
|
126 |
- context.logger.warning("No operations to list")
|
|
127 |
- return
|
|
128 |
- |
|
129 |
- for op in response.operations:
|
|
130 |
- context.logger.info(op)
|
|
131 |
- |
|
132 |
- |
|
133 | 101 |
@cli.command('wait', short_help="Streams an operation until it is complete.")
|
134 | 102 |
@click.argument('operation-name', nargs=1, type=click.STRING, required=True)
|
135 | 103 |
@pass_context
|
1 |
+# Copyright (C) 2018 Bloomberg LP
|
|
2 |
+#
|
|
3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
+# you may not use this file except in compliance with the License.
|
|
5 |
+# You may obtain a copy of the License at
|
|
6 |
+#
|
|
7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
8 |
+#
|
|
9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
+# See the License for the specific language governing permissions and
|
|
13 |
+# limitations under the License.
|
|
14 |
+ |
|
15 |
+ |
|
16 |
+"""
|
|
17 |
+Operations command
|
|
18 |
+=================
|
|
19 |
+ |
|
20 |
+Check the status of operations
|
|
21 |
+"""
|
|
22 |
+ |
|
23 |
+import logging
|
|
24 |
+from urllib.parse import urlparse
|
|
25 |
+import sys
|
|
26 |
+ |
|
27 |
+import click
|
|
28 |
+import grpc
|
|
29 |
+ |
|
30 |
+from buildgrid._protos.google.longrunning import operations_pb2, operations_pb2_grpc
|
|
31 |
+ |
|
32 |
+from ..cli import pass_context
|
|
33 |
+ |
|
34 |
+ |
|
35 |
+@click.group(name='operation', short_help="Long running operations commands")
|
|
36 |
+@click.option('--remote', type=click.STRING, default='http://localhost:50051', show_default=True,
|
|
37 |
+ help="Remote execution server's URL (port defaults to 50051 if no specified).")
|
|
38 |
+@click.option('--client-key', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
39 |
+ help="Private client key for TLS (PEM-encoded)")
|
|
40 |
+@click.option('--client-cert', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
41 |
+ help="Public client certificate for TLS (PEM-encoded)")
|
|
42 |
+@click.option('--server-cert', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
43 |
+ help="Public server certificate for TLS (PEM-encoded)")
|
|
44 |
+@click.option('--instance-name', type=click.STRING, default='main', show_default=True,
|
|
45 |
+ help="Targeted farm instance name.")
|
|
46 |
+@pass_context
|
|
47 |
+def cli(context, remote, instance_name, client_key, client_cert, server_cert):
|
|
48 |
+ url = urlparse(remote)
|
|
49 |
+ |
|
50 |
+ context.remote = '{}:{}'.format(url.hostname, url.port or 50051)
|
|
51 |
+ context.instance_name = instance_name
|
|
52 |
+ |
|
53 |
+ if url.scheme == 'http':
|
|
54 |
+ context.channel = grpc.insecure_channel(context.remote)
|
|
55 |
+ else:
|
|
56 |
+ credentials = context.load_client_credentials(client_key, client_cert, server_cert)
|
|
57 |
+ if not credentials:
|
|
58 |
+ click.echo("ERROR: no TLS keys were specified and no defaults could be found.\n" +
|
|
59 |
+ "Use `insecure-mode: false` in order to deactivate TLS encryption.\n", err=True)
|
|
60 |
+ sys.exit(-1)
|
|
61 |
+ |
|
62 |
+ context.channel = grpc.secure_channel(context.remote, credentials)
|
|
63 |
+ |
|
64 |
+ context.logger = logging.getLogger(__name__)
|
|
65 |
+ context.logger.debug("Starting for remote {}".format(context.remote))
|
|
66 |
+ |
|
67 |
+ |
|
68 |
+@cli.command('status', short_help="Get the status of an operation.")
|
|
69 |
+@click.argument('operation-name', nargs=1, type=click.STRING, required=True)
|
|
70 |
+@pass_context
|
|
71 |
+def status(context, operation_name):
|
|
72 |
+ context.logger.info("Getting operation status...")
|
|
73 |
+ stub = operations_pb2_grpc.OperationsStub(context.channel)
|
|
74 |
+ |
|
75 |
+ request = operations_pb2.GetOperationRequest(name=operation_name)
|
|
76 |
+ |
|
77 |
+ response = stub.GetOperation(request)
|
|
78 |
+ context.logger.info(response)
|
|
79 |
+ |
|
80 |
+ |
|
81 |
+@cli.command('list', short_help="List operations.")
|
|
82 |
+@pass_context
|
|
83 |
+def lists(context):
|
|
84 |
+ context.logger.info("Getting list of operations")
|
|
85 |
+ stub = operations_pb2_grpc.OperationsStub(context.channel)
|
|
86 |
+ |
|
87 |
+ request = operations_pb2.ListOperationsRequest(name=context.instance_name)
|
|
88 |
+ |
|
89 |
+ response = stub.ListOperations(request)
|
|
90 |
+ |
|
91 |
+ if not response.operations:
|
|
92 |
+ context.logger.warning("No operations to list")
|
|
93 |
+ return
|
|
94 |
+ |
|
95 |
+ for op in response.operations:
|
|
96 |
+ context.logger.info(op)
|
... | ... | @@ -26,16 +26,14 @@ import sys |
26 | 26 |
|
27 | 27 |
import click
|
28 | 28 |
|
29 |
-from buildgrid.server import buildgrid_server
|
|
30 |
-from buildgrid.server.cas.storage.disk import DiskStorage
|
|
31 |
-from buildgrid.server.cas.storage.lru_memory_cache import LRUMemoryCache
|
|
32 |
-from buildgrid.server.cas.storage.s3 import S3Storage
|
|
33 |
-from buildgrid.server.cas.storage.with_cache import WithCacheStorage
|
|
29 |
+from buildgrid.server.controller import ExecutionController
|
|
34 | 30 |
from buildgrid.server.actioncache.storage import ActionCache
|
31 |
+from buildgrid.server.cas.instance import ByteStreamInstance, ContentAddressableStorageInstance
|
|
32 |
+from buildgrid.server.referencestorage.storage import ReferenceCache
|
|
35 | 33 |
|
36 | 34 |
from ..cli import pass_context
|
37 |
- |
|
38 |
-_SIZE_PREFIXES = {'k': 2 ** 10, 'm': 2 ** 20, 'g': 2 ** 30, 't': 2 ** 40}
|
|
35 |
+from ..settings import parser
|
|
36 |
+from ..server import BuildGridServer
|
|
39 | 37 |
|
40 | 38 |
|
41 | 39 |
@click.group(name='server', short_help="Start a local server instance.")
|
... | ... | @@ -45,71 +43,58 @@ def cli(context): |
45 | 43 |
|
46 | 44 |
|
47 | 45 |
@cli.command('start', short_help="Setup a new server instance.")
|
48 |
-@click.argument('instances', nargs=-1, type=click.STRING)
|
|
49 |
-@click.option('--port', type=click.INT, default='50051', show_default=True,
|
|
50 |
- help="The port number to be listened.")
|
|
51 |
-@click.option('--server-key', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
52 |
- help="Private server key for TLS (PEM-encoded)")
|
|
53 |
-@click.option('--server-cert', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
54 |
- help="Public server certificate for TLS (PEM-encoded)")
|
|
55 |
-@click.option('--client-certs', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
56 |
- help="Public client certificates for TLS (PEM-encoded, one single file)")
|
|
57 |
-@click.option('--allow-insecure', type=click.BOOL, is_flag=True,
|
|
58 |
- help="Whether or not to allow unencrypted connections.")
|
|
59 |
-@click.option('--allow-update-action-result/--forbid-update-action-result',
|
|
60 |
- 'allow_uar', default=True, show_default=True,
|
|
61 |
- help="Whether or not to allow clients to manually edit the action cache.")
|
|
62 |
-@click.option('--max-cached-actions', type=click.INT, default=50, show_default=True,
|
|
63 |
- help="Maximum number of actions to keep in the ActionCache.")
|
|
64 |
-@click.option('--cas', type=click.Choice(('lru', 's3', 'disk', 'with-cache')),
|
|
65 |
- help="The CAS storage type to use.")
|
|
66 |
-@click.option('--cas-cache', type=click.Choice(('lru', 's3', 'disk')),
|
|
67 |
- help="For --cas=with-cache, the CAS storage to use as the cache.")
|
|
68 |
-@click.option('--cas-fallback', type=click.Choice(('lru', 's3', 'disk')),
|
|
69 |
- help="For --cas=with-cache, the CAS storage to use as the fallback.")
|
|
70 |
-@click.option('--cas-lru-size', type=click.STRING,
|
|
71 |
- help="For --cas=lru, the LRU cache's memory limit.")
|
|
72 |
-@click.option('--cas-s3-bucket', type=click.STRING,
|
|
73 |
- help="For --cas=s3, the bucket name.")
|
|
74 |
-@click.option('--cas-s3-endpoint', type=click.STRING,
|
|
75 |
- help="For --cas=s3, the endpoint URI.")
|
|
76 |
-@click.option('--cas-disk-directory', type=click.Path(file_okay=False, dir_okay=True, writable=True),
|
|
77 |
- help="For --cas=disk, the folder to store CAS blobs in.")
|
|
46 |
+@click.argument('CONFIG', type=click.Path(file_okay=True, dir_okay=False, writable=False))
|
|
78 | 47 |
@pass_context
|
79 |
-def start(context, port, allow_insecure, server_key, server_cert, client_certs,
|
|
80 |
- instances, max_cached_actions, allow_uar, cas, **cas_args):
|
|
81 |
- """Setups a new server instance."""
|
|
82 |
- credentials = None
|
|
83 |
- if not allow_insecure:
|
|
84 |
- credentials = context.load_server_credentials(server_key, server_cert, client_certs)
|
|
85 |
- if not credentials and not allow_insecure:
|
|
86 |
- click.echo("ERROR: no TLS keys were specified and no defaults could be found.\n" +
|
|
87 |
- "Use --allow-insecure in order to deactivate TLS encryption.\n", err=True)
|
|
88 |
- sys.exit(-1)
|
|
89 |
- |
|
90 |
- context.credentials = credentials
|
|
91 |
- context.port = port
|
|
92 |
- |
|
93 |
- context.logger.info("BuildGrid server booting up")
|
|
94 |
- context.logger.info("Starting on port {}".format(port))
|
|
48 |
+def start(context, config):
|
|
49 |
+ with open(config) as f:
|
|
50 |
+ settings = parser.get_parser().safe_load(f)
|
|
95 | 51 |
|
96 |
- cas_storage = _make_cas_storage(context, cas, cas_args)
|
|
52 |
+ server_settings = settings['server']
|
|
53 |
+ insecure_mode = server_settings['insecure-mode']
|
|
97 | 54 |
|
98 |
- if cas_storage is None:
|
|
99 |
- context.logger.info("Running without CAS - action cache will be unavailable")
|
|
100 |
- action_cache = None
|
|
101 |
- |
|
102 |
- else:
|
|
103 |
- action_cache = ActionCache(cas_storage, max_cached_actions, allow_uar)
|
|
104 |
- |
|
105 |
- if instances is None:
|
|
106 |
- instances = ['main']
|
|
55 |
+ credentials = None
|
|
56 |
+ if not insecure_mode:
|
|
57 |
+ server_key = server_settings['tls-server-key']
|
|
58 |
+ server_cert = server_settings['tls-server-cert']
|
|
59 |
+ client_certs = server_settings['tls-client-certs']
|
|
60 |
+ credentials = context.load_server_credentials(server_key, server_cert, client_certs)
|
|
107 | 61 |
|
108 |
- server = buildgrid_server.BuildGridServer(port=context.port,
|
|
109 |
- credentials=context.credentials,
|
|
110 |
- instances=instances,
|
|
111 |
- cas_storage=cas_storage,
|
|
112 |
- action_cache=action_cache)
|
|
62 |
+ if not credentials:
|
|
63 |
+ click.echo("ERROR: no TLS keys were specified and no defaults could be found.\n" +
|
|
64 |
+ "Use --allow-insecure in order to deactivate TLS encryption.\n", err=True)
|
|
65 |
+ sys.exit(-1)
|
|
66 |
+ |
|
67 |
+ instances = settings['instances']
|
|
68 |
+ |
|
69 |
+ execution_controllers = _instance_maker(instances, ExecutionController)
|
|
70 |
+ |
|
71 |
+ execution_instances = {}
|
|
72 |
+ bots_interfaces = {}
|
|
73 |
+ operations_instances = {}
|
|
74 |
+ |
|
75 |
+ # TODO: map properly in parser
|
|
76 |
+ for k, v in execution_controllers.items():
|
|
77 |
+ execution_instances[k] = v.execution_instance
|
|
78 |
+ bots_interfaces[k] = v.bots_interface
|
|
79 |
+ operations_instances[k] = v.operations_instance
|
|
80 |
+ |
|
81 |
+ reference_caches = _instance_maker(instances, ReferenceCache)
|
|
82 |
+ action_caches = _instance_maker(instances, ActionCache)
|
|
83 |
+ cas = _instance_maker(instances, ContentAddressableStorageInstance)
|
|
84 |
+ bytestreams = _instance_maker(instances, ByteStreamInstance)
|
|
85 |
+ |
|
86 |
+ port = server_settings['port']
|
|
87 |
+ server = BuildGridServer(port=port,
|
|
88 |
+ credentials=credentials,
|
|
89 |
+ execution_instances=execution_instances,
|
|
90 |
+ bots_interfaces=bots_interfaces,
|
|
91 |
+ operations_instances=operations_instances,
|
|
92 |
+ reference_storage_instances=reference_caches,
|
|
93 |
+ action_cache_instances=action_caches,
|
|
94 |
+ cas_instances=cas,
|
|
95 |
+ bytestream_instances=bytestreams)
|
|
96 |
+ |
|
97 |
+ context.logger.info("Starting server on port {}".format(port))
|
|
113 | 98 |
loop = asyncio.get_event_loop()
|
114 | 99 |
try:
|
115 | 100 |
server.start()
|
... | ... | @@ -119,57 +104,20 @@ def start(context, port, allow_insecure, server_key, server_cert, client_certs, |
119 | 104 |
pass
|
120 | 105 |
|
121 | 106 |
finally:
|
107 |
+ context.logger.info("Stopping server")
|
|
122 | 108 |
server.stop()
|
123 | 109 |
loop.close()
|
124 | 110 |
|
125 | 111 |
|
126 |
-def _make_cas_storage(context, cas_type, cas_args):
|
|
127 |
- """Returns the storage provider corresponding to the given `cas_type`,
|
|
128 |
- or None if the provider cannot be created.
|
|
129 |
- """
|
|
130 |
- if cas_type == "lru":
|
|
131 |
- if cas_args["cas_lru_size"] is None:
|
|
132 |
- context.logger.error("--cas-lru-size is required for LRU CAS")
|
|
133 |
- return None
|
|
134 |
- try:
|
|
135 |
- size = _parse_size(cas_args["cas_lru_size"])
|
|
136 |
- except ValueError:
|
|
137 |
- context.logger.error('Invalid LRU size "{0}"'.format(cas_args["cas_lru_size"]))
|
|
138 |
- return None
|
|
139 |
- return LRUMemoryCache(size)
|
|
140 |
- elif cas_type == "s3":
|
|
141 |
- if cas_args["cas_s3_bucket"] is None:
|
|
142 |
- context.logger.error("--cas-s3-bucket is required for S3 CAS")
|
|
143 |
- return None
|
|
144 |
- if cas_args["cas_s3_endpoint"] is not None:
|
|
145 |
- return S3Storage(cas_args["cas_s3_bucket"],
|
|
146 |
- endpoint_url=cas_args["cas_s3_endpoint"])
|
|
147 |
- return S3Storage(cas_args["cas_s3_bucket"])
|
|
148 |
- elif cas_type == "disk":
|
|
149 |
- if cas_args["cas_disk_directory"] is None:
|
|
150 |
- context.logger.error("--cas-disk-directory is required for disk CAS")
|
|
151 |
- return None
|
|
152 |
- return DiskStorage(cas_args["cas_disk_directory"])
|
|
153 |
- elif cas_type == "with-cache":
|
|
154 |
- cache = _make_cas_storage(context, cas_args["cas_cache"], cas_args)
|
|
155 |
- fallback = _make_cas_storage(context, cas_args["cas_fallback"], cas_args)
|
|
156 |
- if cache is None:
|
|
157 |
- context.logger.error("Missing cache provider for --cas=with-cache")
|
|
158 |
- return None
|
|
159 |
- elif fallback is None:
|
|
160 |
- context.logger.error("Missing fallback provider for --cas=with-cache")
|
|
161 |
- return None
|
|
162 |
- return WithCacheStorage(cache, fallback)
|
|
163 |
- elif cas_type is None:
|
|
164 |
- return None
|
|
165 |
- return None
|
|
166 |
- |
|
167 |
- |
|
168 |
-def _parse_size(size):
|
|
169 |
- """Convert a string containing a size in bytes (e.g. '2GB') to a number."""
|
|
170 |
- size = size.lower()
|
|
171 |
- if size[-1] == 'b':
|
|
172 |
- size = size[:-1]
|
|
173 |
- if size[-1] in _SIZE_PREFIXES:
|
|
174 |
- return int(size[:-1]) * _SIZE_PREFIXES[size[-1]]
|
|
175 |
- return int(size)
|
|
112 |
+# Turn away now if you want to keep your eyes
|
|
113 |
+def _instance_maker(instances, service_type):
|
|
114 |
+ # TODO get this mapped in parser
|
|
115 |
+ made = {}
|
|
116 |
+ |
|
117 |
+ for instance in instances:
|
|
118 |
+ services = instance['services']
|
|
119 |
+ instance_name = instance['name']
|
|
120 |
+ for service in services:
|
|
121 |
+ if isinstance(service, service_type):
|
|
122 |
+ made[instance_name] = service
|
|
123 |
+ return made
|
1 |
+# Copyright (C) 2018 Bloomberg LP
|
|
2 |
+#
|
|
3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
+# you may not use this file except in compliance with the License.
|
|
5 |
+# You may obtain a copy of the License at
|
|
6 |
+#
|
|
7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
8 |
+#
|
|
9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
+# See the License for the specific language governing permissions and
|
|
13 |
+# limitations under the License.
|
|
14 |
+ |
|
15 |
+ |
|
16 |
+"""
|
|
17 |
+BuildGridServer
|
|
18 |
+==============
|
|
19 |
+ |
|
20 |
+Creates a BuildGrid server, binding all the requisite service instances together.
|
|
21 |
+"""
|
|
22 |
+ |
|
23 |
+import logging
|
|
24 |
+from concurrent import futures
|
|
25 |
+ |
|
26 |
+import grpc
|
|
27 |
+ |
|
28 |
+from buildgrid.server.cas.service import ByteStreamService, ContentAddressableStorageService
|
|
29 |
+from buildgrid.server.actioncache.service import ActionCacheService
|
|
30 |
+from buildgrid.server.execution.service import ExecutionService
|
|
31 |
+from buildgrid.server.operations.service import OperationsService
|
|
32 |
+from buildgrid.server.bots.service import BotsService
|
|
33 |
+from buildgrid.server.referencestorage.service import ReferenceStorageService
|
|
34 |
+ |
|
35 |
+ |
|
36 |
+class BuildGridServer:
|
|
37 |
+ |
|
38 |
+ def __init__(self, port=50051, max_workers=10, credentials=None,
|
|
39 |
+ execution_instances=None, bots_interfaces=None, operations_instances=None,
|
|
40 |
+ operations_service_instances=None, reference_storage_instances=None,
|
|
41 |
+ action_cache_instances=None, cas_instances=None, bytestream_instances=None):
|
|
42 |
+ |
|
43 |
+ self.logger = logging.getLogger(__name__)
|
|
44 |
+ address = '[::]:{0}'.format(port)
|
|
45 |
+ |
|
46 |
+ server = grpc.server(futures.ThreadPoolExecutor(max_workers))
|
|
47 |
+ |
|
48 |
+ if credentials is not None:
|
|
49 |
+ self.logger.info("Secure connection")
|
|
50 |
+ server.add_secure_port(address, credentials)
|
|
51 |
+ |
|
52 |
+ else:
|
|
53 |
+ self.logger.info("Insecure connection")
|
|
54 |
+ server.add_insecure_port(address)
|
|
55 |
+ |
|
56 |
+ if execution_instances:
|
|
57 |
+ self.logger.debug("Adding execution instances {}".format(
|
|
58 |
+ execution_instances.keys()))
|
|
59 |
+ ExecutionService(server, execution_instances)
|
|
60 |
+ |
|
61 |
+ if bots_interfaces:
|
|
62 |
+ self.logger.debug("Adding bots interfaces {}".format(
|
|
63 |
+ bots_interfaces.keys()))
|
|
64 |
+ BotsService(server, bots_interfaces)
|
|
65 |
+ |
|
66 |
+ if operations_instances:
|
|
67 |
+ self.logger.debug("Adding operations instances {}".format(
|
|
68 |
+ operations_instances.keys()))
|
|
69 |
+ OperationsService(server, operations_instances)
|
|
70 |
+ |
|
71 |
+ if reference_storage_instances:
|
|
72 |
+ self.logger.debug("Adding reference storages {}".format(
|
|
73 |
+ reference_storage_instances.keys()))
|
|
74 |
+ ReferenceStorageService(server, reference_storage_instances)
|
|
75 |
+ |
|
76 |
+ if action_cache_instances:
|
|
77 |
+ self.logger.debug("Adding action cache instances {}".format(
|
|
78 |
+ action_cache_instances.keys()))
|
|
79 |
+ ActionCacheService(server, action_cache_instances)
|
|
80 |
+ |
|
81 |
+ if cas_instances:
|
|
82 |
+ self.logger.debug("Adding cas instances {}".format(
|
|
83 |
+ cas_instances.keys()))
|
|
84 |
+ ContentAddressableStorageService(server, cas_instances)
|
|
85 |
+ |
|
86 |
+ if bytestream_instances:
|
|
87 |
+ self.logger.debug("Adding bytestream instances {}".format(
|
|
88 |
+ bytestream_instances.keys()))
|
|
89 |
+ ByteStreamService(server, bytestream_instances)
|
|
90 |
+ |
|
91 |
+ self._server = server
|
|
92 |
+ |
|
93 |
+ def start(self):
|
|
94 |
+ self._server.start()
|
|
95 |
+ |
|
96 |
+ def stop(self):
|
|
97 |
+ self._server.stop(grace=0)
|
1 |
+server:
|
|
2 |
+ port: 50051
|
|
3 |
+ tls-server-key: null
|
|
4 |
+ tls-server-cert: null
|
|
5 |
+ tls-client-certs: null
|
|
6 |
+ insecure-mode: true
|
|
7 |
+ |
|
8 |
+description: |
|
|
9 |
+ A single default instance
|
|
10 |
+ |
|
11 |
+instances:
|
|
12 |
+ - name: main
|
|
13 |
+ description: |
|
|
14 |
+ The main server
|
|
15 |
+ |
|
16 |
+ storages:
|
|
17 |
+ - !disk-storage &main-storage
|
|
18 |
+ path: ~/cas/
|
|
19 |
+ |
|
20 |
+ services:
|
|
21 |
+ - !action-cache &main-action
|
|
22 |
+ storage: *main-storage
|
|
23 |
+ max_cached_refs: 256
|
|
24 |
+ allow_updates: true
|
|
25 |
+ |
|
26 |
+ - !execution
|
|
27 |
+ storage: *main-storage
|
|
28 |
+ action_cache: *main-action
|
|
29 |
+ |
|
30 |
+ - !cas
|
|
31 |
+ storage: *main-storage
|
|
32 |
+ |
|
33 |
+ - !bytestream
|
|
34 |
+ storage: *main-storage
|
1 |
+# Copyright (C) 2018 Bloomberg LP
|
|
2 |
+#
|
|
3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
+# you may not use this file except in compliance with the License.
|
|
5 |
+# You may obtain a copy of the License at
|
|
6 |
+#
|
|
7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
8 |
+#
|
|
9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
+# See the License for the specific language governing permissions and
|
|
13 |
+# limitations under the License.
|
|
14 |
+ |
|
15 |
+ |
|
16 |
+import yaml
|
|
17 |
+ |
|
18 |
+from buildgrid.server.controller import ExecutionController
|
|
19 |
+from buildgrid.server.actioncache.storage import ActionCache
|
|
20 |
+from buildgrid.server.cas.instance import ByteStreamInstance, ContentAddressableStorageInstance
|
|
21 |
+from buildgrid.server.cas.storage.disk import DiskStorage
|
|
22 |
+from buildgrid.server.cas.storage.lru_memory_cache import LRUMemoryCache
|
|
23 |
+from buildgrid.server.cas.storage.s3 import S3Storage
|
|
24 |
+from buildgrid.server.cas.storage.with_cache import WithCacheStorage
|
|
25 |
+ |
|
26 |
+ |
|
27 |
+class YamlFactory(yaml.YAMLObject):
|
|
28 |
+ @classmethod
|
|
29 |
+ def from_yaml(cls, loader, node):
|
|
30 |
+ values = loader.construct_mapping(node, deep=True)
|
|
31 |
+ return cls(**values)
|
|
32 |
+ |
|
33 |
+ |
|
34 |
+class Disk(YamlFactory):
|
|
35 |
+ |
|
36 |
+ yaml_tag = u'!disk-storage'
|
|
37 |
+ |
|
38 |
+ def __new__(cls, path):
|
|
39 |
+ return DiskStorage(path)
|
|
40 |
+ |
|
41 |
+ |
|
42 |
+class LRU(YamlFactory):
|
|
43 |
+ |
|
44 |
+ yaml_tag = u'!lru-storage'
|
|
45 |
+ |
|
46 |
+ def __new__(cls, size):
|
|
47 |
+ return LRUMemoryCache(_parse_size(size))
|
|
48 |
+ |
|
49 |
+ |
|
50 |
+class S3(YamlFactory):
|
|
51 |
+ |
|
52 |
+ yaml_tag = u'!s3-storage'
|
|
53 |
+ |
|
54 |
+ def __new__(cls, bucket, endpoint):
|
|
55 |
+ return S3Storage(bucket, endpoint_url=endpoint)
|
|
56 |
+ |
|
57 |
+ |
|
58 |
+class WithCache(YamlFactory):
|
|
59 |
+ |
|
60 |
+ yaml_tag = u'!with-cache-storage'
|
|
61 |
+ |
|
62 |
+ def __new__(cls, cache, fallback):
|
|
63 |
+ return WithCacheStorage(cache, fallback)
|
|
64 |
+ |
|
65 |
+ |
|
66 |
+class Execution(YamlFactory):
|
|
67 |
+ |
|
68 |
+ yaml_tag = u'!execution'
|
|
69 |
+ |
|
70 |
+ def __new__(cls, storage, action_cache=None):
|
|
71 |
+ return ExecutionController(action_cache, storage)
|
|
72 |
+ |
|
73 |
+ |
|
74 |
+class Action(YamlFactory):
|
|
75 |
+ |
|
76 |
+ yaml_tag = u'!action-cache'
|
|
77 |
+ |
|
78 |
+ def __new__(cls, storage, max_cached_refs=0, allow_updates=True):
|
|
79 |
+ return ActionCache(storage, max_cached_refs, allow_updates)
|
|
80 |
+ |
|
81 |
+ |
|
82 |
+class CAS(YamlFactory):
|
|
83 |
+ |
|
84 |
+ yaml_tag = u'!cas'
|
|
85 |
+ |
|
86 |
+ def __new__(cls, storage):
|
|
87 |
+ return ContentAddressableStorageInstance(storage)
|
|
88 |
+ |
|
89 |
+ |
|
90 |
+class ByteStream(YamlFactory):
|
|
91 |
+ |
|
92 |
+ yaml_tag = u'!bytestream'
|
|
93 |
+ |
|
94 |
+ def __new__(cls, storage):
|
|
95 |
+ return ByteStreamInstance(storage)
|
|
96 |
+ |
|
97 |
+ |
|
98 |
+def _parse_size(size):
|
|
99 |
+ """Convert a string containing a size in bytes (e.g. '2GB') to a number."""
|
|
100 |
+ _size_prefixes = {'k': 2 ** 10, 'm': 2 ** 20, 'g': 2 ** 30, 't': 2 ** 40}
|
|
101 |
+ size = size.lower()
|
|
102 |
+ |
|
103 |
+ if size[-1] == 'b':
|
|
104 |
+ size = size[:-1]
|
|
105 |
+ if size[-1] in _size_prefixes:
|
|
106 |
+ return int(size[:-1]) * _size_prefixes[size[-1]]
|
|
107 |
+ return int(size)
|
|
108 |
+ |
|
109 |
+ |
|
110 |
+def get_parser():
|
|
111 |
+ |
|
112 |
+ yaml.SafeLoader.add_constructor(Execution.yaml_tag, Execution.from_yaml)
|
|
113 |
+ yaml.SafeLoader.add_constructor(Execution.yaml_tag, Execution.from_yaml)
|
|
114 |
+ yaml.SafeLoader.add_constructor(Action.yaml_tag, Action.from_yaml)
|
|
115 |
+ yaml.SafeLoader.add_constructor(Disk.yaml_tag, Disk.from_yaml)
|
|
116 |
+ yaml.SafeLoader.add_constructor(LRU.yaml_tag, LRU.from_yaml)
|
|
117 |
+ yaml.SafeLoader.add_constructor(S3.yaml_tag, S3.from_yaml)
|
|
118 |
+ yaml.SafeLoader.add_constructor(WithCache.yaml_tag, WithCache.from_yaml)
|
|
119 |
+ yaml.SafeLoader.add_constructor(CAS.yaml_tag, CAS.from_yaml)
|
|
120 |
+ yaml.SafeLoader.add_constructor(ByteStream.yaml_tag, ByteStream.from_yaml)
|
|
121 |
+ |
|
122 |
+ return yaml
|
... | ... | @@ -27,18 +27,27 @@ import grpc |
27 | 27 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
28 | 28 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2_grpc
|
29 | 29 |
|
30 |
-from .._exceptions import NotFoundError
|
|
30 |
+from .._exceptions import InvalidArgumentError, NotFoundError
|
|
31 | 31 |
|
32 | 32 |
|
33 | 33 |
class ActionCacheService(remote_execution_pb2_grpc.ActionCacheServicer):
|
34 | 34 |
|
35 |
- def __init__(self, action_cache):
|
|
36 |
- self._action_cache = action_cache
|
|
35 |
+ def __init__(self, server, instances):
|
|
36 |
+ self._instances = instances
|
|
37 |
+ |
|
37 | 38 |
self.logger = logging.getLogger(__name__)
|
38 | 39 |
|
40 |
+ remote_execution_pb2_grpc.add_ActionCacheServicer_to_server(self, server)
|
|
41 |
+ |
|
39 | 42 |
def GetActionResult(self, request, context):
|
40 | 43 |
try:
|
41 |
- return self._action_cache.get_action_result(request.action_digest)
|
|
44 |
+ instance = self._get_instance(request.instance_name)
|
|
45 |
+ return instance.get_action_result(request.action_digest)
|
|
46 |
+ |
|
47 |
+ except InvalidArgumentError as e:
|
|
48 |
+ self.logger.error(e)
|
|
49 |
+ context.set_details(str(e))
|
|
50 |
+ context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
|
42 | 51 |
|
43 | 52 |
except NotFoundError as e:
|
44 | 53 |
self.logger.error(e)
|
... | ... | @@ -48,11 +57,24 @@ class ActionCacheService(remote_execution_pb2_grpc.ActionCacheServicer): |
48 | 57 |
|
49 | 58 |
def UpdateActionResult(self, request, context):
|
50 | 59 |
try:
|
51 |
- self._action_cache.update_action_result(request.action_digest, request.action_result)
|
|
60 |
+ instance = self._get_instance(request.instance_name)
|
|
61 |
+ instance.update_action_result(request.action_digest, request.action_result)
|
|
52 | 62 |
return request.action_result
|
53 | 63 |
|
64 |
+ except InvalidArgumentError as e:
|
|
65 |
+ self.logger.error(e)
|
|
66 |
+ context.set_details(str(e))
|
|
67 |
+ context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
|
68 |
+ |
|
54 | 69 |
except NotImplementedError as e:
|
55 | 70 |
self.logger.error(e)
|
56 | 71 |
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
57 | 72 |
|
58 | 73 |
return remote_execution_pb2.ActionResult()
|
74 |
+ |
|
75 |
+ def _get_instance(self, instance_name):
|
|
76 |
+ try:
|
|
77 |
+ return self._instances[instance_name]
|
|
78 |
+ |
|
79 |
+ except KeyError:
|
|
80 |
+ raise InvalidArgumentError("Invalid instance name: {}".format(instance_name))
|
... | ... | @@ -33,10 +33,12 @@ from .._exceptions import InvalidArgumentError, OutofSyncError |
33 | 33 |
|
34 | 34 |
class BotsService(bots_pb2_grpc.BotsServicer):
|
35 | 35 |
|
36 |
- def __init__(self, instances):
|
|
36 |
+ def __init__(self, server, instances):
|
|
37 | 37 |
self._instances = instances
|
38 | 38 |
self.logger = logging.getLogger(__name__)
|
39 | 39 |
|
40 |
+ bots_pb2_grpc.add_BotsServicer_to_server(self, server)
|
|
41 |
+ |
|
40 | 42 |
def CreateBotSession(self, request, context):
|
41 | 43 |
try:
|
42 | 44 |
parent = request.parent
|
1 |
-# Copyright (C) 2018 Bloomberg LP
|
|
2 |
-#
|
|
3 |
-# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
-# you may not use this file except in compliance with the License.
|
|
5 |
-# You may obtain a copy of the License at
|
|
6 |
-#
|
|
7 |
-# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
8 |
-#
|
|
9 |
-# Unless required by applicable law or agreed to in writing, software
|
|
10 |
-# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
-# See the License for the specific language governing permissions and
|
|
13 |
-# limitations under the License.
|
|
14 |
- |
|
15 |
- |
|
16 |
-"""
|
|
17 |
-BuildGridServer
|
|
18 |
-==============
|
|
19 |
- |
|
20 |
-Creates the user a local server BuildGrid server.
|
|
21 |
-"""
|
|
22 |
- |
|
23 |
-from concurrent import futures
|
|
24 |
- |
|
25 |
-import grpc
|
|
26 |
- |
|
27 |
-from buildgrid._protos.google.bytestream import bytestream_pb2_grpc
|
|
28 |
-from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2_grpc
|
|
29 |
-from buildgrid._protos.google.devtools.remoteworkers.v1test2 import bots_pb2_grpc
|
|
30 |
-from buildgrid._protos.google.longrunning import operations_pb2_grpc
|
|
31 |
- |
|
32 |
-from .instance import BuildGridInstance
|
|
33 |
-from .cas.service import ByteStreamService, ContentAddressableStorageService
|
|
34 |
-from .actioncache.service import ActionCacheService
|
|
35 |
-from .execution.service import ExecutionService
|
|
36 |
-from .operations.service import OperationsService
|
|
37 |
-from .bots.service import BotsService
|
|
38 |
- |
|
39 |
- |
|
40 |
-class BuildGridServer:
|
|
41 |
- |
|
42 |
- def __init__(self, port=50051, credentials=None, instances=None,
|
|
43 |
- max_workers=10, action_cache=None, cas_storage=None):
|
|
44 |
- address = '[::]:{0}'.format(port)
|
|
45 |
- |
|
46 |
- self._server = grpc.server(futures.ThreadPoolExecutor(max_workers))
|
|
47 |
- |
|
48 |
- if credentials is not None:
|
|
49 |
- self._server.add_secure_port(address, credentials)
|
|
50 |
- else:
|
|
51 |
- self._server.add_insecure_port(address)
|
|
52 |
- |
|
53 |
- if cas_storage is not None:
|
|
54 |
- cas_service = ContentAddressableStorageService(cas_storage)
|
|
55 |
- remote_execution_pb2_grpc.add_ContentAddressableStorageServicer_to_server(cas_service,
|
|
56 |
- self._server)
|
|
57 |
- bytestream_pb2_grpc.add_ByteStreamServicer_to_server(ByteStreamService(cas_storage),
|
|
58 |
- self._server)
|
|
59 |
- if action_cache is not None:
|
|
60 |
- action_cache_service = ActionCacheService(action_cache)
|
|
61 |
- remote_execution_pb2_grpc.add_ActionCacheServicer_to_server(action_cache_service,
|
|
62 |
- self._server)
|
|
63 |
- |
|
64 |
- buildgrid_instances = {}
|
|
65 |
- if not instances:
|
|
66 |
- buildgrid_instances["main"] = BuildGridInstance(action_cache, cas_storage)
|
|
67 |
- else:
|
|
68 |
- for name in instances:
|
|
69 |
- buildgrid_instances[name] = BuildGridInstance(action_cache, cas_storage)
|
|
70 |
- |
|
71 |
- bots_pb2_grpc.add_BotsServicer_to_server(BotsService(buildgrid_instances),
|
|
72 |
- self._server)
|
|
73 |
- remote_execution_pb2_grpc.add_ExecutionServicer_to_server(ExecutionService(buildgrid_instances),
|
|
74 |
- self._server)
|
|
75 |
- operations_pb2_grpc.add_OperationsServicer_to_server(OperationsService(buildgrid_instances),
|
|
76 |
- self._server)
|
|
77 |
- |
|
78 |
- def start(self):
|
|
79 |
- self._server.start()
|
|
80 |
- |
|
81 |
- def stop(self):
|
|
82 |
- self._server.stop(0)
|
... | ... | @@ -27,18 +27,20 @@ import logging |
27 | 27 |
import grpc
|
28 | 28 |
|
29 | 29 |
from buildgrid._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
|
30 |
-from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2 as re_pb2
|
|
31 |
-from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2_grpc as re_pb2_grpc
|
|
30 |
+from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
|
31 |
+from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2_grpc
|
|
32 | 32 |
|
33 | 33 |
from .._exceptions import InvalidArgumentError, NotFoundError, OutOfRangeError
|
34 | 34 |
|
35 | 35 |
|
36 |
-class ContentAddressableStorageService(re_pb2_grpc.ContentAddressableStorageServicer):
|
|
36 |
+class ContentAddressableStorageService(remote_execution_pb2_grpc.ContentAddressableStorageServicer):
|
|
37 | 37 |
|
38 |
- def __init__(self, instances):
|
|
38 |
+ def __init__(self, server, instances):
|
|
39 | 39 |
self.logger = logging.getLogger(__name__)
|
40 | 40 |
self._instances = instances
|
41 | 41 |
|
42 |
+ remote_execution_pb2_grpc.add_ContentAddressableStorageServicer_to_server(self, server)
|
|
43 |
+ |
|
42 | 44 |
def FindMissingBlobs(self, request, context):
|
43 | 45 |
try:
|
44 | 46 |
instance = self._get_instance(request.instance_name)
|
... | ... | @@ -49,7 +51,7 @@ class ContentAddressableStorageService(re_pb2_grpc.ContentAddressableStorageServ |
49 | 51 |
context.set_details(str(e))
|
50 | 52 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
51 | 53 |
|
52 |
- return re_pb2.FindMissingBlobsResponse()
|
|
54 |
+ return remote_execution_pb2.FindMissingBlobsResponse()
|
|
53 | 55 |
|
54 | 56 |
def BatchUpdateBlobs(self, request, context):
|
55 | 57 |
try:
|
... | ... | @@ -61,7 +63,7 @@ class ContentAddressableStorageService(re_pb2_grpc.ContentAddressableStorageServ |
61 | 63 |
context.set_details(str(e))
|
62 | 64 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
63 | 65 |
|
64 |
- return re_pb2.BatchReadBlobsResponse()
|
|
66 |
+ return remote_execution_pb2.BatchReadBlobsResponse()
|
|
65 | 67 |
|
66 | 68 |
def _get_instance(self, instance_name):
|
67 | 69 |
try:
|
... | ... | @@ -73,10 +75,12 @@ class ContentAddressableStorageService(re_pb2_grpc.ContentAddressableStorageServ |
73 | 75 |
|
74 | 76 |
class ByteStreamService(bytestream_pb2_grpc.ByteStreamServicer):
|
75 | 77 |
|
76 |
- def __init__(self, instances):
|
|
78 |
+ def __init__(self, server, instances):
|
|
77 | 79 |
self.logger = logging.getLogger(__name__)
|
78 | 80 |
self._instances = instances
|
79 | 81 |
|
82 |
+ bytestream_pb2_grpc.add_ByteStreamServicer_to_server(self, server)
|
|
83 |
+ |
|
80 | 84 |
def Read(self, request, context):
|
81 | 85 |
try:
|
82 | 86 |
path = request.resource_name.split("/")
|
... | ... | @@ -14,31 +14,36 @@ |
14 | 14 |
|
15 | 15 |
|
16 | 16 |
"""
|
17 |
-BuildGrid Instance
|
|
17 |
+Execution Controller
|
|
18 | 18 |
==================
|
19 | 19 |
|
20 |
-An instance of the BuildGrid server.
|
|
20 |
+An instance of the Execution controller.
|
|
21 | 21 |
|
22 |
-Contains scheduler, execution instance and an interface to the bots.
|
|
22 |
+All this stuff you need to make the execution service work.
|
|
23 |
+ |
|
24 |
+Contains scheduler, execution instance, an interface to the bots
|
|
25 |
+and an operations instance.
|
|
23 | 26 |
"""
|
24 | 27 |
|
25 | 28 |
|
26 | 29 |
import logging
|
27 | 30 |
|
28 |
-from .execution.instance import ExecutionInstance
|
|
29 | 31 |
from .scheduler import Scheduler
|
30 | 32 |
from .bots.instance import BotsInterface
|
33 |
+from .execution.instance import ExecutionInstance
|
|
34 |
+from .operations.instance import OperationsInstance
|
|
31 | 35 |
|
32 | 36 |
|
33 |
-class BuildGridInstance(ExecutionInstance, BotsInterface):
|
|
37 |
+class ExecutionController:
|
|
34 | 38 |
|
35 |
- def __init__(self, action_cache=None, cas_storage=None):
|
|
39 |
+ def __init__(self, action_cache=None, storage=None):
|
|
36 | 40 |
scheduler = Scheduler(action_cache)
|
37 | 41 |
|
38 | 42 |
self.logger = logging.getLogger(__name__)
|
39 | 43 |
|
40 |
- ExecutionInstance.__init__(self, scheduler, cas_storage)
|
|
41 |
- BotsInterface.__init__(self, scheduler)
|
|
44 |
+ self._execution_instance = ExecutionInstance(scheduler, storage)
|
|
45 |
+ self._bots_interface = BotsInterface(scheduler)
|
|
46 |
+ self._operations_instance = OperationsInstance(scheduler)
|
|
42 | 47 |
|
43 | 48 |
def stream_operation_updates(self, message_queue, operation_name):
|
44 | 49 |
operation = message_queue.get()
|
... | ... | @@ -50,3 +55,15 @@ class BuildGridInstance(ExecutionInstance, BotsInterface): |
50 | 55 |
def cancel_operation(self, name):
|
51 | 56 |
# TODO: Cancel leases
|
52 | 57 |
raise NotImplementedError("Cancelled operations not supported")
|
58 |
+ |
|
59 |
+ @property
|
|
60 |
+ def execution_instance(self):
|
|
61 |
+ return self._execution_instance
|
|
62 |
+ |
|
63 |
+ @property
|
|
64 |
+ def bots_interface(self):
|
|
65 |
+ return self._bots_interface
|
|
66 |
+ |
|
67 |
+ @property
|
|
68 |
+ def operations_instance(self):
|
|
69 |
+ return self._operations_instance
|
... | ... | @@ -24,6 +24,7 @@ import logging |
24 | 24 |
from buildgrid._protos.build.bazel.remote.execution.v2.remote_execution_pb2 import Action
|
25 | 25 |
|
26 | 26 |
from ..job import Job
|
27 |
+from .._exceptions import InvalidArgumentError
|
|
27 | 28 |
|
28 | 29 |
|
29 | 30 |
class ExecutionInstance:
|
... | ... | @@ -51,3 +52,24 @@ class ExecutionInstance: |
51 | 52 |
self._scheduler.append_job(job, skip_cache_lookup)
|
52 | 53 |
|
53 | 54 |
return job.get_operation()
|
55 |
+ |
|
56 |
+ def register_message_client(self, name, queue):
|
|
57 |
+ try:
|
|
58 |
+ self._scheduler.register_client(name, queue)
|
|
59 |
+ |
|
60 |
+ except KeyError:
|
|
61 |
+ raise InvalidArgumentError("Operation name does not exist: {}".format(name))
|
|
62 |
+ |
|
63 |
+ def unregister_message_client(self, name, queue):
|
|
64 |
+ try:
|
|
65 |
+ self._scheduler.unregister_client(name, queue)
|
|
66 |
+ |
|
67 |
+ except KeyError:
|
|
68 |
+ raise InvalidArgumentError("Operation name does not exist: {}".format(name))
|
|
69 |
+ |
|
70 |
+ def stream_operation_updates(self, message_queue, operation_name):
|
|
71 |
+ operation = message_queue.get()
|
|
72 |
+ while not operation.done:
|
|
73 |
+ yield operation
|
|
74 |
+ operation = message_queue.get()
|
|
75 |
+ yield operation
|
... | ... | @@ -35,10 +35,12 @@ from .._exceptions import InvalidArgumentError |
35 | 35 |
|
36 | 36 |
class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer):
|
37 | 37 |
|
38 |
- def __init__(self, instances):
|
|
38 |
+ def __init__(self, server, instances):
|
|
39 | 39 |
self.logger = logging.getLogger(__name__)
|
40 | 40 |
self._instances = instances
|
41 | 41 |
|
42 |
+ remote_execution_pb2_grpc.add_ExecutionServicer_to_server(self, server)
|
|
43 |
+ |
|
42 | 44 |
def Execute(self, request, context):
|
43 | 45 |
try:
|
44 | 46 |
message_queue = queue.Queue()
|
... | ... | @@ -64,3 +64,14 @@ class OperationsInstance: |
64 | 64 |
|
65 | 65 |
except KeyError:
|
66 | 66 |
raise InvalidArgumentError("Operation name does not exist: {}".format(name))
|
67 |
+ |
|
68 |
+ def stream_operation_updates(self, message_queue, operation_name):
|
|
69 |
+ operation = message_queue.get()
|
|
70 |
+ while not operation.done:
|
|
71 |
+ yield operation
|
|
72 |
+ operation = message_queue.get()
|
|
73 |
+ yield operation
|
|
74 |
+ |
|
75 |
+ def cancel_operation(self, name):
|
|
76 |
+ # TODO: Cancel leases
|
|
77 |
+ raise NotImplementedError("Cancelled operations not supported")
|
... | ... | @@ -32,10 +32,12 @@ from .._exceptions import InvalidArgumentError |
32 | 32 |
|
33 | 33 |
class OperationsService(operations_pb2_grpc.OperationsServicer):
|
34 | 34 |
|
35 |
- def __init__(self, instances):
|
|
35 |
+ def __init__(self, server, instances):
|
|
36 | 36 |
self._instances = instances
|
37 | 37 |
self.logger = logging.getLogger(__name__)
|
38 | 38 |
|
39 |
+ operations_pb2_grpc.add_OperationsServicer_to_server(self, server)
|
|
40 |
+ |
|
39 | 41 |
def GetOperation(self, request, context):
|
40 | 42 |
try:
|
41 | 43 |
name = request.name
|
... | ... | @@ -20,34 +20,70 @@ import grpc |
20 | 20 |
from buildgrid._protos.buildstream.v2 import buildstream_pb2
|
21 | 21 |
from buildgrid._protos.buildstream.v2 import buildstream_pb2_grpc
|
22 | 22 |
|
23 |
-from .._exceptions import NotFoundError
|
|
23 |
+from .._exceptions import InvalidArgumentError, NotFoundError
|
|
24 | 24 |
|
25 | 25 |
|
26 | 26 |
class ReferenceStorageService(buildstream_pb2_grpc.ReferenceStorageServicer):
|
27 | 27 |
|
28 |
- def __init__(self, reference_cache):
|
|
29 |
- self._reference_cache = reference_cache
|
|
28 |
+ def __init__(self, server, instances):
|
|
30 | 29 |
self.logger = logging.getLogger(__name__)
|
31 | 30 |
|
31 |
+ self._instances = instances
|
|
32 |
+ |
|
33 |
+ buildstream_pb2_grpc.add_ReferenceStorageServicer_to_server(self, server)
|
|
34 |
+ |
|
32 | 35 |
def GetReference(self, request, context):
|
33 | 36 |
try:
|
37 |
+ instance = self._get_instance(request.instance_name)
|
|
38 |
+ digest = instance.get_digest_reference(request.key)
|
|
34 | 39 |
response = buildstream_pb2.GetReferenceResponse()
|
35 |
- response.digest.CopyFrom(self._reference_cache.get_digest_reference(request.key))
|
|
40 |
+ response.digest.CopyFrom(digest)
|
|
36 | 41 |
return response
|
37 | 42 |
|
43 |
+ except InvalidArgumentError as e:
|
|
44 |
+ self.logger.error(e)
|
|
45 |
+ context.set_details(str(e))
|
|
46 |
+ context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
|
47 |
+ |
|
38 | 48 |
except NotFoundError:
|
39 | 49 |
context.set_code(grpc.StatusCode.NOT_FOUND)
|
40 | 50 |
|
51 |
+ return buildstream_pb2.GetReferenceResponse()
|
|
52 |
+ |
|
41 | 53 |
def UpdateReference(self, request, context):
|
42 | 54 |
try:
|
55 |
+ instance = self._get_instance(request.instance_name)
|
|
56 |
+ digest = request.digest
|
|
57 |
+ |
|
43 | 58 |
for key in request.keys:
|
44 |
- self._reference_cache.update_reference(key, request.digest)
|
|
59 |
+ instance.update_reference(key, digest)
|
|
45 | 60 |
|
46 |
- return buildstream_pb2.UpdateReferenceResponse()
|
|
61 |
+ except InvalidArgumentError as e:
|
|
62 |
+ self.logger.error(e)
|
|
63 |
+ context.set_details(str(e))
|
|
64 |
+ context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
|
47 | 65 |
|
48 | 66 |
except NotImplementedError:
|
49 | 67 |
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
50 | 68 |
|
69 |
+ return buildstream_pb2.UpdateReferenceResponse()
|
|
70 |
+ |
|
51 | 71 |
def Status(self, request, context):
|
52 |
- allow_updates = self._reference_cache.allow_updates
|
|
53 |
- return buildstream_pb2.StatusResponse(allow_updates=allow_updates)
|
|
72 |
+ try:
|
|
73 |
+ instance = self._get_instance(request.instance_name)
|
|
74 |
+ allow_updates = instance.allow_updates
|
|
75 |
+ return buildstream_pb2.StatusResponse(allow_updates=allow_updates)
|
|
76 |
+ |
|
77 |
+ except InvalidArgumentError as e:
|
|
78 |
+ self.logger.error(e)
|
|
79 |
+ context.set_details(str(e))
|
|
80 |
+ context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
|
81 |
+ |
|
82 |
+ return buildstream_pb2.StatusResponse()
|
|
83 |
+ |
|
84 |
+ def _get_instance(self, instance_name):
|
|
85 |
+ try:
|
|
86 |
+ return self._instances[instance_name]
|
|
87 |
+ |
|
88 |
+ except KeyError:
|
|
89 |
+ raise InvalidArgumentError("Invalid instance name: {}".format(instance_name))
|
1 |
- |
|
2 | 1 |
.. _dummy-build:
|
3 | 2 |
|
4 | 3 |
Dummy build
|
... | ... | @@ -8,7 +7,7 @@ In one terminal, start a server: |
8 | 7 |
|
9 | 8 |
.. code-block:: sh
|
10 | 9 |
|
11 |
- bgd server start --allow-insecure
|
|
10 |
+ bgd server start buildgrid/_app/settings/default.yml
|
|
12 | 11 |
|
13 | 12 |
In another terminal, send a request for work:
|
14 | 13 |
|
1 |
- |
|
2 | 1 |
.. _simple-build:
|
3 | 2 |
|
4 | 3 |
Simple build
|
... | ... | @@ -27,7 +26,7 @@ Now start a BuildGrid server, passing it a directory it can write a CAS to: |
27 | 26 |
|
28 | 27 |
.. code-block:: sh
|
29 | 28 |
|
30 |
- bgd server start --allow-insecure --cas disk --cas-cache disk --cas-disk-directory /path/to/empty/directory
|
|
29 |
+ bgd server start buildgrid/_app/settings/default.yml
|
|
31 | 30 |
|
32 | 31 |
Start the following bot session:
|
33 | 32 |
|
... | ... | @@ -114,6 +114,7 @@ setup( |
114 | 114 |
'protobuf',
|
115 | 115 |
'grpcio',
|
116 | 116 |
'Click',
|
117 |
+ 'pyaml',
|
|
117 | 118 |
'boto3 < 1.8.0',
|
118 | 119 |
'botocore < 1.11.0',
|
119 | 120 |
'xdg',
|
... | ... | @@ -28,11 +28,13 @@ from buildgrid._protos.google.bytestream import bytestream_pb2 |
28 | 28 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2 as re_pb2
|
29 | 29 |
from buildgrid.server.cas.storage.storage_abc import StorageABC
|
30 | 30 |
from buildgrid.server.cas.instance import ByteStreamInstance, ContentAddressableStorageInstance
|
31 |
+from buildgrid.server.cas import service
|
|
31 | 32 |
from buildgrid.server.cas.service import ByteStreamService, ContentAddressableStorageService
|
32 | 33 |
from buildgrid.settings import HASH
|
33 | 34 |
|
34 | 35 |
|
35 | 36 |
context = mock.create_autospec(_Context)
|
37 |
+server = mock.create_autospec(grpc.server)
|
|
36 | 38 |
|
37 | 39 |
|
38 | 40 |
class SimpleStorage(StorageABC):
|
... | ... | @@ -73,11 +75,12 @@ instances = ["", "test_inst"] |
73 | 75 |
|
74 | 76 |
@pytest.mark.parametrize("data_to_read", test_strings)
|
75 | 77 |
@pytest.mark.parametrize("instance", instances)
|
76 |
-def test_bytestream_read(data_to_read, instance):
|
|
78 |
+@mock.patch.object(service, 'bytestream_pb2_grpc', autospec=True)
|
|
79 |
+def test_bytestream_read(mocked, data_to_read, instance):
|
|
77 | 80 |
storage = SimpleStorage([b"abc", b"defg", data_to_read])
|
78 | 81 |
|
79 | 82 |
bs_instance = ByteStreamInstance(storage)
|
80 |
- servicer = ByteStreamService({instance: bs_instance})
|
|
83 |
+ servicer = ByteStreamService(server, {instance: bs_instance})
|
|
81 | 84 |
|
82 | 85 |
request = bytestream_pb2.ReadRequest()
|
83 | 86 |
if instance != "":
|
... | ... | @@ -91,12 +94,13 @@ def test_bytestream_read(data_to_read, instance): |
91 | 94 |
|
92 | 95 |
|
93 | 96 |
@pytest.mark.parametrize("instance", instances)
|
94 |
-def test_bytestream_read_many(instance):
|
|
97 |
+@mock.patch.object(service, 'bytestream_pb2_grpc', autospec=True)
|
|
98 |
+def test_bytestream_read_many(mocked, instance):
|
|
95 | 99 |
data_to_read = b"testing" * 10000
|
96 | 100 |
|
97 | 101 |
storage = SimpleStorage([b"abc", b"defg", data_to_read])
|
98 | 102 |
bs_instance = ByteStreamInstance(storage)
|
99 |
- servicer = ByteStreamService({instance: bs_instance})
|
|
103 |
+ servicer = ByteStreamService(server, {instance: bs_instance})
|
|
100 | 104 |
|
101 | 105 |
request = bytestream_pb2.ReadRequest()
|
102 | 106 |
if instance != "":
|
... | ... | @@ -111,10 +115,11 @@ def test_bytestream_read_many(instance): |
111 | 115 |
|
112 | 116 |
@pytest.mark.parametrize("instance", instances)
|
113 | 117 |
@pytest.mark.parametrize("extra_data", ["", "/", "/extra/data"])
|
114 |
-def test_bytestream_write(instance, extra_data):
|
|
118 |
+@mock.patch.object(service, 'bytestream_pb2_grpc', autospec=True)
|
|
119 |
+def test_bytestream_write(mocked, instance, extra_data):
|
|
115 | 120 |
storage = SimpleStorage()
|
116 | 121 |
bs_instance = ByteStreamInstance(storage)
|
117 |
- servicer = ByteStreamService({instance: bs_instance})
|
|
122 |
+ servicer = ByteStreamService(server, {instance: bs_instance})
|
|
118 | 123 |
|
119 | 124 |
resource_name = ""
|
120 | 125 |
if instance != "":
|
... | ... | @@ -134,10 +139,11 @@ def test_bytestream_write(instance, extra_data): |
134 | 139 |
assert storage.data[(hash_, 6)] == b'abcdef'
|
135 | 140 |
|
136 | 141 |
|
137 |
-def test_bytestream_write_rejects_wrong_hash():
|
|
142 |
+@mock.patch.object(service, 'bytestream_pb2_grpc', autospec=True)
|
|
143 |
+def test_bytestream_write_rejects_wrong_hash(mocked):
|
|
138 | 144 |
storage = SimpleStorage()
|
139 | 145 |
bs_instance = ByteStreamInstance(storage)
|
140 |
- servicer = ByteStreamService({"": bs_instance})
|
|
146 |
+ servicer = ByteStreamService(server, {"": bs_instance})
|
|
141 | 147 |
|
142 | 148 |
data = b'some data'
|
143 | 149 |
wrong_hash = HASH(b'incorrect').hexdigest()
|
... | ... | @@ -153,10 +159,11 @@ def test_bytestream_write_rejects_wrong_hash(): |
153 | 159 |
|
154 | 160 |
|
155 | 161 |
@pytest.mark.parametrize("instance", instances)
|
156 |
-def test_cas_find_missing_blobs(instance):
|
|
162 |
+@mock.patch.object(service, 'remote_execution_pb2_grpc', autospec=True)
|
|
163 |
+def test_cas_find_missing_blobs(mocked, instance):
|
|
157 | 164 |
storage = SimpleStorage([b'abc', b'def'])
|
158 | 165 |
cas_instance = ContentAddressableStorageInstance(storage)
|
159 |
- servicer = ContentAddressableStorageService({instance: cas_instance})
|
|
166 |
+ servicer = ContentAddressableStorageService(server, {instance: cas_instance})
|
|
160 | 167 |
digests = [
|
161 | 168 |
re_pb2.Digest(hash=HASH(b'def').hexdigest(), size_bytes=3),
|
162 | 169 |
re_pb2.Digest(hash=HASH(b'ghij').hexdigest(), size_bytes=4)
|
... | ... | @@ -168,10 +175,11 @@ def test_cas_find_missing_blobs(instance): |
168 | 175 |
|
169 | 176 |
|
170 | 177 |
@pytest.mark.parametrize("instance", instances)
|
171 |
-def test_cas_batch_update_blobs(instance):
|
|
178 |
+@mock.patch.object(service, 'remote_execution_pb2_grpc', autospec=True)
|
|
179 |
+def test_cas_batch_update_blobs(mocked, instance):
|
|
172 | 180 |
storage = SimpleStorage()
|
173 | 181 |
cas_instance = ContentAddressableStorageInstance(storage)
|
174 |
- servicer = ContentAddressableStorageService({instance: cas_instance})
|
|
182 |
+ servicer = ContentAddressableStorageService(server, {instance: cas_instance})
|
|
175 | 183 |
|
176 | 184 |
update_requests = [
|
177 | 185 |
re_pb2.BatchUpdateBlobsRequest.Request(
|
... | ... | @@ -22,6 +22,7 @@ import tempfile |
22 | 22 |
from unittest import mock
|
23 | 23 |
|
24 | 24 |
import boto3
|
25 |
+import grpc
|
|
25 | 26 |
from grpc._server import _Context
|
26 | 27 |
import pytest
|
27 | 28 |
from moto import mock_s3
|
... | ... | @@ -38,6 +39,7 @@ from buildgrid.settings import HASH |
38 | 39 |
|
39 | 40 |
|
40 | 41 |
context = mock.create_autospec(_Context)
|
42 |
+server = mock.create_autospec(grpc.server)
|
|
41 | 43 |
|
42 | 44 |
abc = b"abc"
|
43 | 45 |
abc_digest = Digest(hash=HASH(abc).hexdigest(), size_bytes=3)
|
... | ... | @@ -66,8 +68,10 @@ class MockStubServer: |
66 | 68 |
def __init__(self):
|
67 | 69 |
instances = {"": MockCASStorage(), "dna": MockCASStorage()}
|
68 | 70 |
self._requests = []
|
69 |
- self._bs_service = service.ByteStreamService(instances)
|
|
70 |
- self._cas_service = service.ContentAddressableStorageService(instances)
|
|
71 |
+ with mock.patch.object(service, 'bytestream_pb2_grpc'):
|
|
72 |
+ self._bs_service = service.ByteStreamService(server, instances)
|
|
73 |
+ with mock.patch.object(service, 'remote_execution_pb2_grpc'):
|
|
74 |
+ self._cas_service = service.ContentAddressableStorageService(server, instances)
|
|
71 | 75 |
|
72 | 76 |
def Read(self, request):
|
73 | 77 |
yield from self._bs_service.Read(request, context)
|
... | ... | @@ -26,10 +26,14 @@ import pytest |
26 | 26 |
|
27 | 27 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
28 | 28 |
from buildgrid.server.cas.storage import lru_memory_cache
|
29 |
+from buildgrid.server.actioncache import service
|
|
29 | 30 |
from buildgrid.server.actioncache.storage import ActionCache
|
30 | 31 |
from buildgrid.server.actioncache.service import ActionCacheService
|
31 | 32 |
|
32 | 33 |
|
34 |
+server = mock.create_autospec(grpc.server)
|
|
35 |
+ |
|
36 |
+ |
|
33 | 37 |
# Can mock this
|
34 | 38 |
@pytest.fixture
|
35 | 39 |
def context():
|
... | ... | @@ -42,36 +46,41 @@ def cas(): |
42 | 46 |
|
43 | 47 |
|
44 | 48 |
@pytest.fixture
|
45 |
-def cache(cas):
|
|
46 |
- yield ActionCache(cas, 50)
|
|
49 |
+def cache_instances(cas):
|
|
50 |
+ yield {"": ActionCache(cas, 50)}
|
|
47 | 51 |
|
48 | 52 |
|
49 |
-def test_simple_action_result(cache, context):
|
|
50 |
- service = ActionCacheService(cache)
|
|
53 |
+def test_simple_action_result(cache_instances, context):
|
|
54 |
+ with mock.patch.object(service, 'remote_execution_pb2_grpc'):
|
|
55 |
+ ac_service = ActionCacheService(server, cache_instances)
|
|
56 |
+ |
|
57 |
+ print(cache_instances)
|
|
51 | 58 |
action_digest = remote_execution_pb2.Digest(hash='sample', size_bytes=4)
|
52 | 59 |
|
53 | 60 |
# Check that before adding the ActionResult, attempting to fetch it fails
|
54 |
- request = remote_execution_pb2.GetActionResultRequest(action_digest=action_digest)
|
|
55 |
- service.GetActionResult(request, context)
|
|
61 |
+ request = remote_execution_pb2.GetActionResultRequest(instance_name="",
|
|
62 |
+ action_digest=action_digest)
|
|
63 |
+ ac_service.GetActionResult(request, context)
|
|
56 | 64 |
context.set_code.assert_called_once_with(grpc.StatusCode.NOT_FOUND)
|
57 | 65 |
|
58 | 66 |
# Add an ActionResult to the cache
|
59 | 67 |
action_result = remote_execution_pb2.ActionResult(stdout_raw=b'example output')
|
60 | 68 |
request = remote_execution_pb2.UpdateActionResultRequest(action_digest=action_digest,
|
61 | 69 |
action_result=action_result)
|
62 |
- service.UpdateActionResult(request, context)
|
|
70 |
+ ac_service.UpdateActionResult(request, context)
|
|
63 | 71 |
|
64 | 72 |
# Check that fetching it now works
|
65 | 73 |
request = remote_execution_pb2.GetActionResultRequest(action_digest=action_digest)
|
66 |
- fetched_result = service.GetActionResult(request, context)
|
|
74 |
+ fetched_result = ac_service.GetActionResult(request, context)
|
|
67 | 75 |
assert fetched_result.stdout_raw == action_result.stdout_raw
|
68 | 76 |
|
69 | 77 |
|
70 |
-def test_disabled_update_action_result(cache, context):
|
|
78 |
+def test_disabled_update_action_result(context):
|
|
71 | 79 |
disabled_push = ActionCache(cas, 50, False)
|
72 |
- service = ActionCacheService(disabled_push)
|
|
80 |
+ with mock.patch.object(service, 'remote_execution_pb2_grpc'):
|
|
81 |
+ ac_service = ActionCacheService(server, {"": disabled_push})
|
|
73 | 82 |
|
74 |
- request = remote_execution_pb2.UpdateActionResultRequest()
|
|
75 |
- service.UpdateActionResult(request, context)
|
|
83 |
+ request = remote_execution_pb2.UpdateActionResultRequest(instance_name='')
|
|
84 |
+ ac_service.UpdateActionResult(request, context)
|
|
76 | 85 |
|
77 | 86 |
context.set_code.assert_called_once_with(grpc.StatusCode.UNIMPLEMENTED)
|
... | ... | @@ -27,25 +27,21 @@ import pytest |
27 | 27 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
28 | 28 |
from buildgrid._protos.google.devtools.remoteworkers.v1test2 import bots_pb2
|
29 | 29 |
from buildgrid.server import job
|
30 |
-from buildgrid.server.instance import BuildGridInstance
|
|
30 |
+from buildgrid.server.controller import ExecutionController
|
|
31 | 31 |
from buildgrid.server.job import LeaseState
|
32 |
-from buildgrid.server.bots.instance import BotsInterface
|
|
32 |
+from buildgrid.server.bots import service
|
|
33 | 33 |
from buildgrid.server.bots.service import BotsService
|
34 | 34 |
|
35 | 35 |
|
36 |
+server = mock.create_autospec(grpc.server)
|
|
37 |
+ |
|
38 |
+ |
|
36 | 39 |
# GRPC context
|
37 | 40 |
@pytest.fixture
|
38 | 41 |
def context():
|
39 | 42 |
yield mock.MagicMock(spec=_Context)
|
40 | 43 |
|
41 | 44 |
|
42 |
-@pytest.fixture
|
|
43 |
-def action_job():
|
|
44 |
- action_digest = remote_execution_pb2.Digest()
|
|
45 |
- j = job.Job(action_digest, None)
|
|
46 |
- yield j
|
|
47 |
- |
|
48 |
- |
|
49 | 45 |
@pytest.fixture
|
50 | 46 |
def bot_session():
|
51 | 47 |
bot = bots_pb2.BotSession()
|
... | ... | @@ -54,20 +50,16 @@ def bot_session(): |
54 | 50 |
|
55 | 51 |
|
56 | 52 |
@pytest.fixture
|
57 |
-def buildgrid():
|
|
58 |
- yield BuildGridInstance()
|
|
59 |
- |
|
60 |
- |
|
61 |
-@pytest.fixture
|
|
62 |
-def bots(schedule):
|
|
63 |
- yield BotsInterface(schedule)
|
|
53 |
+def controller():
|
|
54 |
+ yield ExecutionController()
|
|
64 | 55 |
|
65 | 56 |
|
66 | 57 |
# Instance to test
|
67 | 58 |
@pytest.fixture
|
68 |
-def instance(buildgrid):
|
|
69 |
- instances = {"": buildgrid}
|
|
70 |
- yield BotsService(instances)
|
|
59 |
+def instance(controller):
|
|
60 |
+ instances = {"": controller.bots_interface}
|
|
61 |
+ with mock.patch.object(service, 'bots_pb2_grpc'):
|
|
62 |
+ yield BotsService(server, instances)
|
|
71 | 63 |
|
72 | 64 |
|
73 | 65 |
def test_create_bot_session(bot_session, context, instance):
|
... | ... | @@ -127,12 +119,11 @@ def test_update_bot_session_bot_id_fail(bot_session, context, instance): |
127 | 119 |
|
128 | 120 |
@pytest.mark.parametrize("number_of_jobs", [0, 1, 3, 500])
|
129 | 121 |
def test_number_of_leases(number_of_jobs, bot_session, context, instance):
|
130 |
- request = bots_pb2.CreateBotSessionRequest(bot_session=bot_session)
|
|
131 |
- # Inject work
|
|
122 |
+ |
|
132 | 123 |
for _ in range(0, number_of_jobs):
|
133 |
- action_digest = remote_execution_pb2.Digest()
|
|
134 |
- instance._instances[""].execute(action_digest, True)
|
|
124 |
+ _inject_work(instance._instances[""]._scheduler)
|
|
135 | 125 |
|
126 |
+ request = bots_pb2.CreateBotSessionRequest(bot_session=bot_session)
|
|
136 | 127 |
response = instance.CreateBotSession(request, context)
|
137 | 128 |
|
138 | 129 |
assert len(response.leases) == number_of_jobs
|
... | ... | @@ -141,9 +132,9 @@ def test_number_of_leases(number_of_jobs, bot_session, context, instance): |
141 | 132 |
def test_update_leases_with_work(bot_session, context, instance):
|
142 | 133 |
request = bots_pb2.CreateBotSessionRequest(parent='',
|
143 | 134 |
bot_session=bot_session)
|
144 |
- # Inject work
|
|
135 |
+ |
|
145 | 136 |
action_digest = remote_execution_pb2.Digest(hash='gaff')
|
146 |
- instance._instances[""].execute(action_digest, True)
|
|
137 |
+ _inject_work(instance._instances[""]._scheduler, action_digest)
|
|
147 | 138 |
|
148 | 139 |
response = instance.CreateBotSession(request, context)
|
149 | 140 |
|
... | ... | @@ -165,7 +156,7 @@ def test_update_leases_work_complete(bot_session, context, instance): |
165 | 156 |
|
166 | 157 |
# Inject work
|
167 | 158 |
action_digest = remote_execution_pb2.Digest(hash='gaff')
|
168 |
- instance._instances[""].execute(action_digest, True)
|
|
159 |
+ _inject_work(instance._instances[""]._scheduler, action_digest)
|
|
169 | 160 |
|
170 | 161 |
request = bots_pb2.UpdateBotSessionRequest(name=response.name,
|
171 | 162 |
bot_session=response)
|
... | ... | @@ -193,7 +184,7 @@ def test_work_rejected_by_bot(bot_session, context, instance): |
193 | 184 |
bot_session=bot_session)
|
194 | 185 |
# Inject work
|
195 | 186 |
action_digest = remote_execution_pb2.Digest(hash='gaff')
|
196 |
- instance._instances[""].execute(action_digest, True)
|
|
187 |
+ _inject_work(instance._instances[""]._scheduler, action_digest)
|
|
197 | 188 |
|
198 | 189 |
# Simulated the severed binding between client and server
|
199 | 190 |
response = copy.deepcopy(instance.CreateBotSession(request, context))
|
... | ... | @@ -215,7 +206,7 @@ def test_work_out_of_sync_from_pending(state, bot_session, context, instance): |
215 | 206 |
bot_session=bot_session)
|
216 | 207 |
# Inject work
|
217 | 208 |
action_digest = remote_execution_pb2.Digest(hash='gaff')
|
218 |
- instance._instances[""].execute(action_digest, True)
|
|
209 |
+ _inject_work(instance._instances[""]._scheduler, action_digest)
|
|
219 | 210 |
|
220 | 211 |
# Simulated the severed binding between client and server
|
221 | 212 |
response = copy.deepcopy(instance.CreateBotSession(request, context))
|
... | ... | @@ -236,7 +227,7 @@ def test_work_out_of_sync_from_active(state, bot_session, context, instance): |
236 | 227 |
bot_session=bot_session)
|
237 | 228 |
# Inject work
|
238 | 229 |
action_digest = remote_execution_pb2.Digest(hash='gaff')
|
239 |
- instance._instances[""].execute(action_digest, True)
|
|
230 |
+ _inject_work(instance._instances[""]._scheduler, action_digest)
|
|
240 | 231 |
|
241 | 232 |
# Simulated the severed binding between client and server
|
242 | 233 |
response = copy.deepcopy(instance.CreateBotSession(request, context))
|
... | ... | @@ -263,7 +254,7 @@ def test_work_active_to_active(bot_session, context, instance): |
263 | 254 |
bot_session=bot_session)
|
264 | 255 |
# Inject work
|
265 | 256 |
action_digest = remote_execution_pb2.Digest(hash='gaff')
|
266 |
- instance._instances[""].execute(action_digest, True)
|
|
257 |
+ _inject_work(instance._instances[""]._scheduler, action_digest)
|
|
267 | 258 |
|
268 | 259 |
# Simulated the severed binding between client and server
|
269 | 260 |
response = copy.deepcopy(instance.CreateBotSession(request, context))
|
... | ... | @@ -283,3 +274,10 @@ def test_post_bot_event_temp(context, instance): |
283 | 274 |
instance.PostBotEventTemp(request, context)
|
284 | 275 |
|
285 | 276 |
context.set_code.assert_called_once_with(grpc.StatusCode.UNIMPLEMENTED)
|
277 |
+ |
|
278 |
+ |
|
279 |
+def _inject_work(scheduler, action_digest=None):
|
|
280 |
+ if not action_digest:
|
|
281 |
+ action_digest = remote_execution_pb2.Digest()
|
|
282 |
+ j = job.Job(action_digest, False)
|
|
283 |
+ scheduler.append_job(j, True)
|
... | ... | @@ -20,21 +20,25 @@ |
20 | 20 |
import uuid
|
21 | 21 |
from unittest import mock
|
22 | 22 |
|
23 |
+from google.protobuf import any_pb2
|
|
23 | 24 |
import grpc
|
24 | 25 |
from grpc._server import _Context
|
25 | 26 |
import pytest
|
26 |
-from google.protobuf import any_pb2
|
|
27 | 27 |
|
28 | 28 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
29 | 29 |
from buildgrid._protos.google.longrunning import operations_pb2
|
30 | 30 |
|
31 | 31 |
from buildgrid.server import job
|
32 |
-from buildgrid.server.instance import BuildGridInstance
|
|
32 |
+from buildgrid.server.controller import ExecutionController
|
|
33 | 33 |
from buildgrid.server.cas.storage import lru_memory_cache
|
34 | 34 |
from buildgrid.server.actioncache.storage import ActionCache
|
35 |
+from buildgrid.server.execution import service
|
|
35 | 36 |
from buildgrid.server.execution.service import ExecutionService
|
36 | 37 |
|
37 | 38 |
|
39 |
+server = mock.create_autospec(grpc.server)
|
|
40 |
+ |
|
41 |
+ |
|
38 | 42 |
@pytest.fixture
|
39 | 43 |
def context():
|
40 | 44 |
cxt = mock.MagicMock(spec=_Context)
|
... | ... | @@ -42,21 +46,21 @@ def context(): |
42 | 46 |
|
43 | 47 |
|
44 | 48 |
@pytest.fixture(params=["action-cache", "no-action-cache"])
|
45 |
-def buildgrid(request):
|
|
49 |
+def controller(request):
|
|
46 | 50 |
if request.param == "action-cache":
|
47 | 51 |
storage = lru_memory_cache.LRUMemoryCache(1024 * 1024)
|
48 | 52 |
cache = ActionCache(storage, 50)
|
49 |
- |
|
50 |
- return BuildGridInstance(action_cache=cache,
|
|
51 |
- cas_storage=storage)
|
|
52 |
- return BuildGridInstance()
|
|
53 |
+ yield ExecutionController(cache, storage)
|
|
54 |
+ else:
|
|
55 |
+ yield ExecutionController()
|
|
53 | 56 |
|
54 | 57 |
|
55 | 58 |
# Instance to test
|
56 | 59 |
@pytest.fixture
|
57 |
-def instance(buildgrid):
|
|
58 |
- instances = {"": buildgrid}
|
|
59 |
- yield ExecutionService(instances)
|
|
60 |
+def instance(controller):
|
|
61 |
+ instances = {"": controller.execution_instance}
|
|
62 |
+ with mock.patch.object(service, 'remote_execution_pb2_grpc'):
|
|
63 |
+ yield ExecutionService(server, instances)
|
|
60 | 64 |
|
61 | 65 |
|
62 | 66 |
@pytest.mark.parametrize("skip_cache_lookup", [True, False])
|
... | ... | @@ -86,7 +90,7 @@ def test_wrong_execute_instance(instance, context): |
86 | 90 |
context.set_code.assert_called_once_with(grpc.StatusCode.INVALID_ARGUMENT)
|
87 | 91 |
|
88 | 92 |
|
89 |
-def test_wait_execution(instance, buildgrid, context):
|
|
93 |
+def test_wait_execution(instance, controller, context):
|
|
90 | 94 |
action_digest = remote_execution_pb2.Digest()
|
91 | 95 |
action_digest.hash = 'zhora'
|
92 | 96 |
|
... | ... | @@ -95,7 +99,7 @@ def test_wait_execution(instance, buildgrid, context): |
95 | 99 |
|
96 | 100 |
request = remote_execution_pb2.WaitExecutionRequest(name="{}/{}".format('', j.name))
|
97 | 101 |
|
98 |
- buildgrid._scheduler.jobs[j.name] = j
|
|
102 |
+ controller.execution_instance._scheduler.jobs[j.name] = j
|
|
99 | 103 |
|
100 | 104 |
action_result_any = any_pb2.Any()
|
101 | 105 |
action_result = remote_execution_pb2.ActionResult()
|
... | ... | @@ -115,7 +119,7 @@ def test_wait_execution(instance, buildgrid, context): |
115 | 119 |
assert result.done is True
|
116 | 120 |
|
117 | 121 |
|
118 |
-def test_wrong_instance_wait_execution(instance, buildgrid, context):
|
|
122 |
+def test_wrong_instance_wait_execution(instance, context):
|
|
119 | 123 |
request = remote_execution_pb2.WaitExecutionRequest(name="blade")
|
120 | 124 |
next(instance.WaitExecution(request, context))
|
121 | 125 |
|
... | ... | @@ -19,21 +19,22 @@ |
19 | 19 |
|
20 | 20 |
from unittest import mock
|
21 | 21 |
|
22 |
+from google.protobuf import any_pb2
|
|
22 | 23 |
import grpc
|
23 | 24 |
from grpc._server import _Context
|
24 | 25 |
import pytest
|
25 | 26 |
|
26 |
-from google.protobuf import any_pb2
|
|
27 |
- |
|
28 | 27 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
29 | 28 |
from buildgrid._protos.google.longrunning import operations_pb2
|
30 | 29 |
|
31 |
-from buildgrid.server.instance import BuildGridInstance
|
|
30 |
+from buildgrid.server.controller import ExecutionController
|
|
32 | 31 |
from buildgrid.server._exceptions import InvalidArgumentError
|
33 | 32 |
|
33 |
+from buildgrid.server.operations import service
|
|
34 | 34 |
from buildgrid.server.operations.service import OperationsService
|
35 | 35 |
|
36 | 36 |
|
37 |
+server = mock.create_autospec(grpc.server)
|
|
37 | 38 |
instance_name = "blade"
|
38 | 39 |
|
39 | 40 |
|
... | ... | @@ -55,21 +56,22 @@ def execute_request(): |
55 | 56 |
|
56 | 57 |
|
57 | 58 |
@pytest.fixture
|
58 |
-def buildgrid():
|
|
59 |
- yield BuildGridInstance()
|
|
59 |
+def controller():
|
|
60 |
+ yield ExecutionController()
|
|
60 | 61 |
|
61 | 62 |
|
62 | 63 |
# Instance to test
|
63 | 64 |
@pytest.fixture
|
64 |
-def instance(buildgrid):
|
|
65 |
- instances = {instance_name: buildgrid}
|
|
66 |
- yield OperationsService(instances)
|
|
65 |
+def instance(controller):
|
|
66 |
+ instances = {instance_name: controller.operations_instance}
|
|
67 |
+ with mock.patch.object(service, 'operations_pb2_grpc'):
|
|
68 |
+ yield OperationsService(server, instances)
|
|
67 | 69 |
|
68 | 70 |
|
69 | 71 |
# Queue an execution, get operation corresponding to that request
|
70 |
-def test_get_operation(instance, buildgrid, execute_request, context):
|
|
71 |
- response_execute = buildgrid.execute(execute_request.action_digest,
|
|
72 |
- execute_request.skip_cache_lookup)
|
|
72 |
+def test_get_operation(instance, controller, execute_request, context):
|
|
73 |
+ response_execute = controller.execution_instance.execute(execute_request.action_digest,
|
|
74 |
+ execute_request.skip_cache_lookup)
|
|
73 | 75 |
|
74 | 76 |
request = operations_pb2.GetOperationRequest()
|
75 | 77 |
|
... | ... | @@ -94,9 +96,9 @@ def test_get_operation_instance_fail(instance, context): |
94 | 96 |
context.set_code.assert_called_once_with(grpc.StatusCode.INVALID_ARGUMENT)
|
95 | 97 |
|
96 | 98 |
|
97 |
-def test_list_operations(instance, buildgrid, execute_request, context):
|
|
98 |
- response_execute = buildgrid.execute(execute_request.action_digest,
|
|
99 |
- execute_request.skip_cache_lookup)
|
|
99 |
+def test_list_operations(instance, controller, execute_request, context):
|
|
100 |
+ response_execute = controller.execution_instance.execute(execute_request.action_digest,
|
|
101 |
+ execute_request.skip_cache_lookup)
|
|
100 | 102 |
|
101 | 103 |
request = operations_pb2.ListOperationsRequest(name=instance_name)
|
102 | 104 |
response = instance.ListOperations(request, context)
|
... | ... | @@ -104,9 +106,9 @@ def test_list_operations(instance, buildgrid, execute_request, context): |
104 | 106 |
assert response.operations[0].name.split('/')[-1] == response_execute.name
|
105 | 107 |
|
106 | 108 |
|
107 |
-def test_list_operations_instance_fail(instance, buildgrid, execute_request, context):
|
|
108 |
- buildgrid.execute(execute_request.action_digest,
|
|
109 |
- execute_request.skip_cache_lookup)
|
|
109 |
+def test_list_operations_instance_fail(instance, controller, execute_request, context):
|
|
110 |
+ controller.execution_instance.execute(execute_request.action_digest,
|
|
111 |
+ execute_request.skip_cache_lookup)
|
|
110 | 112 |
|
111 | 113 |
request = operations_pb2.ListOperationsRequest()
|
112 | 114 |
instance.ListOperations(request, context)
|
... | ... | @@ -114,16 +116,16 @@ def test_list_operations_instance_fail(instance, buildgrid, execute_request, con |
114 | 116 |
context.set_code.assert_called_once_with(grpc.StatusCode.INVALID_ARGUMENT)
|
115 | 117 |
|
116 | 118 |
|
117 |
-def test_list_operations_with_result(instance, buildgrid, execute_request, context):
|
|
118 |
- response_execute = buildgrid.execute(execute_request.action_digest,
|
|
119 |
- execute_request.skip_cache_lookup)
|
|
119 |
+def test_list_operations_with_result(instance, controller, execute_request, context):
|
|
120 |
+ response_execute = controller.execution_instance.execute(execute_request.action_digest,
|
|
121 |
+ execute_request.skip_cache_lookup)
|
|
120 | 122 |
|
121 | 123 |
action_result = remote_execution_pb2.ActionResult()
|
122 | 124 |
output_file = remote_execution_pb2.OutputFile(path='unicorn')
|
123 | 125 |
action_result.output_files.extend([output_file])
|
124 | 126 |
|
125 |
- buildgrid._scheduler.job_complete(response_execute.name,
|
|
126 |
- _pack_any(action_result))
|
|
127 |
+ controller.operations_instance._scheduler.job_complete(response_execute.name,
|
|
128 |
+ _pack_any(action_result))
|
|
127 | 129 |
|
128 | 130 |
request = operations_pb2.ListOperationsRequest(name=instance_name)
|
129 | 131 |
response = instance.ListOperations(request, context)
|
... | ... | @@ -144,9 +146,9 @@ def test_list_operations_empty(instance, context): |
144 | 146 |
|
145 | 147 |
|
146 | 148 |
# Send execution off, delete, try to find operation should fail
|
147 |
-def test_delete_operation(instance, buildgrid, execute_request, context):
|
|
148 |
- response_execute = buildgrid.execute(execute_request.action_digest,
|
|
149 |
- execute_request.skip_cache_lookup)
|
|
149 |
+def test_delete_operation(instance, controller, execute_request, context):
|
|
150 |
+ response_execute = controller.execution_instance.execute(execute_request.action_digest,
|
|
151 |
+ execute_request.skip_cache_lookup)
|
|
150 | 152 |
request = operations_pb2.DeleteOperationRequest()
|
151 | 153 |
request.name = "{}/{}".format(instance_name, response_execute.name)
|
152 | 154 |
instance.DeleteOperation(request, context)
|
... | ... | @@ -155,7 +157,7 @@ def test_delete_operation(instance, buildgrid, execute_request, context): |
155 | 157 |
request.name = "{}/{}".format(instance_name, response_execute.name)
|
156 | 158 |
|
157 | 159 |
with pytest.raises(InvalidArgumentError):
|
158 |
- buildgrid.get_operation(response_execute.name)
|
|
160 |
+ controller.operations_instance.get_operation(response_execute.name)
|
|
159 | 161 |
|
160 | 162 |
|
161 | 163 |
def test_delete_operation_fail(instance, context):
|
... | ... | @@ -25,10 +25,15 @@ from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_p |
25 | 25 |
from buildgrid._protos.buildstream.v2 import buildstream_pb2
|
26 | 26 |
|
27 | 27 |
from buildgrid.server.cas.storage import lru_memory_cache
|
28 |
+from buildgrid.server.referencestorage import service
|
|
28 | 29 |
from buildgrid.server.referencestorage.service import ReferenceStorageService
|
29 | 30 |
from buildgrid.server.referencestorage.storage import ReferenceCache
|
30 | 31 |
|
31 | 32 |
|
33 |
+server = mock.create_autospec(grpc.server)
|
|
34 |
+instance_name = ''
|
|
35 |
+ |
|
36 |
+ |
|
32 | 37 |
# Can mock this
|
33 | 38 |
@pytest.fixture
|
34 | 39 |
def context():
|
... | ... | @@ -45,41 +50,49 @@ def cache(cas): |
45 | 50 |
yield ReferenceCache(cas, 50)
|
46 | 51 |
|
47 | 52 |
|
48 |
-def test_simple_result(cache, context):
|
|
53 |
+@pytest.fixture
|
|
54 |
+def instance(cache):
|
|
55 |
+ instances = {instance_name: cache}
|
|
56 |
+ with mock.patch.object(service, 'buildstream_pb2_grpc'):
|
|
57 |
+ yield ReferenceStorageService(server, instances)
|
|
58 |
+ |
|
59 |
+ |
|
60 |
+def test_simple_result(instance, context):
|
|
49 | 61 |
keys = ["rick", "roy", "rach"]
|
50 |
- service = ReferenceStorageService(cache)
|
|
51 | 62 |
|
52 | 63 |
# Check that before adding the ReferenceResult, attempting to fetch it fails
|
53 | 64 |
request = buildstream_pb2.GetReferenceRequest(key=keys[0])
|
54 |
- service.GetReference(request, context)
|
|
65 |
+ instance.GetReference(request, context)
|
|
55 | 66 |
context.set_code.assert_called_once_with(grpc.StatusCode.NOT_FOUND)
|
56 | 67 |
|
57 | 68 |
# Add an ReferenceResult to the cache
|
58 | 69 |
reference_result = remote_execution_pb2.Digest(hash='deckard')
|
59 | 70 |
request = buildstream_pb2.UpdateReferenceRequest(keys=keys,
|
60 | 71 |
digest=reference_result)
|
61 |
- service.UpdateReference(request, context)
|
|
72 |
+ instance.UpdateReference(request, context)
|
|
62 | 73 |
|
63 | 74 |
# Check that fetching it now works
|
64 | 75 |
for key in keys:
|
65 | 76 |
request = buildstream_pb2.GetReferenceRequest(key=key)
|
66 |
- fetched_result = service.GetReference(request, context)
|
|
77 |
+ fetched_result = instance.GetReference(request, context)
|
|
67 | 78 |
assert fetched_result.digest == reference_result
|
68 | 79 |
|
69 | 80 |
|
70 |
-def test_disabled_update_result(cache, context):
|
|
81 |
+def test_disabled_update_result(context):
|
|
71 | 82 |
disabled_push = ReferenceCache(cas, 50, False)
|
72 | 83 |
keys = ["rick", "roy", "rach"]
|
73 |
- service = ReferenceStorageService(disabled_push)
|
|
84 |
+ |
|
85 |
+ with mock.patch.object(service, 'buildstream_pb2_grpc'):
|
|
86 |
+ instance = ReferenceStorageService(server, {'': disabled_push})
|
|
74 | 87 |
|
75 | 88 |
# Add an ReferenceResult to the cache
|
76 | 89 |
reference_result = remote_execution_pb2.Digest(hash='deckard')
|
77 | 90 |
request = buildstream_pb2.UpdateReferenceRequest(keys=keys,
|
78 | 91 |
digest=reference_result)
|
79 |
- service.UpdateReference(request, context)
|
|
92 |
+ instance.UpdateReference(request, context)
|
|
80 | 93 |
|
81 | 94 |
request = buildstream_pb2.UpdateReferenceRequest()
|
82 |
- service.UpdateReference(request, context)
|
|
95 |
+ instance.UpdateReference(request, context)
|
|
83 | 96 |
|
84 | 97 |
context.set_code.assert_called_once_with(grpc.StatusCode.UNIMPLEMENTED)
|
85 | 98 |
|
... | ... | @@ -87,9 +100,10 @@ def test_disabled_update_result(cache, context): |
87 | 100 |
@pytest.mark.parametrize("allow_updates", [True, False])
|
88 | 101 |
def test_status(allow_updates, context):
|
89 | 102 |
cache = ReferenceCache(cas, 5, allow_updates)
|
90 |
- service = ReferenceStorageService(cache)
|
|
103 |
+ with mock.patch.object(service, 'buildstream_pb2_grpc'):
|
|
104 |
+ instance = ReferenceStorageService(server, {'': cache})
|
|
91 | 105 |
|
92 | 106 |
request = buildstream_pb2.StatusRequest()
|
93 |
- response = service.Status(request, context)
|
|
107 |
+ response = instance.Status(request, context)
|
|
94 | 108 |
|
95 | 109 |
assert response.allow_updates == allow_updates
|