finn pushed to branch finn/81-precon-fail at BuildGrid / buildgrid
Commits:
-
a7f2ea44
by finnball at 2018-09-12T15:51:38Z
-
142d7ef2
by finnball at 2018-09-12T15:51:42Z
-
2e426480
by finnball at 2018-09-12T15:51:42Z
-
8564f37c
by finnball at 2018-09-12T15:51:42Z
-
eb959e27
by finnball at 2018-09-12T15:55:02Z
-
ef7d602b
by finnball at 2018-09-12T16:03:50Z
-
217a046c
by finnball at 2018-09-12T16:03:50Z
-
7aa90b2d
by finnball at 2018-09-12T16:03:50Z
-
01387c5a
by finnball at 2018-09-12T16:04:01Z
18 changed files:
- .gitlab-ci.yml
- buildgrid/_app/commands/cmd_cas.py
- buildgrid/_app/commands/cmd_execute.py
- buildgrid/_app/commands/cmd_server.py
- + buildgrid/_app/settings/__init__.py
- + buildgrid/_app/settings/cas.yml
- buildgrid/_app/settings/default.yml
- buildgrid/_app/settings/parser.py
- + buildgrid/_app/settings/remote-storage.yml
- buildgrid/server/_exceptions.py
- buildgrid/server/cas/service.py
- buildgrid/server/cas/storage/remote.py
- buildgrid/server/execution/instance.py
- buildgrid/server/execution/service.py
- docs/source/using_dummy_build.rst
- tests/cas/test_storage.py
- tests/integration/execution_service.py
- tests/integration/operations_service.py
Changes:
... | ... | @@ -33,6 +33,7 @@ before_script: |
33 | 33 |
- ${BGD} server start buildgrid/_app/settings/default.yml &
|
34 | 34 |
- sleep 1 # Allow server to boot
|
35 | 35 |
- ${BGD} bot dummy &
|
36 |
+ - ${BGD} cas upload-dummy
|
|
36 | 37 |
- ${BGD} execute request-dummy --wait-for-completion
|
37 | 38 |
|
38 | 39 |
|
... | ... | @@ -65,6 +65,23 @@ def cli(context, remote, instance_name, client_key, client_cert, server_cert): |
65 | 65 |
context.logger.debug("Starting for remote {}".format(context.remote))
|
66 | 66 |
|
67 | 67 |
|
68 |
+@cli.command('upload-dummy', short_help="Upload a dummy action. Should be used with `execute dummy-request`")
|
|
69 |
+@pass_context
|
|
70 |
+def upload_dummy(context):
|
|
71 |
+ context.logger.info("Uploading dummy action...")
|
|
72 |
+ action = remote_execution_pb2.Action(do_not_cache=True)
|
|
73 |
+ action_digest = create_digest(action.SerializeToString())
|
|
74 |
+ |
|
75 |
+ request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=context.instance_name)
|
|
76 |
+ request.requests.add(digest=action_digest,
|
|
77 |
+ data=action.SerializeToString())
|
|
78 |
+ |
|
79 |
+ stub = remote_execution_pb2_grpc.ContentAddressableStorageStub(context.channel)
|
|
80 |
+ response = stub.BatchUpdateBlobs(request)
|
|
81 |
+ |
|
82 |
+ context.logger.info(response)
|
|
83 |
+ |
|
84 |
+ |
|
68 | 85 |
@cli.command('upload-files', short_help="Upload files to the CAS server.")
|
69 | 86 |
@click.argument('files', nargs=-1, type=click.File('rb'), required=True)
|
70 | 87 |
@pass_context
|
... | ... | @@ -76,9 +76,11 @@ def cli(context, remote, instance_name, client_key, client_cert, server_cert): |
76 | 76 |
help="Stream updates until jobs are completed.")
|
77 | 77 |
@pass_context
|
78 | 78 |
def request_dummy(context, number, wait_for_completion):
|
79 |
- action_digest = remote_execution_pb2.Digest()
|
|
80 | 79 |
|
81 | 80 |
context.logger.info("Sending execution request...")
|
81 |
+ action = remote_execution_pb2.Action(do_not_cache=True)
|
|
82 |
+ action_digest = create_digest(action.SerializeToString())
|
|
83 |
+ |
|
82 | 84 |
stub = remote_execution_pb2_grpc.ExecutionStub(context.channel)
|
83 | 85 |
|
84 | 86 |
request = remote_execution_pb2.ExecuteRequest(instance_name=context.instance_name,
|
... | ... | @@ -90,9 +92,18 @@ def request_dummy(context, number, wait_for_completion): |
90 | 92 |
responses.append(stub.Execute(request))
|
91 | 93 |
|
92 | 94 |
for response in responses:
|
95 |
+ |
|
93 | 96 |
if wait_for_completion:
|
97 |
+ result = None
|
|
94 | 98 |
for stream in response:
|
95 |
- context.logger.info(stream)
|
|
99 |
+ result = stream
|
|
100 |
+ context.logger.info(result)
|
|
101 |
+ |
|
102 |
+ if not result.done:
|
|
103 |
+ click.echo("Result did not return True." +
|
|
104 |
+ "Was the action uploaded to CAS?", err=True)
|
|
105 |
+ sys.exit(-1)
|
|
106 |
+ |
|
96 | 107 |
else:
|
97 | 108 |
context.logger.info(next(response))
|
98 | 109 |
|
... | ... | @@ -49,41 +49,48 @@ def start(context, config): |
49 | 49 |
with open(config) as f:
|
50 | 50 |
settings = parser.get_parser().safe_load(f)
|
51 | 51 |
|
52 |
- server_settings = settings['server']
|
|
53 |
- insecure_mode = server_settings['insecure-mode']
|
|
54 |
- |
|
55 |
- credentials = None
|
|
56 |
- if not insecure_mode:
|
|
57 |
- server_key = server_settings['tls-server-key']
|
|
58 |
- server_cert = server_settings['tls-server-cert']
|
|
59 |
- client_certs = server_settings['tls-client-certs']
|
|
60 |
- credentials = context.load_server_credentials(server_key, server_cert, client_certs)
|
|
61 |
- |
|
62 |
- if not credentials:
|
|
63 |
- click.echo("ERROR: no TLS keys were specified and no defaults could be found.\n" +
|
|
64 |
- "Set `insecure-mode: false` in order to deactivate TLS encryption.\n", err=True)
|
|
65 |
- sys.exit(-1)
|
|
66 |
- |
|
67 |
- instances = settings['instances']
|
|
68 |
- |
|
69 |
- execution_controllers = _instance_maker(instances, ExecutionController)
|
|
70 |
- |
|
71 |
- execution_instances = {}
|
|
72 |
- bots_interfaces = {}
|
|
73 |
- operations_instances = {}
|
|
74 |
- |
|
75 |
- # TODO: map properly in parser
|
|
76 |
- for k, v in execution_controllers.items():
|
|
77 |
- execution_instances[k] = v.execution_instance
|
|
78 |
- bots_interfaces[k] = v.bots_interface
|
|
79 |
- operations_instances[k] = v.operations_instance
|
|
80 |
- |
|
81 |
- reference_caches = _instance_maker(instances, ReferenceCache)
|
|
82 |
- action_caches = _instance_maker(instances, ActionCache)
|
|
83 |
- cas = _instance_maker(instances, ContentAddressableStorageInstance)
|
|
84 |
- bytestreams = _instance_maker(instances, ByteStreamInstance)
|
|
52 |
+ try:
|
|
53 |
+ server_settings = settings['server']
|
|
54 |
+ insecure_mode = server_settings['insecure-mode']
|
|
55 |
+ |
|
56 |
+ credentials = None
|
|
57 |
+ if not insecure_mode:
|
|
58 |
+ credential_settings = server_settings['credentials']
|
|
59 |
+ server_key = credential_settings['tls-server-key']
|
|
60 |
+ server_cert = credential_settings['tls-server-cert']
|
|
61 |
+ client_certs = credential_settings['tls-client-certs']
|
|
62 |
+ credentials = context.load_server_credentials(server_key, server_cert, client_certs)
|
|
63 |
+ |
|
64 |
+ if not credentials:
|
|
65 |
+ click.echo("ERROR: no TLS keys were specified and no defaults could be found.\n" +
|
|
66 |
+ "Set `insecure-mode: false` in order to deactivate TLS encryption.\n", err=True)
|
|
67 |
+ sys.exit(-1)
|
|
68 |
+ |
|
69 |
+ port = server_settings['port']
|
|
70 |
+ instances = settings['instances']
|
|
71 |
+ |
|
72 |
+ execution_controllers = _instance_maker(instances, ExecutionController)
|
|
73 |
+ |
|
74 |
+ execution_instances = {}
|
|
75 |
+ bots_interfaces = {}
|
|
76 |
+ operations_instances = {}
|
|
77 |
+ |
|
78 |
+ # TODO: map properly in parser
|
|
79 |
+ # Issue 82
|
|
80 |
+ for k, v in execution_controllers.items():
|
|
81 |
+ execution_instances[k] = v.execution_instance
|
|
82 |
+ bots_interfaces[k] = v.bots_interface
|
|
83 |
+ operations_instances[k] = v.operations_instance
|
|
84 |
+ |
|
85 |
+ reference_caches = _instance_maker(instances, ReferenceCache)
|
|
86 |
+ action_caches = _instance_maker(instances, ActionCache)
|
|
87 |
+ cas = _instance_maker(instances, ContentAddressableStorageInstance)
|
|
88 |
+ bytestreams = _instance_maker(instances, ByteStreamInstance)
|
|
89 |
+ |
|
90 |
+ except KeyError as e:
|
|
91 |
+ click.echo("ERROR: Could not parse config: {}.\n".format(str(e)), err=True)
|
|
92 |
+ sys.exit(-1)
|
|
85 | 93 |
|
86 |
- port = server_settings['port']
|
|
87 | 94 |
server = BuildGridServer(port=port,
|
88 | 95 |
credentials=credentials,
|
89 | 96 |
execution_instances=execution_instances,
|
1 |
+server:
|
|
2 |
+ port: 50052
|
|
3 |
+ insecure-mode: true
|
|
4 |
+ credentials:
|
|
5 |
+ tls-server-key: null
|
|
6 |
+ tls-server-cert: null
|
|
7 |
+ tls-client-certs: null
|
|
8 |
+ |
|
9 |
+description: |
|
|
10 |
+ Just a CAS.
|
|
11 |
+ |
|
12 |
+instances:
|
|
13 |
+ - name: main
|
|
14 |
+ description: |
|
|
15 |
+ The main server
|
|
16 |
+ |
|
17 |
+ storages:
|
|
18 |
+ - !disk-storage &main-storage
|
|
19 |
+ path: ~/cas/
|
|
20 |
+ |
|
21 |
+ services:
|
|
22 |
+ - !cas
|
|
23 |
+ storage: *main-storage
|
|
24 |
+ |
|
25 |
+ - !bytestream
|
|
26 |
+ storage: *main-storage
|
1 | 1 |
server:
|
2 | 2 |
port: 50051
|
3 |
- tls-server-key: null
|
|
4 |
- tls-server-cert: null
|
|
5 |
- tls-client-certs: null
|
|
6 | 3 |
insecure-mode: true
|
4 |
+ credentials:
|
|
5 |
+ tls-server-key: null
|
|
6 |
+ tls-server-cert: null
|
|
7 |
+ tls-client-certs: null
|
|
7 | 8 |
|
8 | 9 |
description: |
|
9 | 10 |
A single default instance
|
... | ... | @@ -14,7 +14,11 @@ |
14 | 14 |
|
15 | 15 |
|
16 | 16 |
import os
|
17 |
+import sys
|
|
18 |
+from urllib.parse import urlparse
|
|
17 | 19 |
|
20 |
+import click
|
|
21 |
+import grpc
|
|
18 | 22 |
import yaml
|
19 | 23 |
|
20 | 24 |
from buildgrid.server.controller import ExecutionController
|
... | ... | @@ -22,9 +26,12 @@ from buildgrid.server.actioncache.storage import ActionCache |
22 | 26 |
from buildgrid.server.cas.instance import ByteStreamInstance, ContentAddressableStorageInstance
|
23 | 27 |
from buildgrid.server.cas.storage.disk import DiskStorage
|
24 | 28 |
from buildgrid.server.cas.storage.lru_memory_cache import LRUMemoryCache
|
29 |
+from buildgrid.server.cas.storage.remote import RemoteStorage
|
|
25 | 30 |
from buildgrid.server.cas.storage.s3 import S3Storage
|
26 | 31 |
from buildgrid.server.cas.storage.with_cache import WithCacheStorage
|
27 | 32 |
|
33 |
+from ..cli import Context
|
|
34 |
+ |
|
28 | 35 |
|
29 | 36 |
class YamlFactory(yaml.YAMLObject):
|
30 | 37 |
@classmethod
|
... | ... | @@ -58,6 +65,47 @@ class S3(YamlFactory): |
58 | 65 |
return S3Storage(bucket, endpoint_url=endpoint)
|
59 | 66 |
|
60 | 67 |
|
68 |
+class Remote(YamlFactory):
|
|
69 |
+ |
|
70 |
+ yaml_tag = u'!remote-storage'
|
|
71 |
+ |
|
72 |
+ def __new__(cls, url, instance_name, credentials=None):
|
|
73 |
+ # TODO: Context could be passed into the parser.
|
|
74 |
+ # Also find way to get instance_name from parent
|
|
75 |
+ # Issue 82
|
|
76 |
+ context = Context()
|
|
77 |
+ |
|
78 |
+ url = urlparse(url)
|
|
79 |
+ remote = '{}:{}'.format(url.hostname, url.port or 50051)
|
|
80 |
+ |
|
81 |
+ channel = None
|
|
82 |
+ if url.scheme == 'http':
|
|
83 |
+ channel = grpc.insecure_channel(remote)
|
|
84 |
+ |
|
85 |
+ else:
|
|
86 |
+ if not credentials:
|
|
87 |
+ click.echo("ERROR: no TLS keys were specified and no defaults could be found.\n" +
|
|
88 |
+ "Set remote url scheme to `http` in order to deactivate" +
|
|
89 |
+ "TLS encryption.\n", err=True)
|
|
90 |
+ sys.exit(-1)
|
|
91 |
+ |
|
92 |
+ client_key = credentials['tls-client-key']
|
|
93 |
+ client_cert = credentials['tls-client-cert']
|
|
94 |
+ server_cert = credentials['tls-server-cert']
|
|
95 |
+ credentials = context.load_client_credentials(client_key,
|
|
96 |
+ client_cert,
|
|
97 |
+ server_cert)
|
|
98 |
+ if not credentials:
|
|
99 |
+ click.echo("ERROR: no TLS keys were specified and no defaults could be found.\n" +
|
|
100 |
+ "Set remote url scheme to `http` in order to deactivate" +
|
|
101 |
+ "TLS encryption.\n", err=True)
|
|
102 |
+ sys.exit(-1)
|
|
103 |
+ |
|
104 |
+ channel = grpc.secure_channel(remote, credentials)
|
|
105 |
+ |
|
106 |
+ return RemoteStorage(channel, instance_name)
|
|
107 |
+ |
|
108 |
+ |
|
61 | 109 |
class WithCache(YamlFactory):
|
62 | 110 |
|
63 | 111 |
yaml_tag = u'!with-cache-storage'
|
... | ... | @@ -118,6 +166,7 @@ def get_parser(): |
118 | 166 |
yaml.SafeLoader.add_constructor(Disk.yaml_tag, Disk.from_yaml)
|
119 | 167 |
yaml.SafeLoader.add_constructor(LRU.yaml_tag, LRU.from_yaml)
|
120 | 168 |
yaml.SafeLoader.add_constructor(S3.yaml_tag, S3.from_yaml)
|
169 |
+ yaml.SafeLoader.add_constructor(Remote.yaml_tag, Remote.from_yaml)
|
|
121 | 170 |
yaml.SafeLoader.add_constructor(WithCache.yaml_tag, WithCache.from_yaml)
|
122 | 171 |
yaml.SafeLoader.add_constructor(CAS.yaml_tag, CAS.from_yaml)
|
123 | 172 |
yaml.SafeLoader.add_constructor(ByteStream.yaml_tag, ByteStream.from_yaml)
|
1 |
+server:
|
|
2 |
+ port: 50051
|
|
3 |
+ insecure-mode: true
|
|
4 |
+ credentials:
|
|
5 |
+ tls-server-key: null
|
|
6 |
+ tls-server-cert: null
|
|
7 |
+ tls-client-certs: null
|
|
8 |
+ |
|
9 |
+ |
|
10 |
+description: |
|
|
11 |
+ A single default instance with remote storage.
|
|
12 |
+ |
|
13 |
+instances:
|
|
14 |
+ - name: main
|
|
15 |
+ description: |
|
|
16 |
+ The main server
|
|
17 |
+ |
|
18 |
+ storages:
|
|
19 |
+ - !remote-storage &main-storage
|
|
20 |
+ url: "http://localhost:50052"
|
|
21 |
+ instance_name: main
|
|
22 |
+ credentials:
|
|
23 |
+ tls-client-key: null
|
|
24 |
+ tls-client-cert: null
|
|
25 |
+ tls-server-cert: null
|
|
26 |
+ |
|
27 |
+ services:
|
|
28 |
+ - !action-cache &main-action
|
|
29 |
+ storage: *main-storage
|
|
30 |
+ max_cached_refs: 256
|
|
31 |
+ allow_updates: true
|
|
32 |
+ |
|
33 |
+ - !execution
|
|
34 |
+ storage: *main-storage
|
|
35 |
+ action_cache: *main-action
|
|
36 |
+ |
|
37 |
+ - !cas
|
|
38 |
+ storage: *main-storage
|
|
39 |
+ |
|
40 |
+ - !bytestream
|
|
41 |
+ storage: *main-storage
|
... | ... | @@ -46,3 +46,12 @@ class OutOfRangeError(BgdError): |
46 | 46 |
|
47 | 47 |
def __init__(self, message, detail=None, reason=None):
|
48 | 48 |
super().__init__(message, detail=detail, domain=ErrorDomain.SERVER, reason=reason)
|
49 |
+ |
|
50 |
+ |
|
51 |
+class FailedPrecondition(BgdError):
|
|
52 |
+ """ One or more errors occurred in setting up the action requested, such as a missing input
|
|
53 |
+ or command or no worker being available. The client may be able to fix the errors and retry.
|
|
54 |
+ """
|
|
55 |
+ |
|
56 |
+ def __init__(self, message, detail=None, reason=None):
|
|
57 |
+ super().__init__(message, detail=detail, domain=ErrorDomain.SERVER, reason=reason)
|
... | ... | @@ -89,15 +89,15 @@ class ByteStreamService(bytestream_pb2_grpc.ByteStreamServicer): |
89 | 89 |
# TODO: Decide on default instance name
|
90 | 90 |
if path[0] == "blobs":
|
91 | 91 |
if len(path) < 3 or not path[2].isdigit():
|
92 |
- raise InvalidArgumentError("Invalid resource name: {}".format(context.resource_name))
|
|
92 |
+ raise InvalidArgumentError("Invalid resource name: {}".format(request.resource_name))
|
|
93 | 93 |
instance_name = ""
|
94 | 94 |
|
95 | 95 |
elif path[1] == "blobs":
|
96 | 96 |
if len(path) < 4 or not path[3].isdigit():
|
97 |
- raise InvalidArgumentError("Invalid resource name: {}".format(context.resource_name))
|
|
97 |
+ raise InvalidArgumentError("Invalid resource name: {}".format(request.resource_name))
|
|
98 | 98 |
|
99 | 99 |
else:
|
100 |
- raise InvalidArgumentError("Invalid resource name: {}".format(context.resource_name))
|
|
100 |
+ raise InvalidArgumentError("Invalid resource name: {}".format(request.resource_name))
|
|
101 | 101 |
|
102 | 102 |
instance = self._get_instance(instance_name)
|
103 | 103 |
yield from instance.read(path,
|
... | ... | @@ -134,15 +134,15 @@ class ByteStreamService(bytestream_pb2_grpc.ByteStreamServicer): |
134 | 134 |
# TODO: Sort out no instance name
|
135 | 135 |
if path[0] == "uploads":
|
136 | 136 |
if len(path) < 5 or path[2] != "blobs" or not path[4].isdigit():
|
137 |
- raise InvalidArgumentError("Invalid resource name: {}".format(context.resource_name))
|
|
137 |
+ raise InvalidArgumentError("Invalid resource name: {}".format(first_request.resource_name))
|
|
138 | 138 |
instance_name = ""
|
139 | 139 |
|
140 | 140 |
elif path[1] == "uploads":
|
141 | 141 |
if len(path) < 6 or path[3] != "blobs" or not path[5].isdigit():
|
142 |
- raise InvalidArgumentError("Invalid resource name: {}".format(context.resource_name))
|
|
142 |
+ raise InvalidArgumentError("Invalid resource name: {}".format(first_request.resource_name))
|
|
143 | 143 |
|
144 | 144 |
else:
|
145 |
- raise InvalidArgumentError("Invalid resource name: {}".format(context.resource_name))
|
|
145 |
+ raise InvalidArgumentError("Invalid resource name: {}".format(first_request.resource_name))
|
|
146 | 146 |
|
147 | 147 |
instance = self._get_instance(instance_name)
|
148 | 148 |
return instance.write(requests)
|
... | ... | @@ -23,6 +23,8 @@ Forwwards storage requests to a remote storage. |
23 | 23 |
import io
|
24 | 24 |
import logging
|
25 | 25 |
|
26 |
+import grpc
|
|
27 |
+ |
|
26 | 28 |
from buildgrid.utils import gen_fetch_blob, gen_write_request_blob
|
27 | 29 |
from buildgrid._protos.google.bytestream import bytestream_pb2_grpc
|
28 | 30 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
|
... | ... | @@ -32,7 +34,7 @@ from .storage_abc import StorageABC |
32 | 34 |
|
33 | 35 |
class RemoteStorage(StorageABC):
|
34 | 36 |
|
35 |
- def __init__(self, channel, instance_name=""):
|
|
37 |
+ def __init__(self, channel, instance_name):
|
|
36 | 38 |
self.logger = logging.getLogger(__name__)
|
37 | 39 |
self._instance_name = instance_name
|
38 | 40 |
self._stub_bs = bytestream_pb2_grpc.ByteStreamStub(channel)
|
... | ... | @@ -44,18 +46,29 @@ class RemoteStorage(StorageABC): |
44 | 46 |
return False
|
45 | 47 |
|
46 | 48 |
def get_blob(self, digest):
|
47 |
- fetched_data = io.BytesIO()
|
|
48 |
- length = 0
|
|
49 |
- for data in gen_fetch_blob(self._stub_bs, digest, self._instance_name):
|
|
50 |
- length += fetched_data.write(data)
|
|
51 |
- |
|
52 |
- if length:
|
|
53 |
- assert digest.size_bytes == length
|
|
54 |
- fetched_data.seek(0)
|
|
55 |
- return fetched_data
|
|
56 |
- |
|
57 |
- else:
|
|
58 |
- return None
|
|
49 |
+ try:
|
|
50 |
+ fetched_data = io.BytesIO()
|
|
51 |
+ length = 0
|
|
52 |
+ |
|
53 |
+ for data in gen_fetch_blob(self._stub_bs, digest, self._instance_name):
|
|
54 |
+ length += fetched_data.write(data)
|
|
55 |
+ |
|
56 |
+ if length:
|
|
57 |
+ assert digest.size_bytes == length
|
|
58 |
+ fetched_data.seek(0)
|
|
59 |
+ return fetched_data
|
|
60 |
+ |
|
61 |
+ else:
|
|
62 |
+ return None
|
|
63 |
+ |
|
64 |
+ except grpc.RpcError as e:
|
|
65 |
+ if e.code() == grpc.StatusCode.NOT_FOUND:
|
|
66 |
+ pass
|
|
67 |
+ else:
|
|
68 |
+ self.logger.error(e.details())
|
|
69 |
+ raise
|
|
70 |
+ |
|
71 |
+ return None
|
|
59 | 72 |
|
60 | 73 |
def begin_write(self, digest):
|
61 | 74 |
return io.BytesIO(digest.SerializeToString())
|
... | ... | @@ -24,12 +24,12 @@ import logging |
24 | 24 |
from buildgrid._protos.build.bazel.remote.execution.v2.remote_execution_pb2 import Action
|
25 | 25 |
|
26 | 26 |
from ..job import Job
|
27 |
-from .._exceptions import InvalidArgumentError
|
|
27 |
+from .._exceptions import InvalidArgumentError, FailedPrecondition
|
|
28 | 28 |
|
29 | 29 |
|
30 | 30 |
class ExecutionInstance:
|
31 | 31 |
|
32 |
- def __init__(self, scheduler, storage=None):
|
|
32 |
+ def __init__(self, scheduler, storage):
|
|
33 | 33 |
self.logger = logging.getLogger(__name__)
|
34 | 34 |
self._storage = storage
|
35 | 35 |
self._scheduler = scheduler
|
... | ... | @@ -40,13 +40,12 @@ class ExecutionInstance: |
40 | 40 |
this action.
|
41 | 41 |
"""
|
42 | 42 |
|
43 |
- do_not_cache = False
|
|
44 |
- if self._storage is not None:
|
|
45 |
- action = self._storage.get_message(action_digest, Action)
|
|
46 |
- if action is not None:
|
|
47 |
- do_not_cache = action.do_not_cache
|
|
43 |
+ action = self._storage.get_message(action_digest, Action)
|
|
48 | 44 |
|
49 |
- job = Job(action_digest, do_not_cache, message_queue)
|
|
45 |
+ if not action:
|
|
46 |
+ raise FailedPrecondition("Could not get action from storage.")
|
|
47 |
+ |
|
48 |
+ job = Job(action_digest, action.do_not_cache, message_queue)
|
|
50 | 49 |
self.logger.info("Operation name: {}".format(job.name))
|
51 | 50 |
|
52 | 51 |
self._scheduler.append_job(job, skip_cache_lookup)
|
... | ... | @@ -30,7 +30,7 @@ from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_p |
30 | 30 |
|
31 | 31 |
from buildgrid._protos.google.longrunning import operations_pb2
|
32 | 32 |
|
33 |
-from .._exceptions import InvalidArgumentError
|
|
33 |
+from .._exceptions import InvalidArgumentError, FailedPrecondition
|
|
34 | 34 |
|
35 | 35 |
|
36 | 36 |
class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer):
|
... | ... | @@ -61,6 +61,12 @@ class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer): |
61 | 61 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
62 | 62 |
yield operations_pb2.Operation()
|
63 | 63 |
|
64 |
+ except FailedPrecondition as e:
|
|
65 |
+ self.logger.error(e)
|
|
66 |
+ context.set_details(str(e))
|
|
67 |
+ context.set_code(grpc.StatusCode.FAILED_PRECONDITION)
|
|
68 |
+ yield operations_pb2.Operation()
|
|
69 |
+ |
|
64 | 70 |
def WaitExecution(self, request, context):
|
65 | 71 |
try:
|
66 | 72 |
names = request.name.split("/")
|
... | ... | @@ -9,7 +9,13 @@ In one terminal, start a server: |
9 | 9 |
|
10 | 10 |
bgd server start buildgrid/_app/settings/default.yml
|
11 | 11 |
|
12 |
-In another terminal, send a request for work:
|
|
12 |
+In another terminal, upload an action to CAS:
|
|
13 |
+ |
|
14 |
+.. code-block::sh
|
|
15 |
+ |
|
16 |
+ bgd cas upload-dummy
|
|
17 |
+ |
|
18 |
+Then send a request for work:
|
|
13 | 19 |
|
14 | 20 |
.. code-block:: sh
|
15 | 21 |
|
... | ... | @@ -98,17 +98,6 @@ def instance(params): |
98 | 98 |
return {params, MockCASStorage()}
|
99 | 99 |
|
100 | 100 |
|
101 |
-@pytest.fixture()
|
|
102 |
-@mock.patch.object(remote, 'bytestream_pb2_grpc')
|
|
103 |
-@mock.patch.object(remote, 'remote_execution_pb2_grpc')
|
|
104 |
-def remote_storage(mock_bs_grpc, mock_re_pb2_grpc):
|
|
105 |
- mock_server = MockStubServer()
|
|
106 |
- storage = remote.RemoteStorage(instance)
|
|
107 |
- storage._stub_bs = mock_server
|
|
108 |
- storage._stub_cas = mock_server
|
|
109 |
- yield storage
|
|
110 |
- |
|
111 |
- |
|
112 | 101 |
# General tests for all storage providers
|
113 | 102 |
|
114 | 103 |
|
... | ... | @@ -138,7 +127,7 @@ def any_storage(request): |
138 | 127 |
with mock.patch.object(remote, 'bytestream_pb2_grpc'):
|
139 | 128 |
with mock.patch.object(remote, 'remote_execution_pb2_grpc'):
|
140 | 129 |
mock_server = MockStubServer()
|
141 |
- storage = remote.RemoteStorage(instance)
|
|
130 |
+ storage = remote.RemoteStorage(instance, "")
|
|
142 | 131 |
storage._stub_bs = mock_server
|
143 | 132 |
storage._stub_cas = mock_server
|
144 | 133 |
yield storage
|
... | ... | @@ -28,6 +28,7 @@ import pytest |
28 | 28 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
29 | 29 |
from buildgrid._protos.google.longrunning import operations_pb2
|
30 | 30 |
|
31 |
+from buildgrid.utils import create_digest
|
|
31 | 32 |
from buildgrid.server import job
|
32 | 33 |
from buildgrid.server.controller import ExecutionController
|
33 | 34 |
from buildgrid.server.cas.storage import lru_memory_cache
|
... | ... | @@ -37,6 +38,8 @@ from buildgrid.server.execution.service import ExecutionService |
37 | 38 |
|
38 | 39 |
|
39 | 40 |
server = mock.create_autospec(grpc.server)
|
41 |
+action = remote_execution_pb2.Action(do_not_cache=True)
|
|
42 |
+action_digest = create_digest(action.SerializeToString())
|
|
40 | 43 |
|
41 | 44 |
|
42 | 45 |
@pytest.fixture
|
... | ... | @@ -47,12 +50,16 @@ def context(): |
47 | 50 |
|
48 | 51 |
@pytest.fixture(params=["action-cache", "no-action-cache"])
|
49 | 52 |
def controller(request):
|
53 |
+ storage = lru_memory_cache.LRUMemoryCache(1024 * 1024)
|
|
54 |
+ write_session = storage.begin_write(action_digest)
|
|
55 |
+ storage.commit_write(action_digest, write_session)
|
|
56 |
+ |
|
50 | 57 |
if request.param == "action-cache":
|
51 |
- storage = lru_memory_cache.LRUMemoryCache(1024 * 1024)
|
|
52 | 58 |
cache = ActionCache(storage, 50)
|
53 | 59 |
yield ExecutionController(cache, storage)
|
60 |
+ |
|
54 | 61 |
else:
|
55 |
- yield ExecutionController()
|
|
62 |
+ yield ExecutionController(None, storage)
|
|
56 | 63 |
|
57 | 64 |
|
58 | 65 |
# Instance to test
|
... | ... | @@ -65,9 +72,6 @@ def instance(controller): |
65 | 72 |
|
66 | 73 |
@pytest.mark.parametrize("skip_cache_lookup", [True, False])
|
67 | 74 |
def test_execute(skip_cache_lookup, instance, context):
|
68 |
- action_digest = remote_execution_pb2.Digest()
|
|
69 |
- action_digest.hash = 'zhora'
|
|
70 |
- |
|
71 | 75 |
request = remote_execution_pb2.ExecuteRequest(instance_name='',
|
72 | 76 |
action_digest=action_digest,
|
73 | 77 |
skip_cache_lookup=skip_cache_lookup)
|
... | ... | @@ -90,10 +94,16 @@ def test_wrong_execute_instance(instance, context): |
90 | 94 |
context.set_code.assert_called_once_with(grpc.StatusCode.INVALID_ARGUMENT)
|
91 | 95 |
|
92 | 96 |
|
93 |
-def test_wait_execution(instance, controller, context):
|
|
94 |
- action_digest = remote_execution_pb2.Digest()
|
|
95 |
- action_digest.hash = 'zhora'
|
|
97 |
+def test_no_action_digest_in_storage(instance, context):
|
|
98 |
+ request = remote_execution_pb2.ExecuteRequest(instance_name='',
|
|
99 |
+ skip_cache_lookup=True)
|
|
100 |
+ response = instance.Execute(request, context)
|
|
101 |
+ |
|
102 |
+ next(response)
|
|
103 |
+ context.set_code.assert_called_once_with(grpc.StatusCode.FAILED_PRECONDITION)
|
|
96 | 104 |
|
105 |
+ |
|
106 |
+def test_wait_execution(instance, controller, context):
|
|
97 | 107 |
j = job.Job(action_digest, None)
|
98 | 108 |
j._operation.done = True
|
99 | 109 |
|
... | ... | @@ -24,18 +24,21 @@ import grpc |
24 | 24 |
from grpc._server import _Context
|
25 | 25 |
import pytest
|
26 | 26 |
|
27 |
-from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
|
28 |
-from buildgrid._protos.google.longrunning import operations_pb2
|
|
29 |
- |
|
27 |
+from buildgrid.utils import create_digest
|
|
30 | 28 |
from buildgrid.server.controller import ExecutionController
|
31 |
-from buildgrid.server._exceptions import InvalidArgumentError
|
|
32 |
- |
|
29 |
+from buildgrid.server.cas.storage import lru_memory_cache
|
|
33 | 30 |
from buildgrid.server.operations import service
|
34 | 31 |
from buildgrid.server.operations.service import OperationsService
|
32 |
+from buildgrid.server._exceptions import InvalidArgumentError
|
|
33 |
+ |
|
34 |
+from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
|
35 |
+from buildgrid._protos.google.longrunning import operations_pb2
|
|
35 | 36 |
|
36 | 37 |
|
37 | 38 |
server = mock.create_autospec(grpc.server)
|
38 | 39 |
instance_name = "blade"
|
40 |
+action = remote_execution_pb2.Action(do_not_cache=True)
|
|
41 |
+action_digest = create_digest(action.SerializeToString())
|
|
39 | 42 |
|
40 | 43 |
|
41 | 44 |
# Can mock this
|
... | ... | @@ -47,9 +50,6 @@ def context(): |
47 | 50 |
# Requests to make
|
48 | 51 |
@pytest.fixture
|
49 | 52 |
def execute_request():
|
50 |
- action_digest = remote_execution_pb2.Digest()
|
|
51 |
- action_digest.hash = 'zhora'
|
|
52 |
- |
|
53 | 53 |
yield remote_execution_pb2.ExecuteRequest(instance_name='',
|
54 | 54 |
action_digest=action_digest,
|
55 | 55 |
skip_cache_lookup=True)
|
... | ... | @@ -57,7 +57,11 @@ def execute_request(): |
57 | 57 |
|
58 | 58 |
@pytest.fixture
|
59 | 59 |
def controller():
|
60 |
- yield ExecutionController()
|
|
60 |
+ storage = lru_memory_cache.LRUMemoryCache(1024 * 1024)
|
|
61 |
+ write_session = storage.begin_write(action_digest)
|
|
62 |
+ storage.commit_write(action_digest, write_session)
|
|
63 |
+ |
|
64 |
+ yield ExecutionController(None, storage)
|
|
61 | 65 |
|
62 | 66 |
|
63 | 67 |
# Instance to test
|