Martin Blanchard pushed to branch mablanch/63-tls-encryption at BuildGrid / buildgrid
Commits:
-
1489472d
by finn at 2018-08-24T10:05:46Z
-
5738650f
by finn at 2018-08-24T10:05:46Z
-
5dacede6
by Martin Blanchard at 2018-08-24T14:54:50Z
-
e81c6bf3
by Martin Blanchard at 2018-08-24T14:54:51Z
-
301933de
by Martin Blanchard at 2018-08-24T14:54:51Z
19 changed files:
- .gitlab-ci.yml
- buildgrid/_app/bots/temp_directory.py
- buildgrid/_app/cli.py
- buildgrid/_app/commands/cmd_bot.py
- buildgrid/_app/commands/cmd_cas.py
- buildgrid/_app/commands/cmd_execute.py
- buildgrid/_app/commands/cmd_server.py
- + buildgrid/server/buildgrid_instance.py
- buildgrid/server/build_grid_server.py → buildgrid/server/buildgrid_server.py
- buildgrid/server/execution/execution_instance.py
- buildgrid/server/execution/execution_service.py
- buildgrid/server/execution/operations_service.py
- buildgrid/server/worker/bots_interface.py
- buildgrid/server/worker/bots_service.py
- docs/source/using_dummy_build.rst
- docs/source/using_simple_build.rst
- tests/integration/bots_service.py
- tests/integration/execution_service.py
- tests/integration/operations_service.py
Changes:
... | ... | @@ -28,10 +28,10 @@ before_script: |
28 | 28 |
.run-dummy-job-template: &dummy-job
|
29 | 29 |
stage: test
|
30 | 30 |
script:
|
31 |
- - ${BGD} server start &
|
|
31 |
+ - ${BGD} server start --allow-insecure &
|
|
32 | 32 |
- sleep 1 # Allow server to boot
|
33 |
- - ${BGD} bot --host=0.0.0.0 dummy &
|
|
34 |
- - ${BGD} execute --host=0.0.0.0 request-dummy --wait-for-completion
|
|
33 |
+ - ${BGD} bot dummy &
|
|
34 |
+ - ${BGD} execute request-dummy --wait-for-completion
|
|
35 | 35 |
|
36 | 36 |
tests-debian-stretch:
|
37 | 37 |
<<: *linux-tests
|
... | ... | @@ -29,7 +29,7 @@ def work_temp_directory(context, lease): |
29 | 29 |
then uploads results back to CAS
|
30 | 30 |
"""
|
31 | 31 |
|
32 |
- instance_name = context.instance_name
|
|
32 |
+ parent = context.parent
|
|
33 | 33 |
stub_bytestream = bytestream_pb2_grpc.ByteStreamStub(context.channel)
|
34 | 34 |
|
35 | 35 |
action_digest = remote_execution_pb2.Digest()
|
... | ... | @@ -37,12 +37,12 @@ def work_temp_directory(context, lease): |
37 | 37 |
|
38 | 38 |
action = remote_execution_pb2.Action()
|
39 | 39 |
|
40 |
- action = parse_to_pb2_from_fetch(action, stub_bytestream, action_digest, instance_name)
|
|
40 |
+ action = parse_to_pb2_from_fetch(action, stub_bytestream, action_digest, parent)
|
|
41 | 41 |
|
42 | 42 |
with tempfile.TemporaryDirectory() as temp_dir:
|
43 | 43 |
|
44 | 44 |
command = remote_execution_pb2.Command()
|
45 |
- command = parse_to_pb2_from_fetch(command, stub_bytestream, action.command_digest, instance_name)
|
|
45 |
+ command = parse_to_pb2_from_fetch(command, stub_bytestream, action.command_digest, parent)
|
|
46 | 46 |
|
47 | 47 |
arguments = "cd {} &&".format(temp_dir)
|
48 | 48 |
|
... | ... | @@ -51,7 +51,7 @@ def work_temp_directory(context, lease): |
51 | 51 |
|
52 | 52 |
context.logger.info(arguments)
|
53 | 53 |
|
54 |
- write_fetch_directory(temp_dir, stub_bytestream, action.input_root_digest, instance_name)
|
|
54 |
+ write_fetch_directory(temp_dir, stub_bytestream, action.input_root_digest, parent)
|
|
55 | 55 |
|
56 | 56 |
proc = subprocess.Popen(arguments,
|
57 | 57 |
shell=True,
|
... | ... | @@ -75,7 +75,7 @@ def work_temp_directory(context, lease): |
75 | 75 |
requests.append(remote_execution_pb2.BatchUpdateBlobsRequest.Request(
|
76 | 76 |
digest=digest, data=chunk))
|
77 | 77 |
|
78 |
- request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=instance_name,
|
|
78 |
+ request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=parent,
|
|
79 | 79 |
requests=requests)
|
80 | 80 |
|
81 | 81 |
stub_cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(context.channel)
|
... | ... | @@ -25,6 +25,9 @@ import os |
25 | 25 |
import logging
|
26 | 26 |
|
27 | 27 |
import click
|
28 |
+import grpc
|
|
29 |
+ |
|
30 |
+from buildgrid.utils import read_file
|
|
28 | 31 |
|
29 | 32 |
from . import _logging
|
30 | 33 |
|
... | ... | @@ -35,7 +38,114 @@ class Context: |
35 | 38 |
|
36 | 39 |
def __init__(self):
|
37 | 40 |
self.verbose = False
|
38 |
- self.home = os.getcwd()
|
|
41 |
+ |
|
42 |
+ self.user_home = os.getcwd()
|
|
43 |
+ |
|
44 |
+ self.user_cache_home = os.environ.get('XDG_CACHE_HOME')
|
|
45 |
+ if not self.user_cache_home:
|
|
46 |
+ self.user_cache_home = os.path.expanduser('~/.cache')
|
|
47 |
+ self.cache_home = os.path.join(self.user_cache_home, 'buildgrid')
|
|
48 |
+ |
|
49 |
+ self.user_config_home = os.environ.get('XDG_CONFIG_HOME')
|
|
50 |
+ if not self.user_config_home:
|
|
51 |
+ self.user_config_home = os.path.expanduser('~/.config')
|
|
52 |
+ self.config_home = os.path.join(self.user_config_home, 'buildgrid')
|
|
53 |
+ |
|
54 |
+ self.user_data_home = os.environ.get('XDG_DATA_HOME')
|
|
55 |
+ if not self.user_data_home:
|
|
56 |
+ self.user_data_home = os.path.expanduser('~/.local/share')
|
|
57 |
+ self.data_home = os.path.join(self.user_data_home, 'buildgrid')
|
|
58 |
+ |
|
59 |
+ def load_client_credentials(self, client_key=None, client_cert=None,
|
|
60 |
+ server_cert=None, use_default_client_keys=False):
|
|
61 |
+ """Looks-up and loads TLS client gRPC credentials.
|
|
62 |
+ |
|
63 |
+ Args:
|
|
64 |
+ client_key(str): root certificate file path.
|
|
65 |
+ client_cert(str): private key file path.
|
|
66 |
+ server_cert(str): certificate chain file path.
|
|
67 |
+ use_default_client_keys(bool, optional): whether or not to try
|
|
68 |
+ loading client keys from default location. Defaults to False.
|
|
69 |
+ |
|
70 |
+ Returns:
|
|
71 |
+ :obj:`ChannelCredentials`: The credentials for use for a
|
|
72 |
+ TLS-encrypted gRPC client channel.
|
|
73 |
+ """
|
|
74 |
+ if not client_key or not os.path.exists(client_key):
|
|
75 |
+ if use_default_client_keys:
|
|
76 |
+ client_key = os.path.join(self.config_home, 'client.key')
|
|
77 |
+ else:
|
|
78 |
+ client_key = None
|
|
79 |
+ |
|
80 |
+ if not client_cert or not os.path.exists(client_cert):
|
|
81 |
+ if use_default_client_keys:
|
|
82 |
+ client_cert = os.path.join(self.config_home, 'client.crt')
|
|
83 |
+ else:
|
|
84 |
+ client_cert = None
|
|
85 |
+ |
|
86 |
+ if not server_cert or not os.path.exists(server_cert):
|
|
87 |
+ server_cert = os.path.join(self.config_home, 'server.crt')
|
|
88 |
+ if not os.path.exists(server_cert):
|
|
89 |
+ return None
|
|
90 |
+ |
|
91 |
+ server_cert_pem = read_file(server_cert)
|
|
92 |
+ if client_key and os.path.exists(client_key):
|
|
93 |
+ client_key_pem = read_file(client_key)
|
|
94 |
+ else:
|
|
95 |
+ client_key_pem = None
|
|
96 |
+ if client_key_pem and client_cert and os.path.exists(client_cert):
|
|
97 |
+ client_cert_pem = read_file(client_cert)
|
|
98 |
+ else:
|
|
99 |
+ client_cert_pem = None
|
|
100 |
+ |
|
101 |
+ return grpc.ssl_channel_credentials(root_certificates=server_cert_pem,
|
|
102 |
+ private_key=client_key_pem,
|
|
103 |
+ certificate_chain=client_cert_pem)
|
|
104 |
+ |
|
105 |
+ def load_server_credentials(self, server_key=None, server_cert=None,
|
|
106 |
+ client_certs=None, use_default_client_certs=False):
|
|
107 |
+ """Looks-up and loads TLS server gRPC credentials.
|
|
108 |
+ |
|
109 |
+ Every private and public keys are expected to be PEM-encoded.
|
|
110 |
+ |
|
111 |
+ Args:
|
|
112 |
+ server_key(str): private server key file path.
|
|
113 |
+ server_cert(str): public server certificate file path.
|
|
114 |
+ client_certs(str): public client certificates file path.
|
|
115 |
+ use_default_client_certs(bool, optional): whether or not to try
|
|
116 |
+ loading public client certificates from default location.
|
|
117 |
+ Defaults to False.
|
|
118 |
+ |
|
119 |
+ Returns:
|
|
120 |
+ :obj:`ServerCredentials`: The credentials for use for a
|
|
121 |
+ TLS-encrypted gRPC server channel.
|
|
122 |
+ """
|
|
123 |
+ if not server_key or not os.path.exists(server_key):
|
|
124 |
+ server_key = os.path.join(self.config_home, 'server.key')
|
|
125 |
+ if not os.path.exists(server_key):
|
|
126 |
+ return None
|
|
127 |
+ |
|
128 |
+ if not server_cert or not os.path.exists(server_cert):
|
|
129 |
+ server_cert = os.path.join(self.config_home, 'server.crt')
|
|
130 |
+ if not os.path.exists(server_cert):
|
|
131 |
+ return None
|
|
132 |
+ |
|
133 |
+ if not client_certs or not os.path.exists(client_certs):
|
|
134 |
+ if use_default_client_certs:
|
|
135 |
+ client_certs = os.path.join(self.config_home, 'client.crt')
|
|
136 |
+ else:
|
|
137 |
+ client_certs = None
|
|
138 |
+ |
|
139 |
+ server_key_pem = read_file(server_key)
|
|
140 |
+ server_cert_pem = read_file(server_cert)
|
|
141 |
+ if client_certs and os.path.exists(client_certs):
|
|
142 |
+ client_certs_pem = read_file(client_certs)
|
|
143 |
+ else:
|
|
144 |
+ client_certs_pem = None
|
|
145 |
+ |
|
146 |
+ return grpc.ssl_server_credentials([(server_key_pem, server_cert_pem)],
|
|
147 |
+ root_certificates=client_certs_pem,
|
|
148 |
+ require_client_auth=bool(client_certs))
|
|
39 | 149 |
|
40 | 150 |
|
41 | 151 |
pass_context = click.make_pass_decorator(Context, ensure=True)
|
... | ... | @@ -21,8 +21,9 @@ Create a bot interface and request work |
21 | 21 |
"""
|
22 | 22 |
|
23 | 23 |
import logging
|
24 |
- |
|
25 | 24 |
from pathlib import Path, PurePath
|
25 |
+import sys
|
|
26 |
+from urllib.parse import urlparse
|
|
26 | 27 |
|
27 | 28 |
import click
|
28 | 29 |
import grpc
|
... | ... | @@ -35,20 +36,38 @@ from ..cli import pass_context |
35 | 36 |
|
36 | 37 |
|
37 | 38 |
@click.group(name='bot', short_help="Create and register bot clients.")
|
38 |
-@click.option('--parent', type=click.STRING, default='bgd_test', show_default=True,
|
|
39 |
+@click.option('--remote', type=click.STRING, default='http://localhost:50051', show_default=True,
|
|
40 |
+ help="Remote execution server's URL (port defaults to 50051 if not specified).")
|
|
41 |
+@click.option('--client-key', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
42 |
+ help="Private client key for TLS (PEM-encoded)")
|
|
43 |
+@click.option('--client-cert', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
44 |
+ help="Public client certificate for TLS (PEM-encoded)")
|
|
45 |
+@click.option('--server-cert', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
46 |
+ help="Public server certificate for TLS (PEM-encoded)")
|
|
47 |
+@click.option('--parent', type=click.STRING, default='main', show_default=True,
|
|
39 | 48 |
help="Targeted farm resource.")
|
40 |
-@click.option('--port', type=click.INT, default='50051', show_default=True,
|
|
41 |
- help="Remote server's port number.")
|
|
42 |
-@click.option('--host', type=click.STRING, default='localhost', show_default=True,
|
|
43 |
- help="Renote server's hostname.")
|
|
44 | 49 |
@pass_context
|
45 |
-def cli(context, host, port, parent):
|
|
46 |
- channel = grpc.insecure_channel('{}:{}'.format(host, port))
|
|
47 |
- interface = bot_interface.BotInterface(channel)
|
|
50 |
+def cli(context, remote, parent, client_key, client_cert, server_cert):
|
|
51 |
+ url = urlparse(remote)
|
|
52 |
+ |
|
53 |
+ context.remote = '{}:{}'.format(url.hostname, url.port or 50051)
|
|
54 |
+ context.parent = parent
|
|
55 |
+ |
|
56 |
+ if url.scheme == 'http':
|
|
57 |
+ context.channel = grpc.insecure_channel(context.remote)
|
|
58 |
+ else:
|
|
59 |
+ credentials = context.load_client_credentials(client_key, client_cert, server_cert)
|
|
60 |
+ if not credentials:
|
|
61 |
+ click.echo("ERROR: no TLS keys were specified and no defaults could be found.\n" +
|
|
62 |
+ "Use --allow-insecure in order to deactivate TLS encryption.\n", err=True)
|
|
63 |
+ sys.exit(-1)
|
|
64 |
+ |
|
65 |
+ context.channel = grpc.secure_channel(context.remote, credentials)
|
|
48 | 66 |
|
49 | 67 |
context.logger = logging.getLogger(__name__)
|
50 |
- context.logger.info("Starting on port {}".format(port))
|
|
51 |
- context.channel = channel
|
|
68 |
+ context.logger.debug("Starting for remote {}".format(context.remote))
|
|
69 |
+ |
|
70 |
+ interface = bot_interface.BotInterface(context.channel)
|
|
52 | 71 |
|
53 | 72 |
worker = Worker()
|
54 | 73 |
worker.add_device(Device())
|
... | ... | @@ -75,14 +94,11 @@ def run_dummy(context): |
75 | 94 |
|
76 | 95 |
|
77 | 96 |
@cli.command('temp-directory', short_help="Runs commands in temp directory and uploads results.")
|
78 |
-@click.option('--instance-name', type=click.STRING, default='testing', show_default=True,
|
|
79 |
- help="Targeted farm instance name.")
|
|
80 | 97 |
@pass_context
|
81 |
-def run_temp_directory(context, instance_name):
|
|
98 |
+def run_temp_directory(context):
|
|
82 | 99 |
""" Downloads files and command from CAS and runs
|
83 | 100 |
in a temp directory, uploading result back to CAS
|
84 | 101 |
"""
|
85 |
- context.instance_name = instance_name
|
|
86 | 102 |
try:
|
87 | 103 |
b = bot.Bot(context.bot_session)
|
88 | 104 |
b.session(temp_directory.work_temp_directory,
|
... | ... | @@ -21,6 +21,9 @@ Request work to be executed and monitor status of jobs. |
21 | 21 |
"""
|
22 | 22 |
|
23 | 23 |
import logging
|
24 |
+import sys
|
|
25 |
+from urllib.parse import urlparse
|
|
26 |
+ |
|
24 | 27 |
import click
|
25 | 28 |
import grpc
|
26 | 29 |
|
... | ... | @@ -31,25 +34,42 @@ from ..cli import pass_context |
31 | 34 |
|
32 | 35 |
|
33 | 36 |
@click.group(name='cas', short_help="Interact with the CAS server.")
|
34 |
-@click.option('--port', type=click.INT, default='50051', show_default=True,
|
|
35 |
- help="Remote server's port number.")
|
|
36 |
-@click.option('--host', type=click.STRING, default='localhost', show_default=True,
|
|
37 |
- help="Remote server's hostname.")
|
|
37 |
+@click.option('--remote', type=click.STRING, default='http://localhost:50051', show_default=True,
|
|
38 |
+ help="Remote execution server's URL (port defaults to 50051 if no specified).")
|
|
39 |
+@click.option('--client-key', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
40 |
+ help="Private client key for TLS (PEM-encoded)")
|
|
41 |
+@click.option('--client-cert', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
42 |
+ help="Public client certificate for TLS (PEM-encoded)")
|
|
43 |
+@click.option('--server-cert', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
44 |
+ help="Public server certificate for TLS (PEM-encoded)")
|
|
45 |
+@click.option('--instance-name', type=click.STRING, default='main', show_default=True,
|
|
46 |
+ help="Targeted farm instance name.")
|
|
38 | 47 |
@pass_context
|
39 |
-def cli(context, host, port):
|
|
40 |
- context.logger = logging.getLogger(__name__)
|
|
41 |
- context.logger.info("Starting on port {}".format(port))
|
|
48 |
+def cli(context, remote, instance_name, client_key, client_cert, server_cert):
|
|
49 |
+ url = urlparse(remote)
|
|
42 | 50 |
|
43 |
- context.channel = grpc.insecure_channel('{}:{}'.format(host, port))
|
|
44 |
- context.port = port
|
|
51 |
+ context.remote = '{}:{}'.format(url.hostname, url.port or 50051)
|
|
52 |
+ context.instance_name = instance_name
|
|
53 |
+ |
|
54 |
+ if url.scheme == 'http':
|
|
55 |
+ context.channel = grpc.insecure_channel(context.remote)
|
|
56 |
+ else:
|
|
57 |
+ credentials = context.load_client_credentials(client_key, client_cert, server_cert)
|
|
58 |
+ if not credentials:
|
|
59 |
+ click.echo("ERROR: no TLS keys were specified and no defaults could be found.\n" +
|
|
60 |
+ "Use --allow-insecure in order to deactivate TLS encryption.\n", err=True)
|
|
61 |
+ sys.exit(-1)
|
|
62 |
+ |
|
63 |
+ context.channel = grpc.secure_channel(context.remote, credentials)
|
|
64 |
+ |
|
65 |
+ context.logger = logging.getLogger(__name__)
|
|
66 |
+ context.logger.debug("Starting for remote {}".format(context.remote))
|
|
45 | 67 |
|
46 | 68 |
|
47 | 69 |
@cli.command('upload-files', short_help="Upload files to the CAS server.")
|
48 |
-@click.option('--instance-name', type=click.STRING, default='testing', show_default=True,
|
|
49 |
- help="Targeted farm instance name.")
|
|
50 | 70 |
@click.argument('files', nargs=-1, type=click.File('rb'), required=True)
|
51 | 71 |
@pass_context
|
52 |
-def upload_files(context, files, instance_name):
|
|
72 |
+def upload_files(context, files):
|
|
53 | 73 |
stub = remote_execution_pb2_grpc.ContentAddressableStorageStub(context.channel)
|
54 | 74 |
|
55 | 75 |
requests = []
|
... | ... | @@ -58,7 +78,7 @@ def upload_files(context, files, instance_name): |
58 | 78 |
requests.append(remote_execution_pb2.BatchUpdateBlobsRequest.Request(
|
59 | 79 |
digest=create_digest(chunk), data=chunk))
|
60 | 80 |
|
61 |
- request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=instance_name,
|
|
81 |
+ request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=context.instance_name,
|
|
62 | 82 |
requests=requests)
|
63 | 83 |
|
64 | 84 |
context.logger.info("Sending: {}".format(request))
|
... | ... | @@ -67,11 +87,9 @@ def upload_files(context, files, instance_name): |
67 | 87 |
|
68 | 88 |
|
69 | 89 |
@cli.command('upload-dir', short_help="Upload a directory to the CAS server.")
|
70 |
-@click.option('--instance-name', type=click.STRING, default='testing', show_default=True,
|
|
71 |
- help="Targeted farm instance name.")
|
|
72 | 90 |
@click.argument('directory', nargs=1, type=click.Path(), required=True)
|
73 | 91 |
@pass_context
|
74 |
-def upload_dir(context, directory, instance_name):
|
|
92 |
+def upload_dir(context, directory):
|
|
75 | 93 |
context.logger.info("Uploading directory to cas")
|
76 | 94 |
stub = remote_execution_pb2_grpc.ContentAddressableStorageStub(context.channel)
|
77 | 95 |
|
... | ... | @@ -81,7 +99,7 @@ def upload_dir(context, directory, instance_name): |
81 | 99 |
requests.append(remote_execution_pb2.BatchUpdateBlobsRequest.Request(
|
82 | 100 |
digest=file_digest, data=chunk))
|
83 | 101 |
|
84 |
- request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=instance_name,
|
|
102 |
+ request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=context.instance_name,
|
|
85 | 103 |
requests=requests)
|
86 | 104 |
|
87 | 105 |
context.logger.info("Request:\n{}".format(request))
|
... | ... | @@ -22,8 +22,11 @@ Request work to be executed and monitor status of jobs. |
22 | 22 |
|
23 | 23 |
import errno
|
24 | 24 |
import logging
|
25 |
-import stat
|
|
26 | 25 |
import os
|
26 |
+import stat
|
|
27 |
+import sys
|
|
28 |
+from urllib.parse import urlparse
|
|
29 |
+ |
|
27 | 30 |
import click
|
28 | 31 |
import grpc
|
29 | 32 |
|
... | ... | @@ -36,34 +39,51 @@ from ..cli import pass_context |
36 | 39 |
|
37 | 40 |
|
38 | 41 |
@click.group(name='execute', short_help="Execute simple operations.")
|
39 |
-@click.option('--port', type=click.INT, default='50051', show_default=True,
|
|
40 |
- help="Remote server's port number.")
|
|
41 |
-@click.option('--host', type=click.STRING, default='localhost', show_default=True,
|
|
42 |
- help="Remote server's hostname.")
|
|
42 |
+@click.option('--remote', type=click.STRING, default='http://localhost:50051', show_default=True,
|
|
43 |
+ help="Remote execution server's URL (port defaults to 50051 if no specified).")
|
|
44 |
+@click.option('--client-key', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
45 |
+ help="Private client key for TLS (PEM-encoded)")
|
|
46 |
+@click.option('--client-cert', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
47 |
+ help="Public client certificate for TLS (PEM-encoded)")
|
|
48 |
+@click.option('--server-cert', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
49 |
+ help="Public server certificate for TLS (PEM-encoded)")
|
|
50 |
+@click.option('--instance-name', type=click.STRING, default='main', show_default=True,
|
|
51 |
+ help="Targeted farm instance name.")
|
|
43 | 52 |
@pass_context
|
44 |
-def cli(context, host, port):
|
|
45 |
- context.logger = logging.getLogger(__name__)
|
|
46 |
- context.logger.info("Starting on port {}".format(port))
|
|
53 |
+def cli(context, remote, instance_name, client_key, client_cert, server_cert):
|
|
54 |
+ url = urlparse(remote)
|
|
55 |
+ |
|
56 |
+ context.remote = '{}:{}'.format(url.hostname, url.port or 50051)
|
|
57 |
+ context.instance_name = instance_name
|
|
47 | 58 |
|
48 |
- context.channel = grpc.insecure_channel('{}:{}'.format(host, port))
|
|
49 |
- context.port = port
|
|
59 |
+ if url.scheme == 'http':
|
|
60 |
+ context.channel = grpc.insecure_channel(context.remote)
|
|
61 |
+ else:
|
|
62 |
+ credentials = context.load_client_credentials(client_key, client_cert, server_cert)
|
|
63 |
+ if not credentials:
|
|
64 |
+ click.echo("ERROR: no TLS keys were specified and no defaults could be found.\n" +
|
|
65 |
+ "Use --allow-insecure in order to deactivate TLS encryption.\n", err=True)
|
|
66 |
+ sys.exit(-1)
|
|
67 |
+ |
|
68 |
+ context.channel = grpc.secure_channel(context.remote, credentials)
|
|
69 |
+ |
|
70 |
+ context.logger = logging.getLogger(__name__)
|
|
71 |
+ context.logger.debug("Starting for remote {}".format(context.remote))
|
|
50 | 72 |
|
51 | 73 |
|
52 | 74 |
@cli.command('request-dummy', short_help="Send a dummy action.")
|
53 |
-@click.option('--instance-name', type=click.STRING, default='testing', show_default=True,
|
|
54 |
- help="Targeted farm instance name.")
|
|
55 | 75 |
@click.option('--number', type=click.INT, default=1, show_default=True,
|
56 | 76 |
help="Number of request to send.")
|
57 | 77 |
@click.option('--wait-for-completion', is_flag=True,
|
58 | 78 |
help="Stream updates until jobs are completed.")
|
59 | 79 |
@pass_context
|
60 |
-def request_dummy(context, number, instance_name, wait_for_completion):
|
|
80 |
+def request_dummy(context, number, wait_for_completion):
|
|
61 | 81 |
action_digest = remote_execution_pb2.Digest()
|
62 | 82 |
|
63 | 83 |
context.logger.info("Sending execution request...")
|
64 | 84 |
stub = remote_execution_pb2_grpc.ExecutionStub(context.channel)
|
65 | 85 |
|
66 |
- request = remote_execution_pb2.ExecuteRequest(instance_name=instance_name,
|
|
86 |
+ request = remote_execution_pb2.ExecuteRequest(instance_name=context.instance_name,
|
|
67 | 87 |
action_digest=action_digest,
|
68 | 88 |
skip_cache_lookup=True)
|
69 | 89 |
|
... | ... | @@ -98,7 +118,7 @@ def list_operations(context): |
98 | 118 |
context.logger.info("Getting list of operations")
|
99 | 119 |
stub = operations_pb2_grpc.OperationsStub(context.channel)
|
100 | 120 |
|
101 |
- request = operations_pb2.ListOperationsRequest()
|
|
121 |
+ request = operations_pb2.ListOperationsRequest(name=context.instance_name)
|
|
102 | 122 |
|
103 | 123 |
response = stub.ListOperations(request)
|
104 | 124 |
|
... | ... | @@ -115,7 +135,8 @@ def list_operations(context): |
115 | 135 |
@pass_context
|
116 | 136 |
def wait_execution(context, operation_name):
|
117 | 137 |
stub = remote_execution_pb2_grpc.ExecutionStub(context.channel)
|
118 |
- request = remote_execution_pb2.WaitExecutionRequest(name=operation_name)
|
|
138 |
+ request = remote_execution_pb2.WaitExecutionRequest(instance_name=context.instance_name,
|
|
139 |
+ name=operation_name)
|
|
119 | 140 |
|
120 | 141 |
response = stub.WaitExecution(request)
|
121 | 142 |
|
... | ... | @@ -124,8 +145,6 @@ def wait_execution(context, operation_name): |
124 | 145 |
|
125 | 146 |
|
126 | 147 |
@cli.command('command', short_help="Send a command to be executed.")
|
127 |
-@click.option('--instance-name', type=click.STRING, default='testing', show_default=True,
|
|
128 |
- help="Targeted farm instance name.")
|
|
129 | 148 |
@click.option('--output-file', nargs=2, type=(click.STRING, click.BOOL), multiple=True,
|
130 | 149 |
help="Tuple of expected output file and is-executeable flag.")
|
131 | 150 |
@click.option('--output-directory', default='testing', show_default=True,
|
... | ... | @@ -133,7 +152,7 @@ def wait_execution(context, operation_name): |
133 | 152 |
@click.argument('input-root', nargs=1, type=click.Path(), required=True)
|
134 | 153 |
@click.argument('commands', nargs=-1, type=click.STRING, required=True)
|
135 | 154 |
@pass_context
|
136 |
-def command(context, input_root, commands, output_file, output_directory, instance_name):
|
|
155 |
+def command(context, input_root, commands, output_file, output_directory):
|
|
137 | 156 |
stub = remote_execution_pb2_grpc.ExecutionStub(context.channel)
|
138 | 157 |
|
139 | 158 |
execute_command = remote_execution_pb2.Command()
|
... | ... | @@ -170,11 +189,11 @@ def command(context, input_root, commands, output_file, output_directory, instan |
170 | 189 |
requests.append(remote_execution_pb2.BatchUpdateBlobsRequest.Request(
|
171 | 190 |
digest=action_digest, data=action.SerializeToString()))
|
172 | 191 |
|
173 |
- request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=instance_name,
|
|
192 |
+ request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=context.instance_name,
|
|
174 | 193 |
requests=requests)
|
175 | 194 |
remote_execution_pb2_grpc.ContentAddressableStorageStub(context.channel).BatchUpdateBlobs(request)
|
176 | 195 |
|
177 |
- request = remote_execution_pb2.ExecuteRequest(instance_name=instance_name,
|
|
196 |
+ request = remote_execution_pb2.ExecuteRequest(instance_name=context.instance_name,
|
|
178 | 197 |
action_digest=action_digest,
|
179 | 198 |
skip_cache_lookup=True)
|
180 | 199 |
response = stub.Execute(request)
|
... | ... | @@ -201,7 +220,7 @@ def command(context, input_root, commands, output_file, output_directory, instan |
201 | 220 |
raise
|
202 | 221 |
|
203 | 222 |
with open(path, 'wb+') as f:
|
204 |
- write_fetch_blob(f, stub, output_file_response.digest, instance_name)
|
|
223 |
+ write_fetch_blob(f, stub, output_file_response.digest, context.instance_name)
|
|
205 | 224 |
|
206 | 225 |
if output_file_response.path in output_executeables:
|
207 | 226 |
st = os.stat(path)
|
... | ... | @@ -22,10 +22,11 @@ Create a BuildGrid server. |
22 | 22 |
|
23 | 23 |
import asyncio
|
24 | 24 |
import logging
|
25 |
+import sys
|
|
25 | 26 |
|
26 | 27 |
import click
|
27 | 28 |
|
28 |
-from buildgrid.server import build_grid_server
|
|
29 |
+from buildgrid.server import buildgrid_server
|
|
29 | 30 |
from buildgrid.server.cas.storage.disk import DiskStorage
|
30 | 31 |
from buildgrid.server.cas.storage.lru_memory_cache import LRUMemoryCache
|
31 | 32 |
from buildgrid.server.cas.storage.s3 import S3Storage
|
... | ... | @@ -41,17 +42,25 @@ _SIZE_PREFIXES = {'k': 2 ** 10, 'm': 2 ** 20, 'g': 2 ** 30, 't': 2 ** 40} |
41 | 42 |
@pass_context
|
42 | 43 |
def cli(context):
|
43 | 44 |
context.logger = logging.getLogger(__name__)
|
44 |
- context.logger.info("BuildGrid server booting up")
|
|
45 | 45 |
|
46 | 46 |
|
47 | 47 |
@cli.command('start', short_help="Setup a new server instance.")
|
48 |
+@click.argument('instances', nargs=-1, type=click.STRING)
|
|
48 | 49 |
@click.option('--port', type=click.INT, default='50051', show_default=True,
|
49 | 50 |
help="The port number to be listened.")
|
50 |
-@click.option('--max-cached-actions', type=click.INT, default=50, show_default=True,
|
|
51 |
- help="Maximum number of actions to keep in the ActionCache.")
|
|
51 |
+@click.option('--server-key', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
52 |
+ help="Private server key for TLS (PEM-encoded)")
|
|
53 |
+@click.option('--server-cert', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
54 |
+ help="Public server certificate for TLS (PEM-encoded)")
|
|
55 |
+@click.option('--client-certs', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
56 |
+ help="Public client certificates for TLS (PEM-encoded)")
|
|
57 |
+@click.option('--allow-insecure', type=click.BOOL, is_flag=True,
|
|
58 |
+ help="Whether or not to allow unencrypted connections.")
|
|
52 | 59 |
@click.option('--allow-update-action-result/--forbid-update-action-result',
|
53 | 60 |
'allow_uar', default=True, show_default=True,
|
54 | 61 |
help="Whether or not to allow clients to manually edit the action cache.")
|
62 |
+@click.option('--max-cached-actions', type=click.INT, default=50, show_default=True,
|
|
63 |
+ help="Maximum number of actions to keep in the ActionCache.")
|
|
55 | 64 |
@click.option('--cas', type=click.Choice(('lru', 's3', 'disk', 'with-cache')),
|
56 | 65 |
help="The CAS storage type to use.")
|
57 | 66 |
@click.option('--cas-cache', type=click.Choice(('lru', 's3', 'disk')),
|
... | ... | @@ -67,7 +76,21 @@ def cli(context): |
67 | 76 |
@click.option('--cas-disk-directory', type=click.Path(file_okay=False, dir_okay=True, writable=True),
|
68 | 77 |
help="For --cas=disk, the folder to store CAS blobs in.")
|
69 | 78 |
@pass_context
|
70 |
-def start(context, port, max_cached_actions, allow_uar, cas, **cas_args):
|
|
79 |
+def start(context, port, allow_insecure, server_key, server_cert, client_certs,
|
|
80 |
+ instances, max_cached_actions, allow_uar, cas, **cas_args):
|
|
81 |
+ """Setups a new server instance."""
|
|
82 |
+ credentials = None
|
|
83 |
+ if not allow_insecure:
|
|
84 |
+ credentials = context.load_server_credentials(server_key, server_cert, client_certs)
|
|
85 |
+ if not credentials and not allow_insecure:
|
|
86 |
+ click.echo("ERROR: no TLS keys were specified and no defaults could be found.\n" +
|
|
87 |
+ "Use --allow-insecure in order to deactivate TLS encryption.\n", err=True)
|
|
88 |
+ sys.exit(-1)
|
|
89 |
+ |
|
90 |
+ context.credentials = credentials
|
|
91 |
+ context.port = port
|
|
92 |
+ |
|
93 |
+ context.logger.info("BuildGrid server booting up")
|
|
71 | 94 |
context.logger.info("Starting on port {}".format(port))
|
72 | 95 |
|
73 | 96 |
cas_storage = _make_cas_storage(context, cas, cas_args)
|
... | ... | @@ -79,9 +102,14 @@ def start(context, port, max_cached_actions, allow_uar, cas, **cas_args): |
79 | 102 |
else:
|
80 | 103 |
action_cache = ActionCache(cas_storage, max_cached_actions, allow_uar)
|
81 | 104 |
|
82 |
- server = build_grid_server.BuildGridServer(port,
|
|
83 |
- cas_storage=cas_storage,
|
|
84 |
- action_cache=action_cache)
|
|
105 |
+ if instances is None:
|
|
106 |
+ instances = ['main']
|
|
107 |
+ |
|
108 |
+ server = buildgrid_server.BuildGridServer(port=context.port,
|
|
109 |
+ credentials=context.credentials,
|
|
110 |
+ instances=instances,
|
|
111 |
+ cas_storage=cas_storage,
|
|
112 |
+ action_cache=action_cache)
|
|
85 | 113 |
loop = asyncio.get_event_loop()
|
86 | 114 |
try:
|
87 | 115 |
server.start()
|
1 |
+# Copyright (C) 2018 Bloomberg LP
|
|
2 |
+#
|
|
3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
+# you may not use this file except in compliance with the License.
|
|
5 |
+# You may obtain a copy of the License at
|
|
6 |
+#
|
|
7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
8 |
+#
|
|
9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
+# See the License for the specific language governing permissions and
|
|
13 |
+# limitations under the License.
|
|
14 |
+ |
|
15 |
+ |
|
16 |
+"""
|
|
17 |
+BuildGrid Instance
|
|
18 |
+==================
|
|
19 |
+ |
|
20 |
+An instance of the BuildGrid server.
|
|
21 |
+ |
|
22 |
+Contains scheduler, execution instance and an interface to the bots.
|
|
23 |
+"""
|
|
24 |
+ |
|
25 |
+ |
|
26 |
+import logging
|
|
27 |
+ |
|
28 |
+from .execution.execution_instance import ExecutionInstance
|
|
29 |
+from .scheduler import Scheduler
|
|
30 |
+from .worker.bots_interface import BotsInterface
|
|
31 |
+ |
|
32 |
+ |
|
33 |
+class BuildGridInstance(ExecutionInstance, BotsInterface):
|
|
34 |
+ |
|
35 |
+ def __init__(self, action_cache=None, cas_storage=None):
|
|
36 |
+ scheduler = Scheduler(action_cache)
|
|
37 |
+ |
|
38 |
+ self.logger = logging.getLogger(__name__)
|
|
39 |
+ |
|
40 |
+ ExecutionInstance.__init__(self, scheduler, cas_storage)
|
|
41 |
+ BotsInterface.__init__(self, scheduler)
|
|
42 |
+ |
|
43 |
+ def stream_operation_updates(self, message_queue, operation_name):
|
|
44 |
+ operation = message_queue.get()
|
|
45 |
+ while not operation.done:
|
|
46 |
+ yield operation
|
|
47 |
+ operation = message_queue.get()
|
|
48 |
+ yield operation
|
|
49 |
+ |
|
50 |
+ def cancel_operation(self, name):
|
|
51 |
+ # TODO: Cancel leases
|
|
52 |
+ raise NotImplementedError("Cancelled operations not supported")
|
... | ... | @@ -29,34 +29,27 @@ from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_p |
29 | 29 |
from buildgrid._protos.google.devtools.remoteworkers.v1test2 import bots_pb2_grpc
|
30 | 30 |
from buildgrid._protos.google.longrunning import operations_pb2_grpc
|
31 | 31 |
|
32 |
+from .buildgrid_instance import BuildGridInstance
|
|
32 | 33 |
from .cas.bytestream_service import ByteStreamService
|
33 | 34 |
from .cas.content_addressable_storage_service import ContentAddressableStorageService
|
34 | 35 |
from .execution.action_cache_service import ActionCacheService
|
35 | 36 |
from .execution.execution_service import ExecutionService
|
36 | 37 |
from .execution.operations_service import OperationsService
|
37 |
-from .execution.execution_instance import ExecutionInstance
|
|
38 |
-from .scheduler import Scheduler
|
|
39 | 38 |
from .worker.bots_service import BotsService
|
40 |
-from .worker.bots_interface import BotsInterface
|
|
41 | 39 |
|
42 | 40 |
|
43 | 41 |
class BuildGridServer:
|
44 | 42 |
|
45 |
- def __init__(self, port='50051', max_workers=10, cas_storage=None, action_cache=None):
|
|
46 |
- port = '[::]:{0}'.format(port)
|
|
47 |
- scheduler = Scheduler(action_cache)
|
|
48 |
- bots_interface = BotsInterface(scheduler)
|
|
49 |
- execution_instance = ExecutionInstance(scheduler, cas_storage)
|
|
43 |
+ def __init__(self, port=50051, credentials=None, instances=None,
|
|
44 |
+ max_workers=10, action_cache=None, cas_storage=None):
|
|
45 |
+ address = '[::]:{0}'.format(port)
|
|
50 | 46 |
|
51 | 47 |
self._server = grpc.server(futures.ThreadPoolExecutor(max_workers))
|
52 |
- self._server.add_insecure_port(port)
|
|
53 | 48 |
|
54 |
- bots_pb2_grpc.add_BotsServicer_to_server(BotsService(bots_interface),
|
|
55 |
- self._server)
|
|
56 |
- remote_execution_pb2_grpc.add_ExecutionServicer_to_server(ExecutionService(execution_instance),
|
|
57 |
- self._server)
|
|
58 |
- operations_pb2_grpc.add_OperationsServicer_to_server(OperationsService(execution_instance),
|
|
59 |
- self._server)
|
|
49 |
+ if credentials is not None:
|
|
50 |
+ self._server.add_secure_port(address, credentials)
|
|
51 |
+ else:
|
|
52 |
+ self._server.add_insecure_port(address)
|
|
60 | 53 |
|
61 | 54 |
if cas_storage is not None:
|
62 | 55 |
cas_service = ContentAddressableStorageService(cas_storage)
|
... | ... | @@ -69,6 +62,20 @@ class BuildGridServer: |
69 | 62 |
remote_execution_pb2_grpc.add_ActionCacheServicer_to_server(action_cache_service,
|
70 | 63 |
self._server)
|
71 | 64 |
|
65 |
+ buildgrid_instances = {}
|
|
66 |
+ if not instances:
|
|
67 |
+ buildgrid_instances["main"] = BuildGridInstance(action_cache, cas_storage)
|
|
68 |
+ else:
|
|
69 |
+ for name in instances:
|
|
70 |
+ buildgrid_instances[name] = BuildGridInstance(action_cache, cas_storage)
|
|
71 |
+ |
|
72 |
+ bots_pb2_grpc.add_BotsServicer_to_server(BotsService(buildgrid_instances),
|
|
73 |
+ self._server)
|
|
74 |
+ remote_execution_pb2_grpc.add_ExecutionServicer_to_server(ExecutionService(buildgrid_instances),
|
|
75 |
+ self._server)
|
|
76 |
+ operations_pb2_grpc.add_OperationsServicer_to_server(OperationsService(buildgrid_instances),
|
|
77 |
+ self._server)
|
|
78 |
+ |
|
72 | 79 |
def start(self):
|
73 | 80 |
self._server.start()
|
74 | 81 |
|
... | ... | @@ -56,12 +56,14 @@ class ExecutionInstance: |
56 | 56 |
|
57 | 57 |
def get_operation(self, name):
|
58 | 58 |
operation = self._scheduler.jobs.get(name)
|
59 |
+ |
|
59 | 60 |
if operation is None:
|
60 | 61 |
raise InvalidArgumentError("Operation name does not exist: {}".format(name))
|
62 |
+ |
|
61 | 63 |
else:
|
62 | 64 |
return operation.get_operation()
|
63 | 65 |
|
64 |
- def list_operations(self, name, list_filter, page_size, page_token):
|
|
66 |
+ def list_operations(self, list_filter, page_size, page_token):
|
|
65 | 67 |
# TODO: Pages
|
66 | 68 |
# Spec says number of pages and length of a page are optional
|
67 | 69 |
return self._scheduler.get_operations()
|
... | ... | @@ -72,10 +74,6 @@ class ExecutionInstance: |
72 | 74 |
except KeyError:
|
73 | 75 |
raise InvalidArgumentError("Operation name does not exist: {}".format(name))
|
74 | 76 |
|
75 |
- def cancel_operation(self, name):
|
|
76 |
- # TODO: Cancel leases
|
|
77 |
- raise NotImplementedError("Cancelled operations not supported")
|
|
78 |
- |
|
79 | 77 |
def register_message_client(self, name, queue):
|
80 | 78 |
try:
|
81 | 79 |
self._scheduler.register_client(name, queue)
|
... | ... | @@ -35,23 +35,23 @@ from .._exceptions import InvalidArgumentError |
35 | 35 |
|
36 | 36 |
class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer):
|
37 | 37 |
|
38 |
- def __init__(self, instance):
|
|
38 |
+ def __init__(self, instances):
|
|
39 | 39 |
self.logger = logging.getLogger(__name__)
|
40 |
- self._instance = instance
|
|
40 |
+ self._instances = instances
|
|
41 | 41 |
|
42 | 42 |
def Execute(self, request, context):
|
43 |
- # Ignore request.instance_name for now
|
|
44 |
- # Have only one instance
|
|
45 | 43 |
try:
|
46 | 44 |
message_queue = queue.Queue()
|
47 |
- operation = self._instance.execute(request.action_digest,
|
|
48 |
- request.skip_cache_lookup,
|
|
49 |
- message_queue)
|
|
45 |
+ instance = self._get_instance(request.instance_name)
|
|
46 |
+ operation = instance.execute(request.action_digest,
|
|
47 |
+ request.skip_cache_lookup,
|
|
48 |
+ message_queue)
|
|
50 | 49 |
|
51 |
- context.add_callback(partial(self._remove_client, operation.name, message_queue))
|
|
50 |
+ context.add_callback(partial(instance.unregister_message_client,
|
|
51 |
+ operation.name, message_queue))
|
|
52 | 52 |
|
53 |
- yield from self._stream_operation_updates(message_queue,
|
|
54 |
- operation.name)
|
|
53 |
+ yield from instance.stream_operation_updates(message_queue,
|
|
54 |
+ operation.name)
|
|
55 | 55 |
|
56 | 56 |
except InvalidArgumentError as e:
|
57 | 57 |
self.logger.error(e)
|
... | ... | @@ -59,23 +59,25 @@ class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer): |
59 | 59 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
60 | 60 |
yield operations_pb2.Operation()
|
61 | 61 |
|
62 |
- except NotImplementedError as e:
|
|
63 |
- self.logger.error(e)
|
|
64 |
- context.set_details(str(e))
|
|
65 |
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
|
66 |
- yield operations_pb2.Operation()
|
|
67 |
- |
|
68 | 62 |
def WaitExecution(self, request, context):
|
69 | 63 |
try:
|
64 |
+ names = request.name.split("/")
|
|
65 |
+ |
|
66 |
+ # Operation name should be in format:
|
|
67 |
+ # {instance/name}/{operation_id}
|
|
68 |
+ instance_name = ''.join(names[0:-1])
|
|
69 |
+ |
|
70 | 70 |
message_queue = queue.Queue()
|
71 |
- operation_name = request.name
|
|
71 |
+ operation_name = names[-1]
|
|
72 |
+ instance = self._get_instance(instance_name)
|
|
72 | 73 |
|
73 |
- self._instance.register_message_client(operation_name, message_queue)
|
|
74 |
+ instance.register_message_client(operation_name, message_queue)
|
|
74 | 75 |
|
75 |
- context.add_callback(partial(self._remove_client, operation_name, message_queue))
|
|
76 |
+ context.add_callback(partial(instance.unregister_message_client,
|
|
77 |
+ operation_name, message_queue))
|
|
76 | 78 |
|
77 |
- yield from self._stream_operation_updates(message_queue,
|
|
78 |
- operation_name)
|
|
79 |
+ yield from instance.stream_operation_updates(message_queue,
|
|
80 |
+ operation_name)
|
|
79 | 81 |
|
80 | 82 |
except InvalidArgumentError as e:
|
81 | 83 |
self.logger.error(e)
|
... | ... | @@ -83,12 +85,9 @@ class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer): |
83 | 85 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
84 | 86 |
yield operations_pb2.Operation()
|
85 | 87 |
|
86 |
- def _remove_client(self, operation_name, message_queue):
|
|
87 |
- self._instance.unregister_message_client(operation_name, message_queue)
|
|
88 |
+ def _get_instance(self, name):
|
|
89 |
+ try:
|
|
90 |
+ return self._instances[name]
|
|
88 | 91 |
|
89 |
- def _stream_operation_updates(self, message_queue, operation_name):
|
|
90 |
- operation = message_queue.get()
|
|
91 |
- while not operation.done:
|
|
92 |
- yield operation
|
|
93 |
- operation = message_queue.get()
|
|
94 |
- yield operation
|
|
92 |
+ except KeyError:
|
|
93 |
+ raise InvalidArgumentError("Instance doesn't exist on server: {}".format(name))
|
... | ... | @@ -23,6 +23,8 @@ import logging |
23 | 23 |
|
24 | 24 |
import grpc
|
25 | 25 |
|
26 |
+from google.protobuf.empty_pb2 import Empty
|
|
27 |
+ |
|
26 | 28 |
from buildgrid._protos.google.longrunning import operations_pb2_grpc, operations_pb2
|
27 | 29 |
|
28 | 30 |
from .._exceptions import InvalidArgumentError
|
... | ... | @@ -30,42 +32,102 @@ from .._exceptions import InvalidArgumentError |
30 | 32 |
|
31 | 33 |
class OperationsService(operations_pb2_grpc.OperationsServicer):
|
32 | 34 |
|
33 |
- def __init__(self, instance):
|
|
34 |
- self._instance = instance
|
|
35 |
+ def __init__(self, instances):
|
|
36 |
+ self._instances = instances
|
|
35 | 37 |
self.logger = logging.getLogger(__name__)
|
36 | 38 |
|
37 | 39 |
def GetOperation(self, request, context):
|
38 | 40 |
try:
|
39 |
- return self._instance.get_operation(request.name)
|
|
41 |
+ name = request.name
|
|
42 |
+ operation_name = self._get_operation_name(name)
|
|
43 |
+ |
|
44 |
+ instance = self._get_instance(name)
|
|
45 |
+ |
|
46 |
+ operation = instance.get_operation(operation_name)
|
|
47 |
+ operation.name = name
|
|
48 |
+ return operation
|
|
40 | 49 |
|
41 | 50 |
except InvalidArgumentError as e:
|
42 | 51 |
self.logger.error(e)
|
43 | 52 |
context.set_details(str(e))
|
44 | 53 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
45 |
- return operations_pb2.Operation()
|
|
54 |
+ |
|
55 |
+ return operations_pb2.Operation()
|
|
46 | 56 |
|
47 | 57 |
def ListOperations(self, request, context):
|
48 |
- return self._instance.list_operations(request.name,
|
|
49 |
- request.filter,
|
|
58 |
+ try:
|
|
59 |
+ # Name should be the collection name
|
|
60 |
+ # Or in this case, the instance_name
|
|
61 |
+ name = request.name
|
|
62 |
+ instance = self._get_instance(name)
|
|
63 |
+ |
|
64 |
+ result = instance.list_operations(request.filter,
|
|
50 | 65 |
request.page_size,
|
51 | 66 |
request.page_token)
|
52 | 67 |
|
68 |
+ for operation in result.operations:
|
|
69 |
+ operation.name = "{}/{}".format(name, operation.name)
|
|
70 |
+ |
|
71 |
+ return result
|
|
72 |
+ |
|
73 |
+ except InvalidArgumentError as e:
|
|
74 |
+ self.logger.error(e)
|
|
75 |
+ context.set_details(str(e))
|
|
76 |
+ context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
|
77 |
+ |
|
78 |
+ return operations_pb2.ListOperationsResponse()
|
|
79 |
+ |
|
53 | 80 |
def DeleteOperation(self, request, context):
|
54 | 81 |
try:
|
55 |
- return self._instance.delete_operation(request.name)
|
|
82 |
+ name = request.name
|
|
83 |
+ operation_name = self._get_operation_name(name)
|
|
84 |
+ |
|
85 |
+ instance = self._get_instance(name)
|
|
86 |
+ |
|
87 |
+ instance.delete_operation(operation_name)
|
|
56 | 88 |
|
57 | 89 |
except InvalidArgumentError as e:
|
58 | 90 |
self.logger.error(e)
|
59 | 91 |
context.set_details(str(e))
|
60 | 92 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
61 |
- return operations_pb2.Operation()
|
|
93 |
+ |
|
94 |
+ return Empty()
|
|
62 | 95 |
|
63 | 96 |
def CancelOperation(self, request, context):
|
64 | 97 |
try:
|
65 |
- return self._instance.cancel_operation(request.name)
|
|
98 |
+ name = request.name
|
|
99 |
+ operation_name = self._get_operation_name(name)
|
|
100 |
+ |
|
101 |
+ instance = self._get_instance(name)
|
|
102 |
+ |
|
103 |
+ instance.cancel_operation(operation_name)
|
|
66 | 104 |
|
67 | 105 |
except NotImplementedError as e:
|
68 | 106 |
self.logger.error(e)
|
69 | 107 |
context.set_details(str(e))
|
70 | 108 |
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
71 |
- return operations_pb2.Operation()
|
|
109 |
+ |
|
110 |
+ except InvalidArgumentError as e:
|
|
111 |
+ self.logger.error(e)
|
|
112 |
+ context.set_details(str(e))
|
|
113 |
+ context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
|
114 |
+ |
|
115 |
+ return Empty()
|
|
116 |
+ |
|
117 |
+ def _get_operation_name(self, name):
|
|
118 |
+ return name.split("/")[-1]
|
|
119 |
+ |
|
120 |
+ def _get_instance(self, name):
|
|
121 |
+ try:
|
|
122 |
+ names = name.split("/")
|
|
123 |
+ |
|
124 |
+ # Operation name should be in format:
|
|
125 |
+ # {instance/name}/{operation_id}
|
|
126 |
+ instance_name = ''.join(names[0:-1])
|
|
127 |
+ if not instance_name:
|
|
128 |
+ return self._instances[name]
|
|
129 |
+ |
|
130 |
+ return self._instances[instance_name]
|
|
131 |
+ |
|
132 |
+ except KeyError:
|
|
133 |
+ raise InvalidArgumentError("Instance doesn't exist on server: {}".format(name))
|
... | ... | @@ -54,7 +54,8 @@ class BotsInterface: |
54 | 54 |
pass
|
55 | 55 |
|
56 | 56 |
# Bot session name, selected by the server
|
57 |
- name = str(uuid.uuid4())
|
|
57 |
+ name = "{}/{}".format(parent, str(uuid.uuid4()))
|
|
58 |
+ |
|
58 | 59 |
bot_session.name = name
|
59 | 60 |
|
60 | 61 |
self._bot_ids[name] = bot_id
|
... | ... | @@ -33,14 +33,17 @@ from .._exceptions import InvalidArgumentError, OutofSyncError |
33 | 33 |
|
34 | 34 |
class BotsService(bots_pb2_grpc.BotsServicer):
|
35 | 35 |
|
36 |
- def __init__(self, instance):
|
|
37 |
- self._instance = instance
|
|
36 |
+ def __init__(self, instances):
|
|
37 |
+ self._instances = instances
|
|
38 | 38 |
self.logger = logging.getLogger(__name__)
|
39 | 39 |
|
40 | 40 |
def CreateBotSession(self, request, context):
|
41 | 41 |
try:
|
42 |
- return self._instance.create_bot_session(request.parent,
|
|
43 |
- request.bot_session)
|
|
42 |
+ parent = request.parent
|
|
43 |
+ instance = self._get_instance(request.parent)
|
|
44 |
+ return instance.create_bot_session(parent,
|
|
45 |
+ request.bot_session)
|
|
46 |
+ |
|
44 | 47 |
except InvalidArgumentError as e:
|
45 | 48 |
self.logger.error(e)
|
46 | 49 |
context.set_details(str(e))
|
... | ... | @@ -50,8 +53,15 @@ class BotsService(bots_pb2_grpc.BotsServicer): |
50 | 53 |
|
51 | 54 |
def UpdateBotSession(self, request, context):
|
52 | 55 |
try:
|
53 |
- return self._instance.update_bot_session(request.name,
|
|
54 |
- request.bot_session)
|
|
56 |
+ names = request.name.split("/")
|
|
57 |
+ # Operation name should be in format:
|
|
58 |
+ # {instance/name}/{uuid}
|
|
59 |
+ instance_name = ''.join(names[0:-1])
|
|
60 |
+ |
|
61 |
+ instance = self._get_instance(instance_name)
|
|
62 |
+ return instance.update_bot_session(request.name,
|
|
63 |
+ request.bot_session)
|
|
64 |
+ |
|
55 | 65 |
except InvalidArgumentError as e:
|
56 | 66 |
self.logger.error(e)
|
57 | 67 |
context.set_details(str(e))
|
... | ... | @@ -72,3 +82,10 @@ class BotsService(bots_pb2_grpc.BotsServicer): |
72 | 82 |
def PostBotEventTemp(self, request, context):
|
73 | 83 |
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
74 | 84 |
return Empty()
|
85 |
+ |
|
86 |
+ def _get_instance(self, name):
|
|
87 |
+ try:
|
|
88 |
+ return self._instances[name]
|
|
89 |
+ |
|
90 |
+ except KeyError:
|
|
91 |
+ raise InvalidArgumentError("Instance doesn't exist on server: {}".format(name))
|
... | ... | @@ -8,7 +8,7 @@ In one terminal, start a server: |
8 | 8 |
|
9 | 9 |
.. code-block:: sh
|
10 | 10 |
|
11 |
- bgd server start
|
|
11 |
+ bgd server start --allow-insecure
|
|
12 | 12 |
|
13 | 13 |
In another terminal, send a request for work:
|
14 | 14 |
|
... | ... | @@ -27,7 +27,7 @@ Now start a BuildGrid server, passing it a directory it can write a CAS to: |
27 | 27 |
|
28 | 28 |
.. code-block:: sh
|
29 | 29 |
|
30 |
- bgd server start --cas disk --cas-cache disk --cas-disk-directory /path/to/empty/directory
|
|
30 |
+ bgd server start --allow-insecure --cas disk --cas-cache disk --cas-disk-directory /path/to/empty/directory
|
|
31 | 31 |
|
32 | 32 |
Start the following bot session:
|
33 | 33 |
|
... | ... | @@ -18,7 +18,6 @@ |
18 | 18 |
# pylint: disable=redefined-outer-name
|
19 | 19 |
|
20 | 20 |
import copy
|
21 |
-import uuid
|
|
22 | 21 |
from unittest import mock
|
23 | 22 |
|
24 | 23 |
import grpc
|
... | ... | @@ -27,7 +26,7 @@ import pytest |
27 | 26 |
|
28 | 27 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
29 | 28 |
from buildgrid._protos.google.devtools.remoteworkers.v1test2 import bots_pb2
|
30 |
-from buildgrid.server import scheduler, job
|
|
29 |
+from buildgrid.server import job, buildgrid_instance
|
|
31 | 30 |
from buildgrid.server.job import LeaseState
|
32 | 31 |
from buildgrid.server.worker import bots_interface, bots_service
|
33 | 32 |
|
... | ... | @@ -53,8 +52,8 @@ def bot_session(): |
53 | 52 |
|
54 | 53 |
|
55 | 54 |
@pytest.fixture
|
56 |
-def schedule():
|
|
57 |
- yield scheduler.Scheduler()
|
|
55 |
+def buildgrid():
|
|
56 |
+ yield buildgrid_instance.BuildGridInstance()
|
|
58 | 57 |
|
59 | 58 |
|
60 | 59 |
@pytest.fixture
|
... | ... | @@ -64,19 +63,17 @@ def bots(schedule): |
64 | 63 |
|
65 | 64 |
# Instance to test
|
66 | 65 |
@pytest.fixture
|
67 |
-def instance(bots):
|
|
68 |
- yield bots_service.BotsService(bots)
|
|
66 |
+def instance(buildgrid):
|
|
67 |
+ instances = {"": buildgrid}
|
|
68 |
+ yield bots_service.BotsService(instances)
|
|
69 | 69 |
|
70 | 70 |
|
71 | 71 |
def test_create_bot_session(bot_session, context, instance):
|
72 |
- parent = 'rach'
|
|
73 |
- request = bots_pb2.CreateBotSessionRequest(parent=parent,
|
|
74 |
- bot_session=bot_session)
|
|
72 |
+ request = bots_pb2.CreateBotSessionRequest(bot_session=bot_session)
|
|
75 | 73 |
|
76 | 74 |
response = instance.CreateBotSession(request, context)
|
77 | 75 |
|
78 | 76 |
assert isinstance(response, bots_pb2.BotSession)
|
79 |
- assert uuid.UUID(response.name, version=4)
|
|
80 | 77 |
assert bot_session.bot_id == response.bot_id
|
81 | 78 |
|
82 | 79 |
|
... | ... | @@ -92,8 +89,7 @@ def test_create_bot_session_bot_id_fail(context, instance): |
92 | 89 |
|
93 | 90 |
|
94 | 91 |
def test_update_bot_session(bot_session, context, instance):
|
95 |
- request = bots_pb2.CreateBotSessionRequest(parent='',
|
|
96 |
- bot_session=bot_session)
|
|
92 |
+ request = bots_pb2.CreateBotSessionRequest(bot_session=bot_session)
|
|
97 | 93 |
bot = instance.CreateBotSession(request, context)
|
98 | 94 |
|
99 | 95 |
request = bots_pb2.UpdateBotSessionRequest(name=bot.name,
|
... | ... | @@ -106,8 +102,7 @@ def test_update_bot_session(bot_session, context, instance): |
106 | 102 |
|
107 | 103 |
|
108 | 104 |
def test_update_bot_session_zombie(bot_session, context, instance):
|
109 |
- request = bots_pb2.CreateBotSessionRequest(parent='',
|
|
110 |
- bot_session=bot_session)
|
|
105 |
+ request = bots_pb2.CreateBotSessionRequest(bot_session=bot_session)
|
|
111 | 106 |
bot = instance.CreateBotSession(request, context)
|
112 | 107 |
# Update server with incorrect UUID by rotating it
|
113 | 108 |
bot.name = bot.name[len(bot.name): 0]
|
... | ... | @@ -121,8 +116,7 @@ def test_update_bot_session_zombie(bot_session, context, instance): |
121 | 116 |
|
122 | 117 |
|
123 | 118 |
def test_update_bot_session_bot_id_fail(bot_session, context, instance):
|
124 |
- request = bots_pb2.UpdateBotSessionRequest(name='ana',
|
|
125 |
- bot_session=bot_session)
|
|
119 |
+ request = bots_pb2.UpdateBotSessionRequest(bot_session=bot_session)
|
|
126 | 120 |
|
127 | 121 |
instance.UpdateBotSession(request, context)
|
128 | 122 |
|
... | ... | @@ -131,17 +125,15 @@ def test_update_bot_session_bot_id_fail(bot_session, context, instance): |
131 | 125 |
|
132 | 126 |
@pytest.mark.parametrize("number_of_jobs", [0, 1, 3, 500])
|
133 | 127 |
def test_number_of_leases(number_of_jobs, bot_session, context, instance):
|
134 |
- request = bots_pb2.CreateBotSessionRequest(parent='',
|
|
135 |
- bot_session=bot_session)
|
|
128 |
+ request = bots_pb2.CreateBotSessionRequest(bot_session=bot_session)
|
|
136 | 129 |
# Inject work
|
137 | 130 |
for _ in range(0, number_of_jobs):
|
138 | 131 |
action_digest = remote_execution_pb2.Digest()
|
139 |
- instance._instance._scheduler.append_job(job.Job(action_digest))
|
|
132 |
+ instance._instances[""].execute(action_digest, True)
|
|
140 | 133 |
|
141 | 134 |
response = instance.CreateBotSession(request, context)
|
142 | 135 |
|
143 | 136 |
assert len(response.leases) == number_of_jobs
|
144 |
- assert isinstance(response, bots_pb2.BotSession)
|
|
145 | 137 |
|
146 | 138 |
|
147 | 139 |
def test_update_leases_with_work(bot_session, context, instance):
|
... | ... | @@ -149,7 +141,7 @@ def test_update_leases_with_work(bot_session, context, instance): |
149 | 141 |
bot_session=bot_session)
|
150 | 142 |
# Inject work
|
151 | 143 |
action_digest = remote_execution_pb2.Digest(hash='gaff')
|
152 |
- instance._instance._scheduler.append_job(job.Job(action_digest))
|
|
144 |
+ instance._instances[""].execute(action_digest, True)
|
|
153 | 145 |
|
154 | 146 |
response = instance.CreateBotSession(request, context)
|
155 | 147 |
|
... | ... | @@ -159,7 +151,6 @@ def test_update_leases_with_work(bot_session, context, instance): |
159 | 151 |
|
160 | 152 |
assert isinstance(response, bots_pb2.BotSession)
|
161 | 153 |
assert response.leases[0].state == LeaseState.PENDING.value
|
162 |
- assert uuid.UUID(response.leases[0].id, version=4)
|
|
163 | 154 |
assert response_action == action_digest
|
164 | 155 |
|
165 | 156 |
|
... | ... | @@ -172,7 +163,7 @@ def test_update_leases_work_complete(bot_session, context, instance): |
172 | 163 |
|
173 | 164 |
# Inject work
|
174 | 165 |
action_digest = remote_execution_pb2.Digest(hash='gaff')
|
175 |
- instance._instance._scheduler.append_job(job.Job(action_digest))
|
|
166 |
+ instance._instances[""].execute(action_digest, True)
|
|
176 | 167 |
|
177 | 168 |
request = bots_pb2.UpdateBotSessionRequest(name=response.name,
|
178 | 169 |
bot_session=response)
|
... | ... | @@ -200,7 +191,7 @@ def test_work_rejected_by_bot(bot_session, context, instance): |
200 | 191 |
bot_session=bot_session)
|
201 | 192 |
# Inject work
|
202 | 193 |
action_digest = remote_execution_pb2.Digest(hash='gaff')
|
203 |
- instance._instance._scheduler.append_job(job.Job(action_digest))
|
|
194 |
+ instance._instances[""].execute(action_digest, True)
|
|
204 | 195 |
|
205 | 196 |
# Simulated the severed binding between client and server
|
206 | 197 |
response = copy.deepcopy(instance.CreateBotSession(request, context))
|
... | ... | @@ -222,7 +213,8 @@ def test_work_out_of_sync_from_pending(state, bot_session, context, instance): |
222 | 213 |
bot_session=bot_session)
|
223 | 214 |
# Inject work
|
224 | 215 |
action_digest = remote_execution_pb2.Digest(hash='gaff')
|
225 |
- instance._instance._scheduler.append_job(job.Job(action_digest))
|
|
216 |
+ instance._instances[""].execute(action_digest, True)
|
|
217 |
+ |
|
226 | 218 |
# Simulated the severed binding between client and server
|
227 | 219 |
response = copy.deepcopy(instance.CreateBotSession(request, context))
|
228 | 220 |
|
... | ... | @@ -242,7 +234,8 @@ def test_work_out_of_sync_from_active(state, bot_session, context, instance): |
242 | 234 |
bot_session=bot_session)
|
243 | 235 |
# Inject work
|
244 | 236 |
action_digest = remote_execution_pb2.Digest(hash='gaff')
|
245 |
- instance._instance._scheduler.append_job(job.Job(action_digest))
|
|
237 |
+ instance._instances[""].execute(action_digest, True)
|
|
238 |
+ |
|
246 | 239 |
# Simulated the severed binding between client and server
|
247 | 240 |
response = copy.deepcopy(instance.CreateBotSession(request, context))
|
248 | 241 |
|
... | ... | @@ -268,7 +261,8 @@ def test_work_active_to_active(bot_session, context, instance): |
268 | 261 |
bot_session=bot_session)
|
269 | 262 |
# Inject work
|
270 | 263 |
action_digest = remote_execution_pb2.Digest(hash='gaff')
|
271 |
- instance._instance._scheduler.append_job(job.Job(action_digest))
|
|
264 |
+ instance._instances[""].execute(action_digest, True)
|
|
265 |
+ |
|
272 | 266 |
# Simulated the severed binding between client and server
|
273 | 267 |
response = copy.deepcopy(instance.CreateBotSession(request, context))
|
274 | 268 |
|
... | ... | @@ -20,15 +20,17 @@ |
20 | 20 |
import uuid
|
21 | 21 |
from unittest import mock
|
22 | 22 |
|
23 |
+import grpc
|
|
23 | 24 |
from grpc._server import _Context
|
24 | 25 |
import pytest
|
26 |
+from google.protobuf import any_pb2
|
|
25 | 27 |
|
26 | 28 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
27 | 29 |
from buildgrid._protos.google.longrunning import operations_pb2
|
28 | 30 |
|
29 |
-from buildgrid.server import scheduler, job
|
|
31 |
+from buildgrid.server import job, buildgrid_instance
|
|
30 | 32 |
from buildgrid.server.cas.storage import lru_memory_cache
|
31 |
-from buildgrid.server.execution import action_cache, execution_instance, execution_service
|
|
33 |
+from buildgrid.server.execution import action_cache, execution_service
|
|
32 | 34 |
|
33 | 35 |
|
34 | 36 |
@pytest.fixture
|
... | ... | @@ -38,19 +40,21 @@ def context(): |
38 | 40 |
|
39 | 41 |
|
40 | 42 |
@pytest.fixture(params=["action-cache", "no-action-cache"])
|
41 |
-def execution(request):
|
|
43 |
+def buildgrid(request):
|
|
42 | 44 |
if request.param == "action-cache":
|
43 | 45 |
storage = lru_memory_cache.LRUMemoryCache(1024 * 1024)
|
44 | 46 |
cache = action_cache.ActionCache(storage, 50)
|
45 |
- schedule = scheduler.Scheduler(cache)
|
|
46 |
- return execution_instance.ExecutionInstance(schedule, storage)
|
|
47 |
- return execution_instance.ExecutionInstance(scheduler.Scheduler())
|
|
47 |
+ |
|
48 |
+ return buildgrid_instance.BuildGridInstance(action_cache=cache,
|
|
49 |
+ cas_storage=storage)
|
|
50 |
+ return buildgrid_instance.BuildGridInstance()
|
|
48 | 51 |
|
49 | 52 |
|
50 | 53 |
# Instance to test
|
51 | 54 |
@pytest.fixture
|
52 |
-def instance(execution):
|
|
53 |
- yield execution_service.ExecutionService(execution)
|
|
55 |
+def instance(buildgrid):
|
|
56 |
+ instances = {"": buildgrid}
|
|
57 |
+ yield execution_service.ExecutionService(instances)
|
|
54 | 58 |
|
55 | 59 |
|
56 | 60 |
@pytest.mark.parametrize("skip_cache_lookup", [True, False])
|
... | ... | @@ -72,23 +76,45 @@ def test_execute(skip_cache_lookup, instance, context): |
72 | 76 |
assert result.done is False
|
73 | 77 |
|
74 | 78 |
|
75 |
-# def test_wait_execution(instance, context):
|
|
76 |
- # TODO: Figure out why next(response) hangs on the .get()
|
|
77 |
- # method when running in pytest.
|
|
78 |
-# action_digest = remote_execution_pb2.Digest()
|
|
79 |
-# action_digest.hash = 'zhora'
|
|
79 |
+def test_wrong_execute_instance(instance, context):
|
|
80 |
+ request = remote_execution_pb2.ExecuteRequest(instance_name='blade')
|
|
81 |
+ response = instance.Execute(request, context)
|
|
82 |
+ |
|
83 |
+ next(response)
|
|
84 |
+ context.set_code.assert_called_once_with(grpc.StatusCode.INVALID_ARGUMENT)
|
|
85 |
+ |
|
86 |
+ |
|
87 |
+def test_wait_execution(instance, buildgrid, context):
|
|
88 |
+ action_digest = remote_execution_pb2.Digest()
|
|
89 |
+ action_digest.hash = 'zhora'
|
|
90 |
+ |
|
91 |
+ j = job.Job(action_digest, None)
|
|
92 |
+ j._operation.done = True
|
|
93 |
+ |
|
94 |
+ request = remote_execution_pb2.WaitExecutionRequest(name="{}/{}".format('', j.name))
|
|
80 | 95 |
|
81 |
-# j = job.Job(action_digest, None)
|
|
82 |
-# j._operation.done = True
|
|
96 |
+ buildgrid._scheduler.jobs[j.name] = j
|
|
83 | 97 |
|
84 |
-# request = remote_execution_pb2.WaitExecutionRequest(name=j.name)
|
|
98 |
+ action_result_any = any_pb2.Any()
|
|
99 |
+ action_result = remote_execution_pb2.ActionResult()
|
|
100 |
+ action_result_any.Pack(action_result)
|
|
85 | 101 |
|
86 |
-# instance._instance._scheduler.jobs[j.name] = j
|
|
102 |
+ j.update_execute_stage(job.ExecuteStage.COMPLETED)
|
|
103 |
+ |
|
104 |
+ response = instance.WaitExecution(request, context)
|
|
105 |
+ |
|
106 |
+ result = next(response)
|
|
107 |
+ |
|
108 |
+ assert isinstance(result, operations_pb2.Operation)
|
|
109 |
+ metadata = remote_execution_pb2.ExecuteOperationMetadata()
|
|
110 |
+ result.metadata.Unpack(metadata)
|
|
111 |
+ assert metadata.stage == job.ExecuteStage.COMPLETED.value
|
|
112 |
+ assert uuid.UUID(result.name, version=4)
|
|
113 |
+ assert result.done is True
|
|
87 | 114 |
|
88 |
-# action_result_any = any_pb2.Any()
|
|
89 |
-# action_result = remote_execution_pb2.ActionResult()
|
|
90 |
-# action_result_any.Pack(action_result)
|
|
91 | 115 |
|
92 |
-# instance._instance._scheduler._update_execute_stage(j, job.ExecuteStage.COMPLETED)
|
|
116 |
+def test_wrong_instance_wait_execution(instance, buildgrid, context):
|
|
117 |
+ request = remote_execution_pb2.WaitExecutionRequest(name="blade")
|
|
118 |
+ next(instance.WaitExecution(request, context))
|
|
93 | 119 |
|
94 |
-# response = instance.WaitExecution(request, context)
|
|
120 |
+ context.set_code.assert_called_once_with(grpc.StatusCode.INVALID_ARGUMENT)
|
... | ... | @@ -28,10 +28,13 @@ from google.protobuf import any_pb2 |
28 | 28 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
29 | 29 |
from buildgrid._protos.google.longrunning import operations_pb2
|
30 | 30 |
|
31 |
-from buildgrid.server import scheduler
|
|
31 |
+from buildgrid.server import buildgrid_instance
|
|
32 | 32 |
from buildgrid.server._exceptions import InvalidArgumentError
|
33 | 33 |
|
34 |
-from buildgrid.server.execution import execution_instance, operations_service
|
|
34 |
+from buildgrid.server.execution import operations_service
|
|
35 |
+ |
|
36 |
+ |
|
37 |
+instance_name = "blade"
|
|
35 | 38 |
|
36 | 39 |
|
37 | 40 |
# Can mock this
|
... | ... | @@ -52,65 +55,80 @@ def execute_request(): |
52 | 55 |
|
53 | 56 |
|
54 | 57 |
@pytest.fixture
|
55 |
-def schedule():
|
|
56 |
- yield scheduler.Scheduler()
|
|
57 |
- |
|
58 |
- |
|
59 |
-@pytest.fixture
|
|
60 |
-def execution(schedule):
|
|
61 |
- yield execution_instance.ExecutionInstance(schedule)
|
|
58 |
+def buildgrid():
|
|
59 |
+ yield buildgrid_instance.BuildGridInstance()
|
|
62 | 60 |
|
63 | 61 |
|
64 | 62 |
# Instance to test
|
65 | 63 |
@pytest.fixture
|
66 |
-def instance(execution):
|
|
67 |
- yield operations_service.OperationsService(execution)
|
|
64 |
+def instance(buildgrid):
|
|
65 |
+ instances = {instance_name: buildgrid}
|
|
66 |
+ yield operations_service.OperationsService(instances)
|
|
68 | 67 |
|
69 | 68 |
|
70 | 69 |
# Queue an execution, get operation corresponding to that request
|
71 |
-def test_get_operation(instance, execute_request, context):
|
|
72 |
- response_execute = instance._instance.execute(execute_request.action_digest,
|
|
73 |
- execute_request.skip_cache_lookup)
|
|
70 |
+def test_get_operation(instance, buildgrid, execute_request, context):
|
|
71 |
+ response_execute = buildgrid.execute(execute_request.action_digest,
|
|
72 |
+ execute_request.skip_cache_lookup)
|
|
74 | 73 |
|
75 | 74 |
request = operations_pb2.GetOperationRequest()
|
76 | 75 |
|
77 |
- request.name = response_execute.name
|
|
76 |
+ request.name = "{}/{}".format(instance_name, response_execute.name)
|
|
78 | 77 |
|
79 | 78 |
response = instance.GetOperation(request, context)
|
80 | 79 |
assert response is response_execute
|
81 | 80 |
|
82 | 81 |
|
83 | 82 |
def test_get_operation_fail(instance, context):
|
83 |
+ request = operations_pb2.GetOperationRequest()
|
|
84 |
+ request.name = "{}/{}".format(instance_name, "runner")
|
|
85 |
+ instance.GetOperation(request, context)
|
|
86 |
+ |
|
87 |
+ context.set_code.assert_called_once_with(grpc.StatusCode.INVALID_ARGUMENT)
|
|
88 |
+ |
|
89 |
+ |
|
90 |
+def test_get_operation_instance_fail(instance, context):
|
|
84 | 91 |
request = operations_pb2.GetOperationRequest()
|
85 | 92 |
instance.GetOperation(request, context)
|
86 | 93 |
|
87 | 94 |
context.set_code.assert_called_once_with(grpc.StatusCode.INVALID_ARGUMENT)
|
88 | 95 |
|
89 | 96 |
|
90 |
-def test_list_operations(instance, execute_request, context):
|
|
91 |
- response_execute = instance._instance.execute(execute_request.action_digest,
|
|
92 |
- execute_request.skip_cache_lookup)
|
|
97 |
+def test_list_operations(instance, buildgrid, execute_request, context):
|
|
98 |
+ response_execute = buildgrid.execute(execute_request.action_digest,
|
|
99 |
+ execute_request.skip_cache_lookup)
|
|
93 | 100 |
|
94 |
- request = operations_pb2.ListOperationsRequest()
|
|
101 |
+ request = operations_pb2.ListOperationsRequest(name=instance_name)
|
|
95 | 102 |
response = instance.ListOperations(request, context)
|
96 | 103 |
|
97 |
- assert response.operations[0].name == response_execute.name
|
|
104 |
+ assert response.operations[0].name.split('/')[-1] == response_execute.name
|
|
105 |
+ |
|
98 | 106 |
|
107 |
+def test_list_operations_instance_fail(instance, buildgrid, execute_request, context):
|
|
108 |
+ buildgrid.execute(execute_request.action_digest,
|
|
109 |
+ execute_request.skip_cache_lookup)
|
|
99 | 110 |
|
100 |
-def test_list_operations_with_result(instance, execute_request, context):
|
|
101 |
- response_execute = instance._instance.execute(execute_request.action_digest,
|
|
102 |
- execute_request.skip_cache_lookup)
|
|
111 |
+ request = operations_pb2.ListOperationsRequest()
|
|
112 |
+ instance.ListOperations(request, context)
|
|
113 |
+ |
|
114 |
+ context.set_code.assert_called_once_with(grpc.StatusCode.INVALID_ARGUMENT)
|
|
115 |
+ |
|
116 |
+ |
|
117 |
+def test_list_operations_with_result(instance, buildgrid, execute_request, context):
|
|
118 |
+ response_execute = buildgrid.execute(execute_request.action_digest,
|
|
119 |
+ execute_request.skip_cache_lookup)
|
|
103 | 120 |
|
104 | 121 |
action_result = remote_execution_pb2.ActionResult()
|
105 | 122 |
output_file = remote_execution_pb2.OutputFile(path='unicorn')
|
106 | 123 |
action_result.output_files.extend([output_file])
|
107 | 124 |
|
108 |
- instance._instance._scheduler.job_complete(response_execute.name, _pack_any(action_result))
|
|
125 |
+ buildgrid._scheduler.job_complete(response_execute.name,
|
|
126 |
+ _pack_any(action_result))
|
|
109 | 127 |
|
110 |
- request = operations_pb2.ListOperationsRequest()
|
|
128 |
+ request = operations_pb2.ListOperationsRequest(name=instance_name)
|
|
111 | 129 |
response = instance.ListOperations(request, context)
|
112 | 130 |
|
113 |
- assert response.operations[0].name == response_execute.name
|
|
131 |
+ assert response.operations[0].name.split('/')[-1] == response_execute.name
|
|
114 | 132 |
|
115 | 133 |
execute_response = remote_execution_pb2.ExecuteResponse()
|
116 | 134 |
response.operations[0].response.Unpack(execute_response)
|
... | ... | @@ -118,7 +136,7 @@ def test_list_operations_with_result(instance, execute_request, context): |
118 | 136 |
|
119 | 137 |
|
120 | 138 |
def test_list_operations_empty(instance, context):
|
121 |
- request = operations_pb2.ListOperationsRequest()
|
|
139 |
+ request = operations_pb2.ListOperationsRequest(name=instance_name)
|
|
122 | 140 |
|
123 | 141 |
response = instance.ListOperations(request, context)
|
124 | 142 |
|
... | ... | @@ -126,21 +144,23 @@ def test_list_operations_empty(instance, context): |
126 | 144 |
|
127 | 145 |
|
128 | 146 |
# Send execution off, delete, try to find operation should fail
|
129 |
-def test_delete_operation(instance, execute_request, context):
|
|
130 |
- response_execute = instance._instance.execute(execute_request.action_digest,
|
|
131 |
- execute_request.skip_cache_lookup)
|
|
147 |
+def test_delete_operation(instance, buildgrid, execute_request, context):
|
|
148 |
+ response_execute = buildgrid.execute(execute_request.action_digest,
|
|
149 |
+ execute_request.skip_cache_lookup)
|
|
132 | 150 |
request = operations_pb2.DeleteOperationRequest()
|
133 |
- request.name = response_execute.name
|
|
151 |
+ request.name = "{}/{}".format(instance_name, response_execute.name)
|
|
134 | 152 |
instance.DeleteOperation(request, context)
|
135 | 153 |
|
136 | 154 |
request = operations_pb2.GetOperationRequest()
|
137 |
- request.name = response_execute.name
|
|
155 |
+ request.name = "{}/{}".format(instance_name, response_execute.name)
|
|
156 |
+ |
|
138 | 157 |
with pytest.raises(InvalidArgumentError):
|
139 |
- instance._instance.get_operation(response_execute.name)
|
|
158 |
+ buildgrid.get_operation(response_execute.name)
|
|
140 | 159 |
|
141 | 160 |
|
142 |
-def test_delete_operation_fail(instance, execute_request, context):
|
|
161 |
+def test_delete_operation_fail(instance, context):
|
|
143 | 162 |
request = operations_pb2.DeleteOperationRequest()
|
163 |
+ request.name = "{}/{}".format(instance_name, "runner")
|
|
144 | 164 |
instance.DeleteOperation(request, context)
|
145 | 165 |
|
146 | 166 |
context.set_code.assert_called_once_with(grpc.StatusCode.INVALID_ARGUMENT)
|
... | ... | @@ -148,11 +168,19 @@ def test_delete_operation_fail(instance, execute_request, context): |
148 | 168 |
|
149 | 169 |
def test_cancel_operation(instance, context):
|
150 | 170 |
request = operations_pb2.CancelOperationRequest()
|
171 |
+ request.name = "{}/{}".format(instance_name, "runner")
|
|
151 | 172 |
instance.CancelOperation(request, context)
|
152 | 173 |
|
153 | 174 |
context.set_code.assert_called_once_with(grpc.StatusCode.UNIMPLEMENTED)
|
154 | 175 |
|
155 | 176 |
|
177 |
+def test_cancel_operation_instance_fail(instance, context):
|
|
178 |
+ request = operations_pb2.CancelOperationRequest()
|
|
179 |
+ instance.CancelOperation(request, context)
|
|
180 |
+ |
|
181 |
+ context.set_code.assert_called_once_with(grpc.StatusCode.INVALID_ARGUMENT)
|
|
182 |
+ |
|
183 |
+ |
|
156 | 184 |
def _pack_any(pack):
|
157 | 185 |
some_any = any_pb2.Any()
|
158 | 186 |
some_any.Pack(pack)
|