finn pushed to branch finn/instances at BuildGrid / buildgrid
Commits:
-
5108ed23
by finn at 2018-08-23T10:58:07Z
-
19e7f201
by finn at 2018-08-23T17:09:41Z
-
3cad2ea0
by finn at 2018-08-23T17:09:41Z
16 changed files:
- buildgrid/_app/bots/temp_directory.py
- buildgrid/_app/commands/cmd_bot.py
- buildgrid/_app/commands/cmd_cas.py
- buildgrid/_app/commands/cmd_execute.py
- buildgrid/_app/commands/cmd_server.py
- − buildgrid/server/action_cache.py
- + buildgrid/server/buildgrid_instance.py
- buildgrid/server/build_grid_server.py → buildgrid/server/buildgrid_server.py
- buildgrid/server/execution/execution_instance.py
- buildgrid/server/execution/execution_service.py
- buildgrid/server/execution/operations_service.py
- buildgrid/server/worker/bots_interface.py
- buildgrid/server/worker/bots_service.py
- tests/integration/bots_service.py
- tests/integration/execution_service.py
- tests/integration/operations_service.py
Changes:
| ... | ... | @@ -29,7 +29,7 @@ def work_temp_directory(context, lease): |
| 29 | 29 |
then uploads results back to CAS
|
| 30 | 30 |
"""
|
| 31 | 31 |
|
| 32 |
- instance_name = context.instance_name
|
|
| 32 |
+ parent = context.parent
|
|
| 33 | 33 |
stub_bytestream = bytestream_pb2_grpc.ByteStreamStub(context.channel)
|
| 34 | 34 |
|
| 35 | 35 |
action_digest = remote_execution_pb2.Digest()
|
| ... | ... | @@ -37,12 +37,12 @@ def work_temp_directory(context, lease): |
| 37 | 37 |
|
| 38 | 38 |
action = remote_execution_pb2.Action()
|
| 39 | 39 |
|
| 40 |
- action = parse_to_pb2_from_fetch(action, stub_bytestream, action_digest, instance_name)
|
|
| 40 |
+ action = parse_to_pb2_from_fetch(action, stub_bytestream, action_digest, parent)
|
|
| 41 | 41 |
|
| 42 | 42 |
with tempfile.TemporaryDirectory() as temp_dir:
|
| 43 | 43 |
|
| 44 | 44 |
command = remote_execution_pb2.Command()
|
| 45 |
- command = parse_to_pb2_from_fetch(command, stub_bytestream, action.command_digest, instance_name)
|
|
| 45 |
+ command = parse_to_pb2_from_fetch(command, stub_bytestream, action.command_digest, parent)
|
|
| 46 | 46 |
|
| 47 | 47 |
arguments = "cd {} &&".format(temp_dir)
|
| 48 | 48 |
|
| ... | ... | @@ -51,7 +51,7 @@ def work_temp_directory(context, lease): |
| 51 | 51 |
|
| 52 | 52 |
context.logger.info(arguments)
|
| 53 | 53 |
|
| 54 |
- write_fetch_directory(temp_dir, stub_bytestream, action.input_root_digest, instance_name)
|
|
| 54 |
+ write_fetch_directory(temp_dir, stub_bytestream, action.input_root_digest, parent)
|
|
| 55 | 55 |
|
| 56 | 56 |
proc = subprocess.Popen(arguments,
|
| 57 | 57 |
shell=True,
|
| ... | ... | @@ -75,7 +75,7 @@ def work_temp_directory(context, lease): |
| 75 | 75 |
requests.append(remote_execution_pb2.BatchUpdateBlobsRequest.Request(
|
| 76 | 76 |
digest=digest, data=chunk))
|
| 77 | 77 |
|
| 78 |
- request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=instance_name,
|
|
| 78 |
+ request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=parent,
|
|
| 79 | 79 |
requests=requests)
|
| 80 | 80 |
|
| 81 | 81 |
stub_cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(context.channel)
|
| ... | ... | @@ -35,7 +35,7 @@ from ..cli import pass_context |
| 35 | 35 |
|
| 36 | 36 |
|
| 37 | 37 |
@click.group(name='bot', short_help="Create and register bot clients.")
|
| 38 |
-@click.option('--parent', type=click.STRING, default='bgd_test', show_default=True,
|
|
| 38 |
+@click.option('--parent', type=click.STRING, default='main', show_default=True,
|
|
| 39 | 39 |
help="Targeted farm resource.")
|
| 40 | 40 |
@click.option('--port', type=click.INT, default='50051', show_default=True,
|
| 41 | 41 |
help="Remote server's port number.")
|
| ... | ... | @@ -49,6 +49,7 @@ def cli(context, host, port, parent): |
| 49 | 49 |
context.logger = logging.getLogger(__name__)
|
| 50 | 50 |
context.logger.info("Starting on port {}".format(port))
|
| 51 | 51 |
context.channel = channel
|
| 52 |
+ context.parent = parent
|
|
| 52 | 53 |
|
| 53 | 54 |
worker = Worker()
|
| 54 | 55 |
worker.add_device(Device())
|
| ... | ... | @@ -75,14 +76,11 @@ def run_dummy(context): |
| 75 | 76 |
|
| 76 | 77 |
|
| 77 | 78 |
@cli.command('temp-directory', short_help="Runs commands in temp directory and uploads results.")
|
| 78 |
-@click.option('--instance-name', type=click.STRING, default='testing', show_default=True,
|
|
| 79 |
- help="Targeted farm instance name.")
|
|
| 80 | 79 |
@pass_context
|
| 81 |
-def run_temp_directory(context, instance_name):
|
|
| 80 |
+def run_temp_directory(context):
|
|
| 82 | 81 |
""" Downloads files and command from CAS and runs
|
| 83 | 82 |
in a temp directory, uploading result back to CAS
|
| 84 | 83 |
"""
|
| 85 |
- context.instance_name = instance_name
|
|
| 86 | 84 |
try:
|
| 87 | 85 |
b = bot.Bot(context.bot_session)
|
| 88 | 86 |
b.session(temp_directory.work_temp_directory,
|
| ... | ... | @@ -31,25 +31,26 @@ from ..cli import pass_context |
| 31 | 31 |
|
| 32 | 32 |
|
| 33 | 33 |
@click.group(name='cas', short_help="Interact with the CAS server.")
|
| 34 |
+@click.option('--instance-name', type=click.STRING, default='main', show_default=True,
|
|
| 35 |
+ help="Targeted farm instance name.")
|
|
| 34 | 36 |
@click.option('--port', type=click.INT, default='50051', show_default=True,
|
| 35 | 37 |
help="Remote server's port number.")
|
| 36 | 38 |
@click.option('--host', type=click.STRING, default='localhost', show_default=True,
|
| 37 | 39 |
help="Remote server's hostname.")
|
| 38 | 40 |
@pass_context
|
| 39 |
-def cli(context, host, port):
|
|
| 41 |
+def cli(context, instance_name, host, port):
|
|
| 40 | 42 |
context.logger = logging.getLogger(__name__)
|
| 41 | 43 |
context.logger.info("Starting on port {}".format(port))
|
| 42 | 44 |
|
| 45 |
+ context.instance_name = instance_name
|
|
| 43 | 46 |
context.channel = grpc.insecure_channel('{}:{}'.format(host, port))
|
| 44 | 47 |
context.port = port
|
| 45 | 48 |
|
| 46 | 49 |
|
| 47 | 50 |
@cli.command('upload-files', short_help="Upload files to the CAS server.")
|
| 48 |
-@click.option('--instance-name', type=click.STRING, default='testing', show_default=True,
|
|
| 49 |
- help="Targeted farm instance name.")
|
|
| 50 | 51 |
@click.argument('files', nargs=-1, type=click.File('rb'), required=True)
|
| 51 | 52 |
@pass_context
|
| 52 |
-def upload_files(context, files, instance_name):
|
|
| 53 |
+def upload_files(context, files):
|
|
| 53 | 54 |
stub = remote_execution_pb2_grpc.ContentAddressableStorageStub(context.channel)
|
| 54 | 55 |
|
| 55 | 56 |
requests = []
|
| ... | ... | @@ -58,7 +59,7 @@ def upload_files(context, files, instance_name): |
| 58 | 59 |
requests.append(remote_execution_pb2.BatchUpdateBlobsRequest.Request(
|
| 59 | 60 |
digest=create_digest(chunk), data=chunk))
|
| 60 | 61 |
|
| 61 |
- request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=instance_name,
|
|
| 62 |
+ request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=context.instance_name,
|
|
| 62 | 63 |
requests=requests)
|
| 63 | 64 |
|
| 64 | 65 |
context.logger.info("Sending: {}".format(request))
|
| ... | ... | @@ -67,11 +68,9 @@ def upload_files(context, files, instance_name): |
| 67 | 68 |
|
| 68 | 69 |
|
| 69 | 70 |
@cli.command('upload-dir', short_help="Upload a directory to the CAS server.")
|
| 70 |
-@click.option('--instance-name', type=click.STRING, default='testing', show_default=True,
|
|
| 71 |
- help="Targeted farm instance name.")
|
|
| 72 | 71 |
@click.argument('directory', nargs=1, type=click.Path(), required=True)
|
| 73 | 72 |
@pass_context
|
| 74 |
-def upload_dir(context, directory, instance_name):
|
|
| 73 |
+def upload_dir(context, directory):
|
|
| 75 | 74 |
context.logger.info("Uploading directory to cas")
|
| 76 | 75 |
stub = remote_execution_pb2_grpc.ContentAddressableStorageStub(context.channel)
|
| 77 | 76 |
|
| ... | ... | @@ -81,7 +80,7 @@ def upload_dir(context, directory, instance_name): |
| 81 | 80 |
requests.append(remote_execution_pb2.BatchUpdateBlobsRequest.Request(
|
| 82 | 81 |
digest=file_digest, data=chunk))
|
| 83 | 82 |
|
| 84 |
- request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=instance_name,
|
|
| 83 |
+ request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=context.instance_name,
|
|
| 85 | 84 |
requests=requests)
|
| 86 | 85 |
|
| 87 | 86 |
context.logger.info("Request:\n{}".format(request))
|
| ... | ... | @@ -36,34 +36,35 @@ from ..cli import pass_context |
| 36 | 36 |
|
| 37 | 37 |
|
| 38 | 38 |
@click.group(name='execute', short_help="Execute simple operations.")
|
| 39 |
+@click.option('--instance-name', type=click.STRING, default='main',
|
|
| 40 |
+ show_default=True, help="Targeted farm instance name.")
|
|
| 39 | 41 |
@click.option('--port', type=click.INT, default='50051', show_default=True,
|
| 40 | 42 |
help="Remote server's port number.")
|
| 41 | 43 |
@click.option('--host', type=click.STRING, default='localhost', show_default=True,
|
| 42 | 44 |
help="Remote server's hostname.")
|
| 43 | 45 |
@pass_context
|
| 44 |
-def cli(context, host, port):
|
|
| 46 |
+def cli(context, instance_name, host, port):
|
|
| 45 | 47 |
context.logger = logging.getLogger(__name__)
|
| 46 | 48 |
context.logger.info("Starting on port {}".format(port))
|
| 47 | 49 |
|
| 50 |
+ context.instance_name = instance_name
|
|
| 48 | 51 |
context.channel = grpc.insecure_channel('{}:{}'.format(host, port))
|
| 49 | 52 |
context.port = port
|
| 50 | 53 |
|
| 51 | 54 |
|
| 52 | 55 |
@cli.command('request-dummy', short_help="Send a dummy action.")
|
| 53 |
-@click.option('--instance-name', type=click.STRING, default='testing', show_default=True,
|
|
| 54 |
- help="Targeted farm instance name.")
|
|
| 55 | 56 |
@click.option('--number', type=click.INT, default=1, show_default=True,
|
| 56 | 57 |
help="Number of request to send.")
|
| 57 | 58 |
@click.option('--wait-for-completion', is_flag=True,
|
| 58 | 59 |
help="Stream updates until jobs are completed.")
|
| 59 | 60 |
@pass_context
|
| 60 |
-def request_dummy(context, number, instance_name, wait_for_completion):
|
|
| 61 |
+def request_dummy(context, number, wait_for_completion):
|
|
| 61 | 62 |
action_digest = remote_execution_pb2.Digest()
|
| 62 | 63 |
|
| 63 | 64 |
context.logger.info("Sending execution request...")
|
| 64 | 65 |
stub = remote_execution_pb2_grpc.ExecutionStub(context.channel)
|
| 65 | 66 |
|
| 66 |
- request = remote_execution_pb2.ExecuteRequest(instance_name=instance_name,
|
|
| 67 |
+ request = remote_execution_pb2.ExecuteRequest(instance_name=context.instance_name,
|
|
| 67 | 68 |
action_digest=action_digest,
|
| 68 | 69 |
skip_cache_lookup=True)
|
| 69 | 70 |
|
| ... | ... | @@ -98,7 +99,7 @@ def list_operations(context): |
| 98 | 99 |
context.logger.info("Getting list of operations")
|
| 99 | 100 |
stub = operations_pb2_grpc.OperationsStub(context.channel)
|
| 100 | 101 |
|
| 101 |
- request = operations_pb2.ListOperationsRequest()
|
|
| 102 |
+ request = operations_pb2.ListOperationsRequest(name=context.instance_name)
|
|
| 102 | 103 |
|
| 103 | 104 |
response = stub.ListOperations(request)
|
| 104 | 105 |
|
| ... | ... | @@ -115,7 +116,8 @@ def list_operations(context): |
| 115 | 116 |
@pass_context
|
| 116 | 117 |
def wait_execution(context, operation_name):
|
| 117 | 118 |
stub = remote_execution_pb2_grpc.ExecutionStub(context.channel)
|
| 118 |
- request = remote_execution_pb2.WaitExecutionRequest(name=operation_name)
|
|
| 119 |
+ request = remote_execution_pb2.WaitExecutionRequest(instance_name=context.instance_name,
|
|
| 120 |
+ name=operation_name)
|
|
| 119 | 121 |
|
| 120 | 122 |
response = stub.WaitExecution(request)
|
| 121 | 123 |
|
| ... | ... | @@ -124,8 +126,6 @@ def wait_execution(context, operation_name): |
| 124 | 126 |
|
| 125 | 127 |
|
| 126 | 128 |
@cli.command('command', short_help="Send a command to be executed.")
|
| 127 |
-@click.option('--instance-name', type=click.STRING, default='testing', show_default=True,
|
|
| 128 |
- help="Targeted farm instance name.")
|
|
| 129 | 129 |
@click.option('--output-file', nargs=2, type=(click.STRING, click.BOOL), multiple=True,
|
| 130 | 130 |
help="Tuple of expected output file and is-executeable flag.")
|
| 131 | 131 |
@click.option('--output-directory', default='testing', show_default=True,
|
| ... | ... | @@ -133,7 +133,7 @@ def wait_execution(context, operation_name): |
| 133 | 133 |
@click.argument('input-root', nargs=1, type=click.Path(), required=True)
|
| 134 | 134 |
@click.argument('commands', nargs=-1, type=click.STRING, required=True)
|
| 135 | 135 |
@pass_context
|
| 136 |
-def command(context, input_root, commands, output_file, output_directory, instance_name):
|
|
| 136 |
+def command(context, input_root, commands, output_file, output_directory):
|
|
| 137 | 137 |
stub = remote_execution_pb2_grpc.ExecutionStub(context.channel)
|
| 138 | 138 |
|
| 139 | 139 |
execute_command = remote_execution_pb2.Command()
|
| ... | ... | @@ -170,11 +170,11 @@ def command(context, input_root, commands, output_file, output_directory, instan |
| 170 | 170 |
requests.append(remote_execution_pb2.BatchUpdateBlobsRequest.Request(
|
| 171 | 171 |
digest=action_digest, data=action.SerializeToString()))
|
| 172 | 172 |
|
| 173 |
- request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=instance_name,
|
|
| 173 |
+ request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=context.instance_name,
|
|
| 174 | 174 |
requests=requests)
|
| 175 | 175 |
remote_execution_pb2_grpc.ContentAddressableStorageStub(context.channel).BatchUpdateBlobs(request)
|
| 176 | 176 |
|
| 177 |
- request = remote_execution_pb2.ExecuteRequest(instance_name=instance_name,
|
|
| 177 |
+ request = remote_execution_pb2.ExecuteRequest(instance_name=context.instance_name,
|
|
| 178 | 178 |
action_digest=action_digest,
|
| 179 | 179 |
skip_cache_lookup=True)
|
| 180 | 180 |
response = stub.Execute(request)
|
| ... | ... | @@ -201,7 +201,7 @@ def command(context, input_root, commands, output_file, output_directory, instan |
| 201 | 201 |
raise
|
| 202 | 202 |
|
| 203 | 203 |
with open(path, 'wb+') as f:
|
| 204 |
- write_fetch_blob(f, stub, output_file_response.digest, instance_name)
|
|
| 204 |
+ write_fetch_blob(f, stub, output_file_response.digest, context.instance_name)
|
|
| 205 | 205 |
|
| 206 | 206 |
if output_file_response.path in output_executeables:
|
| 207 | 207 |
st = os.stat(path)
|
| ... | ... | @@ -25,7 +25,7 @@ import logging |
| 25 | 25 |
|
| 26 | 26 |
import click
|
| 27 | 27 |
|
| 28 |
-from buildgrid.server import build_grid_server
|
|
| 28 |
+from buildgrid.server import buildgrid_server
|
|
| 29 | 29 |
from buildgrid.server.cas.storage.disk import DiskStorage
|
| 30 | 30 |
from buildgrid.server.cas.storage.lru_memory_cache import LRUMemoryCache
|
| 31 | 31 |
from buildgrid.server.cas.storage.s3 import S3Storage
|
| ... | ... | @@ -45,6 +45,7 @@ def cli(context): |
| 45 | 45 |
|
| 46 | 46 |
|
| 47 | 47 |
@cli.command('start', short_help="Setup a new server instance.")
|
| 48 |
+@click.argument('instances', nargs=-1, type=click.STRING)
|
|
| 48 | 49 |
@click.option('--port', type=click.INT, default='50051', show_default=True,
|
| 49 | 50 |
help="The port number to be listened.")
|
| 50 | 51 |
@click.option('--max-cached-actions', type=click.INT, default=50, show_default=True,
|
| ... | ... | @@ -67,7 +68,9 @@ def cli(context): |
| 67 | 68 |
@click.option('--cas-disk-directory', type=click.Path(file_okay=False, dir_okay=True, writable=True),
|
| 68 | 69 |
help="For --cas=disk, the folder to store CAS blobs in.")
|
| 69 | 70 |
@pass_context
|
| 70 |
-def start(context, port, max_cached_actions, allow_uar, cas, **cas_args):
|
|
| 71 |
+def start(context, instances, port, max_cached_actions, allow_uar, cas, **cas_args):
|
|
| 72 |
+ """ Starts a BuildGrid server.
|
|
| 73 |
+ """
|
|
| 71 | 74 |
context.logger.info("Starting on port {}".format(port))
|
| 72 | 75 |
|
| 73 | 76 |
cas_storage = _make_cas_storage(context, cas, cas_args)
|
| ... | ... | @@ -79,9 +82,13 @@ def start(context, port, max_cached_actions, allow_uar, cas, **cas_args): |
| 79 | 82 |
else:
|
| 80 | 83 |
action_cache = ActionCache(cas_storage, max_cached_actions, allow_uar)
|
| 81 | 84 |
|
| 82 |
- server = build_grid_server.BuildGridServer(port,
|
|
| 83 |
- cas_storage=cas_storage,
|
|
| 84 |
- action_cache=action_cache)
|
|
| 85 |
+ if instances is None:
|
|
| 86 |
+ instances = ['main']
|
|
| 87 |
+ |
|
| 88 |
+ server = buildgrid_server.BuildGridServer(port,
|
|
| 89 |
+ instances,
|
|
| 90 |
+ cas_storage=cas_storage,
|
|
| 91 |
+ action_cache=action_cache)
|
|
| 85 | 92 |
loop = asyncio.get_event_loop()
|
| 86 | 93 |
try:
|
| 87 | 94 |
server.start()
|
| 1 |
-# Copyright (C) 2018 Bloomberg LP
|
|
| 2 |
-#
|
|
| 3 |
-# Licensed under the Apache License, Version 2.0 (the "License");
|
|
| 4 |
-# you may not use this file except in compliance with the License.
|
|
| 5 |
-# You may obtain a copy of the License at
|
|
| 6 |
-#
|
|
| 7 |
-# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
| 8 |
-#
|
|
| 9 |
-# Unless required by applicable law or agreed to in writing, software
|
|
| 10 |
-# distributed under the License is distributed on an "AS IS" BASIS,
|
|
| 11 |
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
| 12 |
-# See the License for the specific language governing permissions and
|
|
| 13 |
-# limitations under the License.
|
|
| 14 |
- |
|
| 15 |
- |
|
| 16 |
-"""
|
|
| 17 |
-ActionCache
|
|
| 18 |
-===========
|
|
| 19 |
- |
|
| 20 |
-Implements a simple in-memory action cache.
|
|
| 21 |
- |
|
| 22 |
-The action cache maps Action to their corresponding ActionResult. An
|
|
| 23 |
-ActionResult may be found in cache, for any given Action, if that action has
|
|
| 24 |
-already been executed.
|
|
| 25 |
- |
|
| 26 |
-Note:
|
|
| 27 |
- Action and ActionResult are referenced by their Digest and mapping is stored
|
|
| 28 |
- in-memory.
|
|
| 29 |
-"""
|
|
| 30 |
- |
|
| 31 |
-import collections
|
|
| 32 |
- |
|
| 33 |
-from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2 as re_pb2
|
|
| 34 |
- |
|
| 35 |
- |
|
| 36 |
-class ActionCache:
|
|
| 37 |
- """In-memory Action to ActionResult associative array.
|
|
| 38 |
- """
|
|
| 39 |
- |
|
| 40 |
- def __init__(self, storage, max_cached_actions):
|
|
| 41 |
- """Initialises a new ActionCache instance.
|
|
| 42 |
- |
|
| 43 |
- Args:
|
|
| 44 |
- storage (StorageABC): storage backend instance to be used.
|
|
| 45 |
- max_cached_actions (int): maximun number of entries to cache.
|
|
| 46 |
- """
|
|
| 47 |
- self._storage = storage
|
|
| 48 |
- self._max_cached_actions = max_cached_actions
|
|
| 49 |
- self._digest_map = collections.OrderedDict()
|
|
| 50 |
- |
|
| 51 |
- def get_action_result(self, action_digest):
|
|
| 52 |
- """Retrieves the cached ActionResult for the given Action digest.
|
|
| 53 |
- |
|
| 54 |
- Args:
|
|
| 55 |
- action_digest (Digest): digest of the Action to query.
|
|
| 56 |
- |
|
| 57 |
- Returns:
|
|
| 58 |
- The cached ActionResult matching the given Action digest or None if
|
|
| 59 |
- the nothing hass been cached yet for that Action.
|
|
| 60 |
- """
|
|
| 61 |
- key = (action_digest.hash, action_digest.size_bytes)
|
|
| 62 |
- if key in self._digest_map:
|
|
| 63 |
- action_result = self._storage.get_message(self._digest_map[key],
|
|
| 64 |
- re_pb2.ActionResult)
|
|
| 65 |
- if action_result is not None:
|
|
| 66 |
- if self._blobs_still_exist(action_result):
|
|
| 67 |
- self._digest_map.move_to_end(key)
|
|
| 68 |
- return action_result
|
|
| 69 |
- del self._digest_map[key]
|
|
| 70 |
- return None
|
|
| 71 |
- |
|
| 72 |
- def put_action_result(self, action_digest, action_result):
|
|
| 73 |
- """Stores an ActionResult in cache for the given Action digest.
|
|
| 74 |
- |
|
| 75 |
- If the cache size limit has been reached, the oldest cache entries will
|
|
| 76 |
- be dropped before insertion so that the cache size never exceeds the
|
|
| 77 |
- maximum numbers of entries allowed.
|
|
| 78 |
- |
|
| 79 |
- Args:
|
|
| 80 |
- action_digest (Digest): digest of the Action to select.
|
|
| 81 |
- action_result (ActionResult): result object to store.
|
|
| 82 |
- """
|
|
| 83 |
- if self._max_cached_actions == 0:
|
|
| 84 |
- return
|
|
| 85 |
- |
|
| 86 |
- while len(self._digest_map) >= self._max_cached_actions:
|
|
| 87 |
- self._digest_map.popitem(last=False)
|
|
| 88 |
- |
|
| 89 |
- key = (action_digest.hash, action_digest.size_bytes)
|
|
| 90 |
- action_result_digest = self._storage.put_message(action_result)
|
|
| 91 |
- self._digest_map[key] = action_result_digest
|
|
| 92 |
- |
|
| 93 |
- def _blobs_still_exist(self, action_result):
|
|
| 94 |
- """Checks CAS for ActionResult output blobs existance.
|
|
| 95 |
- |
|
| 96 |
- Args:
|
|
| 97 |
- action_result (ActionResult): ActionResult to search referenced
|
|
| 98 |
- output blobs for.
|
|
| 99 |
- |
|
| 100 |
- Returns:
|
|
| 101 |
- True if all referenced blobs are present in CAS, False otherwise.
|
|
| 102 |
- """
|
|
| 103 |
- blobs_needed = []
|
|
| 104 |
- |
|
| 105 |
- for output_file in action_result.output_files:
|
|
| 106 |
- blobs_needed.append(output_file.digest)
|
|
| 107 |
- |
|
| 108 |
- for output_directory in action_result.output_directories:
|
|
| 109 |
- blobs_needed.append(output_directory.tree_digest)
|
|
| 110 |
- tree = self._storage.get_message(output_directory.tree_digest,
|
|
| 111 |
- re_pb2.Tree)
|
|
| 112 |
- if tree is None:
|
|
| 113 |
- return False
|
|
| 114 |
- for file_node in tree.root.files:
|
|
| 115 |
- blobs_needed.append(file_node.digest)
|
|
| 116 |
- for child in tree.children:
|
|
| 117 |
- for file_node in child.files:
|
|
| 118 |
- blobs_needed.append(file_node.digest)
|
|
| 119 |
- |
|
| 120 |
- if action_result.stdout_digest.hash and not action_result.stdout_raw:
|
|
| 121 |
- blobs_needed.append(action_result.stdout_digest)
|
|
| 122 |
- if action_result.stderr_digest.hash and not action_result.stderr_raw:
|
|
| 123 |
- blobs_needed.append(action_result.stderr_digest)
|
|
| 124 |
- |
|
| 125 |
- missing = self._storage.missing_blobs(blobs_needed)
|
|
| 126 |
- return len(missing) == 0
|
| 1 |
+# Copyright (C) 2018 Bloomberg LP
|
|
| 2 |
+#
|
|
| 3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
| 4 |
+# you may not use this file except in compliance with the License.
|
|
| 5 |
+# You may obtain a copy of the License at
|
|
| 6 |
+#
|
|
| 7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
| 8 |
+#
|
|
| 9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
| 10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
| 11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
| 12 |
+# See the License for the specific language governing permissions and
|
|
| 13 |
+# limitations under the License.
|
|
| 14 |
+ |
|
| 15 |
+ |
|
| 16 |
+"""
|
|
| 17 |
+BuildGrid Instance
|
|
| 18 |
+==================
|
|
| 19 |
+ |
|
| 20 |
+An instance of the BuildGrid server.
|
|
| 21 |
+ |
|
| 22 |
+Contains scheduler, execution instance and an interface to the bots.
|
|
| 23 |
+"""
|
|
| 24 |
+ |
|
| 25 |
+ |
|
| 26 |
+import logging
|
|
| 27 |
+ |
|
| 28 |
+from .execution.execution_instance import ExecutionInstance
|
|
| 29 |
+from .scheduler import Scheduler
|
|
| 30 |
+from .worker.bots_interface import BotsInterface
|
|
| 31 |
+ |
|
| 32 |
+ |
|
| 33 |
+class BuildGridInstance(ExecutionInstance, BotsInterface):
|
|
| 34 |
+ |
|
| 35 |
+ def __init__(self, action_cache=None, cas_storage=None):
|
|
| 36 |
+ scheduler = Scheduler(action_cache)
|
|
| 37 |
+ |
|
| 38 |
+ self.logger = logging.getLogger(__name__)
|
|
| 39 |
+ |
|
| 40 |
+ ExecutionInstance.__init__(self, scheduler, cas_storage)
|
|
| 41 |
+ BotsInterface.__init__(self, scheduler)
|
|
| 42 |
+ |
|
| 43 |
+ def stream_operation_updates(self, message_queue, operation_name):
|
|
| 44 |
+ operation = message_queue.get()
|
|
| 45 |
+ while not operation.done:
|
|
| 46 |
+ yield operation
|
|
| 47 |
+ operation = message_queue.get()
|
|
| 48 |
+ yield operation
|
|
| 49 |
+ |
|
| 50 |
+ def cancel_operation(self, name):
|
|
| 51 |
+ # TODO: Cancel leases
|
|
| 52 |
+ raise NotImplementedError("Cancelled operations not supported")
|
| ... | ... | @@ -29,35 +29,23 @@ from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_p |
| 29 | 29 |
from buildgrid._protos.google.devtools.remoteworkers.v1test2 import bots_pb2_grpc
|
| 30 | 30 |
from buildgrid._protos.google.longrunning import operations_pb2_grpc
|
| 31 | 31 |
|
| 32 |
+from .buildgrid_instance import BuildGridInstance
|
|
| 32 | 33 |
from .cas.bytestream_service import ByteStreamService
|
| 33 | 34 |
from .cas.content_addressable_storage_service import ContentAddressableStorageService
|
| 34 | 35 |
from .execution.action_cache_service import ActionCacheService
|
| 35 | 36 |
from .execution.execution_service import ExecutionService
|
| 36 | 37 |
from .execution.operations_service import OperationsService
|
| 37 |
-from .execution.execution_instance import ExecutionInstance
|
|
| 38 |
-from .scheduler import Scheduler
|
|
| 39 | 38 |
from .worker.bots_service import BotsService
|
| 40 |
-from .worker.bots_interface import BotsInterface
|
|
| 41 | 39 |
|
| 42 | 40 |
|
| 43 | 41 |
class BuildGridServer:
|
| 44 | 42 |
|
| 45 |
- def __init__(self, port='50051', max_workers=10, cas_storage=None, action_cache=None):
|
|
| 43 |
+ def __init__(self, port='50051', instances=None, max_workers=10, action_cache=None, cas_storage=None):
|
|
| 46 | 44 |
port = '[::]:{0}'.format(port)
|
| 47 |
- scheduler = Scheduler(action_cache)
|
|
| 48 |
- bots_interface = BotsInterface(scheduler)
|
|
| 49 |
- execution_instance = ExecutionInstance(scheduler, cas_storage)
|
|
| 50 | 45 |
|
| 51 | 46 |
self._server = grpc.server(futures.ThreadPoolExecutor(max_workers))
|
| 52 | 47 |
self._server.add_insecure_port(port)
|
| 53 | 48 |
|
| 54 |
- bots_pb2_grpc.add_BotsServicer_to_server(BotsService(bots_interface),
|
|
| 55 |
- self._server)
|
|
| 56 |
- remote_execution_pb2_grpc.add_ExecutionServicer_to_server(ExecutionService(execution_instance),
|
|
| 57 |
- self._server)
|
|
| 58 |
- operations_pb2_grpc.add_OperationsServicer_to_server(OperationsService(execution_instance),
|
|
| 59 |
- self._server)
|
|
| 60 |
- |
|
| 61 | 49 |
if cas_storage is not None:
|
| 62 | 50 |
cas_service = ContentAddressableStorageService(cas_storage)
|
| 63 | 51 |
remote_execution_pb2_grpc.add_ContentAddressableStorageServicer_to_server(cas_service,
|
| ... | ... | @@ -69,6 +57,20 @@ class BuildGridServer: |
| 69 | 57 |
remote_execution_pb2_grpc.add_ActionCacheServicer_to_server(action_cache_service,
|
| 70 | 58 |
self._server)
|
| 71 | 59 |
|
| 60 |
+ buildgrid_instances = {}
|
|
| 61 |
+ if not instances:
|
|
| 62 |
+ buildgrid_instances["main"] = BuildGridInstance(action_cache, cas_storage)
|
|
| 63 |
+ else:
|
|
| 64 |
+ for name in instances:
|
|
| 65 |
+ buildgrid_instances[name] = BuildGridInstance(action_cache, cas_storage)
|
|
| 66 |
+ |
|
| 67 |
+ bots_pb2_grpc.add_BotsServicer_to_server(BotsService(buildgrid_instances),
|
|
| 68 |
+ self._server)
|
|
| 69 |
+ remote_execution_pb2_grpc.add_ExecutionServicer_to_server(ExecutionService(buildgrid_instances),
|
|
| 70 |
+ self._server)
|
|
| 71 |
+ operations_pb2_grpc.add_OperationsServicer_to_server(OperationsService(buildgrid_instances),
|
|
| 72 |
+ self._server)
|
|
| 73 |
+ |
|
| 72 | 74 |
def start(self):
|
| 73 | 75 |
self._server.start()
|
| 74 | 76 |
|
| ... | ... | @@ -56,12 +56,14 @@ class ExecutionInstance: |
| 56 | 56 |
|
| 57 | 57 |
def get_operation(self, name):
|
| 58 | 58 |
operation = self._scheduler.jobs.get(name)
|
| 59 |
+ |
|
| 59 | 60 |
if operation is None:
|
| 60 | 61 |
raise InvalidArgumentError("Operation name does not exist: {}".format(name))
|
| 62 |
+ |
|
| 61 | 63 |
else:
|
| 62 | 64 |
return operation.get_operation()
|
| 63 | 65 |
|
| 64 |
- def list_operations(self, name, list_filter, page_size, page_token):
|
|
| 66 |
+ def list_operations(self, list_filter, page_size, page_token):
|
|
| 65 | 67 |
# TODO: Pages
|
| 66 | 68 |
# Spec says number of pages and length of a page are optional
|
| 67 | 69 |
return self._scheduler.get_operations()
|
| ... | ... | @@ -72,10 +74,6 @@ class ExecutionInstance: |
| 72 | 74 |
except KeyError:
|
| 73 | 75 |
raise InvalidArgumentError("Operation name does not exist: {}".format(name))
|
| 74 | 76 |
|
| 75 |
- def cancel_operation(self, name):
|
|
| 76 |
- # TODO: Cancel leases
|
|
| 77 |
- raise NotImplementedError("Cancelled operations not supported")
|
|
| 78 |
- |
|
| 79 | 77 |
def register_message_client(self, name, queue):
|
| 80 | 78 |
try:
|
| 81 | 79 |
self._scheduler.register_client(name, queue)
|
| ... | ... | @@ -35,23 +35,23 @@ from .._exceptions import InvalidArgumentError |
| 35 | 35 |
|
| 36 | 36 |
class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer):
|
| 37 | 37 |
|
| 38 |
- def __init__(self, instance):
|
|
| 38 |
+ def __init__(self, instances):
|
|
| 39 | 39 |
self.logger = logging.getLogger(__name__)
|
| 40 |
- self._instance = instance
|
|
| 40 |
+ self._instances = instances
|
|
| 41 | 41 |
|
| 42 | 42 |
def Execute(self, request, context):
|
| 43 |
- # Ignore request.instance_name for now
|
|
| 44 |
- # Have only one instance
|
|
| 45 | 43 |
try:
|
| 46 | 44 |
message_queue = queue.Queue()
|
| 47 |
- operation = self._instance.execute(request.action_digest,
|
|
| 48 |
- request.skip_cache_lookup,
|
|
| 49 |
- message_queue)
|
|
| 45 |
+ instance = self._get_instance(request.instance_name)
|
|
| 46 |
+ operation = instance.execute(request.action_digest,
|
|
| 47 |
+ request.skip_cache_lookup,
|
|
| 48 |
+ message_queue)
|
|
| 50 | 49 |
|
| 51 |
- context.add_callback(partial(self._remove_client, operation.name, message_queue))
|
|
| 50 |
+ context.add_callback(partial(instance.unregister_message_client,
|
|
| 51 |
+ operation.name, message_queue))
|
|
| 52 | 52 |
|
| 53 |
- yield from self._stream_operation_updates(message_queue,
|
|
| 54 |
- operation.name)
|
|
| 53 |
+ yield from instance.stream_operation_updates(message_queue,
|
|
| 54 |
+ operation.name)
|
|
| 55 | 55 |
|
| 56 | 56 |
except InvalidArgumentError as e:
|
| 57 | 57 |
self.logger.error(e)
|
| ... | ... | @@ -69,13 +69,15 @@ class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer): |
| 69 | 69 |
try:
|
| 70 | 70 |
message_queue = queue.Queue()
|
| 71 | 71 |
operation_name = request.name
|
| 72 |
+ instance = self._get_instance(request.instance_name)
|
|
| 72 | 73 |
|
| 73 |
- self._instance.register_message_client(operation_name, message_queue)
|
|
| 74 |
+ instance.register_message_client(operation_name, message_queue)
|
|
| 74 | 75 |
|
| 75 |
- context.add_callback(partial(self._remove_client, operation_name, message_queue))
|
|
| 76 |
+ context.add_callback(partial(instance.unregister_message_client,
|
|
| 77 |
+ operation_name, message_queue))
|
|
| 76 | 78 |
|
| 77 |
- yield from self._stream_operation_updates(message_queue,
|
|
| 78 |
- operation_name)
|
|
| 79 |
+ yield from instance.stream_operation_updates(message_queue,
|
|
| 80 |
+ operation_name)
|
|
| 79 | 81 |
|
| 80 | 82 |
except InvalidArgumentError as e:
|
| 81 | 83 |
self.logger.error(e)
|
| ... | ... | @@ -83,12 +85,9 @@ class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer): |
| 83 | 85 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
| 84 | 86 |
yield operations_pb2.Operation()
|
| 85 | 87 |
|
| 86 |
- def _remove_client(self, operation_name, message_queue):
|
|
| 87 |
- self._instance.unregister_message_client(operation_name, message_queue)
|
|
| 88 |
+ def _get_instance(self, name):
|
|
| 89 |
+ try:
|
|
| 90 |
+ return self._instances[name]
|
|
| 88 | 91 |
|
| 89 |
- def _stream_operation_updates(self, message_queue, operation_name):
|
|
| 90 |
- operation = message_queue.get()
|
|
| 91 |
- while not operation.done:
|
|
| 92 |
- yield operation
|
|
| 93 |
- operation = message_queue.get()
|
|
| 94 |
- yield operation
|
|
| 92 |
+ except KeyError:
|
|
| 93 |
+ raise InvalidArgumentError("Instance doesn't exist on server: {}".format(name))
|
| ... | ... | @@ -23,6 +23,8 @@ import logging |
| 23 | 23 |
|
| 24 | 24 |
import grpc
|
| 25 | 25 |
|
| 26 |
+from google.protobuf.empty_pb2 import Empty
|
|
| 27 |
+ |
|
| 26 | 28 |
from buildgrid._protos.google.longrunning import operations_pb2_grpc, operations_pb2
|
| 27 | 29 |
|
| 28 | 30 |
from .._exceptions import InvalidArgumentError
|
| ... | ... | @@ -30,42 +32,102 @@ from .._exceptions import InvalidArgumentError |
| 30 | 32 |
|
| 31 | 33 |
class OperationsService(operations_pb2_grpc.OperationsServicer):
|
| 32 | 34 |
|
| 33 |
- def __init__(self, instance):
|
|
| 34 |
- self._instance = instance
|
|
| 35 |
+ def __init__(self, instances):
|
|
| 36 |
+ self._instances = instances
|
|
| 35 | 37 |
self.logger = logging.getLogger(__name__)
|
| 36 | 38 |
|
| 37 | 39 |
def GetOperation(self, request, context):
|
| 38 | 40 |
try:
|
| 39 |
- return self._instance.get_operation(request.name)
|
|
| 41 |
+ name = request.name
|
|
| 42 |
+ operation_name = self._get_operation_name(name)
|
|
| 43 |
+ |
|
| 44 |
+ instance = self._get_instance(name)
|
|
| 45 |
+ |
|
| 46 |
+ operation = instance.get_operation(operation_name)
|
|
| 47 |
+ operation.name = name
|
|
| 48 |
+ return operation
|
|
| 40 | 49 |
|
| 41 | 50 |
except InvalidArgumentError as e:
|
| 42 | 51 |
self.logger.error(e)
|
| 43 | 52 |
context.set_details(str(e))
|
| 44 | 53 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
| 45 |
- return operations_pb2.Operation()
|
|
| 54 |
+ |
|
| 55 |
+ return operations_pb2.Operation()
|
|
| 46 | 56 |
|
| 47 | 57 |
def ListOperations(self, request, context):
|
| 48 |
- return self._instance.list_operations(request.name,
|
|
| 49 |
- request.filter,
|
|
| 58 |
+ try:
|
|
| 59 |
+ # Name should be the collection name
|
|
| 60 |
+ # Or in this case, the instance_name
|
|
| 61 |
+ name = request.name
|
|
| 62 |
+ instance = self._get_instance(name)
|
|
| 63 |
+ |
|
| 64 |
+ result = instance.list_operations(request.filter,
|
|
| 50 | 65 |
request.page_size,
|
| 51 | 66 |
request.page_token)
|
| 52 | 67 |
|
| 68 |
+ for operation in result.operations:
|
|
| 69 |
+ operation.name = "{}/{}".format(name, operation.name)
|
|
| 70 |
+ |
|
| 71 |
+ return result
|
|
| 72 |
+ |
|
| 73 |
+ except InvalidArgumentError as e:
|
|
| 74 |
+ self.logger.error(e)
|
|
| 75 |
+ context.set_details(str(e))
|
|
| 76 |
+ context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
|
| 77 |
+ |
|
| 78 |
+ return operations_pb2.ListOperationsResponse()
|
|
| 79 |
+ |
|
| 53 | 80 |
def DeleteOperation(self, request, context):
|
| 54 | 81 |
try:
|
| 55 |
- return self._instance.delete_operation(request.name)
|
|
| 82 |
+ name = request.name
|
|
| 83 |
+ operation_name = self._get_operation_name(name)
|
|
| 84 |
+ |
|
| 85 |
+ instance = self._get_instance(name)
|
|
| 86 |
+ |
|
| 87 |
+ instance.delete_operation(operation_name)
|
|
| 56 | 88 |
|
| 57 | 89 |
except InvalidArgumentError as e:
|
| 58 | 90 |
self.logger.error(e)
|
| 59 | 91 |
context.set_details(str(e))
|
| 60 | 92 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
| 61 |
- return operations_pb2.Operation()
|
|
| 93 |
+ |
|
| 94 |
+ return Empty()
|
|
| 62 | 95 |
|
| 63 | 96 |
def CancelOperation(self, request, context):
|
| 64 | 97 |
try:
|
| 65 |
- return self._instance.cancel_operation(request.name)
|
|
| 98 |
+ name = request.name
|
|
| 99 |
+ operation_name = self._get_operation_name(name)
|
|
| 100 |
+ |
|
| 101 |
+ instance = self._get_instance(name)
|
|
| 102 |
+ |
|
| 103 |
+ instance.cancel_operation(operation_name)
|
|
| 66 | 104 |
|
| 67 | 105 |
except NotImplementedError as e:
|
| 68 | 106 |
self.logger.error(e)
|
| 69 | 107 |
context.set_details(str(e))
|
| 70 | 108 |
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
| 71 |
- return operations_pb2.Operation()
|
|
| 109 |
+ |
|
| 110 |
+ except InvalidArgumentError as e:
|
|
| 111 |
+ self.logger.error(e)
|
|
| 112 |
+ context.set_details(str(e))
|
|
| 113 |
+ context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
|
| 114 |
+ |
|
| 115 |
+ return Empty()
|
|
| 116 |
+ |
|
| 117 |
+ def _get_operation_name(self, name):
|
|
| 118 |
+ return name.split("/")[-1]
|
|
| 119 |
+ |
|
| 120 |
+ def _get_instance(self, name):
|
|
| 121 |
+ try:
|
|
| 122 |
+ names = name.split("/")
|
|
| 123 |
+ |
|
| 124 |
+ # Operation name should be in format:
|
|
| 125 |
+ # {instance/name}/{operation_id}
|
|
| 126 |
+ instance_name = ''.join(names[0:-1])
|
|
| 127 |
+ if not instance_name:
|
|
| 128 |
+ return self._instances[name]
|
|
| 129 |
+ |
|
| 130 |
+ return self._instances[instance_name]
|
|
| 131 |
+ |
|
| 132 |
+ except KeyError:
|
|
| 133 |
+ raise InvalidArgumentError("Instance doesn't exist on server: {}".format(name))
|
| ... | ... | @@ -54,7 +54,8 @@ class BotsInterface: |
| 54 | 54 |
pass
|
| 55 | 55 |
|
| 56 | 56 |
# Bot session name, selected by the server
|
| 57 |
- name = str(uuid.uuid4())
|
|
| 57 |
+ name = "{}/{}".format(parent, str(uuid.uuid4()))
|
|
| 58 |
+ |
|
| 58 | 59 |
bot_session.name = name
|
| 59 | 60 |
|
| 60 | 61 |
self._bot_ids[name] = bot_id
|
| ... | ... | @@ -33,14 +33,17 @@ from .._exceptions import InvalidArgumentError, OutofSyncError |
| 33 | 33 |
|
| 34 | 34 |
class BotsService(bots_pb2_grpc.BotsServicer):
|
| 35 | 35 |
|
| 36 |
- def __init__(self, instance):
|
|
| 37 |
- self._instance = instance
|
|
| 36 |
+ def __init__(self, instances):
|
|
| 37 |
+ self._instances = instances
|
|
| 38 | 38 |
self.logger = logging.getLogger(__name__)
|
| 39 | 39 |
|
| 40 | 40 |
def CreateBotSession(self, request, context):
|
| 41 | 41 |
try:
|
| 42 |
- return self._instance.create_bot_session(request.parent,
|
|
| 43 |
- request.bot_session)
|
|
| 42 |
+ parent = request.parent
|
|
| 43 |
+ instance = self._get_instance(request.parent)
|
|
| 44 |
+ return instance.create_bot_session(parent,
|
|
| 45 |
+ request.bot_session)
|
|
| 46 |
+ |
|
| 44 | 47 |
except InvalidArgumentError as e:
|
| 45 | 48 |
self.logger.error(e)
|
| 46 | 49 |
context.set_details(str(e))
|
| ... | ... | @@ -50,8 +53,15 @@ class BotsService(bots_pb2_grpc.BotsServicer): |
| 50 | 53 |
|
| 51 | 54 |
def UpdateBotSession(self, request, context):
|
| 52 | 55 |
try:
|
| 53 |
- return self._instance.update_bot_session(request.name,
|
|
| 54 |
- request.bot_session)
|
|
| 56 |
+ names = request.name.split("/")
|
|
| 57 |
+ # Operation name should be in format:
|
|
| 58 |
+ # {instance/name}/{uuid}
|
|
| 59 |
+ instance_name = ''.join(names[0:-1])
|
|
| 60 |
+ |
|
| 61 |
+ instance = self._get_instance(instance_name)
|
|
| 62 |
+ return instance.update_bot_session(request.name,
|
|
| 63 |
+ request.bot_session)
|
|
| 64 |
+ |
|
| 55 | 65 |
except InvalidArgumentError as e:
|
| 56 | 66 |
self.logger.error(e)
|
| 57 | 67 |
context.set_details(str(e))
|
| ... | ... | @@ -72,3 +82,10 @@ class BotsService(bots_pb2_grpc.BotsServicer): |
| 72 | 82 |
def PostBotEventTemp(self, request, context):
|
| 73 | 83 |
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
| 74 | 84 |
return Empty()
|
| 85 |
+ |
|
| 86 |
+ def _get_instance(self, name):
|
|
| 87 |
+ try:
|
|
| 88 |
+ return self._instances[name]
|
|
| 89 |
+ |
|
| 90 |
+ except KeyError:
|
|
| 91 |
+ raise InvalidArgumentError("Instance doesn't exist on server: {}".format(name))
|
| ... | ... | @@ -18,7 +18,6 @@ |
| 18 | 18 |
# pylint: disable=redefined-outer-name
|
| 19 | 19 |
|
| 20 | 20 |
import copy
|
| 21 |
-import uuid
|
|
| 22 | 21 |
from unittest import mock
|
| 23 | 22 |
|
| 24 | 23 |
import grpc
|
| ... | ... | @@ -27,7 +26,7 @@ import pytest |
| 27 | 26 |
|
| 28 | 27 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
| 29 | 28 |
from buildgrid._protos.google.devtools.remoteworkers.v1test2 import bots_pb2
|
| 30 |
-from buildgrid.server import scheduler, job
|
|
| 29 |
+from buildgrid.server import job, buildgrid_instance
|
|
| 31 | 30 |
from buildgrid.server.job import LeaseState
|
| 32 | 31 |
from buildgrid.server.worker import bots_interface, bots_service
|
| 33 | 32 |
|
| ... | ... | @@ -53,8 +52,8 @@ def bot_session(): |
| 53 | 52 |
|
| 54 | 53 |
|
| 55 | 54 |
@pytest.fixture
|
| 56 |
-def schedule():
|
|
| 57 |
- yield scheduler.Scheduler()
|
|
| 55 |
+def buildgrid():
|
|
| 56 |
+ yield buildgrid_instance.BuildGridInstance()
|
|
| 58 | 57 |
|
| 59 | 58 |
|
| 60 | 59 |
@pytest.fixture
|
| ... | ... | @@ -64,19 +63,17 @@ def bots(schedule): |
| 64 | 63 |
|
| 65 | 64 |
# Instance to test
|
| 66 | 65 |
@pytest.fixture
|
| 67 |
-def instance(bots):
|
|
| 68 |
- yield bots_service.BotsService(bots)
|
|
| 66 |
+def instance(buildgrid):
|
|
| 67 |
+ instances = {"": buildgrid}
|
|
| 68 |
+ yield bots_service.BotsService(instances)
|
|
| 69 | 69 |
|
| 70 | 70 |
|
| 71 | 71 |
def test_create_bot_session(bot_session, context, instance):
|
| 72 |
- parent = 'rach'
|
|
| 73 |
- request = bots_pb2.CreateBotSessionRequest(parent=parent,
|
|
| 74 |
- bot_session=bot_session)
|
|
| 72 |
+ request = bots_pb2.CreateBotSessionRequest(bot_session=bot_session)
|
|
| 75 | 73 |
|
| 76 | 74 |
response = instance.CreateBotSession(request, context)
|
| 77 | 75 |
|
| 78 | 76 |
assert isinstance(response, bots_pb2.BotSession)
|
| 79 |
- assert uuid.UUID(response.name, version=4)
|
|
| 80 | 77 |
assert bot_session.bot_id == response.bot_id
|
| 81 | 78 |
|
| 82 | 79 |
|
| ... | ... | @@ -92,8 +89,7 @@ def test_create_bot_session_bot_id_fail(context, instance): |
| 92 | 89 |
|
| 93 | 90 |
|
| 94 | 91 |
def test_update_bot_session(bot_session, context, instance):
|
| 95 |
- request = bots_pb2.CreateBotSessionRequest(parent='',
|
|
| 96 |
- bot_session=bot_session)
|
|
| 92 |
+ request = bots_pb2.CreateBotSessionRequest(bot_session=bot_session)
|
|
| 97 | 93 |
bot = instance.CreateBotSession(request, context)
|
| 98 | 94 |
|
| 99 | 95 |
request = bots_pb2.UpdateBotSessionRequest(name=bot.name,
|
| ... | ... | @@ -106,8 +102,7 @@ def test_update_bot_session(bot_session, context, instance): |
| 106 | 102 |
|
| 107 | 103 |
|
| 108 | 104 |
def test_update_bot_session_zombie(bot_session, context, instance):
|
| 109 |
- request = bots_pb2.CreateBotSessionRequest(parent='',
|
|
| 110 |
- bot_session=bot_session)
|
|
| 105 |
+ request = bots_pb2.CreateBotSessionRequest(bot_session=bot_session)
|
|
| 111 | 106 |
bot = instance.CreateBotSession(request, context)
|
| 112 | 107 |
# Update server with incorrect UUID by rotating it
|
| 113 | 108 |
bot.name = bot.name[len(bot.name): 0]
|
| ... | ... | @@ -121,8 +116,7 @@ def test_update_bot_session_zombie(bot_session, context, instance): |
| 121 | 116 |
|
| 122 | 117 |
|
| 123 | 118 |
def test_update_bot_session_bot_id_fail(bot_session, context, instance):
|
| 124 |
- request = bots_pb2.UpdateBotSessionRequest(name='ana',
|
|
| 125 |
- bot_session=bot_session)
|
|
| 119 |
+ request = bots_pb2.UpdateBotSessionRequest(bot_session=bot_session)
|
|
| 126 | 120 |
|
| 127 | 121 |
instance.UpdateBotSession(request, context)
|
| 128 | 122 |
|
| ... | ... | @@ -131,17 +125,15 @@ def test_update_bot_session_bot_id_fail(bot_session, context, instance): |
| 131 | 125 |
|
| 132 | 126 |
@pytest.mark.parametrize("number_of_jobs", [0, 1, 3, 500])
|
| 133 | 127 |
def test_number_of_leases(number_of_jobs, bot_session, context, instance):
|
| 134 |
- request = bots_pb2.CreateBotSessionRequest(parent='',
|
|
| 135 |
- bot_session=bot_session)
|
|
| 128 |
+ request = bots_pb2.CreateBotSessionRequest(bot_session=bot_session)
|
|
| 136 | 129 |
# Inject work
|
| 137 | 130 |
for _ in range(0, number_of_jobs):
|
| 138 | 131 |
action_digest = remote_execution_pb2.Digest()
|
| 139 |
- instance._instance._scheduler.append_job(job.Job(action_digest))
|
|
| 132 |
+ instance._instances[""].execute(action_digest, True)
|
|
| 140 | 133 |
|
| 141 | 134 |
response = instance.CreateBotSession(request, context)
|
| 142 | 135 |
|
| 143 | 136 |
assert len(response.leases) == number_of_jobs
|
| 144 |
- assert isinstance(response, bots_pb2.BotSession)
|
|
| 145 | 137 |
|
| 146 | 138 |
|
| 147 | 139 |
def test_update_leases_with_work(bot_session, context, instance):
|
| ... | ... | @@ -149,7 +141,7 @@ def test_update_leases_with_work(bot_session, context, instance): |
| 149 | 141 |
bot_session=bot_session)
|
| 150 | 142 |
# Inject work
|
| 151 | 143 |
action_digest = remote_execution_pb2.Digest(hash='gaff')
|
| 152 |
- instance._instance._scheduler.append_job(job.Job(action_digest))
|
|
| 144 |
+ instance._instances[""].execute(action_digest, True)
|
|
| 153 | 145 |
|
| 154 | 146 |
response = instance.CreateBotSession(request, context)
|
| 155 | 147 |
|
| ... | ... | @@ -159,7 +151,6 @@ def test_update_leases_with_work(bot_session, context, instance): |
| 159 | 151 |
|
| 160 | 152 |
assert isinstance(response, bots_pb2.BotSession)
|
| 161 | 153 |
assert response.leases[0].state == LeaseState.PENDING.value
|
| 162 |
- assert uuid.UUID(response.leases[0].id, version=4)
|
|
| 163 | 154 |
assert response_action == action_digest
|
| 164 | 155 |
|
| 165 | 156 |
|
| ... | ... | @@ -172,7 +163,7 @@ def test_update_leases_work_complete(bot_session, context, instance): |
| 172 | 163 |
|
| 173 | 164 |
# Inject work
|
| 174 | 165 |
action_digest = remote_execution_pb2.Digest(hash='gaff')
|
| 175 |
- instance._instance._scheduler.append_job(job.Job(action_digest))
|
|
| 166 |
+ instance._instances[""].execute(action_digest, True)
|
|
| 176 | 167 |
|
| 177 | 168 |
request = bots_pb2.UpdateBotSessionRequest(name=response.name,
|
| 178 | 169 |
bot_session=response)
|
| ... | ... | @@ -200,7 +191,7 @@ def test_work_rejected_by_bot(bot_session, context, instance): |
| 200 | 191 |
bot_session=bot_session)
|
| 201 | 192 |
# Inject work
|
| 202 | 193 |
action_digest = remote_execution_pb2.Digest(hash='gaff')
|
| 203 |
- instance._instance._scheduler.append_job(job.Job(action_digest))
|
|
| 194 |
+ instance._instances[""].execute(action_digest, True)
|
|
| 204 | 195 |
|
| 205 | 196 |
# Simulated the severed binding between client and server
|
| 206 | 197 |
response = copy.deepcopy(instance.CreateBotSession(request, context))
|
| ... | ... | @@ -222,7 +213,8 @@ def test_work_out_of_sync_from_pending(state, bot_session, context, instance): |
| 222 | 213 |
bot_session=bot_session)
|
| 223 | 214 |
# Inject work
|
| 224 | 215 |
action_digest = remote_execution_pb2.Digest(hash='gaff')
|
| 225 |
- instance._instance._scheduler.append_job(job.Job(action_digest))
|
|
| 216 |
+ instance._instances[""].execute(action_digest, True)
|
|
| 217 |
+ |
|
| 226 | 218 |
# Simulated the severed binding between client and server
|
| 227 | 219 |
response = copy.deepcopy(instance.CreateBotSession(request, context))
|
| 228 | 220 |
|
| ... | ... | @@ -242,7 +234,8 @@ def test_work_out_of_sync_from_active(state, bot_session, context, instance): |
| 242 | 234 |
bot_session=bot_session)
|
| 243 | 235 |
# Inject work
|
| 244 | 236 |
action_digest = remote_execution_pb2.Digest(hash='gaff')
|
| 245 |
- instance._instance._scheduler.append_job(job.Job(action_digest))
|
|
| 237 |
+ instance._instances[""].execute(action_digest, True)
|
|
| 238 |
+ |
|
| 246 | 239 |
# Simulated the severed binding between client and server
|
| 247 | 240 |
response = copy.deepcopy(instance.CreateBotSession(request, context))
|
| 248 | 241 |
|
| ... | ... | @@ -268,7 +261,8 @@ def test_work_active_to_active(bot_session, context, instance): |
| 268 | 261 |
bot_session=bot_session)
|
| 269 | 262 |
# Inject work
|
| 270 | 263 |
action_digest = remote_execution_pb2.Digest(hash='gaff')
|
| 271 |
- instance._instance._scheduler.append_job(job.Job(action_digest))
|
|
| 264 |
+ instance._instances[""].execute(action_digest, True)
|
|
| 265 |
+ |
|
| 272 | 266 |
# Simulated the severed binding between client and server
|
| 273 | 267 |
response = copy.deepcopy(instance.CreateBotSession(request, context))
|
| 274 | 268 |
|
| ... | ... | @@ -26,9 +26,9 @@ import pytest |
| 26 | 26 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
| 27 | 27 |
from buildgrid._protos.google.longrunning import operations_pb2
|
| 28 | 28 |
|
| 29 |
-from buildgrid.server import scheduler, job
|
|
| 29 |
+from buildgrid.server import job, buildgrid_instance
|
|
| 30 | 30 |
from buildgrid.server.cas.storage import lru_memory_cache
|
| 31 |
-from buildgrid.server.execution import action_cache, execution_instance, execution_service
|
|
| 31 |
+from buildgrid.server.execution import action_cache, execution_service
|
|
| 32 | 32 |
|
| 33 | 33 |
|
| 34 | 34 |
@pytest.fixture
|
| ... | ... | @@ -38,19 +38,21 @@ def context(): |
| 38 | 38 |
|
| 39 | 39 |
|
| 40 | 40 |
@pytest.fixture(params=["action-cache", "no-action-cache"])
|
| 41 |
-def execution(request):
|
|
| 41 |
+def buildgrid(request):
|
|
| 42 | 42 |
if request.param == "action-cache":
|
| 43 | 43 |
storage = lru_memory_cache.LRUMemoryCache(1024 * 1024)
|
| 44 | 44 |
cache = action_cache.ActionCache(storage, 50)
|
| 45 |
- schedule = scheduler.Scheduler(cache)
|
|
| 46 |
- return execution_instance.ExecutionInstance(schedule, storage)
|
|
| 47 |
- return execution_instance.ExecutionInstance(scheduler.Scheduler())
|
|
| 45 |
+ |
|
| 46 |
+ return buildgrid_instance.BuildGridInstance(action_cache=cache,
|
|
| 47 |
+ cas_storage=storage)
|
|
| 48 |
+ return buildgrid_instance.BuildGridInstance()
|
|
| 48 | 49 |
|
| 49 | 50 |
|
| 50 | 51 |
# Instance to test
|
| 51 | 52 |
@pytest.fixture
|
| 52 |
-def instance(execution):
|
|
| 53 |
- yield execution_service.ExecutionService(execution)
|
|
| 53 |
+def instance(buildgrid):
|
|
| 54 |
+ instances = {"": buildgrid}
|
|
| 55 |
+ yield execution_service.ExecutionService(instances)
|
|
| 54 | 56 |
|
| 55 | 57 |
|
| 56 | 58 |
@pytest.mark.parametrize("skip_cache_lookup", [True, False])
|
| ... | ... | @@ -28,10 +28,10 @@ from google.protobuf import any_pb2 |
| 28 | 28 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
| 29 | 29 |
from buildgrid._protos.google.longrunning import operations_pb2
|
| 30 | 30 |
|
| 31 |
-from buildgrid.server import scheduler
|
|
| 31 |
+from buildgrid.server import buildgrid_instance
|
|
| 32 | 32 |
from buildgrid.server._exceptions import InvalidArgumentError
|
| 33 | 33 |
|
| 34 |
-from buildgrid.server.execution import execution_instance, operations_service
|
|
| 34 |
+from buildgrid.server.execution import operations_service
|
|
| 35 | 35 |
|
| 36 | 36 |
|
| 37 | 37 |
# Can mock this
|
| ... | ... | @@ -52,29 +52,25 @@ def execute_request(): |
| 52 | 52 |
|
| 53 | 53 |
|
| 54 | 54 |
@pytest.fixture
|
| 55 |
-def schedule():
|
|
| 56 |
- yield scheduler.Scheduler()
|
|
| 57 |
- |
|
| 58 |
- |
|
| 59 |
-@pytest.fixture
|
|
| 60 |
-def execution(schedule):
|
|
| 61 |
- yield execution_instance.ExecutionInstance(schedule)
|
|
| 55 |
+def buildgrid():
|
|
| 56 |
+ yield buildgrid_instance.BuildGridInstance()
|
|
| 62 | 57 |
|
| 63 | 58 |
|
| 64 | 59 |
# Instance to test
|
| 65 | 60 |
@pytest.fixture
|
| 66 |
-def instance(execution):
|
|
| 67 |
- yield operations_service.OperationsService(execution)
|
|
| 61 |
+def instance(buildgrid):
|
|
| 62 |
+ instances = {"blade": buildgrid}
|
|
| 63 |
+ yield operations_service.OperationsService(instances)
|
|
| 68 | 64 |
|
| 69 | 65 |
|
| 70 | 66 |
# Queue an execution, get operation corresponding to that request
|
| 71 | 67 |
def test_get_operation(instance, execute_request, context):
|
| 72 |
- response_execute = instance._instance.execute(execute_request.action_digest,
|
|
| 73 |
- execute_request.skip_cache_lookup)
|
|
| 68 |
+ response_execute = instance._instances["blade"].execute(execute_request.action_digest,
|
|
| 69 |
+ execute_request.skip_cache_lookup)
|
|
| 74 | 70 |
|
| 75 | 71 |
request = operations_pb2.GetOperationRequest()
|
| 76 | 72 |
|
| 77 |
- request.name = response_execute.name
|
|
| 73 |
+ request.name = "blade/" + response_execute.name
|
|
| 78 | 74 |
|
| 79 | 75 |
response = instance.GetOperation(request, context)
|
| 80 | 76 |
assert response is response_execute
|
| ... | ... | @@ -88,29 +84,30 @@ def test_get_operation_fail(instance, context): |
| 88 | 84 |
|
| 89 | 85 |
|
| 90 | 86 |
def test_list_operations(instance, execute_request, context):
|
| 91 |
- response_execute = instance._instance.execute(execute_request.action_digest,
|
|
| 92 |
- execute_request.skip_cache_lookup)
|
|
| 87 |
+ response_execute = instance._instances["blade"].execute(execute_request.action_digest,
|
|
| 88 |
+ execute_request.skip_cache_lookup)
|
|
| 93 | 89 |
|
| 94 |
- request = operations_pb2.ListOperationsRequest()
|
|
| 90 |
+ request = operations_pb2.ListOperationsRequest(name="blade")
|
|
| 95 | 91 |
response = instance.ListOperations(request, context)
|
| 96 | 92 |
|
| 97 |
- assert response.operations[0].name == response_execute.name
|
|
| 93 |
+ assert response.operations[0].name.split('/')[-1] == response_execute.name
|
|
| 98 | 94 |
|
| 99 | 95 |
|
| 100 | 96 |
def test_list_operations_with_result(instance, execute_request, context):
|
| 101 |
- response_execute = instance._instance.execute(execute_request.action_digest,
|
|
| 102 |
- execute_request.skip_cache_lookup)
|
|
| 97 |
+ response_execute = instance._instances["blade"].execute(execute_request.action_digest,
|
|
| 98 |
+ execute_request.skip_cache_lookup)
|
|
| 103 | 99 |
|
| 104 | 100 |
action_result = remote_execution_pb2.ActionResult()
|
| 105 | 101 |
output_file = remote_execution_pb2.OutputFile(path='unicorn')
|
| 106 | 102 |
action_result.output_files.extend([output_file])
|
| 107 | 103 |
|
| 108 |
- instance._instance._scheduler.job_complete(response_execute.name, _pack_any(action_result))
|
|
| 104 |
+ instance._instances["blade"]._scheduler.job_complete(response_execute.name,
|
|
| 105 |
+ _pack_any(action_result))
|
|
| 109 | 106 |
|
| 110 |
- request = operations_pb2.ListOperationsRequest()
|
|
| 107 |
+ request = operations_pb2.ListOperationsRequest(name="blade")
|
|
| 111 | 108 |
response = instance.ListOperations(request, context)
|
| 112 | 109 |
|
| 113 |
- assert response.operations[0].name == response_execute.name
|
|
| 110 |
+ assert response.operations[0].name.split('/')[-1] == response_execute.name
|
|
| 114 | 111 |
|
| 115 | 112 |
execute_response = remote_execution_pb2.ExecuteResponse()
|
| 116 | 113 |
response.operations[0].response.Unpack(execute_response)
|
| ... | ... | @@ -118,7 +115,7 @@ def test_list_operations_with_result(instance, execute_request, context): |
| 118 | 115 |
|
| 119 | 116 |
|
| 120 | 117 |
def test_list_operations_empty(instance, context):
|
| 121 |
- request = operations_pb2.ListOperationsRequest()
|
|
| 118 |
+ request = operations_pb2.ListOperationsRequest(name="blade")
|
|
| 122 | 119 |
|
| 123 | 120 |
response = instance.ListOperations(request, context)
|
| 124 | 121 |
|
| ... | ... | @@ -127,32 +124,27 @@ def test_list_operations_empty(instance, context): |
| 127 | 124 |
|
| 128 | 125 |
# Send execution off, delete, try to find operation should fail
|
| 129 | 126 |
def test_delete_operation(instance, execute_request, context):
|
| 130 |
- response_execute = instance._instance.execute(execute_request.action_digest,
|
|
| 131 |
- execute_request.skip_cache_lookup)
|
|
| 127 |
+ response_execute = instance._instances["blade"].execute(execute_request.action_digest,
|
|
| 128 |
+ execute_request.skip_cache_lookup)
|
|
| 132 | 129 |
request = operations_pb2.DeleteOperationRequest()
|
| 133 |
- request.name = response_execute.name
|
|
| 130 |
+ request.name = "blade/" + response_execute.name
|
|
| 134 | 131 |
instance.DeleteOperation(request, context)
|
| 135 | 132 |
|
| 136 | 133 |
request = operations_pb2.GetOperationRequest()
|
| 137 |
- request.name = response_execute.name
|
|
| 134 |
+ request.name = "blade/" + response_execute.name
|
|
| 135 |
+ |
|
| 138 | 136 |
with pytest.raises(InvalidArgumentError):
|
| 139 |
- instance._instance.get_operation(response_execute.name)
|
|
| 137 |
+ instance._instances["blade"].get_operation(response_execute.name)
|
|
| 140 | 138 |
|
| 141 | 139 |
|
| 142 | 140 |
def test_delete_operation_fail(instance, execute_request, context):
|
| 143 | 141 |
request = operations_pb2.DeleteOperationRequest()
|
| 142 |
+ request.name = "blade/run"
|
|
| 144 | 143 |
instance.DeleteOperation(request, context)
|
| 145 | 144 |
|
| 146 | 145 |
context.set_code.assert_called_once_with(grpc.StatusCode.INVALID_ARGUMENT)
|
| 147 | 146 |
|
| 148 | 147 |
|
| 149 |
-def test_cancel_operation(instance, context):
|
|
| 150 |
- request = operations_pb2.CancelOperationRequest()
|
|
| 151 |
- instance.CancelOperation(request, context)
|
|
| 152 |
- |
|
| 153 |
- context.set_code.assert_called_once_with(grpc.StatusCode.UNIMPLEMENTED)
|
|
| 154 |
- |
|
| 155 |
- |
|
| 156 | 148 |
def _pack_any(pack):
|
| 157 | 149 |
some_any = any_pb2.Any()
|
| 158 | 150 |
some_any.Pack(pack)
|
