finn pushed to branch finn/84-bot-errors at BuildGrid / buildgrid
Commits:
-
07e06b4a
by finnball at 2018-09-20T16:21:42Z
-
1113a905
by finnball at 2018-09-20T16:21:42Z
-
1cc0759c
by finnball at 2018-09-20T16:21:42Z
-
6a5b122a
by finnball at 2018-09-20T16:26:44Z
-
50bc7f6e
by finnball at 2018-09-21T12:12:50Z
-
2b04ca5b
by finnball at 2018-09-21T12:12:50Z
-
afdcb559
by finnball at 2018-09-21T12:12:50Z
-
9dd954df
by finnball at 2018-09-21T12:12:50Z
-
7bb0e65f
by finnball at 2018-09-21T12:12:50Z
-
3ba75cd0
by finnball at 2018-09-21T12:12:50Z
15 changed files:
- .gitlab-ci.yml
- buildgrid/_app/bots/buildbox.py
- buildgrid/_app/bots/temp_directory.py
- buildgrid/_app/commands/cmd_cas.py
- buildgrid/_app/commands/cmd_execute.py
- buildgrid/bot/bot_session.py
- buildgrid/server/_exceptions.py
- buildgrid/server/execution/instance.py
- buildgrid/server/execution/service.py
- buildgrid/server/job.py
- buildgrid/server/scheduler.py
- buildgrid/settings.py
- docs/source/using_internal.rst
- tests/integration/execution_service.py
- tests/integration/operations_service.py
Changes:
| ... | ... | @@ -33,6 +33,7 @@ before_script: |
| 33 | 33 |
- ${BGD} server start buildgrid/_app/settings/default.yml &
|
| 34 | 34 |
- sleep 1 # Allow server to boot
|
| 35 | 35 |
- ${BGD} bot dummy &
|
| 36 |
+ - ${BGD} cas upload-dummy
|
|
| 36 | 37 |
- ${BGD} execute request-dummy --wait-for-completion
|
| 37 | 38 |
|
| 38 | 39 |
|
| ... | ... | @@ -19,7 +19,9 @@ import tempfile |
| 19 | 19 |
|
| 20 | 20 |
from google.protobuf import any_pb2
|
| 21 | 21 |
|
| 22 |
+from buildgrid.settings import HASH_LENGTH
|
|
| 22 | 23 |
from buildgrid.client.cas import upload
|
| 24 |
+from buildgrid._exceptions import BotError
|
|
| 23 | 25 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
| 24 | 26 |
from buildgrid._protos.google.bytestream import bytestream_pb2_grpc
|
| 25 | 27 |
from buildgrid.utils import read_file, write_file, parse_to_pb2_from_fetch
|
| ... | ... | @@ -87,17 +89,30 @@ def work_buildbox(context, lease): |
| 87 | 89 |
|
| 88 | 90 |
command_line = subprocess.Popen(command_line,
|
| 89 | 91 |
stdin=subprocess.PIPE,
|
| 90 |
- stdout=subprocess.PIPE)
|
|
| 91 |
- # TODO: Should return the stdout and stderr to the user.
|
|
| 92 |
- command_line.communicate()
|
|
| 92 |
+ stdout=subprocess.PIPE,
|
|
| 93 |
+ stderr=subprocess.PIPE)
|
|
| 94 |
+ stdout, stderr = command_line.communicate()
|
|
| 95 |
+ action_result = remote_execution_pb2.ActionResult()
|
|
| 96 |
+ # TODO: Upload to CAS or output RAW
|
|
| 97 |
+ # For now, just pass raw
|
|
| 98 |
+ # https://gitlab.com/BuildGrid/buildgrid/issues/90
|
|
| 99 |
+ action_result.stdout_raw = stdout
|
|
| 100 |
+ |
|
| 101 |
+ if stderr:
|
|
| 102 |
+ # TODO: Upload to CAS or output RAW
|
|
| 103 |
+ # For now, just pass raw
|
|
| 104 |
+ # https://gitlab.com/BuildGrid/buildgrid/issues/90
|
|
| 105 |
+ logger.error("BuildBox error: [{}]".format(stderr))
|
|
| 106 |
+ raise BotError(stderr, detail=stdout, reason="Captured stderr")
|
|
| 93 | 107 |
|
| 94 | 108 |
output_digest = remote_execution_pb2.Digest()
|
| 95 | 109 |
output_digest.ParseFromString(read_file(output_digest_file.name))
|
| 96 | 110 |
|
| 97 | 111 |
logger.debug("Output root digest: {}".format(output_digest))
|
| 98 | 112 |
|
| 99 |
- if len(output_digest.hash) < 64:
|
|
| 100 |
- logger.warning("Buildbox command failed - no output root digest present.")
|
|
| 113 |
+ if len(output_digest.hash) < HASH_LENGTH:
|
|
| 114 |
+ raise BotError("Output hash length too small",
|
|
| 115 |
+ detail=stdout, reason="No output root digest present.")
|
|
| 101 | 116 |
|
| 102 | 117 |
# TODO: Have BuildBox helping us creating the Tree instance here
|
| 103 | 118 |
# See https://gitlab.com/BuildStream/buildbox/issues/7 for details
|
| ... | ... | @@ -110,7 +125,6 @@ def work_buildbox(context, lease): |
| 110 | 125 |
output_directory.tree_digest.CopyFrom(output_tree_digest)
|
| 111 | 126 |
output_directory.path = os.path.relpath(working_directory, start='/')
|
| 112 | 127 |
|
| 113 |
- action_result = remote_execution_pb2.ActionResult()
|
|
| 114 | 128 |
action_result.output_directories.extend([output_directory])
|
| 115 | 129 |
|
| 116 | 130 |
action_result_any = any_pb2.Any()
|
| ... | ... | @@ -77,11 +77,20 @@ def work_temp_directory(context, lease): |
| 77 | 77 |
universal_newlines=True,
|
| 78 | 78 |
env=environment,
|
| 79 | 79 |
stdin=subprocess.PIPE,
|
| 80 |
- stdout=subprocess.PIPE)
|
|
| 81 |
- # TODO: Should return the stdout and stderr in the ActionResult.
|
|
| 82 |
- process.communicate()
|
|
| 80 |
+ stdout=subprocess.PIPE,
|
|
| 81 |
+ stderr=subprocess.PIPE)
|
|
| 82 |
+ stdout, stderr = process.communicate()
|
|
| 83 | 83 |
|
| 84 | 84 |
action_result = remote_execution_pb2.ActionResult()
|
| 85 |
+ # TODO: Upload to CAS or output RAW
|
|
| 86 |
+ # For now, just pass raw
|
|
| 87 |
+ # https://gitlab.com/BuildGrid/buildgrid/issues/90
|
|
| 88 |
+ action_result.stdout_raw = stdout
|
|
| 89 |
+ action_result.stderr_raw = stderr
|
|
| 90 |
+ |
|
| 91 |
+ if stderr:
|
|
| 92 |
+ logger.error("Bot error: [{}]".format(stderr))
|
|
| 93 |
+ raise BotError(stderr, detail=stdout, reason="Captured stderr")
|
|
| 85 | 94 |
|
| 86 | 95 |
with upload(context.cas_channel, instance=instance_name) as cas:
|
| 87 | 96 |
for output_path in command.output_files:
|
| ... | ... | @@ -65,6 +65,23 @@ def cli(context, remote, instance_name, client_key, client_cert, server_cert): |
| 65 | 65 |
context.logger.debug("Starting for remote {}".format(context.remote))
|
| 66 | 66 |
|
| 67 | 67 |
|
| 68 |
+@cli.command('upload-dummy', short_help="Upload a dummy action. Should be used with `execute dummy-request`")
|
|
| 69 |
+@pass_context
|
|
| 70 |
+def upload_dummy(context):
|
|
| 71 |
+ context.logger.info("Uploading dummy action...")
|
|
| 72 |
+ action = remote_execution_pb2.Action(do_not_cache=True)
|
|
| 73 |
+ action_digest = create_digest(action.SerializeToString())
|
|
| 74 |
+ |
|
| 75 |
+ request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=context.instance_name)
|
|
| 76 |
+ request.requests.add(digest=action_digest,
|
|
| 77 |
+ data=action.SerializeToString())
|
|
| 78 |
+ |
|
| 79 |
+ stub = remote_execution_pb2_grpc.ContentAddressableStorageStub(context.channel)
|
|
| 80 |
+ response = stub.BatchUpdateBlobs(request)
|
|
| 81 |
+ |
|
| 82 |
+ context.logger.info(response)
|
|
| 83 |
+ |
|
| 84 |
+ |
|
| 68 | 85 |
@cli.command('upload-files', short_help="Upload files to the CAS server.")
|
| 69 | 86 |
@click.argument('files', nargs=-1, type=click.File('rb'), required=True)
|
| 70 | 87 |
@pass_context
|
| ... | ... | @@ -76,9 +76,11 @@ def cli(context, remote, instance_name, client_key, client_cert, server_cert): |
| 76 | 76 |
help="Stream updates until jobs are completed.")
|
| 77 | 77 |
@pass_context
|
| 78 | 78 |
def request_dummy(context, number, wait_for_completion):
|
| 79 |
- action_digest = remote_execution_pb2.Digest()
|
|
| 80 | 79 |
|
| 81 | 80 |
context.logger.info("Sending execution request...")
|
| 81 |
+ action = remote_execution_pb2.Action(do_not_cache=True)
|
|
| 82 |
+ action_digest = create_digest(action.SerializeToString())
|
|
| 83 |
+ |
|
| 82 | 84 |
stub = remote_execution_pb2_grpc.ExecutionStub(context.channel)
|
| 83 | 85 |
|
| 84 | 86 |
request = remote_execution_pb2.ExecuteRequest(instance_name=context.instance_name,
|
| ... | ... | @@ -90,9 +92,18 @@ def request_dummy(context, number, wait_for_completion): |
| 90 | 92 |
responses.append(stub.Execute(request))
|
| 91 | 93 |
|
| 92 | 94 |
for response in responses:
|
| 95 |
+ |
|
| 93 | 96 |
if wait_for_completion:
|
| 97 |
+ result = None
|
|
| 94 | 98 |
for stream in response:
|
| 95 |
- context.logger.info(stream)
|
|
| 99 |
+ result = stream
|
|
| 100 |
+ context.logger.info(result)
|
|
| 101 |
+ |
|
| 102 |
+ if not result.done:
|
|
| 103 |
+ click.echo("Result did not return True." +
|
|
| 104 |
+ "Was the action uploaded to CAS?", err=True)
|
|
| 105 |
+ sys.exit(-1)
|
|
| 106 |
+ |
|
| 96 | 107 |
else:
|
| 97 | 108 |
context.logger.info(next(response))
|
| 98 | 109 |
|
| ... | ... | @@ -12,6 +12,9 @@ |
| 12 | 12 |
# See the License for the specific language governing permissions and
|
| 13 | 13 |
# limitations under the License.
|
| 14 | 14 |
|
| 15 |
+# Disable broad exception catch
|
|
| 16 |
+# pylint: disable=broad-except
|
|
| 17 |
+ |
|
| 15 | 18 |
|
| 16 | 19 |
"""
|
| 17 | 20 |
Bot Session
|
| ... | ... | @@ -23,10 +26,14 @@ import asyncio |
| 23 | 26 |
import logging
|
| 24 | 27 |
import platform
|
| 25 | 28 |
import uuid
|
| 26 |
- |
|
| 27 | 29 |
from enum import Enum
|
| 28 | 30 |
|
| 31 |
+import grpc
|
|
| 32 |
+from google.protobuf import any_pb2
|
|
| 33 |
+ |
|
| 29 | 34 |
from buildgrid._protos.google.devtools.remoteworkers.v1test2 import bots_pb2, worker_pb2
|
| 35 |
+from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
|
| 36 |
+from buildgrid._exceptions import BotError
|
|
| 30 | 37 |
|
| 31 | 38 |
|
| 32 | 39 |
class BotStatus(Enum):
|
| ... | ... | @@ -142,13 +149,35 @@ class BotSession: |
| 142 | 149 |
|
| 143 | 150 |
async def create_work(self, lease):
|
| 144 | 151 |
self.logger.debug("Work created: [{}]".format(lease.id))
|
| 145 |
- |
|
| 152 |
+ input_lease = lease
|
|
| 146 | 153 |
loop = asyncio.get_event_loop()
|
| 147 |
- lease = await loop.run_in_executor(None, self._work, self._context, lease)
|
|
| 154 |
+ |
|
| 155 |
+ try:
|
|
| 156 |
+ lease = await loop.run_in_executor(None, self._work, self._context, lease)
|
|
| 157 |
+ |
|
| 158 |
+ except BotError as e:
|
|
| 159 |
+ self.logger.error("Bot error thrown: [{}]".format(e))
|
|
| 160 |
+ lease = self._lease_error(input_lease, e)
|
|
| 161 |
+ |
|
| 162 |
+ except grpc.RpcError as e:
|
|
| 163 |
+ self.logger.error("Connection error thrown: [{}]".format(e))
|
|
| 164 |
+ lease = self._lease_error(input_lease, e)
|
|
| 165 |
+ |
|
| 166 |
+ except Exception as e:
|
|
| 167 |
+ self.logger.error("Connection error thrown: [{}]".format(e))
|
|
| 168 |
+ lease = self._lease_error(input_lease, e)
|
|
| 148 | 169 |
|
| 149 | 170 |
self.logger.debug("Work complete: [{}]".format(lease.id))
|
| 150 | 171 |
self.lease_completed(lease)
|
| 151 | 172 |
|
| 173 |
+ def _lease_error(self, lease, error):
|
|
| 174 |
+ action_result = remote_execution_pb2.ActionResult()
|
|
| 175 |
+ action_result.stderr_raw = str(error)
|
|
| 176 |
+ action_result_any = any_pb2.Any()
|
|
| 177 |
+ action_result_any.Pack(action_result)
|
|
| 178 |
+ lease.result.CopyFrom(action_result_any)
|
|
| 179 |
+ return lease
|
|
| 180 |
+ |
|
| 152 | 181 |
|
| 153 | 182 |
class Worker:
|
| 154 | 183 |
def __init__(self, properties=None, configs=None):
|
| ... | ... | @@ -46,3 +46,12 @@ class OutOfRangeError(BgdError): |
| 46 | 46 |
|
| 47 | 47 |
def __init__(self, message, detail=None, reason=None):
|
| 48 | 48 |
super().__init__(message, detail=detail, domain=ErrorDomain.SERVER, reason=reason)
|
| 49 |
+ |
|
| 50 |
+ |
|
| 51 |
+class FailedPreconditionError(BgdError):
|
|
| 52 |
+ """ One or more errors occurred in setting up the action requested, such as a missing input
|
|
| 53 |
+ or command or no worker being available. The client may be able to fix the errors and retry.
|
|
| 54 |
+ """
|
|
| 55 |
+ |
|
| 56 |
+ def __init__(self, message, detail=None, reason=None):
|
|
| 57 |
+ super().__init__(message, detail=detail, domain=ErrorDomain.SERVER, reason=reason)
|
| ... | ... | @@ -24,12 +24,12 @@ import logging |
| 24 | 24 |
from buildgrid._protos.build.bazel.remote.execution.v2.remote_execution_pb2 import Action
|
| 25 | 25 |
|
| 26 | 26 |
from ..job import Job
|
| 27 |
-from .._exceptions import InvalidArgumentError
|
|
| 27 |
+from .._exceptions import InvalidArgumentError, FailedPreconditionError
|
|
| 28 | 28 |
|
| 29 | 29 |
|
| 30 | 30 |
class ExecutionInstance:
|
| 31 | 31 |
|
| 32 |
- def __init__(self, scheduler, storage=None):
|
|
| 32 |
+ def __init__(self, scheduler, storage):
|
|
| 33 | 33 |
self.logger = logging.getLogger(__name__)
|
| 34 | 34 |
self._storage = storage
|
| 35 | 35 |
self._scheduler = scheduler
|
| ... | ... | @@ -43,13 +43,12 @@ class ExecutionInstance: |
| 43 | 43 |
this action.
|
| 44 | 44 |
"""
|
| 45 | 45 |
|
| 46 |
- do_not_cache = False
|
|
| 47 |
- if self._storage is not None:
|
|
| 48 |
- action = self._storage.get_message(action_digest, Action)
|
|
| 49 |
- if action is not None:
|
|
| 50 |
- do_not_cache = action.do_not_cache
|
|
| 46 |
+ action = self._storage.get_message(action_digest, Action)
|
|
| 51 | 47 |
|
| 52 |
- job = Job(action_digest, do_not_cache, message_queue)
|
|
| 48 |
+ if not action:
|
|
| 49 |
+ raise FailedPreconditionError("Could not get action from storage.")
|
|
| 50 |
+ |
|
| 51 |
+ job = Job(action_digest, action.do_not_cache, message_queue)
|
|
| 53 | 52 |
self.logger.info("Operation name: [{}]".format(job.name))
|
| 54 | 53 |
|
| 55 | 54 |
self._scheduler.append_job(job, skip_cache_lookup)
|
| ... | ... | @@ -30,7 +30,7 @@ from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_p |
| 30 | 30 |
|
| 31 | 31 |
from buildgrid._protos.google.longrunning import operations_pb2
|
| 32 | 32 |
|
| 33 |
-from .._exceptions import InvalidArgumentError
|
|
| 33 |
+from .._exceptions import InvalidArgumentError, FailedPreconditionError
|
|
| 34 | 34 |
|
| 35 | 35 |
|
| 36 | 36 |
class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer):
|
| ... | ... | @@ -63,6 +63,12 @@ class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer): |
| 63 | 63 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
| 64 | 64 |
yield operations_pb2.Operation()
|
| 65 | 65 |
|
| 66 |
+ except FailedPreconditionError as e:
|
|
| 67 |
+ self.logger.error(e)
|
|
| 68 |
+ context.set_details(str(e))
|
|
| 69 |
+ context.set_code(grpc.StatusCode.FAILED_PRECONDITION)
|
|
| 70 |
+ yield operations_pb2.Operation()
|
|
| 71 |
+ |
|
| 66 | 72 |
def WaitExecution(self, request, context):
|
| 67 | 73 |
try:
|
| 68 | 74 |
names = request.name.split("/")
|
| ... | ... | @@ -21,6 +21,7 @@ from enum import Enum |
| 21 | 21 |
|
| 22 | 22 |
from google.protobuf import any_pb2
|
| 23 | 23 |
|
| 24 |
+from buildgrid._protos.google.rpc import code_pb2, status_pb2
|
|
| 24 | 25 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
| 25 | 26 |
from buildgrid._protos.google.devtools.remoteworkers.v1test2 import bots_pb2
|
| 26 | 27 |
from buildgrid._protos.google.longrunning import operations_pb2
|
| ... | ... | @@ -121,10 +122,14 @@ class Job: |
| 121 | 122 |
self._operation.metadata.CopyFrom(self._pack_any(self.get_operation_meta()))
|
| 122 | 123 |
if self.result is not None:
|
| 123 | 124 |
self._operation.done = True
|
| 124 |
- action_result = remote_execution_pb2.ActionResult()
|
|
| 125 |
- self.result.Unpack(action_result)
|
|
| 126 |
- response = remote_execution_pb2.ExecuteResponse(result=action_result,
|
|
| 127 |
- cached_result=self.result_cached)
|
|
| 125 |
+ status = status_pb2.Status()
|
|
| 126 |
+ status.code = code_pb2.OK
|
|
| 127 |
+ if self.result.stderr_raw or self.result.stderr_digest:
|
|
| 128 |
+ status.code = code_pb2.INTERNAL
|
|
| 129 |
+ |
|
| 130 |
+ response = remote_execution_pb2.ExecuteResponse(result=self.result,
|
|
| 131 |
+ cached_result=self.result_cached,
|
|
| 132 |
+ status=status)
|
|
| 128 | 133 |
self._operation.response.CopyFrom(self._pack_any(response))
|
| 129 | 134 |
|
| 130 | 135 |
return self._operation
|
| ... | ... | @@ -27,6 +27,7 @@ from google.protobuf import any_pb2 |
| 27 | 27 |
|
| 28 | 28 |
|
| 29 | 29 |
from buildgrid.server._exceptions import NotFoundError
|
| 30 |
+from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
|
| 30 | 31 |
from buildgrid._protos.google.longrunning import operations_pb2
|
| 31 | 32 |
|
| 32 | 33 |
from .job import ExecuteStage, LeaseState
|
| ... | ... | @@ -84,10 +85,13 @@ class Scheduler: |
| 84 | 85 |
|
| 85 | 86 |
def job_complete(self, name, result):
|
| 86 | 87 |
job = self.jobs[name]
|
| 87 |
- job.result = result
|
|
| 88 |
- job.update_execute_stage(ExecuteStage.COMPLETED)
|
|
| 88 |
+ action_result = remote_execution_pb2.ActionResult()
|
|
| 89 |
+ result.Unpack(action_result)
|
|
| 90 |
+ job.result = action_result
|
|
| 89 | 91 |
if not job.do_not_cache and self._action_cache is not None:
|
| 90 |
- self._action_cache.update_action_result(job.action_digest, result)
|
|
| 92 |
+ if not (action_result.stderr_raw or action_result.stderr_digest):
|
|
| 93 |
+ self._action_cache.update_action_result(job.action_digest, result)
|
|
| 94 |
+ job.update_execute_stage(ExecuteStage.COMPLETED)
|
|
| 91 | 95 |
|
| 92 | 96 |
def get_operations(self):
|
| 93 | 97 |
response = operations_pb2.ListOperationsResponse()
|
| ... | ... | @@ -3,3 +3,4 @@ import hashlib |
| 3 | 3 |
|
| 4 | 4 |
# The hash function that CAS uses
|
| 5 | 5 |
HASH = hashlib.sha256
|
| 6 |
+HASH_LENGTH = 64
|
| 1 |
- |
|
| 2 | 1 |
.. _internal-client:
|
| 3 | 2 |
|
| 4 | 3 |
Internal client
|
| ... | ... | @@ -19,7 +18,13 @@ In one terminal, start a server: |
| 19 | 18 |
|
| 20 | 19 |
bgd server start buildgrid/_app/settings/default.yml
|
| 21 | 20 |
|
| 22 |
-In another terminal, send a request for work:
|
|
| 21 |
+In another terminal, upload an action to CAS:
|
|
| 22 |
+ |
|
| 23 |
+.. code-block::sh
|
|
| 24 |
+ |
|
| 25 |
+ bgd cas upload-dummy
|
|
| 26 |
+ |
|
| 27 |
+Then send a request for work:
|
|
| 23 | 28 |
|
| 24 | 29 |
.. code-block:: sh
|
| 25 | 30 |
|
| ... | ... | @@ -28,6 +28,7 @@ import pytest |
| 28 | 28 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
| 29 | 29 |
from buildgrid._protos.google.longrunning import operations_pb2
|
| 30 | 30 |
|
| 31 |
+from buildgrid.utils import create_digest
|
|
| 31 | 32 |
from buildgrid.server import job
|
| 32 | 33 |
from buildgrid.server.controller import ExecutionController
|
| 33 | 34 |
from buildgrid.server.cas.storage import lru_memory_cache
|
| ... | ... | @@ -37,6 +38,8 @@ from buildgrid.server.execution.service import ExecutionService |
| 37 | 38 |
|
| 38 | 39 |
|
| 39 | 40 |
server = mock.create_autospec(grpc.server)
|
| 41 |
+action = remote_execution_pb2.Action(do_not_cache=True)
|
|
| 42 |
+action_digest = create_digest(action.SerializeToString())
|
|
| 40 | 43 |
|
| 41 | 44 |
|
| 42 | 45 |
@pytest.fixture
|
| ... | ... | @@ -47,12 +50,16 @@ def context(): |
| 47 | 50 |
|
| 48 | 51 |
@pytest.fixture(params=["action-cache", "no-action-cache"])
|
| 49 | 52 |
def controller(request):
|
| 53 |
+ storage = lru_memory_cache.LRUMemoryCache(1024 * 1024)
|
|
| 54 |
+ write_session = storage.begin_write(action_digest)
|
|
| 55 |
+ storage.commit_write(action_digest, write_session)
|
|
| 56 |
+ |
|
| 50 | 57 |
if request.param == "action-cache":
|
| 51 |
- storage = lru_memory_cache.LRUMemoryCache(1024 * 1024)
|
|
| 52 | 58 |
cache = ActionCache(storage, 50)
|
| 53 | 59 |
yield ExecutionController(cache, storage)
|
| 60 |
+ |
|
| 54 | 61 |
else:
|
| 55 |
- yield ExecutionController()
|
|
| 62 |
+ yield ExecutionController(None, storage)
|
|
| 56 | 63 |
|
| 57 | 64 |
|
| 58 | 65 |
# Instance to test
|
| ... | ... | @@ -66,9 +73,6 @@ def instance(controller): |
| 66 | 73 |
|
| 67 | 74 |
@pytest.mark.parametrize("skip_cache_lookup", [True, False])
|
| 68 | 75 |
def test_execute(skip_cache_lookup, instance, context):
|
| 69 |
- action_digest = remote_execution_pb2.Digest()
|
|
| 70 |
- action_digest.hash = 'zhora'
|
|
| 71 |
- |
|
| 72 | 76 |
request = remote_execution_pb2.ExecuteRequest(instance_name='',
|
| 73 | 77 |
action_digest=action_digest,
|
| 74 | 78 |
skip_cache_lookup=skip_cache_lookup)
|
| ... | ... | @@ -91,10 +95,16 @@ def test_wrong_execute_instance(instance, context): |
| 91 | 95 |
context.set_code.assert_called_once_with(grpc.StatusCode.INVALID_ARGUMENT)
|
| 92 | 96 |
|
| 93 | 97 |
|
| 94 |
-def test_wait_execution(instance, controller, context):
|
|
| 95 |
- action_digest = remote_execution_pb2.Digest()
|
|
| 96 |
- action_digest.hash = 'zhora'
|
|
| 98 |
+def test_no_action_digest_in_storage(instance, context):
|
|
| 99 |
+ request = remote_execution_pb2.ExecuteRequest(instance_name='',
|
|
| 100 |
+ skip_cache_lookup=True)
|
|
| 101 |
+ response = instance.Execute(request, context)
|
|
| 102 |
+ |
|
| 103 |
+ next(response)
|
|
| 104 |
+ context.set_code.assert_called_once_with(grpc.StatusCode.FAILED_PRECONDITION)
|
|
| 97 | 105 |
|
| 106 |
+ |
|
| 107 |
+def test_wait_execution(instance, controller, context):
|
|
| 98 | 108 |
j = job.Job(action_digest, None)
|
| 99 | 109 |
j._operation.done = True
|
| 100 | 110 |
|
| ... | ... | @@ -24,18 +24,21 @@ import grpc |
| 24 | 24 |
from grpc._server import _Context
|
| 25 | 25 |
import pytest
|
| 26 | 26 |
|
| 27 |
-from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
|
| 28 |
-from buildgrid._protos.google.longrunning import operations_pb2
|
|
| 29 |
- |
|
| 27 |
+from buildgrid.utils import create_digest
|
|
| 30 | 28 |
from buildgrid.server.controller import ExecutionController
|
| 31 |
-from buildgrid.server._exceptions import InvalidArgumentError
|
|
| 32 |
- |
|
| 29 |
+from buildgrid.server.cas.storage import lru_memory_cache
|
|
| 33 | 30 |
from buildgrid.server.operations import service
|
| 34 | 31 |
from buildgrid.server.operations.service import OperationsService
|
| 32 |
+from buildgrid.server._exceptions import InvalidArgumentError
|
|
| 33 |
+ |
|
| 34 |
+from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
|
| 35 |
+from buildgrid._protos.google.longrunning import operations_pb2
|
|
| 35 | 36 |
|
| 36 | 37 |
|
| 37 | 38 |
server = mock.create_autospec(grpc.server)
|
| 38 | 39 |
instance_name = "blade"
|
| 40 |
+action = remote_execution_pb2.Action(do_not_cache=True)
|
|
| 41 |
+action_digest = create_digest(action.SerializeToString())
|
|
| 39 | 42 |
|
| 40 | 43 |
|
| 41 | 44 |
# Can mock this
|
| ... | ... | @@ -47,9 +50,6 @@ def context(): |
| 47 | 50 |
# Requests to make
|
| 48 | 51 |
@pytest.fixture
|
| 49 | 52 |
def execute_request():
|
| 50 |
- action_digest = remote_execution_pb2.Digest()
|
|
| 51 |
- action_digest.hash = 'zhora'
|
|
| 52 |
- |
|
| 53 | 53 |
yield remote_execution_pb2.ExecuteRequest(instance_name='',
|
| 54 | 54 |
action_digest=action_digest,
|
| 55 | 55 |
skip_cache_lookup=True)
|
| ... | ... | @@ -57,7 +57,11 @@ def execute_request(): |
| 57 | 57 |
|
| 58 | 58 |
@pytest.fixture
|
| 59 | 59 |
def controller():
|
| 60 |
- yield ExecutionController()
|
|
| 60 |
+ storage = lru_memory_cache.LRUMemoryCache(1024 * 1024)
|
|
| 61 |
+ write_session = storage.begin_write(action_digest)
|
|
| 62 |
+ storage.commit_write(action_digest, write_session)
|
|
| 63 |
+ |
|
| 64 |
+ yield ExecutionController(None, storage)
|
|
| 61 | 65 |
|
| 62 | 66 |
|
| 63 | 67 |
# Instance to test
|
