[Notes] [Git][BuildGrid/buildgrid][master] 2 commits: Add an action cache



Title: GitLab

finnball pushed to branch master at BuildGrid / buildgrid

Commits:

11 changed files:

Changes:

  • app/commands/cmd_server.py
    ... ... @@ -28,6 +28,7 @@ import click
    28 28
     import logging
    
    29 29
     
    
    30 30
     from buildgrid.server import build_grid_server
    
    31
    +from buildgrid.server.action_cache import ActionCache
    
    31 32
     from buildgrid.server.cas.storage.disk import DiskStorage
    
    32 33
     from buildgrid.server.cas.storage.lru_memory_cache import LRUMemoryCache
    
    33 34
     from buildgrid.server.cas.storage.s3 import S3Storage
    
    ... ... @@ -43,6 +44,11 @@ def cli(context):
    43 44
     
    
    44 45
     @cli.command('start', short_help='Starts server')
    
    45 46
     @click.option('--port', default='50051')
    
    47
    +@click.option('--max-cached-actions', type=int, default=50,
    
    48
    +              help='Maximum number of actions to keep in the ActionCache.')
    
    49
    +@click.option('--allow-update-action-result/--forbid-update-action-result',
    
    50
    +              'allow_uar', default=True,
    
    51
    +              help='Whether or not to allow clients to manually edit the action cache.')
    
    46 52
     @click.option('--cas',
    
    47 53
                   type=click.Choice(('lru', 's3', 'disk', 'with-cache')),
    
    48 54
                   help='CAS storage type to use.')
    
    ... ... @@ -59,15 +65,22 @@ def cli(context):
    59 65
                   type=click.Path(file_okay=False, dir_okay=True, writable=True),
    
    60 66
                   help='For --cas=disk, the folder to store CAS blobs in.')
    
    61 67
     @pass_context
    
    62
    -def start(context, port, cas, **cas_args):
    
    68
    +def start(context, port, max_cached_actions, allow_uar, cas, **cas_args):
    
    63 69
         context.logger.info("Starting on port {}".format(port))
    
    64 70
     
    
    65 71
         loop = asyncio.get_event_loop()
    
    66 72
     
    
    67 73
         cas_storage = _make_cas_storage(context, cas, cas_args)
    
    68 74
         if cas_storage is None:
    
    69
    -        context.logger.info("Running without CAS")
    
    70
    -    server = build_grid_server.BuildGridServer(port, cas_storage=cas_storage)
    
    75
    +        context.logger.info("Running without CAS - action cache will be unavailable")
    
    76
    +        action_cache = None
    
    77
    +    else:
    
    78
    +        action_cache = ActionCache(cas_storage, max_cached_actions)
    
    79
    +
    
    80
    +    server = build_grid_server.BuildGridServer(port,
    
    81
    +                                               cas_storage=cas_storage,
    
    82
    +                                               action_cache=action_cache,
    
    83
    +                                               allow_update_action_result=allow_uar)
    
    71 84
     
    
    72 85
         try:
    
    73 86
             asyncio.ensure_future(server.start())
    

  • buildgrid/server/action_cache.py
    1
    +# Copyright (C) 2018 Bloomberg LP
    
    2
    +#
    
    3
    +# Licensed under the Apache License, Version 2.0 (the "License");
    
    4
    +# you may not use this file except in compliance with the License.
    
    5
    +# You may obtain a copy of the License at
    
    6
    +#
    
    7
    +#  <http://www.apache.org/licenses/LICENSE-2.0>
    
    8
    +#
    
    9
    +# Unless required by applicable law or agreed to in writing, software
    
    10
    +# distributed under the License is distributed on an "AS IS" BASIS,
    
    11
    +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    
    12
    +# See the License for the specific language governing permissions and
    
    13
    +# limitations under the License.
    
    14
    +#
    
    15
    +# Authors:
    
    16
    +#        Carter Sande <csande bloomberg net>
    
    17
    +
    
    18
    +"""
    
    19
    +ActionCache
    
    20
    +==================
    
    21
    +
    
    22
    +Implements a simple in-memory action cache.
    
    23
    +"""
    
    24
    +
    
    25
    +import collections
    
    26
    +
    
    27
    +from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2 as re_pb2
    
    28
    +
    
    29
    +class ActionCache:
    
    30
    +
    
    31
    +    def __init__(self, storage, max_cached_actions):
    
    32
    +        self._storage = storage
    
    33
    +        self._max_cached_actions = max_cached_actions
    
    34
    +        self._digest_map = collections.OrderedDict()
    
    35
    +
    
    36
    +    def get_action_result(self, action_digest):
    
    37
    +        """Return the cached ActionResult for the given Action digest, or None
    
    38
    +        if there isn't one.
    
    39
    +        """
    
    40
    +        key = (action_digest.hash, action_digest.size_bytes)
    
    41
    +        if key in self._digest_map:
    
    42
    +            action_result = self._storage.get_message(self._digest_map[key],
    
    43
    +                                                      re_pb2.ActionResult)
    
    44
    +            if action_result is not None:
    
    45
    +                if self._blobs_still_exist(action_result):
    
    46
    +                    self._digest_map.move_to_end(key)
    
    47
    +                    return action_result
    
    48
    +            del self._digest_map[key]
    
    49
    +        return None
    
    50
    +
    
    51
    +    def put_action_result(self, action_digest, action_result):
    
    52
    +        """Add the given ActionResult to the cache for the given Action
    
    53
    +        digest.
    
    54
    +        """
    
    55
    +        if self._max_cached_actions == 0:
    
    56
    +            return
    
    57
    +
    
    58
    +        while len(self._digest_map) >= self._max_cached_actions:
    
    59
    +            self._digest_map.popitem(last=False)
    
    60
    +
    
    61
    +        key = (action_digest.hash, action_digest.size_bytes)
    
    62
    +        action_result_digest = self._storage.put_message(action_result)
    
    63
    +        self._digest_map[key] = action_result_digest
    
    64
    +
    
    65
    +    def _blobs_still_exist(self, action_result):
    
    66
    +        """Return True if all the CAS blobs referenced by the given
    
    67
    +        ActionResult are present in CAS.
    
    68
    +        """
    
    69
    +        blobs_needed = []
    
    70
    +        
    
    71
    +        for output_file in action_result.output_files:
    
    72
    +            blobs_needed.append(output_file.digest)
    
    73
    +        
    
    74
    +        for output_directory in action_result.output_directories:
    
    75
    +            blobs_needed.append(output_directory.tree_digest)
    
    76
    +            tree = self._storage.get_message(output_directory.tree_digest,
    
    77
    +                                             re_pb2.Tree)
    
    78
    +            if tree is None:
    
    79
    +                return False
    
    80
    +            for file_node in tree.root.files:
    
    81
    +                blobs_needed.append(file_node.digest)
    
    82
    +            for child in tree.children:
    
    83
    +                for file_node in child.files:
    
    84
    +                    blobs_needed.append(file_node.digest)
    
    85
    +
    
    86
    +        if action_result.stdout_digest.hash and not action_result.stdout_raw:
    
    87
    +            blobs_needed.append(action_result.stdout_digest)
    
    88
    +        if action_result.stderr_digest.hash and not action_result.stderr_raw:
    
    89
    +            blobs_needed.append(action_result.stderr_digest)
    
    90
    +
    
    91
    +        missing = self._storage.missing_blobs(blobs_needed)
    
    92
    +        return len(missing) == 0

  • buildgrid/server/build_grid_server.py
    ... ... @@ -33,6 +33,7 @@ from buildgrid._protos.google.longrunning import operations_pb2_grpc
    33 33
     
    
    34 34
     from .cas.bytestream_service import ByteStreamService
    
    35 35
     from .cas.content_addressable_storage_service import ContentAddressableStorageService
    
    36
    +from .execution.action_cache_service import ActionCacheService
    
    36 37
     from .execution.execution_service import ExecutionService
    
    37 38
     from .execution.operations_service import OperationsService
    
    38 39
     from .execution.execution_instance import ExecutionInstance
    
    ... ... @@ -42,11 +43,11 @@ from .worker.bots_interface import BotsInterface
    42 43
     
    
    43 44
     class BuildGridServer(object):
    
    44 45
     
    
    45
    -    def __init__(self, port = '50051', max_workers = 10, cas_storage = None):
    
    46
    +    def __init__(self, port = '50051', max_workers = 10, cas_storage = None, action_cache = None, allow_update_action_result = True):
    
    46 47
             port = '[::]:{0}'.format(port)
    
    47
    -        scheduler = Scheduler()
    
    48
    +        scheduler = Scheduler(action_cache)
    
    48 49
             bots_interface = BotsInterface(scheduler)
    
    49
    -        execution_instance = ExecutionInstance(scheduler)
    
    50
    +        execution_instance = ExecutionInstance(scheduler, cas_storage)
    
    50 51
     
    
    51 52
             self._server = grpc.server(futures.ThreadPoolExecutor(max_workers))
    
    52 53
             self._server.add_insecure_port(port)
    
    ... ... @@ -63,6 +64,12 @@ class BuildGridServer(object):
    63 64
                                                                                           self._server)
    
    64 65
                 bytestream_pb2_grpc.add_ByteStreamServicer_to_server(ByteStreamService(cas_storage),
    
    65 66
                                                                      self._server)
    
    67
    +        if action_cache is not None:
    
    68
    +            action_cache_service = ActionCacheService(action_cache,
    
    69
    +                                                      allow_update_action_result)
    
    70
    +            remote_execution_pb2_grpc.add_ActionCacheServicer_to_server(action_cache_service,
    
    71
    +                                                                        self._server)
    
    72
    +
    
    66 73
     
    
    67 74
         async def start(self):
    
    68 75
             self._server.start()
    

  • buildgrid/server/cas/storage/storage_abc.py
    ... ... @@ -24,6 +24,7 @@ The abstract base class for storage providers.
    24 24
     
    
    25 25
     import abc
    
    26 26
     
    
    27
    +from buildgrid._protos.build.bazel.remote.execution.v2.remote_execution_pb2 import Digest
    
    27 28
     from buildgrid._protos.google.rpc.status_pb2 import Status
    
    28 29
     from buildgrid._protos.google.rpc import code_pb2
    
    29 30
     
    
    ... ... @@ -96,3 +97,23 @@ class StorageABC(abc.ABC):
    96 97
                     else:
    
    97 98
                         result.append(Status(code=code_pb2.OK))
    
    98 99
             return result
    
    100
    +
    
    101
    +    def put_message(self, message):
    
    102
    +        """Store the given Protobuf message in CAS, returning its digest."""
    
    103
    +        message_blob = message.SerializeToString()
    
    104
    +        digest = Digest(hash=HASH(message_blob).hexdigest(), size_bytes=len(message_blob))
    
    105
    +        session = self.begin_write(digest)
    
    106
    +        session.write(message_blob)
    
    107
    +        self.commit_write(digest, session)
    
    108
    +        return digest
    
    109
    +
    
    110
    +    def get_message(self, digest, message_type):
    
    111
    +        """Retrieve the Protobuf message with the given digest and type from
    
    112
    +        CAS. If the blob is not present, returns None.
    
    113
    +        """
    
    114
    +        message_blob = self.get_blob(digest)
    
    115
    +        if message_blob is None:
    
    116
    +            return None
    
    117
    +        result = message_type.FromString(message_blob.read())
    
    118
    +        message_blob.close()
    
    119
    +        return result

  • buildgrid/server/execution/action_cache_service.py
    ... ... @@ -19,7 +19,7 @@
    19 19
     ActionCacheService
    
    20 20
     ==================
    
    21 21
     
    
    22
    -Action Cache currently not implemented.
    
    22
    +Allows clients to manually query/update the action cache.
    
    23 23
     """
    
    24 24
     
    
    25 25
     import logging
    
    ... ... @@ -29,14 +29,21 @@ from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_p
    29 29
     
    
    30 30
     class ActionCacheService(remote_execution_pb2_grpc.ActionCacheServicer):
    
    31 31
     
    
    32
    -    def __init__(self, instance):
    
    33
    -        self._instance = instance
    
    32
    +    def __init__(self, action_cache, allow_updates=True):
    
    33
    +        self._action_cache = action_cache
    
    34
    +        self._allow_updates = allow_updates
    
    34 35
             self.logger = logging.getLogger(__name__)
    
    35 36
     
    
    36 37
         def GetActionResult(self, request, context):
    
    37
    -        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
    
    38
    -        return remote_execution_pb2.ActionResult()
    
    38
    +        result = self._action_cache.get_action_result(request.action_digest)
    
    39
    +        if result is None:
    
    40
    +            context.set_code(grpc.StatusCode.NOT_FOUND)
    
    41
    +            return remote_execution_pb2.ActionResult()
    
    42
    +        return result
    
    39 43
     
    
    40 44
         def UpdateActionResult(self, request, context):
    
    41
    -        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
    
    42
    -        return remote_execution_pb2.ActionResult()
    45
    +        if not self._allow_updates:
    
    46
    +            context.set_code(grpc.StatusCode.UNIMPLEMENTED)
    
    47
    +            return remote_execution_pb2.ActionResult()
    
    48
    +        self._action_cache.put_action_result(request.action_digest, request.action_result)
    
    49
    +        return request.action_result

  • buildgrid/server/execution/execution_instance.py
    ... ... @@ -24,14 +24,17 @@ An instance of the Remote Execution Server.
    24 24
     import uuid
    
    25 25
     import logging
    
    26 26
     
    
    27
    +from buildgrid._protos.build.bazel.remote.execution.v2.remote_execution_pb2 import Action
    
    28
    +
    
    27 29
     from ._exceptions import InvalidArgumentError
    
    28 30
     
    
    29 31
     from ..job import Job, ExecuteStage
    
    30 32
     
    
    31 33
     class ExecutionInstance():
    
    32 34
     
    
    33
    -    def __init__(self, scheduler):
    
    35
    +    def __init__(self, scheduler, storage = None):
    
    34 36
             self.logger = logging.getLogger(__name__)
    
    37
    +        self._storage = storage
    
    35 38
             self._scheduler = scheduler
    
    36 39
     
    
    37 40
         def execute(self, action_digest, skip_cache_lookup, message_queue=None):
    
    ... ... @@ -39,13 +42,17 @@ class ExecutionInstance():
    39 42
             Queues an action and creates an Operation instance to be associated with
    
    40 43
             this action.
    
    41 44
             """
    
    42
    -        job = Job(action_digest, message_queue)
    
    45
    +
    
    46
    +        do_not_cache = False
    
    47
    +        if self._storage is not None:
    
    48
    +            action = self._storage.get_message(action_digest, Action)
    
    49
    +            if action is not None:
    
    50
    +                do_not_cache = action.do_not_cache
    
    51
    +
    
    52
    +        job = Job(action_digest, do_not_cache, message_queue)
    
    43 53
             self.logger.info("Operation name: {}".format(job.name))
    
    44 54
     
    
    45
    -        if not skip_cache_lookup:
    
    46
    -            raise NotImplementedError("ActionCache not implemented")
    
    47
    -        else:
    
    48
    -            self._scheduler.append_job(job)
    
    55
    +        self._scheduler.append_job(job, skip_cache_lookup)
    
    49 56
     
    
    50 57
             return job.get_operation()
    
    51 58
     
    

  • buildgrid/server/job.py
    ... ... @@ -18,8 +18,6 @@
    18 18
     import logging
    
    19 19
     import uuid
    
    20 20
     
    
    21
    -import buildgrid._protos.build.bazel.remote.execution.v2.remote_execution_pb2
    
    22
    -
    
    23 21
     from enum import Enum
    
    24 22
     
    
    25 23
     from buildgrid._protos.build.bazel.remote.execution.v2.remote_execution_pb2 import ExecuteOperationMetadata, ExecuteResponse
    
    ... ... @@ -51,12 +49,14 @@ class LeaseState(Enum):
    51 49
     
    
    52 50
     class Job():
    
    53 51
     
    
    54
    -    def __init__(self, action_digest, message_queue=None):
    
    52
    +    def __init__(self, action_digest, do_not_cache=False, message_queue=None):
    
    55 53
             self.lease = None
    
    56 54
             self.logger = logging.getLogger(__name__)
    
    57 55
             self.result = None
    
    56
    +        self.result_cached = False
    
    58 57
     
    
    59 58
             self._action_digest = action_digest
    
    59
    +        self._do_not_cache = do_not_cache
    
    60 60
             self._execute_stage = ExecuteStage.UNKNOWN
    
    61 61
             self._n_tries = 0
    
    62 62
             self._name = str(uuid.uuid4())
    
    ... ... @@ -70,6 +70,14 @@ class Job():
    70 70
         def name(self):
    
    71 71
             return self._name
    
    72 72
     
    
    73
    +    @property
    
    74
    +    def action_digest(self):
    
    75
    +        return self._action_digest
    
    76
    +
    
    77
    +    @property
    
    78
    +    def do_not_cache(self):
    
    79
    +        return self._do_not_cache
    
    80
    +
    
    73 81
         def check_job_finished(self):
    
    74 82
             if not self._operation_update_queues:
    
    75 83
                 return self._operation.done
    
    ... ... @@ -88,6 +96,7 @@ class Job():
    88 96
                 self._operation.done = True
    
    89 97
                 response = ExecuteResponse()
    
    90 98
                 self.result.Unpack(response.result)
    
    99
    +            response.cached_result = self.result_cached
    
    91 100
                 self._operation.response.CopyFrom(self._pack_any(response))
    
    92 101
     
    
    93 102
             return self._operation
    
    ... ... @@ -95,6 +104,7 @@ class Job():
    95 104
         def get_operation_meta(self):
    
    96 105
             meta = ExecuteOperationMetadata()
    
    97 106
             meta.stage = self._execute_stage.value
    
    107
    +        meta.action_digest.CopyFrom(self._action_digest)
    
    98 108
     
    
    99 109
             return meta
    
    100 110
     
    

  • buildgrid/server/scheduler.py
    ... ... @@ -23,7 +23,9 @@ Schedules jobs.
    23 23
     
    
    24 24
     from collections import deque
    
    25 25
     
    
    26
    +from buildgrid._protos.build.bazel.remote.execution.v2.remote_execution_pb2 import ActionResult
    
    26 27
     from buildgrid._protos.google.longrunning import operations_pb2
    
    28
    +from google.protobuf import any_pb2
    
    27 29
     
    
    28 30
     from .job import ExecuteStage, LeaseState
    
    29 31
     
    
    ... ... @@ -31,7 +33,8 @@ class Scheduler():
    31 33
     
    
    32 34
         MAX_N_TRIES = 5
    
    33 35
     
    
    34
    -    def __init__(self):
    
    36
    +    def __init__(self, action_cache=None):
    
    37
    +        self.action_cache = action_cache
    
    35 38
             self.jobs = {}
    
    36 39
             self.queue = deque()
    
    37 40
     
    
    ... ... @@ -44,10 +47,19 @@ class Scheduler():
    44 47
             if job.check_job_finished():
    
    45 48
                 del self.jobs[name]
    
    46 49
     
    
    47
    -    def append_job(self, job):
    
    48
    -        job.update_execute_stage(ExecuteStage.QUEUED)
    
    50
    +    def append_job(self, job, skip_cache_lookup=False):
    
    49 51
             self.jobs[job.name] = job
    
    52
    +        if self.action_cache is not None and not skip_cache_lookup:
    
    53
    +            cached_result = self.action_cache.get_action_result(job.action_digest)
    
    54
    +            if cached_result is not None:
    
    55
    +                cached_result_any = any_pb2.Any()
    
    56
    +                cached_result_any.Pack(cached_result)
    
    57
    +                job.result = cached_result_any
    
    58
    +                job.result_cached = True
    
    59
    +                job.update_execute_stage(ExecuteStage.COMPLETED)
    
    60
    +                return
    
    50 61
             self.queue.append(job)
    
    62
    +        job.update_execute_stage(ExecuteStage.QUEUED)
    
    51 63
     
    
    52 64
         def retry_job(self, name):
    
    53 65
             if job in self.jobs[name]:
    
    ... ... @@ -67,6 +79,10 @@ class Scheduler():
    67 79
             job.result = result
    
    68 80
             job.update_execute_stage(ExecuteStage.COMPLETED)
    
    69 81
             self.jobs[name] = job
    
    82
    +        if not job.do_not_cache and self.action_cache is not None:
    
    83
    +            action_result = ActionResult()
    
    84
    +            result.Unpack(action_result)
    
    85
    +            self.action_cache.put_action_result(job.action_digest, action_result)
    
    70 86
     
    
    71 87
         def get_operations(self):
    
    72 88
             response = operations_pb2.ListOperationsResponse()
    

  • tests/action_cache.py
    1
    +# Copyright (C) 2018 Bloomberg LP
    
    2
    +#
    
    3
    +# Licensed under the Apache License, Version 2.0 (the "License");
    
    4
    +# you may not use this file except in compliance with the License.
    
    5
    +# You may obtain a copy of the License at
    
    6
    +#
    
    7
    +#  <http://www.apache.org/licenses/LICENSE-2.0>
    
    8
    +#
    
    9
    +# Unless required by applicable law or agreed to in writing, software
    
    10
    +# distributed under the License is distributed on an "AS IS" BASIS,
    
    11
    +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    
    12
    +# See the License for the specific language governing permissions and
    
    13
    +# limitations under the License.
    
    14
    +#
    
    15
    +# Authors:
    
    16
    +#        Carter Sande <csande bloomberg net>
    
    17
    +
    
    18
    +from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
    
    19
    +import pytest
    
    20
    +
    
    21
    +from buildgrid.server import action_cache
    
    22
    +from buildgrid.server.cas.storage import lru_memory_cache
    
    23
    +
    
    24
    +@pytest.fixture
    
    25
    +def cas():
    
    26
    +    return lru_memory_cache.LRUMemoryCache(1024 * 1024)
    
    27
    +
    
    28
    +def test_null_action_cache(cas):
    
    29
    +    cache = action_cache.ActionCache(cas, 0)
    
    30
    +
    
    31
    +    action_digest1 = remote_execution_pb2.Digest(hash='alpha', size_bytes=4)
    
    32
    +    dummy_result = remote_execution_pb2.ActionResult()
    
    33
    +
    
    34
    +    cache.put_action_result(action_digest1, dummy_result)
    
    35
    +    assert cache.get_action_result(action_digest1) is None
    
    36
    +
    
    37
    +def test_action_cache_expiry(cas):
    
    38
    +    cache = action_cache.ActionCache(cas, 2)
    
    39
    +
    
    40
    +    action_digest1 = remote_execution_pb2.Digest(hash='alpha', size_bytes=4)
    
    41
    +    action_digest2 = remote_execution_pb2.Digest(hash='bravo', size_bytes=4)
    
    42
    +    action_digest3 = remote_execution_pb2.Digest(hash='charlie', size_bytes=4)
    
    43
    +    dummy_result = remote_execution_pb2.ActionResult()
    
    44
    +
    
    45
    +    cache.put_action_result(action_digest1, dummy_result)
    
    46
    +    cache.put_action_result(action_digest2, dummy_result)
    
    47
    +
    
    48
    +    # Get digest 1 (making 2 the least recently used)
    
    49
    +    assert cache.get_action_result(action_digest1) is not None
    
    50
    +    # Add digest 3 (so 2 gets removed from the cache)
    
    51
    +    cache.put_action_result(action_digest3, dummy_result)
    
    52
    +
    
    53
    +    assert cache.get_action_result(action_digest1) is not None
    
    54
    +    assert cache.get_action_result(action_digest2) is None
    
    55
    +    assert cache.get_action_result(action_digest3) is not None
    
    56
    +
    
    57
    +def test_action_cache_checks_cas(cas):
    
    58
    +    cache = action_cache.ActionCache(cas, 50)
    
    59
    +
    
    60
    +    action_digest1 = remote_execution_pb2.Digest(hash='alpha', size_bytes=4)
    
    61
    +    action_digest2 = remote_execution_pb2.Digest(hash='bravo', size_bytes=4)
    
    62
    +    action_digest3 = remote_execution_pb2.Digest(hash='charlie', size_bytes=4)
    
    63
    +
    
    64
    +    # Create a tree that references digests in CAS
    
    65
    +    sample_digest = cas.put_message(remote_execution_pb2.Command(arguments=["sample"]))
    
    66
    +    tree = remote_execution_pb2.Tree()
    
    67
    +    tree.root.files.add().digest.CopyFrom(sample_digest)
    
    68
    +    tree.children.add().files.add().digest.CopyFrom(sample_digest)
    
    69
    +    tree_digest = cas.put_message(tree)
    
    70
    +
    
    71
    +    # Add an ActionResult that references real digests to the cache
    
    72
    +    action_result1 = remote_execution_pb2.ActionResult()
    
    73
    +    action_result1.output_directories.add().tree_digest.CopyFrom(tree_digest)
    
    74
    +    action_result1.output_files.add().digest.CopyFrom(sample_digest)
    
    75
    +    action_result1.stdout_digest.CopyFrom(sample_digest)
    
    76
    +    action_result1.stderr_digest.CopyFrom(sample_digest)
    
    77
    +    cache.put_action_result(action_digest1, action_result1)
    
    78
    +
    
    79
    +    # Add ActionResults that reference fake digests to the cache
    
    80
    +    action_result2 = remote_execution_pb2.ActionResult()
    
    81
    +    action_result2.output_directories.add().tree_digest.hash = "nonexistent"
    
    82
    +    action_result2.output_directories[0].tree_digest.size_bytes = 8
    
    83
    +    cache.put_action_result(action_digest2, action_result2)
    
    84
    +
    
    85
    +    action_result3 = remote_execution_pb2.ActionResult()
    
    86
    +    action_result3.stdout_digest.hash = "nonexistent"
    
    87
    +    action_result3.stdout_digest.size_bytes = 8
    
    88
    +    cache.put_action_result(action_digest3, action_result3)
    
    89
    +
    
    90
    +    # Verify we can get the first ActionResult but not the others
    
    91
    +    fetched_result1 = cache.get_action_result(action_digest1)
    
    92
    +    assert fetched_result1.output_directories[0].tree_digest.hash == tree_digest.hash
    
    93
    +    assert cache.get_action_result(action_digest2) is None
    
    94
    +    assert cache.get_action_result(action_digest3) is None

  • tests/integration/action_cache_service.py
    ... ... @@ -23,45 +23,47 @@ from unittest import mock
    23 23
     from grpc._server import _Context
    
    24 24
     from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
    
    25 25
     
    
    26
    -from buildgrid.server import scheduler
    
    27
    -from buildgrid.server.execution import execution_instance, action_cache_service
    
    26
    +from buildgrid.server import action_cache
    
    27
    +from buildgrid.server.cas.storage import lru_memory_cache
    
    28
    +from buildgrid.server.execution import action_cache_service
    
    28 29
     
    
    29 30
     # Can mock this
    
    30 31
     @pytest.fixture
    
    31 32
     def context():
    
    32 33
         yield mock.MagicMock(spec = _Context)
    
    33 34
     
    
    34
    -# Requests to make
    
    35 35
     @pytest.fixture
    
    36
    -def execute_request():
    
    37
    -    action = remote_execution_pb2.Action()
    
    38
    -    action.command_digest.hash = 'zhora'
    
    39
    -
    
    40
    -    yield remote_execution_pb2.ExecuteRequest(instance_name = '',
    
    41
    -                                              action = action,
    
    42
    -                                              skip_cache_lookup = True)
    
    36
    +def cas():
    
    37
    +    yield lru_memory_cache.LRUMemoryCache(1024 * 1024)
    
    43 38
     
    
    44 39
     @pytest.fixture
    
    45
    -def schedule():
    
    46
    -    yield scheduler.Scheduler()
    
    40
    +def cache(cas):
    
    41
    +    yield action_cache.ActionCache(cas, 50)
    
    47 42
     
    
    48
    -@pytest.fixture
    
    49
    -def execution(schedule):
    
    50
    -    yield execution_instance.ExecutionInstance(schedule)
    
    43
    +def test_simple_action_result(cache, context):
    
    44
    +    service = action_cache_service.ActionCacheService(cache)
    
    45
    +    action_digest = remote_execution_pb2.Digest(hash='sample', size_bytes=4)
    
    51 46
     
    
    52
    -# Instance to test
    
    53
    -@pytest.fixture
    
    54
    -def instance(execution):
    
    55
    -    yield action_cache_service.ActionCacheService(execution)
    
    47
    +    # Check that before adding the ActionResult, attempting to fetch it fails
    
    48
    +    request = remote_execution_pb2.GetActionResultRequest(action_digest=action_digest)
    
    49
    +    service.GetActionResult(request, context)
    
    50
    +    context.set_code.assert_called_once_with(grpc.StatusCode.NOT_FOUND)
    
    56 51
     
    
    57
    -def test_get_action_result(instance, context):
    
    58
    -    request = remote_execution_pb2.GetActionResultRequest()
    
    59
    -    instance.GetActionResult(request, context)
    
    52
    +    # Add an ActionResult to the cache
    
    53
    +    action_result = remote_execution_pb2.ActionResult(stdout_raw=b'example output')
    
    54
    +    request = remote_execution_pb2.UpdateActionResultRequest(action_digest=action_digest,
    
    55
    +                                                             action_result=action_result)
    
    56
    +    service.UpdateActionResult(request, context)
    
    60 57
     
    
    61
    -    context.set_code.assert_called_once_with(grpc.StatusCode.UNIMPLEMENTED)
    
    58
    +    # Check that fetching it now works
    
    59
    +    request = remote_execution_pb2.GetActionResultRequest(action_digest=action_digest)
    
    60
    +    fetched_result = service.GetActionResult(request, context)
    
    61
    +    assert fetched_result.stdout_raw == action_result.stdout_raw
    
    62
    +
    
    63
    +def test_disabled_update_action_result(cache, context):
    
    64
    +    service = action_cache_service.ActionCacheService(cache, False)
    
    62 65
     
    
    63
    -def test_update_action_result(instance, context):
    
    64 66
         request = remote_execution_pb2.UpdateActionResultRequest()
    
    65
    -    instance.UpdateActionResult(request, context)
    
    67
    +    service.UpdateActionResult(request, context)
    
    66 68
     
    
    67 69
         context.set_code.assert_called_once_with(grpc.StatusCode.UNIMPLEMENTED)

  • tests/integration/execution_service.py
    ... ... @@ -26,7 +26,8 @@ from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_p
    26 26
     from buildgrid._protos.google.longrunning import operations_pb2
    
    27 27
     from google.protobuf import any_pb2
    
    28 28
     
    
    29
    -from buildgrid.server import scheduler, job
    
    29
    +from buildgrid.server import action_cache, scheduler, job
    
    30
    +from buildgrid.server.cas.storage import lru_memory_cache
    
    30 31
     from buildgrid.server.execution import execution_instance, execution_service
    
    31 32
     
    
    32 33
     @pytest.fixture
    
    ... ... @@ -34,13 +35,14 @@ def context():
    34 35
         cxt = mock.MagicMock(spec = _Context)
    
    35 36
         yield cxt
    
    36 37
     
    
    37
    -@pytest.fixture
    
    38
    -def schedule():
    
    39
    -    yield scheduler.Scheduler()
    
    40
    -
    
    41
    -@pytest.fixture
    
    42
    -def execution(schedule):
    
    43
    -    yield execution_instance.ExecutionInstance(schedule)
    
    38
    +@pytest.fixture(params=["action-cache", "no-action-cache"])
    
    39
    +def execution(request):
    
    40
    +    if request.param == "action-cache":
    
    41
    +        storage = lru_memory_cache.LRUMemoryCache(1024 * 1024)
    
    42
    +        cache = action_cache.ActionCache(storage, 50)
    
    43
    +        schedule = scheduler.Scheduler(cache)
    
    44
    +        return execution_instance.ExecutionInstance(schedule, storage)
    
    45
    +    return execution_instance.ExecutionInstance(scheduler.Scheduler())
    
    44 46
     
    
    45 47
     # Instance to test
    
    46 48
     @pytest.fixture
    
    ... ... @@ -56,17 +58,15 @@ def test_execute(skip_cache_lookup, instance, context):
    56 58
                                                       action_digest = action_digest,
    
    57 59
                                                       skip_cache_lookup = skip_cache_lookup)
    
    58 60
         response = instance.Execute(request, context)
    
    59
    -    if skip_cache_lookup is False:
    
    60
    -        [r for r in response]
    
    61
    -        context.set_code.assert_called_once_with(grpc.StatusCode.UNIMPLEMENTED)
    
    62
    -    else:
    
    63
    -        result = next(response)
    
    64
    -        assert isinstance(result, operations_pb2.Operation)
    
    65
    -        metadata = remote_execution_pb2.ExecuteOperationMetadata()
    
    66
    -        result.metadata.Unpack(metadata)
    
    67
    -        assert metadata.stage == job.ExecuteStage.QUEUED.value
    
    68
    -        assert uuid.UUID(result.name, version=4)
    
    69
    -        assert result.done is False
    
    61
    +
    
    62
    +    result = next(response)
    
    63
    +    assert isinstance(result, operations_pb2.Operation)
    
    64
    +    metadata = remote_execution_pb2.ExecuteOperationMetadata()
    
    65
    +    result.metadata.Unpack(metadata)
    
    66
    +    assert metadata.stage == job.ExecuteStage.QUEUED.value
    
    67
    +    assert uuid.UUID(result.name, version=4)
    
    68
    +    assert result.done is False
    
    69
    +
    
    70 70
     """
    
    71 71
     def test_wait_execution(instance, context):
    
    72 72
         # TODO: Figure out why next(response) hangs on the .get()
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]