[Notes] [Git][BuildGrid/buildgrid][mablanch/83-executed-action-metadata] 11 commits: job.py: Refactor for stronger encapsulation



Title: GitLab

Martin Blanchard pushed to branch mablanch/83-executed-action-metadata at BuildGrid / buildgrid

Commits:

15 changed files:

Changes:

  • .pylintrc
    ... ... @@ -184,7 +184,8 @@ ignore-on-opaque-inference=yes
    184 184
     # List of class names for which member attributes should not be checked (useful
    
    185 185
     # for classes with dynamically set attributes). This supports the use of
    
    186 186
     # qualified names.
    
    187
    -ignored-classes=google.protobuf.any_pb2.Any
    
    187
    +ignored-classes=google.protobuf.any_pb2.Any,
    
    188
    +                google.protobuf.timestamp_pb2.Timestamp
    
    188 189
     
    
    189 190
     # List of module names for which member attributes should not be checked
    
    190 191
     # (useful for modules/projects where namespaces are manipulated during runtime
    

  • buildgrid/_app/bots/dummy.py
    ... ... @@ -17,16 +17,32 @@ import random
    17 17
     import time
    
    18 18
     
    
    19 19
     from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
    
    20
    +from buildgrid.utils import get_hostname
    
    20 21
     
    
    21 22
     
    
    22 23
     def work_dummy(context, lease):
    
    23 24
         """ Just returns lease after some random time
    
    24 25
         """
    
    26
    +    action_result = remote_execution_pb2.ActionResult()
    
    27
    +
    
    25 28
         lease.result.Clear()
    
    26 29
     
    
    27
    -    time.sleep(random.randint(1, 5))
    
    30
    +    action_result.execution_metadata.worker = get_hostname()
    
    28 31
     
    
    29
    -    action_result = remote_execution_pb2.ActionResult()
    
    32
    +    # Simulation input-downloading phase:
    
    33
    +    action_result.execution_metadata.input_fetch_start_timestamp.GetCurrentTime()
    
    34
    +    time.sleep(random.random())
    
    35
    +    action_result.execution_metadata.input_fetch_completed_timestamp.GetCurrentTime()
    
    36
    +
    
    37
    +    # Simulation execution phase:
    
    38
    +    action_result.execution_metadata.execution_start_timestamp.GetCurrentTime()
    
    39
    +    time.sleep(random.random())
    
    40
    +    action_result.execution_metadata.execution_completed_timestamp.GetCurrentTime()
    
    41
    +
    
    42
    +    # Simulation output-uploading phase:
    
    43
    +    action_result.execution_metadata.output_upload_start_timestamp.GetCurrentTime()
    
    44
    +    time.sleep(random.random())
    
    45
    +    action_result.execution_metadata.output_upload_completed_timestamp.GetCurrentTime()
    
    30 46
     
    
    31 47
         lease.result.Pack(action_result)
    
    32 48
     
    

  • buildgrid/_app/bots/host.py
    ... ... @@ -19,7 +19,7 @@ import tempfile
    19 19
     
    
    20 20
     from buildgrid.client.cas import download, upload
    
    21 21
     from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
    
    22
    -from buildgrid.utils import output_file_maker, output_directory_maker
    
    22
    +from buildgrid.utils import get_hostname, output_file_maker, output_directory_maker
    
    23 23
     
    
    24 24
     
    
    25 25
     def work_host_tools(context, lease):
    
    ... ... @@ -29,10 +29,13 @@ def work_host_tools(context, lease):
    29 29
         logger = context.logger
    
    30 30
     
    
    31 31
         action_digest = remote_execution_pb2.Digest()
    
    32
    +    action_result = remote_execution_pb2.ActionResult()
    
    32 33
     
    
    33 34
         lease.payload.Unpack(action_digest)
    
    34 35
         lease.result.Clear()
    
    35 36
     
    
    37
    +    action_result.execution_metadata.worker = get_hostname()
    
    38
    +
    
    36 39
         with tempfile.TemporaryDirectory() as temp_directory:
    
    37 40
             with download(context.cas_channel, instance=instance_name) as downloader:
    
    38 41
                 action = downloader.get_message(action_digest,
    
    ... ... @@ -43,8 +46,12 @@ def work_host_tools(context, lease):
    43 46
                 command = downloader.get_message(action.command_digest,
    
    44 47
                                                  remote_execution_pb2.Command())
    
    45 48
     
    
    49
    +            action_result.execution_metadata.input_fetch_start_timestamp.GetCurrentTime()
    
    50
    +
    
    46 51
                 downloader.download_directory(action.input_root_digest, temp_directory)
    
    47 52
     
    
    53
    +        action_result.execution_metadata.input_fetch_completed_timestamp.GetCurrentTime()
    
    54
    +
    
    48 55
             environment = os.environ.copy()
    
    49 56
             for variable in command.environment_variables:
    
    50 57
                 if variable.name not in ['PATH', 'PWD']:
    
    ... ... @@ -70,6 +77,8 @@ def work_host_tools(context, lease):
    70 77
     
    
    71 78
             logger.debug(' '.join(command_line))
    
    72 79
     
    
    80
    +        action_result.execution_metadata.execution_start_timestamp.GetCurrentTime()
    
    81
    +
    
    73 82
             process = subprocess.Popen(command_line,
    
    74 83
                                        cwd=working_directory,
    
    75 84
                                        env=environment,
    
    ... ... @@ -80,7 +89,8 @@ def work_host_tools(context, lease):
    80 89
             stdout, stderr = process.communicate()
    
    81 90
             returncode = process.returncode
    
    82 91
     
    
    83
    -        action_result = remote_execution_pb2.ActionResult()
    
    92
    +        action_result.execution_metadata.execution_completed_timestamp.GetCurrentTime()
    
    93
    +
    
    84 94
             # TODO: Upload to CAS or output RAW
    
    85 95
             # For now, just pass raw
    
    86 96
             # https://gitlab.com/BuildGrid/buildgrid/issues/90
    
    ... ... @@ -92,6 +102,8 @@ def work_host_tools(context, lease):
    92 102
             logger.debug("Command stdout: [{}]".format(stdout))
    
    93 103
             logger.debug("Command exit code: [{}]".format(returncode))
    
    94 104
     
    
    105
    +        action_result.execution_metadata.output_upload_start_timestamp.GetCurrentTime()
    
    106
    +
    
    95 107
             with upload(context.cas_channel, instance=instance_name) as uploader:
    
    96 108
                 output_files, output_directories = [], []
    
    97 109
     
    
    ... ... @@ -121,6 +133,8 @@ def work_host_tools(context, lease):
    121 133
     
    
    122 134
                 action_result.output_directories.extend(output_directories)
    
    123 135
     
    
    136
    +        action_result.execution_metadata.output_upload_completed_timestamp.GetCurrentTime()
    
    137
    +
    
    124 138
             lease.result.Pack(action_result)
    
    125 139
     
    
    126 140
         return lease

  • buildgrid/_app/cli.py
    ... ... @@ -26,7 +26,6 @@ import logging
    26 26
     
    
    27 27
     import click
    
    28 28
     import grpc
    
    29
    -from xdg import XDG_CACHE_HOME, XDG_CONFIG_HOME, XDG_DATA_HOME
    
    30 29
     
    
    31 30
     from buildgrid.utils import read_file
    
    32 31
     
    
    ... ... @@ -42,41 +41,21 @@ class Context:
    42 41
     
    
    43 42
             self.user_home = os.getcwd()
    
    44 43
     
    
    45
    -        self.cache_home = os.path.join(XDG_CACHE_HOME, 'buildgrid')
    
    46
    -        self.config_home = os.path.join(XDG_CONFIG_HOME, 'buildgrid')
    
    47
    -        self.data_home = os.path.join(XDG_DATA_HOME, 'buildgrid')
    
    48
    -
    
    49
    -    def load_client_credentials(self, client_key=None, client_cert=None,
    
    50
    -                                server_cert=None, use_default_client_keys=False):
    
    44
    +    def load_client_credentials(self, client_key=None, client_cert=None, server_cert=None):
    
    51 45
             """Looks-up and loads TLS client gRPC credentials.
    
    52 46
     
    
    53 47
             Args:
    
    54 48
                 client_key(str): root certificate file path.
    
    55 49
                 client_cert(str): private key file path.
    
    56 50
                 server_cert(str): certificate chain file path.
    
    57
    -            use_default_client_keys(bool, optional): whether or not to try
    
    58
    -                loading client keys from default location. Defaults to False.
    
    59 51
     
    
    60 52
             Returns:
    
    61 53
                 :obj:`ChannelCredentials`: The credentials for use for a
    
    62 54
                 TLS-encrypted gRPC client channel.
    
    63 55
             """
    
    64
    -        if not client_key or not os.path.exists(client_key):
    
    65
    -            if use_default_client_keys:
    
    66
    -                client_key = os.path.join(self.config_home, 'client.key')
    
    67
    -            else:
    
    68
    -                client_key = None
    
    69
    -
    
    70
    -        if not client_cert or not os.path.exists(client_cert):
    
    71
    -            if use_default_client_keys:
    
    72
    -                client_cert = os.path.join(self.config_home, 'client.crt')
    
    73
    -            else:
    
    74
    -                client_cert = None
    
    75 56
     
    
    76 57
             if not server_cert or not os.path.exists(server_cert):
    
    77
    -            server_cert = os.path.join(self.config_home, 'server.crt')
    
    78
    -            if not os.path.exists(server_cert):
    
    79
    -                return None
    
    58
    +            return None
    
    80 59
     
    
    81 60
             server_cert_pem = read_file(server_cert)
    
    82 61
             if client_key and os.path.exists(client_key):
    
    ... ... @@ -100,8 +79,7 @@ class Context:
    100 79
     
    
    101 80
             return credentials
    
    102 81
     
    
    103
    -    def load_server_credentials(self, server_key=None, server_cert=None,
    
    104
    -                                client_certs=None, use_default_client_certs=False):
    
    82
    +    def load_server_credentials(self, server_key=None, server_cert=None, client_certs=None):
    
    105 83
             """Looks-up and loads TLS server gRPC credentials.
    
    106 84
     
    
    107 85
             Every private and public keys are expected to be PEM-encoded.
    
    ... ... @@ -110,29 +88,16 @@ class Context:
    110 88
                 server_key(str): private server key file path.
    
    111 89
                 server_cert(str): public server certificate file path.
    
    112 90
                 client_certs(str): public client certificates file path.
    
    113
    -            use_default_client_certs(bool, optional): whether or not to try
    
    114
    -                loading public client certificates from default location.
    
    115
    -                Defaults to False.
    
    116 91
     
    
    117 92
             Returns:
    
    118 93
                 :obj:`ServerCredentials`: The credentials for use for a
    
    119 94
                 TLS-encrypted gRPC server channel.
    
    120 95
             """
    
    121 96
             if not server_key or not os.path.exists(server_key):
    
    122
    -            server_key = os.path.join(self.config_home, 'server.key')
    
    123
    -            if not os.path.exists(server_key):
    
    124
    -                return None
    
    97
    +            return None
    
    125 98
     
    
    126 99
             if not server_cert or not os.path.exists(server_cert):
    
    127
    -            server_cert = os.path.join(self.config_home, 'server.crt')
    
    128
    -            if not os.path.exists(server_cert):
    
    129
    -                return None
    
    130
    -
    
    131
    -        if not client_certs or not os.path.exists(client_certs):
    
    132
    -            if use_default_client_certs:
    
    133
    -                client_certs = os.path.join(self.config_home, 'client.crt')
    
    134
    -            else:
    
    135
    -                client_certs = None
    
    100
    +            return None
    
    136 101
     
    
    137 102
             server_key_pem = read_file(server_key)
    
    138 103
             server_cert_pem = read_file(server_cert)
    

  • buildgrid/_app/commands/cmd_operation.py
    ... ... @@ -29,6 +29,7 @@ import grpc
    29 29
     
    
    30 30
     from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
    
    31 31
     from buildgrid._protos.google.longrunning import operations_pb2, operations_pb2_grpc
    
    32
    +from buildgrid._protos.google.rpc import code_pb2
    
    32 33
     
    
    33 34
     from ..cli import pass_context
    
    34 35
     
    
    ... ... @@ -65,35 +66,98 @@ def cli(context, remote, instance_name, client_key, client_cert, server_cert):
    65 66
         context.logger.debug("Starting for remote {}".format(context.remote))
    
    66 67
     
    
    67 68
     
    
    69
    +def _print_operation_state(operation, print_details=False):
    
    70
    +    metadata = remote_execution_pb2.ExecuteOperationMetadata()
    
    71
    +    # The metadata is expected to be an ExecuteOperationMetadata message:
    
    72
    +    assert operation.metadata.Is(metadata.DESCRIPTOR)
    
    73
    +    operation.metadata.Unpack(metadata)
    
    74
    +
    
    75
    +    stage_name = remote_execution_pb2.ExecuteOperationMetadata.Stage.Name(
    
    76
    +        metadata.stage).upper()
    
    77
    +
    
    78
    +    if not operation.done:
    
    79
    +        if stage_name == 'CACHE_CHECK':
    
    80
    +            click.echo('CacheCheck: {}: Querying action-cache (stage={})'
    
    81
    +                       .format(operation.name, metadata.stage))
    
    82
    +        elif stage_name == 'QUEUED':
    
    83
    +            click.echo('Queued: {}: Waiting for execution (stage={})'
    
    84
    +                       .format(operation.name, metadata.stage))
    
    85
    +        elif stage_name == 'EXECUTING':
    
    86
    +            click.echo('Executing: {}: Currently running (stage={})'
    
    87
    +                       .format(operation.name, metadata.stage))
    
    88
    +        else:
    
    89
    +            click.echo('Error: {}: In an invalid state (stage={})'
    
    90
    +                       .format(operation.name, metadata.stage), err=True)
    
    91
    +        return
    
    92
    +
    
    93
    +    response = remote_execution_pb2.ExecuteResponse()
    
    94
    +    # The response is expected to be an ExecutionResponse message:
    
    95
    +    assert operation.response.Is(response.DESCRIPTOR)
    
    96
    +    operation.response.Unpack(response)
    
    97
    +
    
    98
    +    if response.status.code != code_pb2.OK:
    
    99
    +        click.echo('Failure: {}: {} (code={})'
    
    100
    +                   .format(operation.name, response.status.message, response.status.code))
    
    101
    +    else:
    
    102
    +        if response.result.exit_code != 0:
    
    103
    +            click.echo('Success: {}: Completed with failure (stage={}, exit_code={})'
    
    104
    +                       .format(operation.name, metadata.stage, response.result.exit_code))
    
    105
    +        else:
    
    106
    +            click.echo('Success: {}: Completed succesfully (stage={}, exit_code={})'
    
    107
    +                       .format(operation.name, metadata.stage, response.result.exit_code))
    
    108
    +
    
    109
    +    if print_details:
    
    110
    +        metadata = response.result.execution_metadata
    
    111
    +        click.echo('  worker={}'.format(metadata.worker))
    
    112
    +
    
    113
    +        queued = metadata.queued_timestamp.ToDatetime()
    
    114
    +        click.echo('  queued_at={}'.format(queued))
    
    115
    +
    
    116
    +        worker_start = metadata.worker_start_timestamp.ToDatetime()
    
    117
    +        worker_completed =  metadata.worker_completed_timestamp.ToDatetime()
    
    118
    +        click.echo('  work_duration={}'.format(worker_completed - worker_start))
    
    119
    +
    
    120
    +        fetch_start = metadata.input_fetch_start_timestamp.ToDatetime()
    
    121
    +        fetch_completed = metadata.input_fetch_completed_timestamp.ToDatetime()
    
    122
    +        click.echo('    fetch_duration={}'.format(fetch_completed - fetch_start))
    
    123
    +
    
    124
    +        execution_start = metadata.execution_start_timestamp.ToDatetime()
    
    125
    +        execution_completed = metadata.execution_completed_timestamp.ToDatetime()
    
    126
    +        click.echo('    exection_duration={}'.format(execution_completed - execution_start))
    
    127
    +
    
    128
    +        upload_start = metadata.output_upload_start_timestamp.ToDatetime()
    
    129
    +        upload_completed = metadata.output_upload_completed_timestamp.ToDatetime()
    
    130
    +        click.echo('    upload_duration={}'.format(upload_completed - upload_start))
    
    131
    +
    
    132
    +        click.echo('  total_duration={}'.format(worker_completed - queued))
    
    133
    +
    
    134
    +
    
    68 135
     @cli.command('status', short_help="Get the status of an operation.")
    
    69 136
     @click.argument('operation-name', nargs=1, type=click.STRING, required=True)
    
    70 137
     @pass_context
    
    71 138
     def status(context, operation_name):
    
    72
    -    context.logger.info("Getting operation status...")
    
    73 139
         stub = operations_pb2_grpc.OperationsStub(context.channel)
    
    74
    -
    
    75 140
         request = operations_pb2.GetOperationRequest(name=operation_name)
    
    76 141
     
    
    77
    -    response = stub.GetOperation(request)
    
    78
    -    context.logger.info(response)
    
    142
    +    operation = stub.GetOperation(request)
    
    143
    +
    
    144
    +    _print_operation_state(operation, print_details=True)
    
    79 145
     
    
    80 146
     
    
    81 147
     @cli.command('list', short_help="List operations.")
    
    82 148
     @pass_context
    
    83 149
     def lists(context):
    
    84
    -    context.logger.info("Getting list of operations")
    
    85 150
         stub = operations_pb2_grpc.OperationsStub(context.channel)
    
    86
    -
    
    87 151
         request = operations_pb2.ListOperationsRequest(name=context.instance_name)
    
    88 152
     
    
    89 153
         response = stub.ListOperations(request)
    
    90 154
     
    
    91 155
         if not response.operations:
    
    92
    -        context.logger.warning("No operations to list")
    
    156
    +        click.echo('Error: No operations to list', err=True)
    
    93 157
             return
    
    94 158
     
    
    95
    -    for op in response.operations:
    
    96
    -        context.logger.info(op)
    
    159
    +    for operation in response.operations:
    
    160
    +        _print_operation_state(operation)
    
    97 161
     
    
    98 162
     
    
    99 163
     @cli.command('wait', short_help="Streams an operation until it is complete.")
    
    ... ... @@ -103,7 +167,10 @@ def wait(context, operation_name):
    103 167
         stub = remote_execution_pb2_grpc.ExecutionStub(context.channel)
    
    104 168
         request = remote_execution_pb2.WaitExecutionRequest(name=operation_name)
    
    105 169
     
    
    106
    -    response = stub.WaitExecution(request)
    
    170
    +    operation_iterator = stub.WaitExecution(request)
    
    107 171
     
    
    108
    -    for stream in response:
    
    109
    -        context.logger.info(stream)
    172
    +    for operation in operation_iterator:
    
    173
    +        if operation.done:
    
    174
    +            _print_operation_state(operation, print_details=True)
    
    175
    +        else:
    
    176
    +            _print_operation_state(operation)

  • buildgrid/server/bots/instance.py
    ... ... @@ -66,10 +66,10 @@ class BotsInterface:
    66 66
             self._bot_sessions[name] = bot_session
    
    67 67
             self.logger.info("Created bot session name=[{}] with bot_id=[{}]".format(name, bot_id))
    
    68 68
     
    
    69
    -        # For now, one lease at a time.
    
    70
    -        lease = self._scheduler.create_lease()
    
    71
    -        if lease:
    
    72
    -            bot_session.leases.extend([lease])
    
    69
    +        # TODO: Send worker capabilities to the scheduler!
    
    70
    +        leases = self._scheduler.request_job_leases({})
    
    71
    +        if leases:
    
    72
    +            bot_session.leases.extend(leases)
    
    73 73
     
    
    74 74
             return bot_session
    
    75 75
     
    
    ... ... @@ -85,11 +85,11 @@ class BotsInterface:
    85 85
             del bot_session.leases[:]
    
    86 86
             bot_session.leases.extend(leases)
    
    87 87
     
    
    88
    -        # For now, one lease at a time
    
    88
    +        # TODO: Send worker capabilities to the scheduler!
    
    89 89
             if not bot_session.leases:
    
    90
    -            lease = self._scheduler.create_lease()
    
    91
    -            if lease:
    
    92
    -                bot_session.leases.extend([lease])
    
    90
    +            leases = self._scheduler.request_job_leases({})
    
    91
    +            if leases:
    
    92
    +                bot_session.leases.extend(leases)
    
    93 93
     
    
    94 94
             self._bot_sessions[name] = bot_session
    
    95 95
             return bot_session
    
    ... ... @@ -109,7 +109,8 @@ class BotsInterface:
    109 109
             if server_state == LeaseState.PENDING:
    
    110 110
     
    
    111 111
                 if client_state == LeaseState.ACTIVE:
    
    112
    -                self._scheduler.update_job_lease_state(client_lease.id, client_lease.state)
    
    112
    +                self._scheduler.update_job_lease_state(client_lease.id,
    
    113
    +                                                       LeaseState.ACTIVE)
    
    113 114
                 elif client_state == LeaseState.COMPLETED:
    
    114 115
                     # TODO: Lease was rejected
    
    115 116
                     raise NotImplementedError("'Not Accepted' is unsupported")
    
    ... ... @@ -122,8 +123,10 @@ class BotsInterface:
    122 123
                     pass
    
    123 124
     
    
    124 125
                 elif client_state == LeaseState.COMPLETED:
    
    125
    -                self._scheduler.update_job_lease_state(client_lease.id, client_lease.state)
    
    126
    -                self._scheduler.job_complete(client_lease.id, client_lease.result, client_lease.status)
    
    126
    +                self._scheduler.update_job_lease_state(client_lease.id,
    
    127
    +                                                       LeaseState.COMPLETED,
    
    128
    +                                                       lease_status=client_lease.status,
    
    129
    +                                                       lease_result=client_lease.result)
    
    127 130
                     return None
    
    128 131
     
    
    129 132
                 else:
    

  • buildgrid/server/execution/instance.py
    ... ... @@ -48,12 +48,15 @@ class ExecutionInstance:
    48 48
             if not action:
    
    49 49
                 raise FailedPreconditionError("Could not get action from storage.")
    
    50 50
     
    
    51
    -        job = Job(action_digest, action.do_not_cache, message_queue)
    
    51
    +        job = Job(action, action_digest)
    
    52
    +        if message_queue is not None:
    
    53
    +            job.register_client(message_queue)
    
    54
    +
    
    52 55
             self.logger.info("Operation name: [{}]".format(job.name))
    
    53 56
     
    
    54
    -        self._scheduler.append_job(job, skip_cache_lookup)
    
    57
    +        self._scheduler.queue_job(job, skip_cache_lookup)
    
    55 58
     
    
    56
    -        return job.get_operation()
    
    59
    +        return job.operation
    
    57 60
     
    
    58 61
         def register_message_client(self, name, queue):
    
    59 62
             try:
    

  • buildgrid/server/job.py
    1
    -# Copyright (C) 2018 Codethink Limited
    
    1
    +# Copyright (C) 2018 Bloomberg LP
    
    2 2
     #
    
    3 3
     # Licensed under the Apache License, Version 2.0 (the "License");
    
    4 4
     # you may not use this file except in compliance with the License.
    
    ... ... @@ -11,14 +11,14 @@
    11 11
     # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    
    12 12
     # See the License for the specific language governing permissions and
    
    13 13
     # limitations under the License.
    
    14
    -#
    
    15
    -# Authors:
    
    16
    -#        Finn Ball <finn ball codethink co uk>
    
    14
    +
    
    17 15
     
    
    18 16
     import logging
    
    19 17
     import uuid
    
    20 18
     from enum import Enum
    
    21 19
     
    
    20
    +from google.protobuf import timestamp_pb2
    
    21
    +
    
    22 22
     from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
    
    23 23
     from buildgrid._protos.google.devtools.remoteworkers.v1test2 import bots_pb2
    
    24 24
     from buildgrid._protos.google.longrunning import operations_pb2
    
    ... ... @@ -52,79 +52,190 @@ class LeaseState(Enum):
    52 52
     
    
    53 53
     class Job:
    
    54 54
     
    
    55
    -    def __init__(self, action_digest, do_not_cache=False, message_queue=None):
    
    56
    -        self.lease = None
    
    55
    +    def __init__(self, action, action_digest):
    
    57 56
             self.logger = logging.getLogger(__name__)
    
    58
    -        self.n_tries = 0
    
    59
    -        self.result = None
    
    60
    -        self.result_cached = False
    
    61 57
     
    
    62
    -        self._action_digest = action_digest
    
    63
    -        self._do_not_cache = do_not_cache
    
    64
    -        self._execute_stage = OperationStage.UNKNOWN
    
    65 58
             self._name = str(uuid.uuid4())
    
    66
    -        self._operation = operations_pb2.Operation(name=self._name)
    
    67
    -        self._operation_update_queues = []
    
    59
    +        self._action = remote_execution_pb2.Action()
    
    60
    +        self._operation = operations_pb2.Operation()
    
    61
    +        self._lease = None
    
    62
    +
    
    63
    +        self.__execute_response = None
    
    64
    +        self.__operation_metadata = remote_execution_pb2.ExecuteOperationMetadata()
    
    65
    +        self.__queued_timestamp = timestamp_pb2.Timestamp()
    
    66
    +        self.__worker_start_timestamp = timestamp_pb2.Timestamp()
    
    67
    +        self.__worker_completed_timestamp = timestamp_pb2.Timestamp()
    
    68
    +
    
    69
    +        self.__operation_metadata.action_digest.CopyFrom(action_digest)
    
    70
    +        self.__operation_metadata.stage = OperationStage.UNKNOWN.value
    
    68 71
     
    
    69
    -        if message_queue is not None:
    
    70
    -            self.register_client(message_queue)
    
    72
    +        self._action.CopyFrom(action)
    
    73
    +        self._do_not_cache = self._action.do_not_cache
    
    74
    +        self._operation_update_queues = []
    
    75
    +        self._operation.name = self._name
    
    76
    +        self._operation.done = False
    
    77
    +        self._n_tries = 0
    
    71 78
     
    
    72 79
         @property
    
    73 80
         def name(self):
    
    74 81
             return self._name
    
    75 82
     
    
    83
    +    @property
    
    84
    +    def do_not_cache(self):
    
    85
    +        return self._do_not_cache
    
    86
    +
    
    87
    +    @property
    
    88
    +    def action(self):
    
    89
    +        return self._action
    
    90
    +
    
    76 91
         @property
    
    77 92
         def action_digest(self):
    
    78
    -        return self._action_digest
    
    93
    +        return self.__operation_metadata.action_digest
    
    79 94
     
    
    80 95
         @property
    
    81
    -    def do_not_cache(self):
    
    82
    -        return self._do_not_cache
    
    96
    +    def action_result(self):
    
    97
    +        if self.__execute_response is not None:
    
    98
    +            return self.__execute_response.result
    
    99
    +        else:
    
    100
    +            return None
    
    101
    +
    
    102
    +    @property
    
    103
    +    def operation(self):
    
    104
    +        return self._operation
    
    105
    +
    
    106
    +    @property
    
    107
    +    def operation_stage(self):
    
    108
    +        return OperationStage(self.__operation_metadata.state)
    
    109
    +
    
    110
    +    @property
    
    111
    +    def lease(self):
    
    112
    +        return self._lease
    
    113
    +
    
    114
    +    @property
    
    115
    +    def lease_state(self):
    
    116
    +        if self._lease is not None:
    
    117
    +            return LeaseState(self._lease.state)
    
    118
    +        else:
    
    119
    +            return None
    
    83 120
     
    
    84
    -    def check_job_finished(self):
    
    85
    -        if not self._operation_update_queues:
    
    86
    -            return self._operation.done
    
    87
    -        return False
    
    121
    +    @property
    
    122
    +    def n_tries(self):
    
    123
    +        return self._n_tries
    
    124
    +
    
    125
    +    @property
    
    126
    +    def n_clients(self):
    
    127
    +        return len(self._operation_update_queues)
    
    88 128
     
    
    89 129
         def register_client(self, queue):
    
    130
    +        """Subscribes to the job's :class:`Operation` stage change events.
    
    131
    +
    
    132
    +        Args:
    
    133
    +            queue (queue.Queue): the event queue to register.
    
    134
    +        """
    
    90 135
             self._operation_update_queues.append(queue)
    
    91
    -        queue.put(self.get_operation())
    
    136
    +        queue.put(self._operation)
    
    92 137
     
    
    93 138
         def unregister_client(self, queue):
    
    139
    +        """Unsubscribes to the job's :class:`Operation` stage change events.
    
    140
    +
    
    141
    +        Args:
    
    142
    +            queue (queue.Queue): the event queue to unregister.
    
    143
    +        """
    
    94 144
             self._operation_update_queues.remove(queue)
    
    95 145
     
    
    96
    -    def get_operation(self):
    
    97
    -        self._operation.metadata.Pack(self.get_operation_meta())
    
    98
    -        if self.result is not None:
    
    99
    -            self._operation.done = True
    
    100
    -            response = remote_execution_pb2.ExecuteResponse(result=self.result,
    
    101
    -                                                            cached_result=self.result_cached)
    
    146
    +    def set_cached_result(self, action_result):
    
    147
    +        """Allows specifying an action result form the action cache for the job.
    
    148
    +        """
    
    149
    +        self.__execute_response = remote_execution_pb2.ExecuteResponse()
    
    150
    +        self.__execute_response.result.CopyFrom(action_result)
    
    151
    +        self.__execute_response.cached_result = True
    
    102 152
     
    
    103
    -            if not self.result_cached:
    
    104
    -                response.status.CopyFrom(self.lease.status)
    
    153
    +    def create_lease(self):
    
    154
    +        """Emits a new :class:`Lease` for the job.
    
    105 155
     
    
    106
    -            self._operation.response.Pack(response)
    
    156
    +        Only one :class:`Lease` can be emitted for a given job. This method
    
    157
    +        should only be used once, any furhter calls are ignored.
    
    158
    +        """
    
    159
    +        if self._lease is not None:
    
    160
    +            return None
    
    107 161
     
    
    108
    -        return self._operation
    
    162
    +        self._lease = bots_pb2.Lease()
    
    163
    +        self._lease.id = self._name
    
    164
    +        self._lease.payload.Pack(self.__operation_metadata.action_digest)
    
    165
    +        self._lease.state = LeaseState.PENDING.value
    
    109 166
     
    
    110
    -    def get_operation_meta(self):
    
    111
    -        meta = remote_execution_pb2.ExecuteOperationMetadata()
    
    112
    -        meta.stage = self._execute_stage.value
    
    113
    -        meta.action_digest.CopyFrom(self._action_digest)
    
    167
    +        return self._lease
    
    114 168
     
    
    115
    -        return meta
    
    169
    +    def update_lease_state(self, state, status=None, result=None):
    
    170
    +        """Operates a state transition for the job's current :class:Lease.
    
    116 171
     
    
    117
    -    def create_lease(self):
    
    118
    -        lease = bots_pb2.Lease(id=self.name, state=LeaseState.PENDING.value)
    
    119
    -        lease.payload.Pack(self._action_digest)
    
    172
    +        Args:
    
    173
    +            state (LeaseState): the lease state to transition to.
    
    174
    +            status (google.rpc.Status): the lease execution status, only
    
    175
    +                required if `state` is `COMPLETED`.
    
    176
    +            result (google.protobuf.Any): the lease execution result, only
    
    177
    +                required if `state` is `COMPLETED`.
    
    178
    +        """
    
    179
    +        if state.value == self._lease.state:
    
    180
    +            return
    
    181
    +
    
    182
    +        self._lease.state = state.value
    
    183
    +
    
    184
    +        if self._lease.state == LeaseState.PENDING.value:
    
    185
    +            self.__worker_start_timestamp.Clear()
    
    186
    +            self.__worker_completed_timestamp.Clear()
    
    187
    +
    
    188
    +            self._lease.status.Clear()
    
    189
    +            self._lease.result.Clear()
    
    120 190
     
    
    121
    -        self.lease = lease
    
    122
    -        return lease
    
    191
    +        elif self._lease.state == LeaseState.ACTIVE.value:
    
    192
    +            self.__worker_start_timestamp.GetCurrentTime()
    
    123 193
     
    
    124
    -    def get_operations(self):
    
    125
    -        return operations_pb2.ListOperationsResponse(operations=[self.get_operation()])
    
    194
    +        elif self._lease.state == LeaseState.COMPLETED.value:
    
    195
    +            self.__worker_completed_timestamp.GetCurrentTime()
    
    196
    +
    
    197
    +            action_result = remote_execution_pb2.ActionResult()
    
    198
    +
    
    199
    +            # TODO: Make a distinction between build and bot failures!
    
    200
    +            if status.code != 0:
    
    201
    +                self._do_not_cache = True
    
    202
    +
    
    203
    +            if result is not None:
    
    204
    +                assert result.Is(action_result.DESCRIPTOR)
    
    205
    +                result.Unpack(action_result)
    
    206
    +
    
    207
    +            action_metadata = action_result.execution_metadata
    
    208
    +            action_metadata.queued_timestamp.CopyFrom(self.__worker_start_timestamp)
    
    209
    +            action_metadata.worker_start_timestamp.CopyFrom(self.__worker_start_timestamp)
    
    210
    +            action_metadata.worker_completed_timestamp.CopyFrom(self.__worker_completed_timestamp)
    
    211
    +
    
    212
    +            self.__execute_response = remote_execution_pb2.ExecuteResponse()
    
    213
    +            self.__execute_response.result.CopyFrom(action_result)
    
    214
    +            self.__execute_response.cached_result = False
    
    215
    +            self.__execute_response.status.CopyFrom(status)
    
    126 216
     
    
    127 217
         def update_operation_stage(self, stage):
    
    128
    -        self._execute_stage = stage
    
    218
    +        """Operates a stage transition for the job's :class:Operation.
    
    219
    +
    
    220
    +        Args:
    
    221
    +            stage (OperationStage): the operation stage to transition to.
    
    222
    +        """
    
    223
    +        if stage.value == self.__operation_metadata.stage:
    
    224
    +            return
    
    225
    +
    
    226
    +        self.__operation_metadata.stage = stage.value
    
    227
    +
    
    228
    +        if self.__operation_metadata.stage == OperationStage.QUEUED.value:
    
    229
    +            if self.__queued_timestamp.ByteSize() == 0:
    
    230
    +                self.__queued_timestamp.GetCurrentTime()
    
    231
    +            self._n_tries += 1
    
    232
    +
    
    233
    +        elif self.__operation_metadata.stage == OperationStage.COMPLETED.value:
    
    234
    +            if self.__execute_response is not None:
    
    235
    +                self._operation.response.Pack(self.__execute_response)
    
    236
    +            self._operation.done = True
    
    237
    +
    
    238
    +        self._operation.metadata.Pack(self.__operation_metadata)
    
    239
    +
    
    129 240
             for queue in self._operation_update_queues:
    
    130
    -            queue.put(self.get_operation())
    241
    +            queue.put(self._operation)

  • buildgrid/server/operations/instance.py
    ... ... @@ -22,6 +22,7 @@ An instance of the LongRunningOperations Service.
    22 22
     import logging
    
    23 23
     
    
    24 24
     from buildgrid._exceptions import InvalidArgumentError
    
    25
    +from buildgrid._protos.google.longrunning import operations_pb2
    
    25 26
     
    
    26 27
     
    
    27 28
     class OperationsInstance:
    
    ... ... @@ -34,18 +35,21 @@ class OperationsInstance:
    34 35
             server.add_operations_instance(self, instance_name)
    
    35 36
     
    
    36 37
         def get_operation(self, name):
    
    37
    -        operation = self._scheduler.jobs.get(name)
    
    38
    +        job = self._scheduler.jobs.get(name)
    
    38 39
     
    
    39
    -        if operation is None:
    
    40
    +        if job is None:
    
    40 41
                 raise InvalidArgumentError("Operation name does not exist: [{}]".format(name))
    
    41 42
     
    
    42 43
             else:
    
    43
    -            return operation.get_operation()
    
    44
    +            return job.operation
    
    44 45
     
    
    45 46
         def list_operations(self, list_filter, page_size, page_token):
    
    46 47
             # TODO: Pages
    
    47 48
             # Spec says number of pages and length of a page are optional
    
    48
    -        return self._scheduler.get_operations()
    
    49
    +        response = operations_pb2.ListOperationsResponse()
    
    50
    +        response.operations.extend([job.operation for job in self._scheduler.list_jobs()])
    
    51
    +
    
    52
    +        return response
    
    49 53
     
    
    50 54
         def delete_operation(self, name):
    
    51 55
             try:
    

  • buildgrid/server/scheduler.py
    1
    -# Copyright (C) 2018 Codethink Limited
    
    1
    +# Copyright (C) 2018 Bloomberg LP
    
    2 2
     #
    
    3 3
     # Licensed under the Apache License, Version 2.0 (the "License");
    
    4 4
     # you may not use this file except in compliance with the License.
    
    ... ... @@ -11,9 +11,7 @@
    11 11
     # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    
    12 12
     # See the License for the specific language governing permissions and
    
    13 13
     # limitations under the License.
    
    14
    -#
    
    15
    -# Authors:
    
    16
    -#        Finn Ball <finn ball codethink co uk>
    
    14
    +
    
    17 15
     
    
    18 16
     """
    
    19 17
     Scheduler
    
    ... ... @@ -24,8 +22,6 @@ Schedules jobs.
    24 22
     from collections import deque
    
    25 23
     
    
    26 24
     from buildgrid._exceptions import NotFoundError
    
    27
    -from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
    
    28
    -from buildgrid._protos.google.longrunning import operations_pb2
    
    29 25
     
    
    30 26
     from .job import OperationStage, LeaseState
    
    31 27
     
    
    ... ... @@ -39,80 +35,101 @@ class Scheduler:
    39 35
             self.jobs = {}
    
    40 36
             self.queue = deque()
    
    41 37
     
    
    42
    -    def register_client(self, name, queue):
    
    43
    -        self.jobs[name].register_client(queue)
    
    38
    +    def register_client(self, job_name, queue):
    
    39
    +        self.jobs[job_name].register_client(queue)
    
    44 40
     
    
    45
    -    def unregister_client(self, name, queue):
    
    46
    -        job = self.jobs[name]
    
    47
    -        job.unregister_client(queue)
    
    48
    -        if job.check_job_finished():
    
    49
    -            del self.jobs[name]
    
    41
    +    def unregister_client(self, job_name, queue):
    
    42
    +        self.jobs[job_name].unregister_client(queue)
    
    50 43
     
    
    51
    -    def append_job(self, job, skip_cache_lookup=False):
    
    44
    +        if not self.jobs[job_name].n_clients and self.jobs[job_name].operation.done:
    
    45
    +            del self.jobs[job_name]
    
    46
    +
    
    47
    +    def queue_job(self, job, skip_cache_lookup=False):
    
    52 48
             self.jobs[job.name] = job
    
    49
    +
    
    50
    +        operation_stage = None
    
    53 51
             if self._action_cache is not None and not skip_cache_lookup:
    
    54 52
                 try:
    
    55
    -                cached_result = self._action_cache.get_action_result(job.action_digest)
    
    53
    +                action_result = self._action_cache.get_action_result(job.action_digest)
    
    56 54
                 except NotFoundError:
    
    55
    +                operation_stage = OperationStage.QUEUED
    
    57 56
                     self.queue.append(job)
    
    58
    -                job.update_operation_stage(OperationStage.QUEUED)
    
    59 57
     
    
    60 58
                 else:
    
    61
    -                job.result = cached_result
    
    62
    -                job.result_cached = True
    
    63
    -                job.update_operation_stage(OperationStage.COMPLETED)
    
    59
    +                job.set_cached_result(action_result)
    
    60
    +                operation_stage = OperationStage.COMPLETED
    
    64 61
     
    
    65 62
             else:
    
    63
    +            operation_stage = OperationStage.QUEUED
    
    66 64
                 self.queue.append(job)
    
    67
    -            job.update_operation_stage(OperationStage.QUEUED)
    
    68 65
     
    
    69
    -    def retry_job(self, name):
    
    70
    -        if name in self.jobs:
    
    71
    -            job = self.jobs[name]
    
    66
    +        job.update_operation_stage(operation_stage)
    
    67
    +
    
    68
    +    def retry_job(self, job_name):
    
    69
    +        if job_name in self.jobs:
    
    70
    +            job = self.jobs[job_name]
    
    72 71
                 if job.n_tries >= self.MAX_N_TRIES:
    
    73 72
                     # TODO: Decide what to do with these jobs
    
    74 73
                     job.update_operation_stage(OperationStage.COMPLETED)
    
    75 74
                     # TODO: Mark these jobs as done
    
    76 75
                 else:
    
    77 76
                     job.update_operation_stage(OperationStage.QUEUED)
    
    78
    -                job.n_tries += 1
    
    79 77
                     self.queue.appendleft(job)
    
    80 78
     
    
    81
    -    def job_complete(self, name, result, status):
    
    82
    -        job = self.jobs[name]
    
    83
    -        job.lease.status.CopyFrom(status)
    
    84
    -        action_result = remote_execution_pb2.ActionResult()
    
    85
    -        result.Unpack(action_result)
    
    86
    -        job.result = action_result
    
    87
    -        if not job.do_not_cache and self._action_cache is not None:
    
    88
    -            if not job.lease.status.code:
    
    89
    -                self._action_cache.update_action_result(job.action_digest, action_result)
    
    90
    -        job.update_operation_stage(OperationStage.COMPLETED)
    
    91
    -
    
    92
    -    def get_operations(self):
    
    93
    -        response = operations_pb2.ListOperationsResponse()
    
    94
    -        for v in self.jobs.values():
    
    95
    -            response.operations.extend([v.get_operation()])
    
    96
    -        return response
    
    97
    -
    
    98
    -    def update_job_lease_state(self, name, state):
    
    99
    -        job = self.jobs[name]
    
    100
    -        job.lease.state = state
    
    101
    -
    
    102
    -    def get_job_lease(self, name):
    
    103
    -        return self.jobs[name].lease
    
    104
    -
    
    105
    -    def cancel_session(self, name):
    
    106
    -        job = self.jobs[name]
    
    107
    -        state = job.lease.state
    
    108
    -        if state in (LeaseState.PENDING.value, LeaseState.ACTIVE.value):
    
    109
    -            self.retry_job(name)
    
    110
    -
    
    111
    -    def create_lease(self):
    
    112
    -        if self.queue:
    
    113
    -            job = self.queue.popleft()
    
    79
    +    def list_jobs(self):
    
    80
    +        return self.jobs.values()
    
    81
    +
    
    82
    +    def request_job_leases(self, worker_capabilities):
    
    83
    +        """Generates a list of the highest priority leases to be run.
    
    84
    +
    
    85
    +        Args:
    
    86
    +            worker_capabilities (dict): a set of key-value pairs decribing the
    
    87
    +                worker properties, configuration and state at the time of the
    
    88
    +                request.
    
    89
    +        """
    
    90
    +        if not self.queue:
    
    91
    +            return []
    
    92
    +
    
    93
    +        job = self.queue.popleft()
    
    94
    +        # For now, one lease at a time:
    
    95
    +        lease = job.create_lease()
    
    96
    +
    
    97
    +        return [lease]
    
    98
    +
    
    99
    +    def update_job_lease_state(self, job_name, lease_state, lease_status=None, lease_result=None):
    
    100
    +        """Requests a state transition for a job's current :class:Lease.
    
    101
    +
    
    102
    +        Args:
    
    103
    +            job_name (str): name of the job to query.
    
    104
    +            lease_state (LeaseState): the lease state to transition to.
    
    105
    +            lease_status (google.rpc.Status): the lease execution status, only
    
    106
    +                required if `lease_state` is `COMPLETED`.
    
    107
    +            lease_result (google.protobuf.Any): the lease execution result, only
    
    108
    +                required if `lease_state` is `COMPLETED`.
    
    109
    +        """
    
    110
    +        job = self.jobs[job_name]
    
    111
    +
    
    112
    +        if lease_state == LeaseState.PENDING:
    
    113
    +            job.update_lease_state(LeaseState.PENDING)
    
    114
    +            job.update_operation_stage(OperationStage.QUEUED)
    
    115
    +
    
    116
    +        elif lease_state == LeaseState.ACTIVE:
    
    117
    +            job.update_lease_state(LeaseState.ACTIVE)
    
    114 118
                 job.update_operation_stage(OperationStage.EXECUTING)
    
    115
    -            job.create_lease()
    
    116
    -            job.lease.state = LeaseState.PENDING.value
    
    117
    -            return job.lease
    
    118
    -        return None
    119
    +
    
    120
    +        elif lease_state == LeaseState.COMPLETED:
    
    121
    +            job.update_lease_state(LeaseState.COMPLETED,
    
    122
    +                                   status=lease_status, result=lease_result)
    
    123
    +
    
    124
    +            if self._action_cache is not None and not job.do_not_cache:
    
    125
    +                self._action_cache.update_action_result(job.action_digest, job.action_result)
    
    126
    +
    
    127
    +            job.update_operation_stage(OperationStage.COMPLETED)
    
    128
    +
    
    129
    +    def get_job_lease(self, job_name):
    
    130
    +        """Returns the lease associated to job, if any have been emitted yet."""
    
    131
    +        return self.jobs[job_name].lease
    
    132
    +
    
    133
    +    def get_job_operation(self, job_name):
    
    134
    +        """Returns the operation associated to job."""
    
    135
    +        return self.jobs[job_name].operation

  • buildgrid/utils.py
    ... ... @@ -15,11 +15,21 @@
    15 15
     
    
    16 16
     from operator import attrgetter
    
    17 17
     import os
    
    18
    +import socket
    
    18 19
     
    
    19 20
     from buildgrid.settings import HASH
    
    20 21
     from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
    
    21 22
     
    
    22 23
     
    
    24
    +def get_hostname():
    
    25
    +    """Returns the hostname of the machine executing that function.
    
    26
    +
    
    27
    +    Returns:
    
    28
    +        str: Hostname for the current machine.
    
    29
    +    """
    
    30
    +    return socket.gethostname()
    
    31
    +
    
    32
    +
    
    23 33
     def create_digest(bytes_to_digest):
    
    24 34
         """Computes the :obj:`Digest` of a piece of data.
    
    25 35
     
    

  • setup.py
    ... ... @@ -119,7 +119,6 @@ setup(
    119 119
             'pyaml',
    
    120 120
             'boto3 < 1.8.0',
    
    121 121
             'botocore < 1.11.0',
    
    122
    -        'xdg',
    
    123 122
         ],
    
    124 123
         entry_points={
    
    125 124
             'console_scripts': [
    

  • tests/integration/bots_service.py
    ... ... @@ -137,7 +137,7 @@ def test_update_leases_with_work(bot_session, context, instance):
    137 137
                                                    bot_session=bot_session)
    
    138 138
     
    
    139 139
         action_digest = remote_execution_pb2.Digest(hash='gaff')
    
    140
    -    _inject_work(instance._instances[""]._scheduler, action_digest)
    
    140
    +    _inject_work(instance._instances[""]._scheduler, action_digest=action_digest)
    
    141 141
     
    
    142 142
         response = instance.CreateBotSession(request, context)
    
    143 143
     
    
    ... ... @@ -159,7 +159,7 @@ def test_update_leases_work_complete(bot_session, context, instance):
    159 159
     
    
    160 160
         # Inject work
    
    161 161
         action_digest = remote_execution_pb2.Digest(hash='gaff')
    
    162
    -    _inject_work(instance._instances[""]._scheduler, action_digest)
    
    162
    +    _inject_work(instance._instances[""]._scheduler, action_digest=action_digest)
    
    163 163
     
    
    164 164
         request = bots_pb2.UpdateBotSessionRequest(name=response.name,
    
    165 165
                                                    bot_session=response)
    
    ... ... @@ -174,6 +174,7 @@ def test_update_leases_work_complete(bot_session, context, instance):
    174 174
         response = copy.deepcopy(instance.UpdateBotSession(request, context))
    
    175 175
     
    
    176 176
         response.leases[0].state = LeaseState.COMPLETED.value
    
    177
    +    response.leases[0].result.Pack(remote_execution_pb2.ActionResult())
    
    177 178
     
    
    178 179
         request = bots_pb2.UpdateBotSessionRequest(name=response.name,
    
    179 180
                                                    bot_session=response)
    
    ... ... @@ -187,7 +188,7 @@ def test_work_rejected_by_bot(bot_session, context, instance):
    187 188
                                                    bot_session=bot_session)
    
    188 189
         # Inject work
    
    189 190
         action_digest = remote_execution_pb2.Digest(hash='gaff')
    
    190
    -    _inject_work(instance._instances[""]._scheduler, action_digest)
    
    191
    +    _inject_work(instance._instances[""]._scheduler, action_digest=action_digest)
    
    191 192
     
    
    192 193
         # Simulated the severed binding between client and server
    
    193 194
         response = copy.deepcopy(instance.CreateBotSession(request, context))
    
    ... ... @@ -209,7 +210,7 @@ def test_work_out_of_sync_from_pending(state, bot_session, context, instance):
    209 210
                                                    bot_session=bot_session)
    
    210 211
         # Inject work
    
    211 212
         action_digest = remote_execution_pb2.Digest(hash='gaff')
    
    212
    -    _inject_work(instance._instances[""]._scheduler, action_digest)
    
    213
    +    _inject_work(instance._instances[""]._scheduler, action_digest=action_digest)
    
    213 214
     
    
    214 215
         # Simulated the severed binding between client and server
    
    215 216
         response = copy.deepcopy(instance.CreateBotSession(request, context))
    
    ... ... @@ -230,7 +231,7 @@ def test_work_out_of_sync_from_active(state, bot_session, context, instance):
    230 231
                                                    bot_session=bot_session)
    
    231 232
         # Inject work
    
    232 233
         action_digest = remote_execution_pb2.Digest(hash='gaff')
    
    233
    -    _inject_work(instance._instances[""]._scheduler, action_digest)
    
    234
    +    _inject_work(instance._instances[""]._scheduler, action_digest=action_digest)
    
    234 235
     
    
    235 236
         # Simulated the severed binding between client and server
    
    236 237
         response = copy.deepcopy(instance.CreateBotSession(request, context))
    
    ... ... @@ -257,7 +258,7 @@ def test_work_active_to_active(bot_session, context, instance):
    257 258
                                                    bot_session=bot_session)
    
    258 259
         # Inject work
    
    259 260
         action_digest = remote_execution_pb2.Digest(hash='gaff')
    
    260
    -    _inject_work(instance._instances[""]._scheduler, action_digest)
    
    261
    +    _inject_work(instance._instances[""]._scheduler, action_digest=action_digest)
    
    261 262
     
    
    262 263
         # Simulated the severed binding between client and server
    
    263 264
         response = copy.deepcopy(instance.CreateBotSession(request, context))
    
    ... ... @@ -279,8 +280,10 @@ def test_post_bot_event_temp(context, instance):
    279 280
         context.set_code.assert_called_once_with(grpc.StatusCode.UNIMPLEMENTED)
    
    280 281
     
    
    281 282
     
    
    282
    -def _inject_work(scheduler, action_digest=None):
    
    283
    +def _inject_work(scheduler, action=None, action_digest=None):
    
    284
    +    if not action:
    
    285
    +        action = remote_execution_pb2.Action()
    
    283 286
         if not action_digest:
    
    284 287
             action_digest = remote_execution_pb2.Digest()
    
    285
    -    j = job.Job(action_digest, False)
    
    286
    -    scheduler.append_job(j, True)
    288
    +    j = job.Job(action, action_digest)
    
    289
    +    scheduler.queue_job(j, True)

  • tests/integration/execution_service.py
    ... ... @@ -105,7 +105,7 @@ def test_no_action_digest_in_storage(instance, context):
    105 105
     
    
    106 106
     
    
    107 107
     def test_wait_execution(instance, controller, context):
    
    108
    -    j = job.Job(action_digest, None)
    
    108
    +    j = job.Job(action, action_digest)
    
    109 109
         j._operation.done = True
    
    110 110
     
    
    111 111
         request = remote_execution_pb2.WaitExecutionRequest(name="{}/{}".format('', j.name))
    

  • tests/integration/operations_service.py
    ... ... @@ -30,6 +30,7 @@ from buildgrid._protos.google.longrunning import operations_pb2
    30 30
     from buildgrid._protos.google.rpc import status_pb2
    
    31 31
     from buildgrid.server.cas.storage import lru_memory_cache
    
    32 32
     from buildgrid.server.controller import ExecutionController
    
    33
    +from buildgrid.server.job import LeaseState
    
    33 34
     from buildgrid.server.operations import service
    
    34 35
     from buildgrid.server.operations.service import OperationsService
    
    35 36
     from buildgrid.utils import create_digest
    
    ... ... @@ -131,9 +132,10 @@ def test_list_operations_with_result(instance, controller, execute_request, cont
    131 132
         action_result.output_files.extend([output_file])
    
    132 133
     
    
    133 134
         controller.operations_instance._scheduler.jobs[response_execute.name].create_lease()
    
    134
    -    controller.operations_instance._scheduler.job_complete(response_execute.name,
    
    135
    -                                                           _pack_any(action_result),
    
    136
    -                                                           status_pb2.Status())
    
    135
    +    controller.operations_instance._scheduler.update_job_lease_state(response_execute.name,
    
    136
    +                                                                     LeaseState.COMPLETED,
    
    137
    +                                                                     lease_status=status_pb2.Status(),
    
    138
    +                                                                     lease_result=_pack_any(action_result))
    
    137 139
     
    
    138 140
         request = operations_pb2.ListOperationsRequest(name=instance_name)
    
    139 141
         response = instance.ListOperations(request, context)
    
    ... ... @@ -142,7 +144,8 @@ def test_list_operations_with_result(instance, controller, execute_request, cont
    142 144
     
    
    143 145
         execute_response = remote_execution_pb2.ExecuteResponse()
    
    144 146
         response.operations[0].response.Unpack(execute_response)
    
    145
    -    assert execute_response.result == action_result
    
    147
    +
    
    148
    +    assert execute_response.result.output_files == action_result.output_files
    
    146 149
     
    
    147 150
     
    
    148 151
     def test_list_operations_empty(instance, context):
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]