Martin Blanchard pushed to branch master at BuildGrid / buildgrid
Commits:
-
07963525
by Raoul Hidalgo Charman at 2018-11-08T13:57:26Z
-
aac2f938
by Raoul Hidalgo Charman at 2018-11-08T13:57:26Z
-
76f2e31f
by Martin Blanchard at 2018-11-08T13:57:26Z
-
dcd82219
by Martin Blanchard at 2018-11-08T14:50:25Z
-
dd572340
by Martin Blanchard at 2018-11-08T16:54:57Z
26 changed files:
- − buildgrid/_app/_logging.py
- buildgrid/_app/cli.py
- buildgrid/bot/bot.py
- buildgrid/bot/bot_interface.py
- buildgrid/bot/bot_session.py
- buildgrid/server/actioncache/service.py
- buildgrid/server/bots/instance.py
- buildgrid/server/bots/service.py
- buildgrid/server/cas/instance.py
- buildgrid/server/cas/service.py
- buildgrid/server/cas/storage/disk.py
- buildgrid/server/cas/storage/lru_memory_cache.py
- buildgrid/server/cas/storage/remote.py
- buildgrid/server/cas/storage/s3.py
- buildgrid/server/cas/storage/with_cache.py
- buildgrid/server/controller.py
- buildgrid/server/execution/instance.py
- buildgrid/server/execution/service.py
- buildgrid/server/instance.py
- buildgrid/server/job.py
- buildgrid/server/operations/instance.py
- buildgrid/server/operations/service.py
- buildgrid/server/referencestorage/service.py
- buildgrid/server/referencestorage/storage.py
- buildgrid/server/scheduler.py
- tests/cas/test_services.py
Changes:
1 |
-# Copyright (C) 2018 Bloomberg LP
|
|
2 |
-#
|
|
3 |
-# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
-# you may not use this file except in compliance with the License.
|
|
5 |
-# You may obtain a copy of the License at
|
|
6 |
-#
|
|
7 |
-# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
8 |
-#
|
|
9 |
-# Unless required by applicable law or agreed to in writing, software
|
|
10 |
-# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
-# See the License for the specific language governing permissions and
|
|
13 |
-# limitations under the License.
|
|
14 |
- |
|
15 |
- |
|
16 |
-import logging
|
|
17 |
- |
|
18 |
- |
|
19 |
-def bgd_logger():
|
|
20 |
- formatter = logging.Formatter(
|
|
21 |
- fmt='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
|
22 |
- )
|
|
23 |
- |
|
24 |
- logger = logging.getLogger()
|
|
25 |
- logger.setLevel(logging.INFO)
|
|
26 |
- |
|
27 |
- handler = logging.StreamHandler()
|
|
28 |
- handler.setFormatter(formatter)
|
|
29 |
- |
|
30 |
- logger.addHandler(handler)
|
|
31 |
- |
|
32 |
- return logger
|
... | ... | @@ -21,16 +21,14 @@ Any files in the commands/ folder with the name cmd_*.py |
21 | 21 |
will be attempted to be imported.
|
22 | 22 |
"""
|
23 | 23 |
|
24 |
-import os
|
|
25 | 24 |
import logging
|
25 |
+import os
|
|
26 | 26 |
|
27 | 27 |
import click
|
28 | 28 |
import grpc
|
29 | 29 |
|
30 | 30 |
from buildgrid.utils import read_file
|
31 | 31 |
|
32 |
-from . import _logging
|
|
33 |
- |
|
34 | 32 |
CONTEXT_SETTINGS = dict(auto_envvar_prefix='BUILDGRID')
|
35 | 33 |
|
36 | 34 |
|
... | ... | @@ -141,12 +139,27 @@ class BuildGridCLI(click.MultiCommand): |
141 | 139 |
|
142 | 140 |
|
143 | 141 |
@click.command(cls=BuildGridCLI, context_settings=CONTEXT_SETTINGS)
|
144 |
-@click.option('-v', '--verbose', is_flag=True,
|
|
145 |
- help='Enables verbose mode.')
|
|
142 |
+@click.option('-v', '--verbose', count=True,
|
|
143 |
+ help='Increase log verbosity level.')
|
|
146 | 144 |
@pass_context
|
147 | 145 |
def cli(context, verbose):
|
148 | 146 |
"""BuildGrid App"""
|
149 |
- logger = _logging.bgd_logger()
|
|
150 |
- context.verbose = verbose
|
|
151 |
- if verbose:
|
|
147 |
+ logger = logging.getLogger()
|
|
148 |
+ |
|
149 |
+ # Clean-up root logger for any pre-configuration:
|
|
150 |
+ for log_handler in logger.handlers[:]:
|
|
151 |
+ logger.removeHandler(log_handler)
|
|
152 |
+ for log_filter in logger.filters[:]:
|
|
153 |
+ logger.removeFilter(log_filter)
|
|
154 |
+ |
|
155 |
+ logging.basicConfig(
|
|
156 |
+ format='%(asctime)s:%(name)32.32s][%(levelname)5.5s]: %(message)s')
|
|
157 |
+ |
|
158 |
+ if verbose == 1:
|
|
159 |
+ logger.setLevel(logging.WARNING)
|
|
160 |
+ elif verbose == 2:
|
|
161 |
+ logger.setLevel(logging.INFO)
|
|
162 |
+ elif verbose >= 3:
|
|
152 | 163 |
logger.setLevel(logging.DEBUG)
|
164 |
+ else:
|
|
165 |
+ logger.setLevel(logging.ERROR)
|
... | ... | @@ -30,7 +30,7 @@ class Bot: |
30 | 30 |
"""
|
31 | 31 |
|
32 | 32 |
def __init__(self, bot_session, update_period=1):
|
33 |
- self.logger = logging.getLogger(__name__)
|
|
33 |
+ self.__logger = logging.getLogger(__name__)
|
|
34 | 34 |
|
35 | 35 |
self._bot_session = bot_session
|
36 | 36 |
self._update_period = update_period
|
... | ... | @@ -31,8 +31,8 @@ class BotInterface: |
31 | 31 |
"""
|
32 | 32 |
|
33 | 33 |
def __init__(self, channel):
|
34 |
- self.logger = logging.getLogger(__name__)
|
|
35 |
- self.logger.info(channel)
|
|
34 |
+ self.__logger = logging.getLogger(__name__)
|
|
35 |
+ |
|
36 | 36 |
self._stub = bots_pb2_grpc.BotsStub(channel)
|
37 | 37 |
|
38 | 38 |
def create_bot_session(self, parent, bot_session):
|
... | ... | @@ -43,8 +43,7 @@ class BotSession: |
43 | 43 |
If a bot attempts to update an invalid session, it must be rejected and
|
44 | 44 |
may be put in quarantine.
|
45 | 45 |
"""
|
46 |
- |
|
47 |
- self.logger = logging.getLogger(__name__)
|
|
46 |
+ self.__logger = logging.getLogger(__name__)
|
|
48 | 47 |
|
49 | 48 |
self._bot_id = '{}.{}'.format(parent, platform.node())
|
50 | 49 |
self._context = None
|
... | ... | @@ -64,20 +63,20 @@ class BotSession: |
64 | 63 |
self._worker = worker
|
65 | 64 |
|
66 | 65 |
def create_bot_session(self, work, context=None):
|
67 |
- self.logger.debug("Creating bot session")
|
|
66 |
+ self.__logger.debug("Creating bot session")
|
|
68 | 67 |
self._work = work
|
69 | 68 |
self._context = context
|
70 | 69 |
|
71 | 70 |
session = self._interface.create_bot_session(self._parent, self.get_pb2())
|
72 | 71 |
self._name = session.name
|
73 | 72 |
|
74 |
- self.logger.info("Created bot session with name: [{}]".format(self._name))
|
|
73 |
+ self.__logger.info("Created bot session with name: [%s]", self._name)
|
|
75 | 74 |
|
76 | 75 |
for lease in session.leases:
|
77 | 76 |
self._update_lease_from_server(lease)
|
78 | 77 |
|
79 | 78 |
def update_bot_session(self):
|
80 |
- self.logger.debug("Updating bot session: [{}]".format(self._bot_id))
|
|
79 |
+ self.__logger.debug("Updating bot session: [%s]", self._bot_id)
|
|
81 | 80 |
session = self._interface.update_bot_session(self.get_pb2())
|
82 | 81 |
for k, v in list(self._leases.items()):
|
83 | 82 |
if v.state == LeaseState.COMPLETED.value:
|
... | ... | @@ -113,25 +112,25 @@ class BotSession: |
113 | 112 |
asyncio.ensure_future(self.create_work(lease))
|
114 | 113 |
|
115 | 114 |
async def create_work(self, lease):
|
116 |
- self.logger.debug("Work created: [{}]".format(lease.id))
|
|
115 |
+ self.__logger.debug("Work created: [%s]", lease.id)
|
|
117 | 116 |
loop = asyncio.get_event_loop()
|
118 | 117 |
|
119 | 118 |
try:
|
120 | 119 |
lease = await loop.run_in_executor(None, self._work, self._context, lease)
|
121 | 120 |
|
122 | 121 |
except grpc.RpcError as e:
|
123 |
- self.logger.error("RPC error thrown: [{}]".format(e))
|
|
122 |
+ self.__logger.error(e)
|
|
124 | 123 |
lease.status.CopyFrom(e.code())
|
125 | 124 |
|
126 | 125 |
except BotError as e:
|
127 |
- self.logger.error("Internal bot error thrown: [{}]".format(e))
|
|
126 |
+ self.__logger.error(e)
|
|
128 | 127 |
lease.status.code = code_pb2.INTERNAL
|
129 | 128 |
|
130 | 129 |
except Exception as e:
|
131 |
- self.logger.error("Exception thrown: [{}]".format(e))
|
|
130 |
+ self.__logger.error(e)
|
|
132 | 131 |
lease.status.code = code_pb2.INTERNAL
|
133 | 132 |
|
134 |
- self.logger.debug("Work complete: [{}]".format(lease.id))
|
|
133 |
+ self.__logger.debug("Work complete: [%s]", lease.id)
|
|
135 | 134 |
self.lease_completed(lease)
|
136 | 135 |
|
137 | 136 |
|
... | ... | @@ -32,7 +32,7 @@ from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_p |
32 | 32 |
class ActionCacheService(remote_execution_pb2_grpc.ActionCacheServicer):
|
33 | 33 |
|
34 | 34 |
def __init__(self, server):
|
35 |
- self.logger = logging.getLogger(__name__)
|
|
35 |
+ self.__logger = logging.getLogger(__name__)
|
|
36 | 36 |
|
37 | 37 |
self._instances = {}
|
38 | 38 |
|
... | ... | @@ -42,34 +42,38 @@ class ActionCacheService(remote_execution_pb2_grpc.ActionCacheServicer): |
42 | 42 |
self._instances[name] = instance
|
43 | 43 |
|
44 | 44 |
def GetActionResult(self, request, context):
|
45 |
+ self.__logger.debug("GetActionResult request from [%s]", context.peer())
|
|
46 |
+ |
|
45 | 47 |
try:
|
46 | 48 |
instance = self._get_instance(request.instance_name)
|
47 | 49 |
return instance.get_action_result(request.action_digest)
|
48 | 50 |
|
49 | 51 |
except InvalidArgumentError as e:
|
50 |
- self.logger.error(e)
|
|
52 |
+ self.__logger.error(e)
|
|
51 | 53 |
context.set_details(str(e))
|
52 | 54 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
53 | 55 |
|
54 | 56 |
except NotFoundError as e:
|
55 |
- self.logger.debug(e)
|
|
57 |
+ self.__logger.debug(e)
|
|
56 | 58 |
context.set_code(grpc.StatusCode.NOT_FOUND)
|
57 | 59 |
|
58 | 60 |
return remote_execution_pb2.ActionResult()
|
59 | 61 |
|
60 | 62 |
def UpdateActionResult(self, request, context):
|
63 |
+ self.__logger.debug("UpdateActionResult request from [%s]", context.peer())
|
|
64 |
+ |
|
61 | 65 |
try:
|
62 | 66 |
instance = self._get_instance(request.instance_name)
|
63 | 67 |
instance.update_action_result(request.action_digest, request.action_result)
|
64 | 68 |
return request.action_result
|
65 | 69 |
|
66 | 70 |
except InvalidArgumentError as e:
|
67 |
- self.logger.error(e)
|
|
71 |
+ self.__logger.error(e)
|
|
68 | 72 |
context.set_details(str(e))
|
69 | 73 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
70 | 74 |
|
71 | 75 |
except NotImplementedError as e:
|
72 |
- self.logger.error(e)
|
|
76 |
+ self.__logger.error(e)
|
|
73 | 77 |
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
74 | 78 |
|
75 | 79 |
return remote_execution_pb2.ActionResult()
|
... | ... | @@ -31,7 +31,7 @@ from ..job import LeaseState |
31 | 31 |
class BotsInterface:
|
32 | 32 |
|
33 | 33 |
def __init__(self, scheduler):
|
34 |
- self.logger = logging.getLogger(__name__)
|
|
34 |
+ self.__logger = logging.getLogger(__name__)
|
|
35 | 35 |
|
36 | 36 |
self._bot_ids = {}
|
37 | 37 |
self._bot_sessions = {}
|
... | ... | @@ -64,7 +64,7 @@ class BotsInterface: |
64 | 64 |
|
65 | 65 |
self._bot_ids[name] = bot_id
|
66 | 66 |
self._bot_sessions[name] = bot_session
|
67 |
- self.logger.info("Created bot session name=[{}] with bot_id=[{}]".format(name, bot_id))
|
|
67 |
+ self.__logger.info("Created bot session name=[%s] with bot_id=[%s]", name, bot_id)
|
|
68 | 68 |
|
69 | 69 |
# TODO: Send worker capabilities to the scheduler!
|
70 | 70 |
leases = self._scheduler.request_job_leases({})
|
... | ... | @@ -77,7 +77,7 @@ class BotsInterface: |
77 | 77 |
""" Client updates the server. Any changes in state to the Lease should be
|
78 | 78 |
registered server side. Assigns available leases with work.
|
79 | 79 |
"""
|
80 |
- self.logger.debug("Updating bot session name={}".format(name))
|
|
80 |
+ self.__logger.debug("Updating bot session name=[%s]", name)
|
|
81 | 81 |
self._check_bot_ids(bot_session.bot_id, name)
|
82 | 82 |
|
83 | 83 |
leases = filter(None, [self.check_states(lease) for lease in bot_session.leases])
|
... | ... | @@ -173,12 +173,12 @@ class BotsInterface: |
173 | 173 |
if bot_id is None:
|
174 | 174 |
raise InvalidArgumentError("Bot id does not exist: [{}]".format(name))
|
175 | 175 |
|
176 |
- self.logger.debug("Attempting to close [{}] with name: [{}]".format(bot_id, name))
|
|
176 |
+ self.__logger.debug("Attempting to close [%s] with name: [%s]", bot_id, name)
|
|
177 | 177 |
for lease in self._bot_sessions[name].leases:
|
178 | 178 |
if lease.state != LeaseState.COMPLETED.value:
|
179 | 179 |
# TODO: Be wary here, may need to handle rejected leases in future
|
180 | 180 |
self._scheduler.retry_job(lease.id)
|
181 | 181 |
|
182 |
- self.logger.debug("Closing bot session: [{}]".format(name))
|
|
182 |
+ self.__logger.debug("Closing bot session: [%s]", name)
|
|
183 | 183 |
self._bot_ids.pop(name)
|
184 |
- self.logger.info("Closed bot [{}] with name: [{}]".format(bot_id, name))
|
|
184 |
+ self.__logger.info("Closed bot [%s] with name: [%s]", bot_id, name)
|
... | ... | @@ -33,7 +33,7 @@ from buildgrid._protos.google.devtools.remoteworkers.v1test2 import bots_pb2_grp |
33 | 33 |
class BotsService(bots_pb2_grpc.BotsServicer):
|
34 | 34 |
|
35 | 35 |
def __init__(self, server):
|
36 |
- self.logger = logging.getLogger(__name__)
|
|
36 |
+ self.__logger = logging.getLogger(__name__)
|
|
37 | 37 |
|
38 | 38 |
self._instances = {}
|
39 | 39 |
|
... | ... | @@ -43,6 +43,8 @@ class BotsService(bots_pb2_grpc.BotsServicer): |
43 | 43 |
self._instances[name] = instance
|
44 | 44 |
|
45 | 45 |
def CreateBotSession(self, request, context):
|
46 |
+ self.__logger.debug("CreateBotSession request from [%s]", context.peer())
|
|
47 |
+ |
|
46 | 48 |
try:
|
47 | 49 |
parent = request.parent
|
48 | 50 |
instance = self._get_instance(request.parent)
|
... | ... | @@ -50,13 +52,15 @@ class BotsService(bots_pb2_grpc.BotsServicer): |
50 | 52 |
request.bot_session)
|
51 | 53 |
|
52 | 54 |
except InvalidArgumentError as e:
|
53 |
- self.logger.error(e)
|
|
55 |
+ self.__logger.error(e)
|
|
54 | 56 |
context.set_details(str(e))
|
55 | 57 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
56 | 58 |
|
57 | 59 |
return bots_pb2.BotSession()
|
58 | 60 |
|
59 | 61 |
def UpdateBotSession(self, request, context):
|
62 |
+ self.__logger.debug("UpdateBotSession request from [%s]", context.peer())
|
|
63 |
+ |
|
60 | 64 |
try:
|
61 | 65 |
names = request.name.split("/")
|
62 | 66 |
# Operation name should be in format:
|
... | ... | @@ -68,23 +72,25 @@ class BotsService(bots_pb2_grpc.BotsServicer): |
68 | 72 |
request.bot_session)
|
69 | 73 |
|
70 | 74 |
except InvalidArgumentError as e:
|
71 |
- self.logger.error(e)
|
|
75 |
+ self.__logger.error(e)
|
|
72 | 76 |
context.set_details(str(e))
|
73 | 77 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
74 | 78 |
|
75 | 79 |
except OutOfSyncError as e:
|
76 |
- self.logger.error(e)
|
|
80 |
+ self.__logger.error(e)
|
|
77 | 81 |
context.set_details(str(e))
|
78 | 82 |
context.set_code(grpc.StatusCode.DATA_LOSS)
|
79 | 83 |
|
80 | 84 |
except NotImplementedError as e:
|
81 |
- self.logger.error(e)
|
|
85 |
+ self.__logger.error(e)
|
|
82 | 86 |
context.set_details(str(e))
|
83 | 87 |
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
84 | 88 |
|
85 | 89 |
return bots_pb2.BotSession()
|
86 | 90 |
|
87 | 91 |
def PostBotEventTemp(self, request, context):
|
92 |
+ self.__logger.debug("PostBotEventTemp request from [%s]", context.peer())
|
|
93 |
+ |
|
88 | 94 |
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
89 | 95 |
return Empty()
|
90 | 96 |
|
... | ... | @@ -19,6 +19,8 @@ Storage Instances |
19 | 19 |
Instances of CAS and ByteStream
|
20 | 20 |
"""
|
21 | 21 |
|
22 |
+import logging
|
|
23 |
+ |
|
22 | 24 |
from buildgrid._exceptions import InvalidArgumentError, NotFoundError, OutOfRangeError
|
23 | 25 |
from buildgrid._protos.google.bytestream import bytestream_pb2
|
24 | 26 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2 as re_pb2
|
... | ... | @@ -28,6 +30,8 @@ from buildgrid.settings import HASH |
28 | 30 |
class ContentAddressableStorageInstance:
|
29 | 31 |
|
30 | 32 |
def __init__(self, storage):
|
33 |
+ self.__logger = logging.getLogger(__name__)
|
|
34 |
+ |
|
31 | 35 |
self._storage = storage
|
32 | 36 |
|
33 | 37 |
def register_instance_with_server(self, instance_name, server):
|
... | ... | @@ -60,6 +64,8 @@ class ByteStreamInstance: |
60 | 64 |
BLOCK_SIZE = 1 * 1024 * 1024 # 1 MB block size
|
61 | 65 |
|
62 | 66 |
def __init__(self, storage):
|
67 |
+ self.__logger = logging.getLogger(__name__)
|
|
68 |
+ |
|
63 | 69 |
self._storage = storage
|
64 | 70 |
|
65 | 71 |
def register_instance_with_server(self, instance_name, server):
|
... | ... | @@ -35,7 +35,7 @@ from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_p |
35 | 35 |
class ContentAddressableStorageService(remote_execution_pb2_grpc.ContentAddressableStorageServicer):
|
36 | 36 |
|
37 | 37 |
def __init__(self, server):
|
38 |
- self.logger = logging.getLogger(__name__)
|
|
38 |
+ self.__logger = logging.getLogger(__name__)
|
|
39 | 39 |
|
40 | 40 |
self._instances = {}
|
41 | 41 |
|
... | ... | @@ -45,42 +45,48 @@ class ContentAddressableStorageService(remote_execution_pb2_grpc.ContentAddressa |
45 | 45 |
self._instances[name] = instance
|
46 | 46 |
|
47 | 47 |
def FindMissingBlobs(self, request, context):
|
48 |
+ self.__logger.debug("FindMissingBlobs request from [%s]", context.peer())
|
|
49 |
+ |
|
48 | 50 |
try:
|
49 |
- self.logger.debug("FindMissingBlobs request: [{}]".format(request))
|
|
50 | 51 |
instance = self._get_instance(request.instance_name)
|
51 | 52 |
response = instance.find_missing_blobs(request.blob_digests)
|
52 |
- self.logger.debug("FindMissingBlobs response: [{}]".format(response))
|
|
53 |
+ |
|
53 | 54 |
return response
|
54 | 55 |
|
55 | 56 |
except InvalidArgumentError as e:
|
56 |
- self.logger.error(e)
|
|
57 |
+ self.__logger.error(e)
|
|
57 | 58 |
context.set_details(str(e))
|
58 | 59 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
59 | 60 |
|
60 | 61 |
return remote_execution_pb2.FindMissingBlobsResponse()
|
61 | 62 |
|
62 | 63 |
def BatchUpdateBlobs(self, request, context):
|
64 |
+ self.__logger.debug("BatchUpdateBlobs request from [%s]", context.peer())
|
|
65 |
+ |
|
63 | 66 |
try:
|
64 |
- self.logger.debug("BatchUpdateBlobs request: [{}]".format(request))
|
|
65 | 67 |
instance = self._get_instance(request.instance_name)
|
66 | 68 |
response = instance.batch_update_blobs(request.requests)
|
67 |
- self.logger.debug("FindMissingBlobs response: [{}]".format(response))
|
|
69 |
+ |
|
68 | 70 |
return response
|
69 | 71 |
|
70 | 72 |
except InvalidArgumentError as e:
|
71 |
- self.logger.error(e)
|
|
73 |
+ self.__logger.error(e)
|
|
72 | 74 |
context.set_details(str(e))
|
73 | 75 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
74 | 76 |
|
75 | 77 |
return remote_execution_pb2.BatchReadBlobsResponse()
|
76 | 78 |
|
77 | 79 |
def BatchReadBlobs(self, request, context):
|
80 |
+ self.__logger.debug("BatchReadBlobs request from [%s]", context.peer())
|
|
81 |
+ |
|
78 | 82 |
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
79 | 83 |
context.set_details('Method not implemented!')
|
80 | 84 |
|
81 | 85 |
return remote_execution_pb2.BatchReadBlobsResponse()
|
82 | 86 |
|
83 | 87 |
def GetTree(self, request, context):
|
88 |
+ self.__logger.debug("GetTree request from [%s]", context.peer())
|
|
89 |
+ |
|
84 | 90 |
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
85 | 91 |
context.set_details('Method not implemented!')
|
86 | 92 |
|
... | ... | @@ -97,7 +103,7 @@ class ContentAddressableStorageService(remote_execution_pb2_grpc.ContentAddressa |
97 | 103 |
class ByteStreamService(bytestream_pb2_grpc.ByteStreamServicer):
|
98 | 104 |
|
99 | 105 |
def __init__(self, server):
|
100 |
- self.logger = logging.getLogger(__name__)
|
|
106 |
+ self.__logger = logging.getLogger(__name__)
|
|
101 | 107 |
|
102 | 108 |
self._instances = {}
|
103 | 109 |
|
... | ... | @@ -107,8 +113,9 @@ class ByteStreamService(bytestream_pb2_grpc.ByteStreamServicer): |
107 | 113 |
self._instances[name] = instance
|
108 | 114 |
|
109 | 115 |
def Read(self, request, context):
|
116 |
+ self.__logger.debug("Read request from [%s]", context.peer())
|
|
117 |
+ |
|
110 | 118 |
try:
|
111 |
- self.logger.debug("Read request: [{}]".format(request))
|
|
112 | 119 |
path = request.resource_name.split("/")
|
113 | 120 |
instance_name = path[0]
|
114 | 121 |
|
... | ... | @@ -131,30 +138,29 @@ class ByteStreamService(bytestream_pb2_grpc.ByteStreamServicer): |
131 | 138 |
request.read_limit)
|
132 | 139 |
|
133 | 140 |
except InvalidArgumentError as e:
|
134 |
- self.logger.error(e)
|
|
141 |
+ self.__logger.error(e)
|
|
135 | 142 |
context.set_details(str(e))
|
136 | 143 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
137 | 144 |
yield bytestream_pb2.ReadResponse()
|
138 | 145 |
|
139 | 146 |
except NotFoundError as e:
|
140 |
- self.logger.error(e)
|
|
147 |
+ self.__logger.error(e)
|
|
141 | 148 |
context.set_details(str(e))
|
142 | 149 |
context.set_code(grpc.StatusCode.NOT_FOUND)
|
143 | 150 |
yield bytestream_pb2.ReadResponse()
|
144 | 151 |
|
145 | 152 |
except OutOfRangeError as e:
|
146 |
- self.logger.error(e)
|
|
153 |
+ self.__logger.error(e)
|
|
147 | 154 |
context.set_details(str(e))
|
148 | 155 |
context.set_code(grpc.StatusCode.OUT_OF_RANGE)
|
149 | 156 |
yield bytestream_pb2.ReadResponse()
|
150 | 157 |
|
151 |
- self.logger.debug("Read finished.")
|
|
152 |
- |
|
153 | 158 |
def Write(self, requests, context):
|
159 |
+ self.__logger.debug("Write request from [%s]", context.peer())
|
|
160 |
+ |
|
154 | 161 |
try:
|
155 | 162 |
requests, request_probe = tee(requests, 2)
|
156 | 163 |
first_request = next(request_probe)
|
157 |
- self.logger.debug("First write request: [{}]".format(first_request))
|
|
158 | 164 |
|
159 | 165 |
path = first_request.resource_name.split("/")
|
160 | 166 |
|
... | ... | @@ -175,21 +181,21 @@ class ByteStreamService(bytestream_pb2_grpc.ByteStreamServicer): |
175 | 181 |
|
176 | 182 |
instance = self._get_instance(instance_name)
|
177 | 183 |
response = instance.write(requests)
|
178 |
- self.logger.debug("Write response: [{}]".format(response))
|
|
184 |
+ |
|
179 | 185 |
return response
|
180 | 186 |
|
181 | 187 |
except NotImplementedError as e:
|
182 |
- self.logger.error(e)
|
|
188 |
+ self.__logger.error(e)
|
|
183 | 189 |
context.set_details(str(e))
|
184 | 190 |
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
185 | 191 |
|
186 | 192 |
except InvalidArgumentError as e:
|
187 |
- self.logger.error(e)
|
|
193 |
+ self.__logger.error(e)
|
|
188 | 194 |
context.set_details(str(e))
|
189 | 195 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
190 | 196 |
|
191 | 197 |
except NotFoundError as e:
|
192 |
- self.logger.error(e)
|
|
198 |
+ self.__logger.error(e)
|
|
193 | 199 |
context.set_details(str(e))
|
194 | 200 |
context.set_code(grpc.StatusCode.NOT_FOUND)
|
195 | 201 |
|
... | ... | @@ -20,6 +20,7 @@ DiskStorage |
20 | 20 |
A CAS storage provider that stores files as blobs on disk.
|
21 | 21 |
"""
|
22 | 22 |
|
23 |
+import logging
|
|
23 | 24 |
import os
|
24 | 25 |
import tempfile
|
25 | 26 |
|
... | ... | @@ -29,6 +30,8 @@ from .storage_abc import StorageABC |
29 | 30 |
class DiskStorage(StorageABC):
|
30 | 31 |
|
31 | 32 |
def __init__(self, path):
|
33 |
+ self.__logger = logging.getLogger(__name__)
|
|
34 |
+ |
|
32 | 35 |
if not os.path.isabs(path):
|
33 | 36 |
self.__root_path = os.path.abspath(path)
|
34 | 37 |
else:
|
... | ... | @@ -43,6 +43,8 @@ class _NullBytesIO(io.BufferedIOBase): |
43 | 43 |
class LRUMemoryCache(StorageABC):
|
44 | 44 |
|
45 | 45 |
def __init__(self, limit):
|
46 |
+ self.__logger = logging.getLogger(__name__)
|
|
47 |
+ |
|
46 | 48 |
self._limit = limit
|
47 | 49 |
self._storage = collections.OrderedDict()
|
48 | 50 |
self._bytes_stored = 0
|
... | ... | @@ -35,7 +35,7 @@ from .storage_abc import StorageABC |
35 | 35 |
class RemoteStorage(StorageABC):
|
36 | 36 |
|
37 | 37 |
def __init__(self, channel, instance_name):
|
38 |
- self.logger = logging.getLogger(__name__)
|
|
38 |
+ self.__logger = logging.getLogger(__name__)
|
|
39 | 39 |
|
40 | 40 |
self.instance_name = instance_name
|
41 | 41 |
self.channel = channel
|
... | ... | @@ -21,6 +21,7 @@ A storage provider that stores data in an Amazon S3 bucket. |
21 | 21 |
"""
|
22 | 22 |
|
23 | 23 |
import io
|
24 |
+import logging
|
|
24 | 25 |
|
25 | 26 |
import boto3
|
26 | 27 |
from botocore.exceptions import ClientError
|
... | ... | @@ -31,6 +32,8 @@ from .storage_abc import StorageABC |
31 | 32 |
class S3Storage(StorageABC):
|
32 | 33 |
|
33 | 34 |
def __init__(self, bucket, **kwargs):
|
35 |
+ self.__logger = logging.getLogger(__name__)
|
|
36 |
+ |
|
34 | 37 |
self._bucket = bucket
|
35 | 38 |
self._s3 = boto3.resource('s3', **kwargs)
|
36 | 39 |
|
... | ... | @@ -26,6 +26,7 @@ the fallback. |
26 | 26 |
"""
|
27 | 27 |
|
28 | 28 |
import io
|
29 |
+import logging
|
|
29 | 30 |
|
30 | 31 |
from .storage_abc import StorageABC
|
31 | 32 |
|
... | ... | @@ -118,6 +119,8 @@ class _CachingTee(io.RawIOBase): |
118 | 119 |
class WithCacheStorage(StorageABC):
|
119 | 120 |
|
120 | 121 |
def __init__(self, cache, fallback):
|
122 |
+ self.__logger = logging.getLogger(__name__)
|
|
123 |
+ |
|
121 | 124 |
self._cache = cache
|
122 | 125 |
self._fallback = fallback
|
123 | 126 |
|
... | ... | @@ -37,9 +37,9 @@ from .operations.instance import OperationsInstance |
37 | 37 |
class ExecutionController:
|
38 | 38 |
|
39 | 39 |
def __init__(self, action_cache=None, storage=None):
|
40 |
- scheduler = Scheduler(action_cache)
|
|
40 |
+ self.__logger = logging.getLogger(__name__)
|
|
41 | 41 |
|
42 |
- self.logger = logging.getLogger(__name__)
|
|
42 |
+ scheduler = Scheduler(action_cache)
|
|
43 | 43 |
|
44 | 44 |
self._execution_instance = ExecutionInstance(scheduler, storage)
|
45 | 45 |
self._bots_interface = BotsInterface(scheduler)
|
... | ... | @@ -30,7 +30,8 @@ from ..job import Job |
30 | 30 |
class ExecutionInstance:
|
31 | 31 |
|
32 | 32 |
def __init__(self, scheduler, storage):
|
33 |
- self.logger = logging.getLogger(__name__)
|
|
33 |
+ self.__logger = logging.getLogger(__name__)
|
|
34 |
+ |
|
34 | 35 |
self._storage = storage
|
35 | 36 |
self._scheduler = scheduler
|
36 | 37 |
|
... | ... | @@ -34,7 +34,8 @@ from buildgrid._protos.google.longrunning import operations_pb2 |
34 | 34 |
class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer):
|
35 | 35 |
|
36 | 36 |
def __init__(self, server):
|
37 |
- self.logger = logging.getLogger(__name__)
|
|
37 |
+ self.__logger = logging.getLogger(__name__)
|
|
38 |
+ |
|
38 | 39 |
self._instances = {}
|
39 | 40 |
remote_execution_pb2_grpc.add_ExecutionServicer_to_server(self, server)
|
40 | 41 |
|
... | ... | @@ -42,6 +43,8 @@ class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer): |
42 | 43 |
self._instances[name] = instance
|
43 | 44 |
|
44 | 45 |
def Execute(self, request, context):
|
46 |
+ self.__logger.debug("Execute request from [%s]", context.peer())
|
|
47 |
+ |
|
45 | 48 |
try:
|
46 | 49 |
message_queue = queue.Queue()
|
47 | 50 |
instance = self._get_instance(request.instance_name)
|
... | ... | @@ -55,7 +58,7 @@ class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer): |
55 | 58 |
instanced_op_name = "{}/{}".format(request.instance_name,
|
56 | 59 |
operation.name)
|
57 | 60 |
|
58 |
- self.logger.info("Operation name: [{}]".format(instanced_op_name))
|
|
61 |
+ self.__logger.info("Operation name: [%s]", instanced_op_name)
|
|
59 | 62 |
|
60 | 63 |
for operation in instance.stream_operation_updates(message_queue,
|
61 | 64 |
operation.name):
|
... | ... | @@ -65,18 +68,20 @@ class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer): |
65 | 68 |
yield op
|
66 | 69 |
|
67 | 70 |
except InvalidArgumentError as e:
|
68 |
- self.logger.error(e)
|
|
71 |
+ self.__logger.error(e)
|
|
69 | 72 |
context.set_details(str(e))
|
70 | 73 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
71 | 74 |
yield operations_pb2.Operation()
|
72 | 75 |
|
73 | 76 |
except FailedPreconditionError as e:
|
74 |
- self.logger.error(e)
|
|
77 |
+ self.__logger.error(e)
|
|
75 | 78 |
context.set_details(str(e))
|
76 | 79 |
context.set_code(grpc.StatusCode.FAILED_PRECONDITION)
|
77 | 80 |
yield operations_pb2.Operation()
|
78 | 81 |
|
79 | 82 |
def WaitExecution(self, request, context):
|
83 |
+ self.__logger.debug("WaitExecution request from [%s]", context.peer())
|
|
84 |
+ |
|
80 | 85 |
try:
|
81 | 86 |
names = request.name.split("/")
|
82 | 87 |
|
... | ... | @@ -101,7 +106,7 @@ class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer): |
101 | 106 |
yield op
|
102 | 107 |
|
103 | 108 |
except InvalidArgumentError as e:
|
104 |
- self.logger.error(e)
|
|
109 |
+ self.__logger.error(e)
|
|
105 | 110 |
context.set_details(str(e))
|
106 | 111 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
107 | 112 |
yield operations_pb2.Operation()
|
... | ... | @@ -13,9 +13,9 @@ |
13 | 13 |
# limitations under the License.
|
14 | 14 |
|
15 | 15 |
|
16 |
+from concurrent import futures
|
|
16 | 17 |
import logging
|
17 | 18 |
import os
|
18 |
-from concurrent import futures
|
|
19 | 19 |
|
20 | 20 |
import grpc
|
21 | 21 |
|
... | ... | @@ -40,8 +40,7 @@ class BuildGridServer: |
40 | 40 |
Args:
|
41 | 41 |
max_workers (int, optional): A pool of max worker threads.
|
42 | 42 |
"""
|
43 |
- |
|
44 |
- self.logger = logging.getLogger(__name__)
|
|
43 |
+ self.__logger = logging.getLogger(__name__)
|
|
45 | 44 |
|
46 | 45 |
if max_workers is None:
|
47 | 46 |
# Use max_workers default from Python 3.5+
|
... | ... | @@ -80,11 +79,11 @@ class BuildGridServer: |
80 | 79 |
credentials (:obj:`grpc.ChannelCredentials`): Credentials object.
|
81 | 80 |
"""
|
82 | 81 |
if credentials is not None:
|
83 |
- self.logger.info("Adding secure connection on: [{}]".format(address))
|
|
82 |
+ self.__logger.info("Adding secure connection on: [%s]", address)
|
|
84 | 83 |
self._server.add_secure_port(address, credentials)
|
85 | 84 |
|
86 | 85 |
else:
|
87 |
- self.logger.info("Adding insecure connection on [{}]".format(address))
|
|
86 |
+ self.__logger.info("Adding insecure connection on [%s]", address)
|
|
88 | 87 |
self._server.add_insecure_port(address)
|
89 | 88 |
|
90 | 89 |
def add_execution_instance(self, instance, instance_name):
|
... | ... | @@ -27,7 +27,7 @@ from buildgrid._protos.google.longrunning import operations_pb2 |
27 | 27 |
class Job:
|
28 | 28 |
|
29 | 29 |
def __init__(self, action, action_digest):
|
30 |
- self.logger = logging.getLogger(__name__)
|
|
30 |
+ self.__logger = logging.getLogger(__name__)
|
|
31 | 31 |
|
32 | 32 |
self._name = str(uuid.uuid4())
|
33 | 33 |
self._action = remote_execution_pb2.Action()
|
... | ... | @@ -28,7 +28,8 @@ from buildgrid._protos.google.longrunning import operations_pb2 |
28 | 28 |
class OperationsInstance:
|
29 | 29 |
|
30 | 30 |
def __init__(self, scheduler):
|
31 |
- self.logger = logging.getLogger(__name__)
|
|
31 |
+ self.__logger = logging.getLogger(__name__)
|
|
32 |
+ |
|
32 | 33 |
self._scheduler = scheduler
|
33 | 34 |
|
34 | 35 |
def register_instance_with_server(self, instance_name, server):
|
... | ... | @@ -32,7 +32,7 @@ from buildgrid._protos.google.longrunning import operations_pb2_grpc, operations |
32 | 32 |
class OperationsService(operations_pb2_grpc.OperationsServicer):
|
33 | 33 |
|
34 | 34 |
def __init__(self, server):
|
35 |
- self.logger = logging.getLogger(__name__)
|
|
35 |
+ self.__logger = logging.getLogger(__name__)
|
|
36 | 36 |
|
37 | 37 |
self._instances = {}
|
38 | 38 |
|
... | ... | @@ -42,6 +42,8 @@ class OperationsService(operations_pb2_grpc.OperationsServicer): |
42 | 42 |
self._instances[name] = instance
|
43 | 43 |
|
44 | 44 |
def GetOperation(self, request, context):
|
45 |
+ self.__logger.debug("GetOperation request from [%s]", context.peer())
|
|
46 |
+ |
|
45 | 47 |
try:
|
46 | 48 |
name = request.name
|
47 | 49 |
|
... | ... | @@ -56,13 +58,15 @@ class OperationsService(operations_pb2_grpc.OperationsServicer): |
56 | 58 |
return op
|
57 | 59 |
|
58 | 60 |
except InvalidArgumentError as e:
|
59 |
- self.logger.error(e)
|
|
61 |
+ self.__logger.error(e)
|
|
60 | 62 |
context.set_details(str(e))
|
61 | 63 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
62 | 64 |
|
63 | 65 |
return operations_pb2.Operation()
|
64 | 66 |
|
65 | 67 |
def ListOperations(self, request, context):
|
68 |
+ self.__logger.debug("ListOperations request from [%s]", context.peer())
|
|
69 |
+ |
|
66 | 70 |
try:
|
67 | 71 |
# The request name should be the collection name
|
68 | 72 |
# In our case, this is just the instance_name
|
... | ... | @@ -79,13 +83,15 @@ class OperationsService(operations_pb2_grpc.OperationsServicer): |
79 | 83 |
return result
|
80 | 84 |
|
81 | 85 |
except InvalidArgumentError as e:
|
82 |
- self.logger.error(e)
|
|
86 |
+ self.__logger.error(e)
|
|
83 | 87 |
context.set_details(str(e))
|
84 | 88 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
85 | 89 |
|
86 | 90 |
return operations_pb2.ListOperationsResponse()
|
87 | 91 |
|
88 | 92 |
def DeleteOperation(self, request, context):
|
93 |
+ self.__logger.debug("DeleteOperation request from [%s]", context.peer())
|
|
94 |
+ |
|
89 | 95 |
try:
|
90 | 96 |
name = request.name
|
91 | 97 |
|
... | ... | @@ -96,13 +102,15 @@ class OperationsService(operations_pb2_grpc.OperationsServicer): |
96 | 102 |
instance.delete_operation(operation_name)
|
97 | 103 |
|
98 | 104 |
except InvalidArgumentError as e:
|
99 |
- self.logger.error(e)
|
|
105 |
+ self.__logger.error(e)
|
|
100 | 106 |
context.set_details(str(e))
|
101 | 107 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
102 | 108 |
|
103 | 109 |
return Empty()
|
104 | 110 |
|
105 | 111 |
def CancelOperation(self, request, context):
|
112 |
+ self.__logger.debug("CancelOperation request from [%s]", context.peer())
|
|
113 |
+ |
|
106 | 114 |
try:
|
107 | 115 |
name = request.name
|
108 | 116 |
|
... | ... | @@ -113,12 +121,12 @@ class OperationsService(operations_pb2_grpc.OperationsServicer): |
113 | 121 |
instance.cancel_operation(operation_name)
|
114 | 122 |
|
115 | 123 |
except NotImplementedError as e:
|
116 |
- self.logger.error(e)
|
|
124 |
+ self.__logger.error(e)
|
|
117 | 125 |
context.set_details(str(e))
|
118 | 126 |
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
119 | 127 |
|
120 | 128 |
except InvalidArgumentError as e:
|
121 |
- self.logger.error(e)
|
|
129 |
+ self.__logger.error(e)
|
|
122 | 130 |
context.set_details(str(e))
|
123 | 131 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
124 | 132 |
|
... | ... | @@ -25,7 +25,7 @@ from buildgrid._protos.buildstream.v2 import buildstream_pb2_grpc |
25 | 25 |
class ReferenceStorageService(buildstream_pb2_grpc.ReferenceStorageServicer):
|
26 | 26 |
|
27 | 27 |
def __init__(self, server):
|
28 |
- self.logger = logging.getLogger(__name__)
|
|
28 |
+ self.__logger = logging.getLogger(__name__)
|
|
29 | 29 |
|
30 | 30 |
self._instances = {}
|
31 | 31 |
|
... | ... | @@ -35,6 +35,8 @@ class ReferenceStorageService(buildstream_pb2_grpc.ReferenceStorageServicer): |
35 | 35 |
self._instances[name] = instance
|
36 | 36 |
|
37 | 37 |
def GetReference(self, request, context):
|
38 |
+ self.__logger.debug("GetReference request from [%s]", context.peer())
|
|
39 |
+ |
|
38 | 40 |
try:
|
39 | 41 |
instance = self._get_instance(request.instance_name)
|
40 | 42 |
digest = instance.get_digest_reference(request.key)
|
... | ... | @@ -43,17 +45,19 @@ class ReferenceStorageService(buildstream_pb2_grpc.ReferenceStorageServicer): |
43 | 45 |
return response
|
44 | 46 |
|
45 | 47 |
except InvalidArgumentError as e:
|
46 |
- self.logger.error(e)
|
|
48 |
+ self.__logger.error(e)
|
|
47 | 49 |
context.set_details(str(e))
|
48 | 50 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
49 | 51 |
|
50 | 52 |
except NotFoundError as e:
|
51 |
- self.logger.debug(e)
|
|
53 |
+ self.__logger.debug(e)
|
|
52 | 54 |
context.set_code(grpc.StatusCode.NOT_FOUND)
|
53 | 55 |
|
54 | 56 |
return buildstream_pb2.GetReferenceResponse()
|
55 | 57 |
|
56 | 58 |
def UpdateReference(self, request, context):
|
59 |
+ self.__logger.debug("UpdateReference request from [%s]", context.peer())
|
|
60 |
+ |
|
57 | 61 |
try:
|
58 | 62 |
instance = self._get_instance(request.instance_name)
|
59 | 63 |
digest = request.digest
|
... | ... | @@ -62,7 +66,7 @@ class ReferenceStorageService(buildstream_pb2_grpc.ReferenceStorageServicer): |
62 | 66 |
instance.update_reference(key, digest)
|
63 | 67 |
|
64 | 68 |
except InvalidArgumentError as e:
|
65 |
- self.logger.error(e)
|
|
69 |
+ self.__logger.error(e)
|
|
66 | 70 |
context.set_details(str(e))
|
67 | 71 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
68 | 72 |
|
... | ... | @@ -72,13 +76,15 @@ class ReferenceStorageService(buildstream_pb2_grpc.ReferenceStorageServicer): |
72 | 76 |
return buildstream_pb2.UpdateReferenceResponse()
|
73 | 77 |
|
74 | 78 |
def Status(self, request, context):
|
79 |
+ self.__logger.debug("Status request from [%s]", context.peer())
|
|
80 |
+ |
|
75 | 81 |
try:
|
76 | 82 |
instance = self._get_instance(request.instance_name)
|
77 | 83 |
allow_updates = instance.allow_updates
|
78 | 84 |
return buildstream_pb2.StatusResponse(allow_updates=allow_updates)
|
79 | 85 |
|
80 | 86 |
except InvalidArgumentError as e:
|
81 |
- self.logger.error(e)
|
|
87 |
+ self.__logger.error(e)
|
|
82 | 88 |
context.set_details(str(e))
|
83 | 89 |
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
84 | 90 |
|
... | ... | @@ -23,6 +23,7 @@ For a given key, it |
23 | 23 |
"""
|
24 | 24 |
|
25 | 25 |
import collections
|
26 |
+import logging
|
|
26 | 27 |
|
27 | 28 |
from buildgrid._exceptions import NotFoundError
|
28 | 29 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
... | ... | @@ -38,6 +39,8 @@ class ReferenceCache: |
38 | 39 |
max_cached_refs (int): maximum number of entries to be stored.
|
39 | 40 |
allow_updates (bool): allow the client to write to storage
|
40 | 41 |
"""
|
42 |
+ self.__logger = logging.getLogger(__name__)
|
|
43 |
+ |
|
41 | 44 |
self._allow_updates = allow_updates
|
42 | 45 |
self._storage = storage
|
43 | 46 |
self._max_cached_refs = max_cached_refs
|
... | ... | @@ -20,6 +20,7 @@ Schedules jobs. |
20 | 20 |
"""
|
21 | 21 |
|
22 | 22 |
from collections import deque
|
23 |
+import logging
|
|
23 | 24 |
|
24 | 25 |
from buildgrid._exceptions import NotFoundError
|
25 | 26 |
|
... | ... | @@ -31,6 +32,8 @@ class Scheduler: |
31 | 32 |
MAX_N_TRIES = 5
|
32 | 33 |
|
33 | 34 |
def __init__(self, action_cache=None):
|
35 |
+ self.__logger = logging.getLogger(__name__)
|
|
36 |
+ |
|
34 | 37 |
self._action_cache = action_cache
|
35 | 38 |
self.jobs = {}
|
36 | 39 |
self.queue = deque()
|
... | ... | @@ -89,7 +89,7 @@ def test_bytestream_read(mocked, data_to_read, instance): |
89 | 89 |
request.resource_name += "blobs/{}/{}".format(HASH(data_to_read).hexdigest(), len(data_to_read))
|
90 | 90 |
|
91 | 91 |
data = b""
|
92 |
- for response in servicer.Read(request, None):
|
|
92 |
+ for response in servicer.Read(request, context):
|
|
93 | 93 |
data += response.data
|
94 | 94 |
assert data == data_to_read
|
95 | 95 |
|
... | ... | @@ -111,7 +111,7 @@ def test_bytestream_read_many(mocked, instance): |
111 | 111 |
request.resource_name += "blobs/{}/{}".format(HASH(data_to_read).hexdigest(), len(data_to_read))
|
112 | 112 |
|
113 | 113 |
data = b""
|
114 |
- for response in servicer.Read(request, None):
|
|
114 |
+ for response in servicer.Read(request, context):
|
|
115 | 115 |
data += response.data
|
116 | 116 |
assert data == data_to_read
|
117 | 117 |
|
... | ... | @@ -137,7 +137,7 @@ def test_bytestream_write(mocked, instance, extra_data): |
137 | 137 |
bytestream_pb2.WriteRequest(data=b'def', write_offset=3, finish_write=True)
|
138 | 138 |
]
|
139 | 139 |
|
140 |
- response = servicer.Write(requests, None)
|
|
140 |
+ response = servicer.Write(requests, context)
|
|
141 | 141 |
assert response.committed_size == 6
|
142 | 142 |
assert len(storage.data) == 1
|
143 | 143 |
assert (hash_, 6) in storage.data
|
... | ... | @@ -178,7 +178,7 @@ def test_cas_find_missing_blobs(mocked, instance): |
178 | 178 |
re_pb2.Digest(hash=HASH(b'ghij').hexdigest(), size_bytes=4)
|
179 | 179 |
]
|
180 | 180 |
request = re_pb2.FindMissingBlobsRequest(instance_name=instance, blob_digests=digests)
|
181 |
- response = servicer.FindMissingBlobs(request, None)
|
|
181 |
+ response = servicer.FindMissingBlobs(request, context)
|
|
182 | 182 |
assert len(response.missing_blob_digests) == 1
|
183 | 183 |
assert response.missing_blob_digests[0] == digests[1]
|
184 | 184 |
|
... | ... | @@ -201,7 +201,7 @@ def test_cas_batch_update_blobs(mocked, instance): |
201 | 201 |
]
|
202 | 202 |
|
203 | 203 |
request = re_pb2.BatchUpdateBlobsRequest(instance_name=instance, requests=update_requests)
|
204 |
- response = servicer.BatchUpdateBlobs(request, None)
|
|
204 |
+ response = servicer.BatchUpdateBlobs(request, context)
|
|
205 | 205 |
assert len(response.responses) == 2
|
206 | 206 |
|
207 | 207 |
for blob_response in response.responses:
|