Martin Blanchard pushed to branch mablanch/61-bazel-support at BuildGrid / buildgrid
Commits:
-
fdc3a053
by Martin Blanchard at 2018-08-28T10:01:44Z
-
63b21827
by Rohit at 2018-08-28T13:08:53Z
-
9e482ddb
by Martin Blanchard at 2018-08-28T13:21:04Z
-
96e01db8
by Martin Blanchard at 2018-08-28T13:21:04Z
-
07c1bbb3
by Martin Blanchard at 2018-08-28T13:21:04Z
-
d04326c2
by Martin Blanchard at 2018-08-28T13:21:04Z
-
ca0e7457
by Martin Blanchard at 2018-08-28T13:21:04Z
-
bbc1854e
by Martin Blanchard at 2018-08-28T13:21:04Z
5 changed files:
- buildgrid/_app/bots/temp_directory.py
- buildgrid/server/execution/execution_service.py
- buildgrid/server/scheduler.py
- buildgrid/utils.py
- setup.py
Changes:
... | ... | @@ -19,70 +19,95 @@ import tempfile |
19 | 19 |
|
20 | 20 |
from google.protobuf import any_pb2
|
21 | 21 |
|
22 |
-from buildgrid.utils import read_file, create_digest, write_fetch_directory, parse_to_pb2_from_fetch
|
|
22 |
+from buildgrid.utils import output_file_maker, write_fetch_directory, parse_to_pb2_from_fetch
|
|
23 | 23 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
|
24 | 24 |
from buildgrid._protos.google.bytestream import bytestream_pb2_grpc
|
25 | 25 |
|
26 | 26 |
|
27 | 27 |
def work_temp_directory(context, lease):
|
28 |
- """ Bot downloads directories and files into a temp directory,
|
|
29 |
- then uploads results back to CAS
|
|
28 |
+ """Executes a lease for a build action, using host tools.
|
|
30 | 29 |
"""
|
31 | 30 |
|
32 |
- parent = context.parent
|
|
31 |
+ instance_name = context.parent
|
|
33 | 32 |
stub_bytestream = bytestream_pb2_grpc.ByteStreamStub(context.channel)
|
34 | 33 |
|
35 | 34 |
action_digest = remote_execution_pb2.Digest()
|
36 | 35 |
lease.payload.Unpack(action_digest)
|
37 | 36 |
|
38 |
- action = remote_execution_pb2.Action()
|
|
37 |
+ action = parse_to_pb2_from_fetch(remote_execution_pb2.Action(),
|
|
38 |
+ stub_bytestream, action_digest, instance_name)
|
|
39 | 39 |
|
40 |
- action = parse_to_pb2_from_fetch(action, stub_bytestream, action_digest, parent)
|
|
40 |
+ with tempfile.TemporaryDirectory() as temp_directory:
|
|
41 |
+ command = parse_to_pb2_from_fetch(remote_execution_pb2.Command(),
|
|
42 |
+ stub_bytestream, action.command_digest, instance_name)
|
|
41 | 43 |
|
42 |
- with tempfile.TemporaryDirectory() as temp_dir:
|
|
44 |
+ write_fetch_directory(temp_directory, stub_bytestream,
|
|
45 |
+ action.input_root_digest, instance_name)
|
|
43 | 46 |
|
44 |
- command = remote_execution_pb2.Command()
|
|
45 |
- command = parse_to_pb2_from_fetch(command, stub_bytestream, action.command_digest, parent)
|
|
46 |
- |
|
47 |
- arguments = "cd {} &&".format(temp_dir)
|
|
47 |
+ execution_envionment = os.environ.copy()
|
|
48 |
+ for variable in command.environment_variables:
|
|
49 |
+ if variable.name not in ['PATH', 'PWD']:
|
|
50 |
+ execution_envionment[variable.name] = variable.value
|
|
48 | 51 |
|
52 |
+ command_arguments = list()
|
|
49 | 53 |
for argument in command.arguments:
|
50 |
- arguments += " {}".format(argument)
|
|
51 |
- |
|
52 |
- context.logger.info(arguments)
|
|
53 |
- |
|
54 |
- write_fetch_directory(temp_dir, stub_bytestream, action.input_root_digest, parent)
|
|
55 |
- |
|
56 |
- proc = subprocess.Popen(arguments,
|
|
57 |
- shell=True,
|
|
58 |
- stdin=subprocess.PIPE,
|
|
59 |
- stdout=subprocess.PIPE)
|
|
60 |
- |
|
61 |
- # TODO: Should return the std_out to the user
|
|
62 |
- proc.communicate()
|
|
63 |
- |
|
64 |
- result = remote_execution_pb2.ActionResult()
|
|
65 |
- requests = []
|
|
66 |
- for output_file in command.output_files:
|
|
67 |
- path = os.path.join(temp_dir, output_file)
|
|
68 |
- chunk = read_file(path)
|
|
69 |
- |
|
70 |
- digest = create_digest(chunk)
|
|
71 |
- |
|
72 |
- result.output_files.extend([remote_execution_pb2.OutputFile(path=output_file,
|
|
73 |
- digest=digest)])
|
|
74 |
- |
|
75 |
- requests.append(remote_execution_pb2.BatchUpdateBlobsRequest.Request(
|
|
76 |
- digest=digest, data=chunk))
|
|
77 |
- |
|
78 |
- request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=parent,
|
|
79 |
- requests=requests)
|
|
54 |
+ command_arguments.append(argument.strip())
|
|
55 |
+ |
|
56 |
+ working_directory = None
|
|
57 |
+ if command.working_directory:
|
|
58 |
+ working_directory = os.path.join(temp_directory,
|
|
59 |
+ command.working_directory)
|
|
60 |
+ os.makedirs(working_directory, exist_ok=True)
|
|
61 |
+ else:
|
|
62 |
+ working_directory = temp_directory
|
|
63 |
+ |
|
64 |
+ # Ensure that output files structure exists:
|
|
65 |
+ for output_path in command.output_files:
|
|
66 |
+ directory_path = os.path.join(working_directory,
|
|
67 |
+ os.path.dirname(output_path))
|
|
68 |
+ os.makedirs(directory_path, exist_ok=True)
|
|
69 |
+ |
|
70 |
+ process = subprocess.Popen(command_arguments,
|
|
71 |
+ cwd=working_directory,
|
|
72 |
+ universal_newlines=True,
|
|
73 |
+ env=execution_envionment,
|
|
74 |
+ stdin=subprocess.PIPE,
|
|
75 |
+ stdout=subprocess.PIPE)
|
|
76 |
+ # TODO: Should return the stdout and stderr to the user.
|
|
77 |
+ process.communicate()
|
|
78 |
+ |
|
79 |
+ update_requests = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=instance_name)
|
|
80 |
+ action_result = remote_execution_pb2.ActionResult()
|
|
81 |
+ |
|
82 |
+ for output_path in command.output_files:
|
|
83 |
+ file_path = os.path.join(working_directory, output_path)
|
|
84 |
+ # Missing outputs should simply be omitted in ActionResult:
|
|
85 |
+ if not os.path.isfile(file_path):
|
|
86 |
+ continue
|
|
87 |
+ |
|
88 |
+ # OutputFile.path should be relative to the working direcory:
|
|
89 |
+ output_file, update_request = output_file_maker(file_path, working_directory)
|
|
90 |
+ |
|
91 |
+ action_result.output_files.extend([output_file])
|
|
92 |
+ update_requests.requests.extend([update_request])
|
|
93 |
+ |
|
94 |
+ for output_path in command.output_directories:
|
|
95 |
+ directory_path = os.path.join(working_directory, output_path)
|
|
96 |
+ # Missing outputs should simply be omitted in ActionResult:
|
|
97 |
+ if not os.path.isdir(directory_path):
|
|
98 |
+ continue
|
|
99 |
+ |
|
100 |
+ # OutputDirectory.path should be relative to the working direcory:
|
|
101 |
+ output_directory, update_request = output_directory_maker(directory_path, working_directory)
|
|
102 |
+ |
|
103 |
+ action_result.output_directories.extend([output_directory])
|
|
104 |
+ update_requests.requests.extend(update_request)
|
|
80 | 105 |
|
81 | 106 |
stub_cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(context.channel)
|
82 |
- stub_cas.BatchUpdateBlobs(request)
|
|
107 |
+ stub_cas.BatchUpdateBlobs(update_requests)
|
|
83 | 108 |
|
84 | 109 |
result_any = any_pb2.Any()
|
85 |
- result_any.Pack(result)
|
|
110 |
+ result_any.Pack(action_result)
|
|
86 | 111 |
|
87 | 112 |
lease.result.CopyFrom(result_any)
|
88 | 113 |
|
... | ... | @@ -86,6 +86,11 @@ class ExecutionService(remote_execution_pb2_grpc.ExecutionServicer): |
86 | 86 |
yield operations_pb2.Operation()
|
87 | 87 |
|
88 | 88 |
def _get_instance(self, name):
|
89 |
+ # If client does not support multiple instances, it may omit the
|
|
90 |
+ # instance name request parameter, so better map our default:
|
|
91 |
+ if not name and len(self._instances) == 1:
|
|
92 |
+ name = 'main'
|
|
93 |
+ |
|
89 | 94 |
try:
|
90 | 95 |
return self._instances[name]
|
91 | 96 |
|
... | ... | @@ -90,7 +90,7 @@ class Scheduler: |
90 | 90 |
job.update_execute_stage(ExecuteStage.COMPLETED)
|
91 | 91 |
self.jobs[name] = job
|
92 | 92 |
if not job.do_not_cache and self._action_cache is not None:
|
93 |
- self._action_cache.put_action_result(job.action_digest, result)
|
|
93 |
+ self._action_cache.update_action_result(job.action_digest, result)
|
|
94 | 94 |
|
95 | 95 |
def get_operations(self):
|
96 | 96 |
response = operations_pb2.ListOperationsResponse()
|
... | ... | @@ -13,6 +13,7 @@ |
13 | 13 |
# limitations under the License.
|
14 | 14 |
|
15 | 15 |
|
16 |
+from operator import attrgetter
|
|
16 | 17 |
import os
|
17 | 18 |
|
18 | 19 |
from buildgrid.settings import HASH
|
... | ... | @@ -31,30 +32,59 @@ def gen_fetch_blob(stub, digest, instance_name=""): |
31 | 32 |
yield response.data
|
32 | 33 |
|
33 | 34 |
|
34 |
-def write_fetch_directory(directory, stub, digest, instance_name=""):
|
|
35 |
- """ Given a directory digest, fetches files and writes them to a directory
|
|
35 |
+def write_fetch_directory(root_directory, stub, digest, instance_name=None):
|
|
36 |
+ """Locally replicates a directory from CAS.
|
|
37 |
+ |
|
38 |
+ Args:
|
|
39 |
+ root_directory (str): local directory to populate.
|
|
40 |
+ stub (): gRPC stub for CAS communication.
|
|
41 |
+ digest (Digest): digest for the directory to fetch from CAS.
|
|
42 |
+ instance_name (str, optional): farm instance name to query data from.
|
|
36 | 43 |
"""
|
37 |
- # TODO: Extend to symlinks and inner directories
|
|
38 |
- # pathlib.Path('/my/directory').mkdir(parents=True, exist_ok=True)
|
|
44 |
+ if not os.path.isabs(root_directory):
|
|
45 |
+ root_directory = os.path.abspath(root_directory)
|
|
46 |
+ if not os.path.exists(root_directory):
|
|
47 |
+ os.makedirs(root_directory, exist_ok=True)
|
|
39 | 48 |
|
40 |
- directory_pb2 = remote_execution_pb2.Directory()
|
|
41 |
- directory_pb2 = parse_to_pb2_from_fetch(directory_pb2, stub, digest, instance_name)
|
|
49 |
+ directory = parse_to_pb2_from_fetch(remote_execution_pb2.Directory(),
|
|
50 |
+ stub, digest, instance_name)
|
|
51 |
+ |
|
52 |
+ for directory_node in directory.directories:
|
|
53 |
+ child_path = os.path.join(root_directory, directory_node.name)
|
|
54 |
+ |
|
55 |
+ write_fetch_directory(child_path, stub, directory_node.digest, instance_name)
|
|
56 |
+ |
|
57 |
+ for file_node in directory.files:
|
|
58 |
+ child_path = os.path.join(root_directory, file_node.name)
|
|
59 |
+ |
|
60 |
+ with open(child_path, 'wb') as child_file:
|
|
61 |
+ write_fetch_blob(child_file, stub, file_node.digest, instance_name)
|
|
62 |
+ |
|
63 |
+ for symlink_node in directory.symlinks:
|
|
64 |
+ child_path = os.path.join(root_directory, symlink_node.name)
|
|
65 |
+ |
|
66 |
+ if os.path.isabs(symlink_node.target):
|
|
67 |
+ continue # No out of temp-directory links for now.
|
|
68 |
+ target_path = os.path.join(root_directory, symlink_node.target)
|
|
69 |
+ |
|
70 |
+ os.symlink(child_path, target_path)
|
|
42 | 71 |
|
43 |
- for file_node in directory_pb2.files:
|
|
44 |
- path = os.path.join(directory, file_node.name)
|
|
45 |
- with open(path, 'wb') as f:
|
|
46 |
- write_fetch_blob(f, stub, file_node.digest, instance_name)
|
|
47 | 72 |
|
73 |
+def write_fetch_blob(target_file, stub, digest, instance_name=None):
|
|
74 |
+ """Extracts a blob from CAS into a local file.
|
|
48 | 75 |
|
49 |
-def write_fetch_blob(out, stub, digest, instance_name=""):
|
|
50 |
- """ Given an output buffer, fetches blob and writes to buffer
|
|
76 |
+ Args:
|
|
77 |
+ target_file (str): local file to write.
|
|
78 |
+ stub (): gRPC stub for CAS communication.
|
|
79 |
+ digest (Digest): digest for the blob to fetch from CAS.
|
|
80 |
+ instance_name (str, optional): farm instance name to query data from.
|
|
51 | 81 |
"""
|
52 | 82 |
|
53 | 83 |
for stream in gen_fetch_blob(stub, digest, instance_name):
|
54 |
- out.write(stream)
|
|
84 |
+ target_file.write(stream)
|
|
85 |
+ target_file.flush()
|
|
55 | 86 |
|
56 |
- out.flush()
|
|
57 |
- assert digest.size_bytes == os.fstat(out.fileno()).st_size
|
|
87 |
+ assert digest.size_bytes == os.fstat(target_file.fileno()).st_size
|
|
58 | 88 |
|
59 | 89 |
|
60 | 90 |
def parse_to_pb2_from_fetch(pb2, stub, digest, instance_name=""):
|
... | ... | @@ -70,7 +100,15 @@ def parse_to_pb2_from_fetch(pb2, stub, digest, instance_name=""): |
70 | 100 |
|
71 | 101 |
|
72 | 102 |
def create_digest(bytes_to_digest):
|
73 |
- """ Creates a hash based on the hex digest and returns the digest
|
|
103 |
+ """Computes the :obj:`Digest` of a piece of data.
|
|
104 |
+ |
|
105 |
+ The :obj:`Digest` of a data is a function of its hash **and** size.
|
|
106 |
+ |
|
107 |
+ Args:
|
|
108 |
+ bytes_to_digest (bytes): byte data to digest.
|
|
109 |
+ |
|
110 |
+ Returns:
|
|
111 |
+ :obj:`Digest`: The gRPC :obj:`Digest` for the given byte data.
|
|
74 | 112 |
"""
|
75 | 113 |
return remote_execution_pb2.Digest(hash=HASH(bytes_to_digest).hexdigest(),
|
76 | 114 |
size_bytes=len(bytes_to_digest))
|
... | ... | @@ -107,6 +145,183 @@ def file_maker(file_path, file_digest): |
107 | 145 |
is_executable=os.access(file_path, os.X_OK))
|
108 | 146 |
|
109 | 147 |
|
110 |
-def read_file(read):
|
|
111 |
- with open(read, 'rb') as f:
|
|
112 |
- return f.read()
|
|
148 |
+def directory_maker(directory_path):
|
|
149 |
+ """
|
|
150 |
+ """
|
|
151 |
+ if not os.path.isabs(directory_path):
|
|
152 |
+ directory_path = os.path.abspath(directory_path)
|
|
153 |
+ |
|
154 |
+ child_directories = list()
|
|
155 |
+ update_requests = list()
|
|
156 |
+ |
|
157 |
+ files, directories, symlinks = list(), list(), list()
|
|
158 |
+ for directory_entry in os.scandir(directory_path):
|
|
159 |
+ # Create a FileNode and corresponding BatchUpdateBlobsRequest:
|
|
160 |
+ if directory_entry.is_file(follow_symlinks=False):
|
|
161 |
+ node_blob = read_file(directory_entry.path)
|
|
162 |
+ node_digest = create_digest(node_blob)
|
|
163 |
+ |
|
164 |
+ node = remote_execution_pb2.FileNode()
|
|
165 |
+ node.name = directory_entry.name
|
|
166 |
+ node.digest = node_digest
|
|
167 |
+ node.is_executable = os.access(directory_entry.path, os.X_OK)
|
|
168 |
+ |
|
169 |
+ node_request = remote_execution_pb2.BatchUpdateBlobsRequest.Request(digest=node_digest)
|
|
170 |
+ node_request.data = node_blob
|
|
171 |
+ |
|
172 |
+ update_requests.append(node_request)
|
|
173 |
+ files.append(node)
|
|
174 |
+ |
|
175 |
+ # Create a DirectoryNode and corresponding BatchUpdateBlobsRequest:
|
|
176 |
+ elif directory_entry.is_dir(follow_symlinks=False):
|
|
177 |
+ node_directory, node_children, node_requests = directory_maker(directory_entry.path)
|
|
178 |
+ |
|
179 |
+ node = remote_execution_pb2.DirectoryNode()
|
|
180 |
+ node.name = directory_entry.name
|
|
181 |
+ node.digest = node_requests[-1].digest
|
|
182 |
+ |
|
183 |
+ child_directories.extend(node_children)
|
|
184 |
+ child_directories.append(node_directory)
|
|
185 |
+ update_requests.expend(node_requests)
|
|
186 |
+ directories.append(node)
|
|
187 |
+ |
|
188 |
+ # Create a SymlinkNode if necessary;
|
|
189 |
+ elif os.path.islink(directory_entry.path):
|
|
190 |
+ node_target = os.readlink(directory_entry.path)
|
|
191 |
+ |
|
192 |
+ node = remote_execution_pb2.SymlinkNode()
|
|
193 |
+ node.name = directory_entry.name
|
|
194 |
+ node.target = node_target
|
|
195 |
+ |
|
196 |
+ symlinks.append(node)
|
|
197 |
+ |
|
198 |
+ directory = remote_execution_pb2.Directory()
|
|
199 |
+ directory.files.extend(files.sort(key=attrgetter('name')))
|
|
200 |
+ directory.directories.extend(directories.sort(key=attrgetter('name')))
|
|
201 |
+ directory.symlinks.extend(symlinks.sort(key=attrgetter('name')))
|
|
202 |
+ |
|
203 |
+ directory_blob = directory.SerializeToString()
|
|
204 |
+ directory_digest = create_digest(directory_blob)
|
|
205 |
+ |
|
206 |
+ update_request = remote_execution_pb2.BatchUpdateBlobsRequest.Request(digest=directory_digest)
|
|
207 |
+ update_request.data = directory_blob
|
|
208 |
+ |
|
209 |
+ update_requests.append(update_request)
|
|
210 |
+ |
|
211 |
+ return directory, child_directories, update_requests
|
|
212 |
+ |
|
213 |
+ |
|
214 |
+def read_file(file_path):
|
|
215 |
+ """Loads raw file content in memory.
|
|
216 |
+ |
|
217 |
+ Returns:
|
|
218 |
+ bytes: Raw file's content until EOF.
|
|
219 |
+ |
|
220 |
+ Raises:
|
|
221 |
+ OSError: If `file_path` does not exist or is not readable.
|
|
222 |
+ """
|
|
223 |
+ with open(file_path, 'rb') as byte_file:
|
|
224 |
+ return byte_file.read()
|
|
225 |
+ |
|
226 |
+ |
|
227 |
+def output_file_maker(file_path, input_path):
|
|
228 |
+ """Creates an :obj:`OutputFile` from a local file.
|
|
229 |
+ |
|
230 |
+ `file_path` **must** point inside or be relative to `input_path`.
|
|
231 |
+ |
|
232 |
+ Args:
|
|
233 |
+ file_path (str): absolute or relative path to a local file.
|
|
234 |
+ input_path (str): absolute or relative path to the input root directory.
|
|
235 |
+ |
|
236 |
+ Returns:
|
|
237 |
+ :obj:`OutputFile`, :obj:`BatchUpdateBlobsRequest`: Tuple of a new gRPC
|
|
238 |
+ :obj:`OutputFile` object for the file pointed by `file_path` and the
|
|
239 |
+ corresponding :obj:`BatchUpdateBlobsRequest` for CAS upload.
|
|
240 |
+ """
|
|
241 |
+ if not os.path.isabs(file_path):
|
|
242 |
+ file_path = os.path.abspath(file_path)
|
|
243 |
+ if not os.path.isabs(input_path):
|
|
244 |
+ input_path = os.path.abspath(input_path)
|
|
245 |
+ |
|
246 |
+ file_blob = read_file(file_path)
|
|
247 |
+ file_digest = create_digest(file_blob)
|
|
248 |
+ |
|
249 |
+ output_file = remote_execution_pb2.OutputFile(digest=file_digest)
|
|
250 |
+ output_file.path = os.path.relpath(file_path, start=input_path)
|
|
251 |
+ output_file.is_executable = os.access(file_path, os.X_OK)
|
|
252 |
+ |
|
253 |
+ update_request = remote_execution_pb2.BatchUpdateBlobsRequest.Request(digest=file_digest)
|
|
254 |
+ update_request.data = file_blob
|
|
255 |
+ |
|
256 |
+ return output_file, update_request
|
|
257 |
+ |
|
258 |
+ |
|
259 |
+def output_directory_maker(directory_path, working_path):
|
|
260 |
+ """Creates a gRPC :obj:`OutputDirectory` from a local directory.
|
|
261 |
+ |
|
262 |
+ `directory_path` **must** point inside or be relative to `input_path`.
|
|
263 |
+ |
|
264 |
+ Args:
|
|
265 |
+ directory_path (str): absolute or relative path to a local directory.
|
|
266 |
+ working_path (str): absolute or relative path to the working directory.
|
|
267 |
+ |
|
268 |
+ Returns:
|
|
269 |
+ :obj:`OutputDirectory`, :obj:`BatchUpdateBlobsRequest`: Tuple of a new
|
|
270 |
+ gRPC :obj:`OutputDirectory` for the directory pointed by
|
|
271 |
+ `directory_path` and the corresponding list of
|
|
272 |
+ :obj:`BatchUpdateBlobsRequest` for CAS upload.
|
|
273 |
+ """
|
|
274 |
+ if not os.path.isabs(directory_path):
|
|
275 |
+ directory_path = os.path.abspath(directory_path)
|
|
276 |
+ if not os.path.isabs(working_path):
|
|
277 |
+ working_path = os.path.abspath(working_path)
|
|
278 |
+ |
|
279 |
+ tree, update_requests = tree_maker(directory_path)
|
|
280 |
+ |
|
281 |
+ output_directory = remote_execution_pb2.OutputDirectory()
|
|
282 |
+ output_directory.tree_digest = update_requests[-1].digest
|
|
283 |
+ output_directory.path = os.path.relpath(directory_path, start=working_path)
|
|
284 |
+ |
|
285 |
+ output_directory_blob = output_directory.SerializeToString()
|
|
286 |
+ output_directory_digest = create_digest(output_directory_blob)
|
|
287 |
+ |
|
288 |
+ update_request = remote_execution_pb2.BatchUpdateBlobsRequest.Request(digest=output_directory_digest)
|
|
289 |
+ update_request.data = output_directory_blob
|
|
290 |
+ |
|
291 |
+ update_requests.append(update_request)
|
|
292 |
+ |
|
293 |
+ return output_directory, update_requests
|
|
294 |
+ |
|
295 |
+ |
|
296 |
+def tree_maker(directory_path):
|
|
297 |
+ """Creates a gRPC :obj:`Tree` from a local directory.
|
|
298 |
+ |
|
299 |
+ Args:
|
|
300 |
+ directory_path (str): absolute or relative path to a local directory.
|
|
301 |
+ |
|
302 |
+ Returns:
|
|
303 |
+ :obj:`Tree`, :obj:`BatchUpdateBlobsRequest`: Tuple of a new
|
|
304 |
+ gRPC :obj:`Tree` for the directory pointed by `directory_path` and the
|
|
305 |
+ corresponding list of :obj:`BatchUpdateBlobsRequest` for CAS upload.
|
|
306 |
+ |
|
307 |
+ The :obj:`BatchUpdateBlobsRequest` list may come in any order. However,
|
|
308 |
+ its last element is guaranteed to be the :obj:`Tree`'s request.
|
|
309 |
+ """
|
|
310 |
+ if not os.path.isabs(directory_path):
|
|
311 |
+ directory_path = os.path.abspath(directory_path)
|
|
312 |
+ |
|
313 |
+ directory, child_directories, update_requests = directory_maker(directory_path)
|
|
314 |
+ |
|
315 |
+ tree = remote_execution_pb2.Tree()
|
|
316 |
+ tree.children.expend([child_directories])
|
|
317 |
+ tree.root = directory
|
|
318 |
+ |
|
319 |
+ tree_blob = tree.SerializeToString()
|
|
320 |
+ tree_digest = create_digest(file_blob)
|
|
321 |
+ |
|
322 |
+ update_request = remote_execution_pb2.BatchUpdateBlobsRequest.Request(digest=tree_digest)
|
|
323 |
+ update_request.data = tree_blob
|
|
324 |
+ |
|
325 |
+ update_requests.append(update_request)
|
|
326 |
+ |
|
327 |
+ return tree, update_requests
|
... | ... | @@ -114,8 +114,8 @@ setup( |
114 | 114 |
'protobuf',
|
115 | 115 |
'grpcio',
|
116 | 116 |
'Click',
|
117 |
- 'boto3',
|
|
118 |
- 'botocore',
|
|
117 |
+ 'boto3 < 1.8.0',
|
|
118 |
+ 'botocore < 1.11.0',
|
|
119 | 119 |
],
|
120 | 120 |
entry_points={
|
121 | 121 |
'console_scripts': [
|