[Notes] [Git][BuildGrid/buildgrid][mablanch/61-bazel-support] 7 commits: Removed old action_cache.py file



Title: GitLab

Martin Blanchard pushed to branch mablanch/61-bazel-support at BuildGrid / buildgrid

Commits:

4 changed files:

Changes:

  • buildgrid/_app/bots/temp_directory.py
    ... ... @@ -19,7 +19,7 @@ import tempfile
    19 19
     
    
    20 20
     from google.protobuf import any_pb2
    
    21 21
     
    
    22
    -from buildgrid.utils import read_file, create_digest, write_fetch_directory, parse_to_pb2_from_fetch
    
    22
    +from buildgrid.utils import output_file_maker, write_fetch_directory, parse_to_pb2_from_fetch
    
    23 23
     from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
    
    24 24
     from buildgrid._protos.google.bytestream import bytestream_pb2_grpc
    
    25 25
     
    
    ... ... @@ -35,54 +35,81 @@ def work_temp_directory(context, lease):
    35 35
         action_digest = remote_execution_pb2.Digest()
    
    36 36
         lease.payload.Unpack(action_digest)
    
    37 37
     
    
    38
    -    action = remote_execution_pb2.Action()
    
    38
    +    action = parse_to_pb2_from_fetch(remote_execution_pb2.Action(),
    
    39
    +                                     stub_bytestream, action_digest, instance_name)
    
    39 40
     
    
    40
    -    action = parse_to_pb2_from_fetch(action, stub_bytestream, action_digest, instance_name)
    
    41
    +    with tempfile.TemporaryDirectory() as temp_directory:
    
    41 42
     
    
    42
    -    with tempfile.TemporaryDirectory() as temp_dir:
    
    43
    +        command = parse_to_pb2_from_fetch(remote_execution_pb2.Command(),
    
    44
    +                                          stub_bytestream, action.command_digest, instance_name)
    
    43 45
     
    
    44
    -        command = remote_execution_pb2.Command()
    
    45
    -        command = parse_to_pb2_from_fetch(command, stub_bytestream, action.command_digest, instance_name)
    
    46
    +        write_fetch_directory(temp_directory, stub_bytestream,
    
    47
    +                              action.input_root_digest, instance_name)
    
    46 48
     
    
    47
    -        arguments = "cd {} &&".format(temp_dir)
    
    49
    +        execution_envionment = os.environ.copy()
    
    50
    +        for variable in command.environment_variables:
    
    51
    +            if variable.name not in ['PATH', 'PWD']:
    
    52
    +                execution_envionment[variable.name] = variable.value
    
    48 53
     
    
    54
    +        command_arguments = list()
    
    49 55
             for argument in command.arguments:
    
    50
    -            arguments += " {}".format(argument)
    
    51
    -
    
    52
    -        context.logger.info(arguments)
    
    53
    -
    
    54
    -        write_fetch_directory(temp_dir, stub_bytestream, action.input_root_digest, instance_name)
    
    55
    -
    
    56
    -        proc = subprocess.Popen(arguments,
    
    57
    -                                shell=True,
    
    58
    -                                stdin=subprocess.PIPE,
    
    59
    -                                stdout=subprocess.PIPE)
    
    60
    -
    
    61
    -        # TODO: Should return the std_out to the user
    
    62
    -        proc.communicate()
    
    63
    -
    
    64
    -        result = remote_execution_pb2.ActionResult()
    
    65
    -        requests = []
    
    66
    -        for output_file in command.output_files:
    
    67
    -            path = os.path.join(temp_dir, output_file)
    
    68
    -            chunk = read_file(path)
    
    69
    -
    
    70
    -            digest = create_digest(chunk)
    
    71
    -
    
    72
    -            result.output_files.extend([remote_execution_pb2.OutputFile(path=output_file,
    
    73
    -                                                                        digest=digest)])
    
    74
    -
    
    75
    -            requests.append(remote_execution_pb2.BatchUpdateBlobsRequest.Request(
    
    76
    -                digest=digest, data=chunk))
    
    77
    -
    
    78
    -        request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=instance_name,
    
    79
    -                                                               requests=requests)
    
    56
    +            command_arguments.append(argument.strip())
    
    57
    +
    
    58
    +        working_directory = None
    
    59
    +        if command.working_directory:
    
    60
    +            working_directory = os.path.join(temp_directory,
    
    61
    +                                             command.working_directory)
    
    62
    +            os.makedirs(working_directory, exist_ok=True)
    
    63
    +        else:
    
    64
    +            working_directory = temp_directory
    
    65
    +
    
    66
    +        # Ensure that output files structure exists:
    
    67
    +        for output_path in command.output_files:
    
    68
    +            directory_path = os.path.join(working_directory,
    
    69
    +                                          os.path.dirname(output_path))
    
    70
    +            os.makedirs(directory_path, exist_ok=True)
    
    71
    +
    
    72
    +        process = subprocess.Popen(command_arguments,
    
    73
    +                                   cwd=working_directory,
    
    74
    +                                   universal_newlines=True,
    
    75
    +                                   env=execution_envionment,
    
    76
    +                                   stdin=subprocess.PIPE,
    
    77
    +                                   stdout=subprocess.PIPE)
    
    78
    +        # TODO: Should return the stdout and stderr to the user.
    
    79
    +        process.communicate()
    
    80
    +
    
    81
    +        update_requests = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=instance_name)
    
    82
    +        action_result = remote_execution_pb2.ActionResult()
    
    83
    +
    
    84
    +        for output_path in command.output_files:
    
    85
    +            file_path = os.path.join(working_directory, output_path)
    
    86
    +            # Missing outputs should simply be omitted in ActionResult:
    
    87
    +            if not os.path.isfile(file_path):
    
    88
    +                continue
    
    89
    +
    
    90
    +            # OutputFile.path should be relative to the working direcory:
    
    91
    +            output_file, update_request = output_file_maker(file_path, working_directory)
    
    92
    +
    
    93
    +            action_result.output_files.extend([output_file])
    
    94
    +            update_requests.requests.extend([update_request])
    
    95
    +
    
    96
    +        for output_path in command.output_directories:
    
    97
    +            directory_path = os.path.join(working_directory, output_path)
    
    98
    +            # Missing outputs should simply be omitted in ActionResult:
    
    99
    +            if not os.path.isdir(directory_path):
    
    100
    +                continue
    
    101
    +
    
    102
    +            # OutputDirectory.path should be relative to the working direcory:
    
    103
    +            output_directory, update_request = output_directory_maker(directory_path, working_directory)
    
    104
    +
    
    105
    +            action_result.output_directories.extend([output_directory])
    
    106
    +            update_requests.requests.extend(update_request)
    
    80 107
     
    
    81 108
             stub_cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(context.channel)
    
    82
    -        stub_cas.BatchUpdateBlobs(request)
    
    109
    +        stub_cas.BatchUpdateBlobs(update_requests)
    
    83 110
     
    
    84 111
             result_any = any_pb2.Any()
    
    85
    -        result_any.Pack(result)
    
    112
    +        result_any.Pack(action_result)
    
    86 113
     
    
    87 114
             lease.result.CopyFrom(result_any)
    
    88 115
     
    

  • buildgrid/server/action_cache.py deleted
    1
    -# Copyright (C) 2018 Bloomberg LP
    
    2
    -#
    
    3
    -# Licensed under the Apache License, Version 2.0 (the "License");
    
    4
    -# you may not use this file except in compliance with the License.
    
    5
    -# You may obtain a copy of the License at
    
    6
    -#
    
    7
    -#  <http://www.apache.org/licenses/LICENSE-2.0>
    
    8
    -#
    
    9
    -# Unless required by applicable law or agreed to in writing, software
    
    10
    -# distributed under the License is distributed on an "AS IS" BASIS,
    
    11
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    
    12
    -# See the License for the specific language governing permissions and
    
    13
    -# limitations under the License.
    
    14
    -
    
    15
    -
    
    16
    -"""
    
    17
    -ActionCache
    
    18
    -===========
    
    19
    -
    
    20
    -Implements a simple in-memory action cache.
    
    21
    -
    
    22
    -The action cache maps Action to their corresponding ActionResult. An
    
    23
    -ActionResult may be found in cache, for any given Action, if that action has
    
    24
    -already been executed.
    
    25
    -
    
    26
    -Note:
    
    27
    -    Action and ActionResult are referenced by their Digest and mapping is stored
    
    28
    -    in-memory.
    
    29
    -"""
    
    30
    -
    
    31
    -import collections
    
    32
    -
    
    33
    -from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2 as re_pb2
    
    34
    -
    
    35
    -
    
    36
    -class ActionCache:
    
    37
    -    """In-memory Action to ActionResult associative array.
    
    38
    -    """
    
    39
    -
    
    40
    -    def __init__(self, storage, max_cached_actions):
    
    41
    -        """Initialises a new ActionCache instance.
    
    42
    -
    
    43
    -        Args:
    
    44
    -            storage (StorageABC): storage backend instance to be used.
    
    45
    -            max_cached_actions (int): maximun number of entries to cache.
    
    46
    -        """
    
    47
    -        self._storage = storage
    
    48
    -        self._max_cached_actions = max_cached_actions
    
    49
    -        self._digest_map = collections.OrderedDict()
    
    50
    -
    
    51
    -    def get_action_result(self, action_digest):
    
    52
    -        """Retrieves the cached ActionResult for the given Action digest.
    
    53
    -
    
    54
    -        Args:
    
    55
    -            action_digest (Digest): digest of the Action to query.
    
    56
    -
    
    57
    -        Returns:
    
    58
    -            The cached ActionResult matching the given Action digest or None if
    
    59
    -            the nothing hass been cached yet for that Action.
    
    60
    -        """
    
    61
    -        key = (action_digest.hash, action_digest.size_bytes)
    
    62
    -        if key in self._digest_map:
    
    63
    -            action_result = self._storage.get_message(self._digest_map[key],
    
    64
    -                                                      re_pb2.ActionResult)
    
    65
    -            if action_result is not None:
    
    66
    -                if self._blobs_still_exist(action_result):
    
    67
    -                    self._digest_map.move_to_end(key)
    
    68
    -                    return action_result
    
    69
    -            del self._digest_map[key]
    
    70
    -        return None
    
    71
    -
    
    72
    -    def put_action_result(self, action_digest, action_result):
    
    73
    -        """Stores an ActionResult in cache for the given Action digest.
    
    74
    -
    
    75
    -        If the cache size limit has been reached, the oldest cache entries will
    
    76
    -        be dropped before insertion so that the cache size never exceeds the
    
    77
    -        maximum numbers of entries allowed.
    
    78
    -
    
    79
    -        Args:
    
    80
    -            action_digest (Digest): digest of the Action to select.
    
    81
    -            action_result (ActionResult): result object to store.
    
    82
    -        """
    
    83
    -        if self._max_cached_actions == 0:
    
    84
    -            return
    
    85
    -
    
    86
    -        while len(self._digest_map) >= self._max_cached_actions:
    
    87
    -            self._digest_map.popitem(last=False)
    
    88
    -
    
    89
    -        key = (action_digest.hash, action_digest.size_bytes)
    
    90
    -        action_result_digest = self._storage.put_message(action_result)
    
    91
    -        self._digest_map[key] = action_result_digest
    
    92
    -
    
    93
    -    def _blobs_still_exist(self, action_result):
    
    94
    -        """Checks CAS for ActionResult output blobs existance.
    
    95
    -
    
    96
    -        Args:
    
    97
    -            action_result (ActionResult): ActionResult to search referenced
    
    98
    -                output blobs for.
    
    99
    -
    
    100
    -        Returns:
    
    101
    -            True if all referenced blobs are present in CAS, False otherwise.
    
    102
    -        """
    
    103
    -        blobs_needed = []
    
    104
    -
    
    105
    -        for output_file in action_result.output_files:
    
    106
    -            blobs_needed.append(output_file.digest)
    
    107
    -
    
    108
    -        for output_directory in action_result.output_directories:
    
    109
    -            blobs_needed.append(output_directory.tree_digest)
    
    110
    -            tree = self._storage.get_message(output_directory.tree_digest,
    
    111
    -                                             re_pb2.Tree)
    
    112
    -            if tree is None:
    
    113
    -                return False
    
    114
    -            for file_node in tree.root.files:
    
    115
    -                blobs_needed.append(file_node.digest)
    
    116
    -            for child in tree.children:
    
    117
    -                for file_node in child.files:
    
    118
    -                    blobs_needed.append(file_node.digest)
    
    119
    -
    
    120
    -        if action_result.stdout_digest.hash and not action_result.stdout_raw:
    
    121
    -            blobs_needed.append(action_result.stdout_digest)
    
    122
    -        if action_result.stderr_digest.hash and not action_result.stderr_raw:
    
    123
    -            blobs_needed.append(action_result.stderr_digest)
    
    124
    -
    
    125
    -        missing = self._storage.missing_blobs(blobs_needed)
    
    126
    -        return len(missing) == 0

  • buildgrid/server/scheduler.py
    ... ... @@ -90,7 +90,7 @@ class Scheduler:
    90 90
             job.update_execute_stage(ExecuteStage.COMPLETED)
    
    91 91
             self.jobs[name] = job
    
    92 92
             if not job.do_not_cache and self._action_cache is not None:
    
    93
    -            self._action_cache.put_action_result(job.action_digest, result)
    
    93
    +            self._action_cache.update_action_result(job.action_digest, result)
    
    94 94
     
    
    95 95
         def get_operations(self):
    
    96 96
             response = operations_pb2.ListOperationsResponse()
    

  • buildgrid/utils.py
    ... ... @@ -13,6 +13,7 @@
    13 13
     # limitations under the License.
    
    14 14
     
    
    15 15
     
    
    16
    +from operator import attrgetter
    
    16 17
     import os
    
    17 18
     
    
    18 19
     from buildgrid.settings import HASH
    
    ... ... @@ -31,30 +32,59 @@ def gen_fetch_blob(stub, digest, instance_name=""):
    31 32
             yield response.data
    
    32 33
     
    
    33 34
     
    
    34
    -def write_fetch_directory(directory, stub, digest, instance_name=""):
    
    35
    -    """ Given a directory digest, fetches files and writes them to a directory
    
    35
    +def write_fetch_directory(root_directory, stub, digest, instance_name=None):
    
    36
    +    """Locally replicates a directory from CAS.
    
    37
    +
    
    38
    +    Args:
    
    39
    +        root_directory (str): local directory to populate.
    
    40
    +        stub (): gRPC stub for CAS communication.
    
    41
    +        digest (Digest): digest for the directory to fetch from CAS.
    
    42
    +        instance_name (str, optional): farm instance name to query data from.
    
    36 43
         """
    
    37
    -    # TODO: Extend to symlinks and inner directories
    
    38
    -    # pathlib.Path('/my/directory').mkdir(parents=True, exist_ok=True)
    
    44
    +    if not os.path.isabs(root_directory):
    
    45
    +        root_directory = os.path.abspath(root_directory)
    
    46
    +    if not os.path.exists(root_directory):
    
    47
    +        os.makedirs(root_directory, exist_ok=True)
    
    39 48
     
    
    40
    -    directory_pb2 = remote_execution_pb2.Directory()
    
    41
    -    directory_pb2 = parse_to_pb2_from_fetch(directory_pb2, stub, digest, instance_name)
    
    49
    +    directory = parse_to_pb2_from_fetch(remote_execution_pb2.Directory(),
    
    50
    +                                        stub, digest, instance_name)
    
    51
    +
    
    52
    +    for directory_node in directory.directories:
    
    53
    +        child_path = os.path.join(root_directory, directory_node.name)
    
    54
    +
    
    55
    +        write_fetch_directory(child_path, stub, directory_node.digest, instance_name)
    
    56
    +
    
    57
    +    for file_node in directory.files:
    
    58
    +        child_path = os.path.join(root_directory, file_node.name)
    
    59
    +
    
    60
    +        with open(child_path, 'wb') as child_file:
    
    61
    +            write_fetch_blob(child_file, stub, file_node.digest, instance_name)
    
    62
    +
    
    63
    +    for symlink_node in directory.symlinks:
    
    64
    +        child_path = os.path.join(root_directory, symlink_node.name)
    
    65
    +
    
    66
    +        if os.path.isabs(symlink_node.target):
    
    67
    +            continue  # No out of temp-directory links for now.
    
    68
    +        target_path = os.path.join(root_directory, symlink_node.target)
    
    69
    +
    
    70
    +        os.symlink(child_path, target_path)
    
    42 71
     
    
    43
    -    for file_node in directory_pb2.files:
    
    44
    -        path = os.path.join(directory, file_node.name)
    
    45
    -        with open(path, 'wb') as f:
    
    46
    -            write_fetch_blob(f, stub, file_node.digest, instance_name)
    
    47 72
     
    
    73
    +def write_fetch_blob(target_file, stub, digest, instance_name=None):
    
    74
    +    """Extracts a blob from CAS into a local file.
    
    48 75
     
    
    49
    -def write_fetch_blob(out, stub, digest, instance_name=""):
    
    50
    -    """ Given an output buffer, fetches blob and writes to buffer
    
    76
    +    Args:
    
    77
    +        target_file (str): local file to write.
    
    78
    +        stub (): gRPC stub for CAS communication.
    
    79
    +        digest (Digest): digest for the blob to fetch from CAS.
    
    80
    +        instance_name (str, optional): farm instance name to query data from.
    
    51 81
         """
    
    52 82
     
    
    53 83
         for stream in gen_fetch_blob(stub, digest, instance_name):
    
    54
    -        out.write(stream)
    
    84
    +        target_file.write(stream)
    
    85
    +    target_file.flush()
    
    55 86
     
    
    56
    -    out.flush()
    
    57
    -    assert digest.size_bytes == os.fstat(out.fileno()).st_size
    
    87
    +    assert digest.size_bytes == os.fstat(target_file.fileno()).st_size
    
    58 88
     
    
    59 89
     
    
    60 90
     def parse_to_pb2_from_fetch(pb2, stub, digest, instance_name=""):
    
    ... ... @@ -70,7 +100,15 @@ def parse_to_pb2_from_fetch(pb2, stub, digest, instance_name=""):
    70 100
     
    
    71 101
     
    
    72 102
     def create_digest(bytes_to_digest):
    
    73
    -    """ Creates a hash based on the hex digest and returns the digest
    
    103
    +    """Computes the :obj:`Digest` of a piece of data.
    
    104
    +
    
    105
    +    The :obj:`Digest` of a data is a function of its hash **and** size.
    
    106
    +
    
    107
    +    Args:
    
    108
    +        bytes_to_digest (bytes): byte data to digest.
    
    109
    +
    
    110
    +    Returns:
    
    111
    +        :obj:`Digest`: The gRPC :obj:`Digest` for the given byte data.
    
    74 112
         """
    
    75 113
         return remote_execution_pb2.Digest(hash=HASH(bytes_to_digest).hexdigest(),
    
    76 114
                                            size_bytes=len(bytes_to_digest))
    
    ... ... @@ -107,6 +145,183 @@ def file_maker(file_path, file_digest):
    107 145
                                              is_executable=os.access(file_path, os.X_OK))
    
    108 146
     
    
    109 147
     
    
    110
    -def read_file(read):
    
    111
    -    with open(read, 'rb') as f:
    
    112
    -        return f.read()
    148
    +def directory_maker(directory_path):
    
    149
    +    """
    
    150
    +    """
    
    151
    +    if not os.path.isabs(directory_path):
    
    152
    +        directory_path = os.path.abspath(directory_path)
    
    153
    +
    
    154
    +    child_directories = list()
    
    155
    +    update_requests = list()
    
    156
    +
    
    157
    +    files, directories, symlinks = list(), list(), list()
    
    158
    +    for directory_entry in os.scandir(directory_path):
    
    159
    +        # Create a FileNode and corresponding BatchUpdateBlobsRequest:
    
    160
    +        if directory_entry.is_file(follow_symlinks=False):
    
    161
    +            node_blob = read_file(directory_entry.path)
    
    162
    +            node_digest = create_digest(node_blob)
    
    163
    +
    
    164
    +            node = remote_execution_pb2.FileNode()
    
    165
    +            node.name = directory_entry.name
    
    166
    +            node.digest = node_digest
    
    167
    +            node.is_executable = os.access(directory_entry.path, os.X_OK)
    
    168
    +
    
    169
    +            node_request = remote_execution_pb2.BatchUpdateBlobsRequest.Request(digest=node_digest)
    
    170
    +            node_request.data = node_blob
    
    171
    +
    
    172
    +            update_requests.append(node_request)
    
    173
    +            files.append(node)
    
    174
    +
    
    175
    +        # Create a DirectoryNode and corresponding BatchUpdateBlobsRequest:
    
    176
    +        elif directory_entry.is_dir(follow_symlinks=False):
    
    177
    +            node_directory, node_children, node_requests = directory_maker(directory_entry.path)
    
    178
    +
    
    179
    +            node = remote_execution_pb2.DirectoryNode()
    
    180
    +            node.name = directory_entry.name
    
    181
    +            node.digest = node_requests[-1].digest
    
    182
    +
    
    183
    +            child_directories.extend(node_children)
    
    184
    +            child_directories.append(node_directory)
    
    185
    +            update_requests.expend(node_requests)
    
    186
    +            directories.append(node)
    
    187
    +
    
    188
    +        # Create a SymlinkNode if necessary;
    
    189
    +        elif os.path.islink(directory_entry.path):
    
    190
    +            node_target = os.readlink(directory_entry.path)
    
    191
    +
    
    192
    +            node = remote_execution_pb2.SymlinkNode()
    
    193
    +            node.name = directory_entry.name
    
    194
    +            node.target = node_target
    
    195
    +
    
    196
    +            symlinks.append(node)
    
    197
    +
    
    198
    +    directory = remote_execution_pb2.Directory()
    
    199
    +    directory.files.extend(files.sort(key=attrgetter('name'))
    
    200
    +    directory.directories.extend(directories.sort(key=attrgetter('name'))
    
    201
    +    directory.symlinks.extend(symlinks.sort(key=attrgetter('name'))
    
    202
    +
    
    203
    +    directory_blob = directory.SerializeToString()
    
    204
    +    directory_digest = create_digest(directory_blob)
    
    205
    +
    
    206
    +    update_request = remote_execution_pb2.BatchUpdateBlobsRequest.Request(digest=directory_digest)
    
    207
    +    update_request.data = directory_blob
    
    208
    +
    
    209
    +    update_requests.append(update_request)
    
    210
    +
    
    211
    +    return directory, child_directories, update_requests
    
    212
    +
    
    213
    +
    
    214
    +def read_file(file_path):
    
    215
    +    """Loads raw file content in memory.
    
    216
    +
    
    217
    +    Returns:
    
    218
    +        bytes: Raw file's content until EOF.
    
    219
    +
    
    220
    +    Raises:
    
    221
    +        OSError: If `file_path` does not exist or is not readable.
    
    222
    +    """
    
    223
    +    with open(file_path, 'rb') as byte_file:
    
    224
    +        return byte_file.read()
    
    225
    +
    
    226
    +
    
    227
    +def output_file_maker(file_path, input_path):
    
    228
    +    """Creates an :obj:`OutputFile` from a local file.
    
    229
    +
    
    230
    +    `file_path` **must** point inside or be relative to `input_path`.
    
    231
    +
    
    232
    +    Args:
    
    233
    +        file_path (str): absolute or relative path to a local file.
    
    234
    +        input_path (str): absolute or relative path to the input root directory.
    
    235
    +
    
    236
    +    Returns:
    
    237
    +        :obj:`OutputFile`, :obj:`BatchUpdateBlobsRequest`: Tuple of a new gRPC
    
    238
    +        :obj:`OutputFile` object for the file pointed by `file_path` and the
    
    239
    +        corresponding :obj:`BatchUpdateBlobsRequest` for CAS upload.
    
    240
    +    """
    
    241
    +    if not os.path.isabs(file_path):
    
    242
    +        file_path = os.path.abspath(file_path)
    
    243
    +    if not os.path.isabs(input_path):
    
    244
    +        input_path = os.path.abspath(input_path)
    
    245
    +
    
    246
    +    file_blob = read_file(file_path)
    
    247
    +    file_digest = create_digest(file_blob)
    
    248
    +
    
    249
    +    output_file = remote_execution_pb2.OutputFile(digest=file_digest)
    
    250
    +    output_file.path = os.path.relpath(file_path, start=input_path)
    
    251
    +    output_file.is_executable = os.access(file_path, os.X_OK)
    
    252
    +
    
    253
    +    update_request = remote_execution_pb2.BatchUpdateBlobsRequest.Request(digest=file_digest)
    
    254
    +    update_request.data = file_blob
    
    255
    +
    
    256
    +    return output_file, update_request
    
    257
    +
    
    258
    +
    
    259
    +def output_directory_maker(directory_path, working_path):
    
    260
    +    """Creates a gRPC :obj:`OutputDirectory` from a local directory.
    
    261
    +
    
    262
    +    `directory_path` **must** point inside or be relative to `input_path`.
    
    263
    +
    
    264
    +    Args:
    
    265
    +        directory_path (str): absolute or relative path to a local directory.
    
    266
    +        working_path (str): absolute or relative path to the working directory.
    
    267
    +
    
    268
    +    Returns:
    
    269
    +        :obj:`OutputDirectory`, :obj:`BatchUpdateBlobsRequest`: Tuple of a new
    
    270
    +        gRPC :obj:`OutputDirectory` for the directory pointed by
    
    271
    +        `directory_path` and the corresponding list of
    
    272
    +        :obj:`BatchUpdateBlobsRequest` for CAS upload.
    
    273
    +    """
    
    274
    +    if not os.path.isabs(directory_path):
    
    275
    +        directory_path = os.path.abspath(directory_path)
    
    276
    +    if not os.path.isabs(working_path):
    
    277
    +        working_path = os.path.abspath(working_path)
    
    278
    +
    
    279
    +    tree, update_requests = tree_maker(directory_path)
    
    280
    +
    
    281
    +    output_directory = remote_execution_pb2.OutputDirectory()
    
    282
    +    output_directory.tree_digest = update_requests[-1].digest
    
    283
    +    output_directory.path = os.path.relpath(directory_path, start=working_path)
    
    284
    +
    
    285
    +    output_directory_blob = output_directory.SerializeToString()
    
    286
    +    output_directory_digest = create_digest(output_directory_blob)
    
    287
    +
    
    288
    +    update_request = remote_execution_pb2.BatchUpdateBlobsRequest.Request(digest=output_directory_digest)
    
    289
    +    update_request.data = output_directory_blob
    
    290
    +
    
    291
    +    update_requests.append(update_request)
    
    292
    +
    
    293
    +    return output_directory, update_requests
    
    294
    +
    
    295
    +
    
    296
    +def tree_maker(directory_path):
    
    297
    +    """Creates a gRPC :obj:`Tree` from a local directory.
    
    298
    +
    
    299
    +    Args:
    
    300
    +        directory_path (str): absolute or relative path to a local directory.
    
    301
    +
    
    302
    +    Returns:
    
    303
    +        :obj:`Tree`, :obj:`BatchUpdateBlobsRequest`: Tuple of a new
    
    304
    +        gRPC :obj:`Tree` for the directory pointed by `directory_path` and the
    
    305
    +        corresponding list of :obj:`BatchUpdateBlobsRequest` for CAS upload.
    
    306
    +
    
    307
    +        The :obj:`BatchUpdateBlobsRequest` list may come in any order. However,
    
    308
    +        its last element is guaranteed to be the :obj:`Tree`'s request.
    
    309
    +    """
    
    310
    +    if not os.path.isabs(directory_path):
    
    311
    +        directory_path = os.path.abspath(directory_path)
    
    312
    +
    
    313
    +    directory, child_directories, update_requests directory_maker(directory_path)
    
    314
    +
    
    315
    +    tree = remote_execution_pb2.Tree()
    
    316
    +    tree.children.expend([child_directories])
    
    317
    +    tree.root = directory
    
    318
    +
    
    319
    +    tree_blob = tree.SerializeToString()
    
    320
    +    tree_digest = create_digest(file_blob)
    
    321
    +
    
    322
    +    update_request = remote_execution_pb2.BatchUpdateBlobsRequest.Request(digest=tree_digest)
    
    323
    +    update_request.data = tree_blob
    
    324
    +
    
    325
    +    update_requests.append(update_request)
    
    326
    +
    
    327
    +    return tree, update_requests



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]