[Notes] [Git][BuildStream/buildstream][docs_Search_not_working] 29 commits: git.py: Make `ref` human readable



Title: GitLab

Phillip Smyth pushed to branch docs_Search_not_working at BuildStream / buildstream

Commits:

20 changed files:

Changes:

  • .gitlab-ci.yml
    ... ... @@ -145,7 +145,8 @@ docs:
    145 145
       stage: test
    
    146 146
       script:
    
    147 147
       - export BST_SOURCE_CACHE="$(pwd)/cache/integration-cache/sources"
    
    148
    -  - pip3 install sphinx
    
    148
    +  # Currently sphinx_rtd_theme does not support Sphinx >1.8, this breaks search functionality
    
    149
    +  - pip3 install sphinx==1.7.8
    
    149 150
       - pip3 install sphinx-click
    
    150 151
       - pip3 install sphinx_rtd_theme
    
    151 152
       - cd dist && ./unpack.sh && cd buildstream
    
    ... ... @@ -161,14 +162,14 @@ docs:
    161 162
     .overnight-tests: &overnight-tests-template
    
    162 163
       stage: test
    
    163 164
       variables:
    
    164
    -    bst_ext_url: git+https://gitlab.com/BuildStream/bst-external.git
    
    165
    -    bst_ext_ref: 1d6ab71151b93c8cbc0a91a36ffe9270f3b835f1 # 0.5.1
    
    166
    -    fd_sdk_ref: 88d7c22c2281b987faa02edd57df80d430eecf1f # 18.08.12
    
    165
    +    BST_EXT_URL: git+https://gitlab.com/BuildStream/bst-external.git
    
    166
    +    BST_EXT_REF: 1d6ab71151b93c8cbc0a91a36ffe9270f3b835f1 # 0.5.1
    
    167
    +    FD_SDK_REF: 88d7c22c2281b987faa02edd57df80d430eecf1f # 18.08.11-35-g88d7c22c
    
    167 168
       before_script:
    
    168 169
       - (cd dist && ./unpack.sh && cd buildstream && pip3 install .)
    
    169
    -  - pip3 install --user -e ${bst_ext_url}@${bst_ext_ref}#egg=bst_ext
    
    170
    +  - pip3 install --user -e ${BST_EXT_URL}@${BST_EXT_REF}#egg=bst_ext
    
    170 171
       - git clone https://gitlab.com/freedesktop-sdk/freedesktop-sdk.git
    
    171
    -  - git -C freedesktop-sdk checkout ${fd_sdk_ref}
    
    172
    +  - git -C freedesktop-sdk checkout ${FD_SDK_REF}
    
    172 173
       only:
    
    173 174
       - schedules
    
    174 175
     
    

  • buildstream/_artifactcache/cascache.py
    ... ... @@ -1048,10 +1048,29 @@ class CASCache(ArtifactCache):
    1048 1048
                     missing_blobs[d.hash] = d
    
    1049 1049
     
    
    1050 1050
             # Upload any blobs missing on the server
    
    1051
    -        for blob_digest in missing_blobs.values():
    
    1052
    -            with open(self.objpath(blob_digest), 'rb') as f:
    
    1053
    -                assert os.fstat(f.fileno()).st_size == blob_digest.size_bytes
    
    1054
    -                self._send_blob(remote, blob_digest, f, u_uid=u_uid)
    
    1051
    +        self._send_blobs(remote, missing_blobs.values(), u_uid)
    
    1052
    +
    
    1053
    +    def _send_blobs(self, remote, digests, u_uid=uuid.uuid4()):
    
    1054
    +        batch = _CASBatchUpdate(remote)
    
    1055
    +
    
    1056
    +        for digest in digests:
    
    1057
    +            with open(self.objpath(digest), 'rb') as f:
    
    1058
    +                assert os.fstat(f.fileno()).st_size == digest.size_bytes
    
    1059
    +
    
    1060
    +                if (digest.size_bytes >= remote.max_batch_total_size_bytes or
    
    1061
    +                        not remote.batch_update_supported):
    
    1062
    +                    # Too large for batch request, upload in independent request.
    
    1063
    +                    self._send_blob(remote, digest, f, u_uid=u_uid)
    
    1064
    +                else:
    
    1065
    +                    if not batch.add(digest, f):
    
    1066
    +                        # Not enough space left in batch request.
    
    1067
    +                        # Complete pending batch first.
    
    1068
    +                        batch.send()
    
    1069
    +                        batch = _CASBatchUpdate(remote)
    
    1070
    +                        batch.add(digest, f)
    
    1071
    +
    
    1072
    +        # Send final batch
    
    1073
    +        batch.send()
    
    1055 1074
     
    
    1056 1075
     
    
    1057 1076
     # Represents a single remote CAS cache.
    
    ... ... @@ -1126,6 +1145,17 @@ class _CASRemote():
    1126 1145
                     if e.code() != grpc.StatusCode.UNIMPLEMENTED:
    
    1127 1146
                         raise
    
    1128 1147
     
    
    1148
    +            # Check whether the server supports BatchUpdateBlobs()
    
    1149
    +            self.batch_update_supported = False
    
    1150
    +            try:
    
    1151
    +                request = remote_execution_pb2.BatchUpdateBlobsRequest()
    
    1152
    +                response = self.cas.BatchUpdateBlobs(request)
    
    1153
    +                self.batch_update_supported = True
    
    1154
    +            except grpc.RpcError as e:
    
    1155
    +                if (e.code() != grpc.StatusCode.UNIMPLEMENTED and
    
    1156
    +                        e.code() != grpc.StatusCode.PERMISSION_DENIED):
    
    1157
    +                    raise
    
    1158
    +
    
    1129 1159
                 self._initialized = True
    
    1130 1160
     
    
    1131 1161
     
    
    ... ... @@ -1173,6 +1203,46 @@ class _CASBatchRead():
    1173 1203
                 yield (response.digest, response.data)
    
    1174 1204
     
    
    1175 1205
     
    
    1206
    +# Represents a batch of blobs queued for upload.
    
    1207
    +#
    
    1208
    +class _CASBatchUpdate():
    
    1209
    +    def __init__(self, remote):
    
    1210
    +        self._remote = remote
    
    1211
    +        self._max_total_size_bytes = remote.max_batch_total_size_bytes
    
    1212
    +        self._request = remote_execution_pb2.BatchUpdateBlobsRequest()
    
    1213
    +        self._size = 0
    
    1214
    +        self._sent = False
    
    1215
    +
    
    1216
    +    def add(self, digest, stream):
    
    1217
    +        assert not self._sent
    
    1218
    +
    
    1219
    +        new_batch_size = self._size + digest.size_bytes
    
    1220
    +        if new_batch_size > self._max_total_size_bytes:
    
    1221
    +            # Not enough space left in current batch
    
    1222
    +            return False
    
    1223
    +
    
    1224
    +        blob_request = self._request.requests.add()
    
    1225
    +        blob_request.digest.hash = digest.hash
    
    1226
    +        blob_request.digest.size_bytes = digest.size_bytes
    
    1227
    +        blob_request.data = stream.read(digest.size_bytes)
    
    1228
    +        self._size = new_batch_size
    
    1229
    +        return True
    
    1230
    +
    
    1231
    +    def send(self):
    
    1232
    +        assert not self._sent
    
    1233
    +        self._sent = True
    
    1234
    +
    
    1235
    +        if len(self._request.requests) == 0:
    
    1236
    +            return
    
    1237
    +
    
    1238
    +        batch_response = self._remote.cas.BatchUpdateBlobs(self._request)
    
    1239
    +
    
    1240
    +        for response in batch_response.responses:
    
    1241
    +            if response.status.code != grpc.StatusCode.OK.value[0]:
    
    1242
    +                raise ArtifactError("Failed to upload blob {}: {}".format(
    
    1243
    +                    response.digest.hash, response.status.code))
    
    1244
    +
    
    1245
    +
    
    1176 1246
     def _grouper(iterable, n):
    
    1177 1247
         while True:
    
    1178 1248
             try:
    

  • buildstream/_artifactcache/casserver.py
    ... ... @@ -68,7 +68,7 @@ def create_server(repo, *, enable_push):
    68 68
             _ByteStreamServicer(artifactcache, enable_push=enable_push), server)
    
    69 69
     
    
    70 70
         remote_execution_pb2_grpc.add_ContentAddressableStorageServicer_to_server(
    
    71
    -        _ContentAddressableStorageServicer(artifactcache), server)
    
    71
    +        _ContentAddressableStorageServicer(artifactcache, enable_push=enable_push), server)
    
    72 72
     
    
    73 73
         remote_execution_pb2_grpc.add_CapabilitiesServicer_to_server(
    
    74 74
             _CapabilitiesServicer(), server)
    
    ... ... @@ -222,9 +222,10 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
    222 222
     
    
    223 223
     
    
    224 224
     class _ContentAddressableStorageServicer(remote_execution_pb2_grpc.ContentAddressableStorageServicer):
    
    225
    -    def __init__(self, cas):
    
    225
    +    def __init__(self, cas, *, enable_push):
    
    226 226
             super().__init__()
    
    227 227
             self.cas = cas
    
    228
    +        self.enable_push = enable_push
    
    228 229
     
    
    229 230
         def FindMissingBlobs(self, request, context):
    
    230 231
             response = remote_execution_pb2.FindMissingBlobsResponse()
    
    ... ... @@ -260,6 +261,46 @@ class _ContentAddressableStorageServicer(remote_execution_pb2_grpc.ContentAddres
    260 261
     
    
    261 262
             return response
    
    262 263
     
    
    264
    +    def BatchUpdateBlobs(self, request, context):
    
    265
    +        response = remote_execution_pb2.BatchUpdateBlobsResponse()
    
    266
    +
    
    267
    +        if not self.enable_push:
    
    268
    +            context.set_code(grpc.StatusCode.PERMISSION_DENIED)
    
    269
    +            return response
    
    270
    +
    
    271
    +        batch_size = 0
    
    272
    +
    
    273
    +        for blob_request in request.requests:
    
    274
    +            digest = blob_request.digest
    
    275
    +
    
    276
    +            batch_size += digest.size_bytes
    
    277
    +            if batch_size > _MAX_PAYLOAD_BYTES:
    
    278
    +                context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
    
    279
    +                return response
    
    280
    +
    
    281
    +            blob_response = response.responses.add()
    
    282
    +            blob_response.digest.hash = digest.hash
    
    283
    +            blob_response.digest.size_bytes = digest.size_bytes
    
    284
    +
    
    285
    +            if len(blob_request.data) != digest.size_bytes:
    
    286
    +                blob_response.status.code = grpc.StatusCode.FAILED_PRECONDITION
    
    287
    +                continue
    
    288
    +
    
    289
    +            try:
    
    290
    +                _clean_up_cache(self.cas, digest.size_bytes)
    
    291
    +
    
    292
    +                with tempfile.NamedTemporaryFile(dir=self.cas.tmpdir) as out:
    
    293
    +                    out.write(blob_request.data)
    
    294
    +                    out.flush()
    
    295
    +                    server_digest = self.cas.add_object(path=out.name)
    
    296
    +                    if server_digest.hash != digest.hash:
    
    297
    +                        blob_response.status.code = grpc.StatusCode.FAILED_PRECONDITION
    
    298
    +
    
    299
    +            except ArtifactTooLargeException:
    
    300
    +                blob_response.status.code = grpc.StatusCode.RESOURCE_EXHAUSTED
    
    301
    +
    
    302
    +        return response
    
    303
    +
    
    263 304
     
    
    264 305
     class _CapabilitiesServicer(remote_execution_pb2_grpc.CapabilitiesServicer):
    
    265 306
         def GetCapabilities(self, request, context):
    

  • buildstream/_frontend/app.py
    ... ... @@ -115,14 +115,6 @@ class App():
    115 115
             else:
    
    116 116
                 self.colors = False
    
    117 117
     
    
    118
    -        # Increase the soft limit for open file descriptors to the maximum.
    
    119
    -        # SafeHardlinks FUSE needs to hold file descriptors for all processes in the sandbox.
    
    120
    -        # Avoid hitting the limit too quickly.
    
    121
    -        limits = resource.getrlimit(resource.RLIMIT_NOFILE)
    
    122
    -        if limits[0] != limits[1]:
    
    123
    -            # Set soft limit to hard limit
    
    124
    -            resource.setrlimit(resource.RLIMIT_NOFILE, (limits[1], limits[1]))
    
    125
    -
    
    126 118
         # create()
    
    127 119
         #
    
    128 120
         # Should be used instead of the regular constructor.
    

  • buildstream/_platform/darwin.py
    1
    +#
    
    2
    +#  Copyright (C) 2017 Codethink Limited
    
    3
    +#  Copyright (C) 2018 Bloomberg Finance LP
    
    4
    +#
    
    5
    +#  This program is free software; you can redistribute it and/or
    
    6
    +#  modify it under the terms of the GNU Lesser General Public
    
    7
    +#  License as published by the Free Software Foundation; either
    
    8
    +#  version 2 of the License, or (at your option) any later version.
    
    9
    +#
    
    10
    +#  This library is distributed in the hope that it will be useful,
    
    11
    +#  but WITHOUT ANY WARRANTY; without even the implied warranty of
    
    12
    +#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the GNU
    
    13
    +#  Lesser General Public License for more details.
    
    14
    +#
    
    15
    +#  You should have received a copy of the GNU Lesser General Public
    
    16
    +#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
    
    17
    +
    
    18
    +import os
    
    19
    +import resource
    
    20
    +
    
    21
    +from .._exceptions import PlatformError
    
    22
    +from ..sandbox import SandboxDummy
    
    23
    +
    
    24
    +from . import Platform
    
    25
    +
    
    26
    +
    
    27
    +class Darwin(Platform):
    
    28
    +
    
    29
    +    # This value comes from OPEN_MAX in syslimits.h
    
    30
    +    OPEN_MAX = 10240
    
    31
    +
    
    32
    +    def __init__(self):
    
    33
    +
    
    34
    +        super().__init__()
    
    35
    +
    
    36
    +    def create_sandbox(self, *args, **kwargs):
    
    37
    +        return SandboxDummy(*args, **kwargs)
    
    38
    +
    
    39
    +    def check_sandbox_config(self, config):
    
    40
    +        # Accept all sandbox configs as it's irrelevant with the dummy sandbox (no Sandbox.run).
    
    41
    +        return True
    
    42
    +
    
    43
    +    def get_cpu_count(self, cap=None):
    
    44
    +        cpu_count = os.cpu_count()
    
    45
    +        if cap is None:
    
    46
    +            return cpu_count
    
    47
    +        else:
    
    48
    +            return min(cpu_count, cap)
    
    49
    +
    
    50
    +    def set_resource_limits(self, soft_limit=OPEN_MAX, hard_limit=None):
    
    51
    +        super().set_resource_limits(soft_limit)

  • buildstream/_platform/linux.py
    ... ... @@ -23,7 +23,7 @@ import subprocess
    23 23
     from .. import _site
    
    24 24
     from .. import utils
    
    25 25
     from .._message import Message, MessageType
    
    26
    -from ..sandbox import SandboxBwrap
    
    26
    +from ..sandbox import SandboxDummy
    
    27 27
     
    
    28 28
     from . import Platform
    
    29 29
     
    
    ... ... @@ -38,13 +38,21 @@ class Linux(Platform):
    38 38
             self._gid = os.getegid()
    
    39 39
     
    
    40 40
             self._die_with_parent_available = _site.check_bwrap_version(0, 1, 8)
    
    41
    -        self._user_ns_available = self._check_user_ns_available()
    
    41
    +
    
    42
    +        if self._local_sandbox_available():
    
    43
    +            self._user_ns_available = self._check_user_ns_available()
    
    44
    +        else:
    
    45
    +            self._user_ns_available = False
    
    42 46
     
    
    43 47
         def create_sandbox(self, *args, **kwargs):
    
    44
    -        # Inform the bubblewrap sandbox as to whether it can use user namespaces or not
    
    45
    -        kwargs['user_ns_available'] = self._user_ns_available
    
    46
    -        kwargs['die_with_parent_available'] = self._die_with_parent_available
    
    47
    -        return SandboxBwrap(*args, **kwargs)
    
    48
    +        if not self._local_sandbox_available():
    
    49
    +            return SandboxDummy(*args, **kwargs)
    
    50
    +        else:
    
    51
    +            from ..sandbox._sandboxbwrap import SandboxBwrap
    
    52
    +            # Inform the bubblewrap sandbox as to whether it can use user namespaces or not
    
    53
    +            kwargs['user_ns_available'] = self._user_ns_available
    
    54
    +            kwargs['die_with_parent_available'] = self._die_with_parent_available
    
    55
    +            return SandboxBwrap(*args, **kwargs)
    
    48 56
     
    
    49 57
         def check_sandbox_config(self, config):
    
    50 58
             if self._user_ns_available:
    
    ... ... @@ -58,8 +66,13 @@ class Linux(Platform):
    58 66
         ################################################
    
    59 67
         #              Private Methods                 #
    
    60 68
         ################################################
    
    61
    -    def _check_user_ns_available(self):
    
    69
    +    def _local_sandbox_available(self):
    
    70
    +        try:
    
    71
    +            return os.path.exists(utils.get_host_tool('bwrap')) and os.path.exists('/dev/fuse')
    
    72
    +        except utils.ProgramNotFoundError:
    
    73
    +            return False
    
    62 74
     
    
    75
    +    def _check_user_ns_available(self):
    
    63 76
             # Here, lets check if bwrap is able to create user namespaces,
    
    64 77
             # issue a warning if it's not available, and save the state
    
    65 78
             # locally so that we can inform the sandbox to not try it
    

  • buildstream/_platform/platform.py
    ... ... @@ -19,6 +19,7 @@
    19 19
     
    
    20 20
     import os
    
    21 21
     import sys
    
    22
    +import resource
    
    22 23
     
    
    23 24
     from .._exceptions import PlatformError, ImplError
    
    24 25
     
    
    ... ... @@ -32,23 +33,26 @@ class Platform():
    32 33
         # sandbox factory as well as platform helpers.
    
    33 34
         #
    
    34 35
         def __init__(self):
    
    35
    -        pass
    
    36
    +        self.set_resource_limits()
    
    36 37
     
    
    37 38
         @classmethod
    
    38 39
         def _create_instance(cls):
    
    39
    -        if sys.platform.startswith('linux'):
    
    40
    -            backend = 'linux'
    
    41
    -        else:
    
    42
    -            backend = 'unix'
    
    43
    -
    
    44 40
             # Meant for testing purposes and therefore hidden in the
    
    45 41
             # deepest corners of the source code. Try not to abuse this,
    
    46 42
             # please?
    
    47 43
             if os.getenv('BST_FORCE_BACKEND'):
    
    48 44
                 backend = os.getenv('BST_FORCE_BACKEND')
    
    45
    +        elif sys.platform.startswith('linux'):
    
    46
    +            backend = 'linux'
    
    47
    +        elif sys.platform.startswith('darwin'):
    
    48
    +            backend = 'darwin'
    
    49
    +        else:
    
    50
    +            backend = 'unix'
    
    49 51
     
    
    50 52
             if backend == 'linux':
    
    51 53
                 from .linux import Linux as PlatformImpl
    
    54
    +        elif backend == 'darwin':
    
    55
    +            from .darwin import Darwin as PlatformImpl
    
    52 56
             elif backend == 'unix':
    
    53 57
                 from .unix import Unix as PlatformImpl
    
    54 58
             else:
    
    ... ... @@ -62,6 +66,13 @@ class Platform():
    62 66
                 cls._create_instance()
    
    63 67
             return cls._instance
    
    64 68
     
    
    69
    +    def get_cpu_count(self, cap=None):
    
    70
    +        cpu_count = len(os.sched_getaffinity(0))
    
    71
    +        if cap is None:
    
    72
    +            return cpu_count
    
    73
    +        else:
    
    74
    +            return min(cpu_count, cap)
    
    75
    +
    
    65 76
         ##################################################################
    
    66 77
         #                        Sandbox functions                       #
    
    67 78
         ##################################################################
    
    ... ... @@ -84,3 +95,15 @@ class Platform():
    84 95
         def check_sandbox_config(self, config):
    
    85 96
             raise ImplError("Platform {platform} does not implement check_sandbox_config()"
    
    86 97
                             .format(platform=type(self).__name__))
    
    98
    +
    
    99
    +    def set_resource_limits(self, soft_limit=None, hard_limit=None):
    
    100
    +        # Need to set resources for _frontend/app.py as this is dependent on the platform
    
    101
    +        # SafeHardlinks FUSE needs to hold file descriptors for all processes in the sandbox.
    
    102
    +        # Avoid hitting the limit too quickly.
    
    103
    +        limits = resource.getrlimit(resource.RLIMIT_NOFILE)
    
    104
    +        if limits[0] != limits[1]:
    
    105
    +            if soft_limit is None:
    
    106
    +                soft_limit = limits[1]
    
    107
    +            if hard_limit is None:
    
    108
    +                hard_limit = limits[1]
    
    109
    +            resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit))

  • buildstream/_platform/unix.py
    ... ... @@ -20,7 +20,6 @@
    20 20
     import os
    
    21 21
     
    
    22 22
     from .._exceptions import PlatformError
    
    23
    -from ..sandbox import SandboxChroot
    
    24 23
     
    
    25 24
     from . import Platform
    
    26 25
     
    
    ... ... @@ -39,6 +38,7 @@ class Unix(Platform):
    39 38
                 raise PlatformError("Root privileges are required to run without bubblewrap.")
    
    40 39
     
    
    41 40
         def create_sandbox(self, *args, **kwargs):
    
    41
    +        from ..sandbox._sandboxchroot import SandboxChroot
    
    42 42
             return SandboxChroot(*args, **kwargs)
    
    43 43
     
    
    44 44
         def check_sandbox_config(self, config):
    

  • buildstream/_project.py
    ... ... @@ -38,6 +38,7 @@ from ._loader import Loader
    38 38
     from .element import Element
    
    39 39
     from ._message import Message, MessageType
    
    40 40
     from ._includes import Includes
    
    41
    +from ._platform import Platform
    
    41 42
     
    
    42 43
     
    
    43 44
     # Project Configuration file
    
    ... ... @@ -617,7 +618,8 @@ class Project():
    617 618
             # Based on some testing (mainly on AWS), maximum effective
    
    618 619
             # max-jobs value seems to be around 8-10 if we have enough cores
    
    619 620
             # users should set values based on workload and build infrastructure
    
    620
    -        output.base_variables['max-jobs'] = str(min(len(os.sched_getaffinity(0)), 8))
    
    621
    +        platform = Platform.get_platform()
    
    622
    +        output.base_variables['max-jobs'] = str(platform.get_cpu_count(8))
    
    621 623
     
    
    622 624
             # Export options into variables, if that was requested
    
    623 625
             output.options.export_variables(output.base_variables)
    

  • buildstream/_yaml.py
    ... ... @@ -467,7 +467,7 @@ def node_get_project_path(node, key, project_dir, *,
    467 467
                             "{}: Specified path '{}' does not exist"
    
    468 468
                             .format(provenance, path_str))
    
    469 469
     
    
    470
    -    is_inside = project_dir_path in full_resolved_path.parents or (
    
    470
    +    is_inside = project_dir_path.resolve() in full_resolved_path.parents or (
    
    471 471
             full_resolved_path == project_dir_path)
    
    472 472
     
    
    473 473
         if path.is_absolute() or not is_inside:
    

  • buildstream/data/userconfig.yaml
    ... ... @@ -26,8 +26,13 @@ logdir: ${XDG_CACHE_HOME}/buildstream/logs
    26 26
     #    Cache
    
    27 27
     #
    
    28 28
     cache:
    
    29
    -  # Size of the artifact cache - BuildStream will attempt to keep the
    
    29
    +  # Size of the artifact cache in bytes - BuildStream will attempt to keep the
    
    30 30
       # artifact cache within this size.
    
    31
    +  # If the value is suffixed with K, M, G or T, the specified memory size is
    
    32
    +  # parsed as Kilobytes, Megabytes, Gigabytes, or Terabytes (with the base
    
    33
    +  # 1024), respectively.
    
    34
    +  # Alternatively, a percentage value may be specified, which is taken relative
    
    35
    +  # to the isize of the file system containing the cache.
    
    31 36
       quota: infinity
    
    32 37
     
    
    33 38
     #
    

  • buildstream/plugins/sources/git.py
    ... ... @@ -43,6 +43,12 @@ git - stage files from a git repository
    43 43
        # will be used to update the 'ref' when refreshing the pipeline.
    
    44 44
        track: master
    
    45 45
     
    
    46
    +   # Optionally specify the ref format used for tracking.
    
    47
    +   # The default is 'sha1' for the raw commit hash.
    
    48
    +   # If you specify 'git-describe', the commit hash will be prefixed
    
    49
    +   # with the closest tag.
    
    50
    +   ref-format: sha1
    
    51
    +
    
    46 52
        # Specify the commit ref, this must be specified in order to
    
    47 53
        # checkout sources and build, but can be automatically updated
    
    48 54
        # if the 'track' attribute was specified.
    
    ... ... @@ -205,7 +211,18 @@ class GitMirror(SourceFetcher):
    205 211
                 [self.source.host_git, 'rev-parse', tracking],
    
    206 212
                 fail="Unable to find commit for specified branch name '{}'".format(tracking),
    
    207 213
                 cwd=self.mirror)
    
    208
    -        return output.rstrip('\n')
    
    214
    +        ref = output.rstrip('\n')
    
    215
    +
    
    216
    +        if self.source.ref_format == 'git-describe':
    
    217
    +            # Prefix the ref with the closest tag, if available,
    
    218
    +            # to make the ref human readable
    
    219
    +            exit_code, output = self.source.check_output(
    
    220
    +                [self.source.host_git, 'describe', '--tags', '--abbrev=40', '--long', ref],
    
    221
    +                cwd=self.mirror)
    
    222
    +            if exit_code == 0:
    
    223
    +                ref = output.rstrip('\n')
    
    224
    +
    
    225
    +        return ref
    
    209 226
     
    
    210 227
         def stage(self, directory, track=None):
    
    211 228
             fullpath = os.path.join(directory, self.path)
    
    ... ... @@ -341,13 +358,18 @@ class GitSource(Source):
    341 358
         def configure(self, node):
    
    342 359
             ref = self.node_get_member(node, str, 'ref', None)
    
    343 360
     
    
    344
    -        config_keys = ['url', 'track', 'ref', 'submodules', 'checkout-submodules']
    
    361
    +        config_keys = ['url', 'track', 'ref', 'submodules', 'checkout-submodules', 'ref-format']
    
    345 362
             self.node_validate(node, config_keys + Source.COMMON_CONFIG_KEYS)
    
    346 363
     
    
    347 364
             self.original_url = self.node_get_member(node, str, 'url')
    
    348 365
             self.mirror = GitMirror(self, '', self.original_url, ref, primary=True)
    
    349 366
             self.tracking = self.node_get_member(node, str, 'track', None)
    
    350 367
     
    
    368
    +        self.ref_format = self.node_get_member(node, str, 'ref-format', 'sha1')
    
    369
    +        if self.ref_format not in ['sha1', 'git-describe']:
    
    370
    +            provenance = self.node_provenance(node, member_name='ref-format')
    
    371
    +            raise SourceError("{}: Unexpected value for ref-format: {}".format(provenance, self.ref_format))
    
    372
    +
    
    351 373
             # At this point we now know if the source has a ref and/or a track.
    
    352 374
             # If it is missing both then we will be unable to track or build.
    
    353 375
             if self.mirror.ref is None and self.tracking is None:
    

  • buildstream/sandbox/__init__.py
    ... ... @@ -18,6 +18,5 @@
    18 18
     #        Tristan Maat <tristan maat codethink co uk>
    
    19 19
     
    
    20 20
     from .sandbox import Sandbox, SandboxFlags
    
    21
    -from ._sandboxchroot import SandboxChroot
    
    22
    -from ._sandboxbwrap import SandboxBwrap
    
    23 21
     from ._sandboxremote import SandboxRemote
    
    22
    +from ._sandboxdummy import SandboxDummy

  • buildstream/sandbox/_sandboxdummy.py
    1
    +#
    
    2
    +#  Copyright (C) 2017 Codethink Limited
    
    3
    +#
    
    4
    +#  This program is free software; you can redistribute it and/or
    
    5
    +#  modify it under the terms of the GNU Lesser General Public
    
    6
    +#  License as published by the Free Software Foundation; either
    
    7
    +#  version 2 of the License, or (at your option) any later version.
    
    8
    +#
    
    9
    +#  This library is distributed in the hope that it will be useful,
    
    10
    +#  but WITHOUT ANY WARRANTY; without even the implied warranty of
    
    11
    +#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the GNU
    
    12
    +#  Lesser General Public License for more details.
    
    13
    +#
    
    14
    +#  You should have received a copy of the GNU Lesser General Public
    
    15
    +#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
    
    16
    +#
    
    17
    +#  Authors:
    
    18
    +
    
    19
    +from .._exceptions import SandboxError
    
    20
    +from . import Sandbox
    
    21
    +
    
    22
    +
    
    23
    +class SandboxDummy(Sandbox):
    
    24
    +    def __init__(self, *args, **kwargs):
    
    25
    +        super().__init__(*args, **kwargs)
    
    26
    +
    
    27
    +    def run(self, command, flags, *, cwd=None, env=None):
    
    28
    +
    
    29
    +        # Fallback to the sandbox default settings for
    
    30
    +        # the cwd and env.
    
    31
    +        #
    
    32
    +        cwd = self._get_work_directory(cwd=cwd)
    
    33
    +        env = self._get_environment(cwd=cwd, env=env)
    
    34
    +
    
    35
    +        if not self._has_command(command[0], env):
    
    36
    +            raise SandboxError("Staged artifacts do not provide command "
    
    37
    +                               "'{}'".format(command[0]),
    
    38
    +                               reason='missing-command')
    
    39
    +
    
    40
    +        raise SandboxError("This platform does not support local builds")

  • buildstream/sandbox/_sandboxremote.py
    ... ... @@ -177,15 +177,11 @@ class SandboxRemote(Sandbox):
    177 177
             if not cascache.verify_digest_pushed(self._get_project(), upload_vdir.ref):
    
    178 178
                 raise SandboxError("Failed to verify that source has been pushed to the remote artifact cache.")
    
    179 179
     
    
    180
    -        # Set up environment and working directory
    
    181
    -        if cwd is None:
    
    182
    -            cwd = self._get_work_directory()
    
    183
    -
    
    184
    -        if cwd is None:
    
    185
    -            cwd = '/'
    
    186
    -
    
    187
    -        if env is None:
    
    188
    -            env = self._get_environment()
    
    180
    +        # Fallback to the sandbox default settings for
    
    181
    +        # the cwd and env.
    
    182
    +        #
    
    183
    +        cwd = self._get_work_directory(cwd=cwd)
    
    184
    +        env = self._get_environment(cwd=cwd, env=env)
    
    189 185
     
    
    190 186
             # We want command args as a list of strings
    
    191 187
             if isinstance(command, str):
    

  • buildstream/utils.py
    ... ... @@ -35,6 +35,7 @@ import tempfile
    35 35
     import itertools
    
    36 36
     import functools
    
    37 37
     from contextlib import contextmanager
    
    38
    +from stat import S_ISDIR
    
    38 39
     
    
    39 40
     import psutil
    
    40 41
     
    
    ... ... @@ -328,27 +329,25 @@ def safe_remove(path):
    328 329
         Raises:
    
    329 330
            UtilError: In the case of unexpected system call failures
    
    330 331
         """
    
    331
    -    if os.path.lexists(path):
    
    332
    -
    
    333
    -        # Try to remove anything that is in the way, but issue
    
    334
    -        # a warning instead if it removes a non empty directory
    
    335
    -        try:
    
    332
    +    try:
    
    333
    +        if S_ISDIR(os.lstat(path).st_mode):
    
    334
    +            os.rmdir(path)
    
    335
    +        else:
    
    336 336
                 os.unlink(path)
    
    337
    -        except OSError as e:
    
    338
    -            if e.errno != errno.EISDIR:
    
    339
    -                raise UtilError("Failed to remove '{}': {}"
    
    340
    -                                .format(path, e))
    
    341
    -
    
    342
    -            try:
    
    343
    -                os.rmdir(path)
    
    344
    -            except OSError as e:
    
    345
    -                if e.errno == errno.ENOTEMPTY:
    
    346
    -                    return False
    
    347
    -                else:
    
    348
    -                    raise UtilError("Failed to remove '{}': {}"
    
    349
    -                                    .format(path, e))
    
    350 337
     
    
    351
    -    return True
    
    338
    +        # File removed/unlinked successfully
    
    339
    +        return True
    
    340
    +
    
    341
    +    except OSError as e:
    
    342
    +        if e.errno == errno.ENOTEMPTY:
    
    343
    +            # Path is non-empty directory
    
    344
    +            return False
    
    345
    +        elif e.errno == errno.ENOENT:
    
    346
    +            # Path does not exist
    
    347
    +            return True
    
    348
    +
    
    349
    +        raise UtilError("Failed to remove '{}': {}"
    
    350
    +                        .format(path, e))
    
    352 351
     
    
    353 352
     
    
    354 353
     def copy_files(src, dest, *, files=None, ignore_missing=False, report_written=False):
    

  • tests/format/project.py
    ... ... @@ -188,3 +188,15 @@ def test_project_refs_options(cli, datafiles):
    188 188
     
    
    189 189
         # Assert that the cache keys are different
    
    190 190
         assert result1.output != result2.output
    
    191
    +
    
    192
    +
    
    193
    +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'element-path'))
    
    194
    +def test_element_path_project_path_contains_symlinks(cli, datafiles, tmpdir):
    
    195
    +    real_project = str(datafiles)
    
    196
    +    linked_project = os.path.join(str(tmpdir), 'linked')
    
    197
    +    os.symlink(real_project, linked_project)
    
    198
    +    os.makedirs(os.path.join(real_project, 'elements'), exist_ok=True)
    
    199
    +    with open(os.path.join(real_project, 'elements', 'element.bst'), 'w') as f:
    
    200
    +        f.write("kind: manual\n")
    
    201
    +    result = cli.run(project=linked_project, args=['show', 'element.bst'])
    
    202
    +    result.assert_success()

  • tests/integration/cachedfail.py
    ... ... @@ -121,7 +121,7 @@ def test_build_depend_on_cached_fail(cli, tmpdir, datafiles):
    121 121
     
    
    122 122
     @pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
    
    123 123
     @pytest.mark.datafiles(DATA_DIR)
    
    124
    -@pytest.mark.parametrize("on_error", ("continue",))
    
    124
    +@pytest.mark.parametrize("on_error", ("continue", "quit"))
    
    125 125
     def test_push_cached_fail(cli, tmpdir, datafiles, on_error):
    
    126 126
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    127 127
         element_path = os.path.join(project, 'elements', 'element.bst')
    

  • tests/sources/git.py
    ... ... @@ -476,3 +476,50 @@ def test_ref_not_in_track_warn_error(cli, tmpdir, datafiles):
    476 476
         result = cli.run(project=project, args=['build', 'target.bst'])
    
    477 477
         result.assert_main_error(ErrorDomain.STREAM, None)
    
    478 478
         result.assert_task_error(ErrorDomain.PLUGIN, CoreWarnings.REF_NOT_IN_TRACK)
    
    479
    +
    
    480
    +
    
    481
    +@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
    
    482
    +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
    
    483
    +@pytest.mark.parametrize("ref_format", ['sha1', 'git-describe'])
    
    484
    +@pytest.mark.parametrize("tag,extra_commit", [(False, False), (True, False), (True, True)])
    
    485
    +def test_track_fetch(cli, tmpdir, datafiles, ref_format, tag, extra_commit):
    
    486
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    487
    +
    
    488
    +    # Create the repo from 'repofiles' subdir
    
    489
    +    repo = create_repo('git', str(tmpdir))
    
    490
    +    ref = repo.create(os.path.join(project, 'repofiles'))
    
    491
    +    if tag:
    
    492
    +        repo.add_tag('tag')
    
    493
    +    if extra_commit:
    
    494
    +        repo.add_commit()
    
    495
    +
    
    496
    +    # Write out our test target
    
    497
    +    element = {
    
    498
    +        'kind': 'import',
    
    499
    +        'sources': [
    
    500
    +            repo.source_config()
    
    501
    +        ]
    
    502
    +    }
    
    503
    +    element['sources'][0]['ref-format'] = ref_format
    
    504
    +    element_path = os.path.join(project, 'target.bst')
    
    505
    +    _yaml.dump(element, element_path)
    
    506
    +
    
    507
    +    # Track it
    
    508
    +    result = cli.run(project=project, args=['track', 'target.bst'])
    
    509
    +    result.assert_success()
    
    510
    +
    
    511
    +    element = _yaml.load(element_path)
    
    512
    +    new_ref = element['sources'][0]['ref']
    
    513
    +
    
    514
    +    if ref_format == 'git-describe' and tag:
    
    515
    +        # Check and strip prefix
    
    516
    +        prefix = 'tag-{}-g'.format(0 if not extra_commit else 1)
    
    517
    +        assert new_ref.startswith(prefix)
    
    518
    +        new_ref = new_ref[len(prefix):]
    
    519
    +
    
    520
    +    # 40 chars for SHA-1
    
    521
    +    assert len(new_ref) == 40
    
    522
    +
    
    523
    +    # Fetch it
    
    524
    +    result = cli.run(project=project, args=['fetch', 'target.bst'])
    
    525
    +    result.assert_success()

  • tests/testutils/repo/git.py
    ... ... @@ -42,6 +42,9 @@ class Git(Repo):
    42 42
             self._run_git('commit', '-m', 'Initial commit')
    
    43 43
             return self.latest_commit()
    
    44 44
     
    
    45
    +    def add_tag(self, tag):
    
    46
    +        self._run_git('tag', tag)
    
    47
    +
    
    45 48
         def add_commit(self):
    
    46 49
             self._run_git('commit', '--allow-empty', '-m', 'Additional commit')
    
    47 50
             return self.latest_commit()
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]