[Notes] [Git][BuildStream/buildstream][juerg/cas-1.2] 8 commits: .gitlab-ci.yml: Avoid running tests in post-merge



Title: GitLab

Jürg Billeter pushed to branch juerg/cas-1.2 at BuildStream / buildstream

Commits:

5 changed files:

Changes:

  • .gitlab-ci.yml
    ... ... @@ -10,6 +10,16 @@ stages:
    10 10
       - test
    
    11 11
       - post
    
    12 12
     
    
    13
    +# Avoid running all the tests post merge on
    
    14
    +# master or on any release branch.
    
    15
    +#
    
    16
    +.tests-condition-template: &tests-condition
    
    17
    +  only:
    
    18
    +  - branches
    
    19
    +  except:
    
    20
    +  - master
    
    21
    +  - /bst-1\..*/
    
    22
    +
    
    13 23
     #####################################################
    
    14 24
     #                  Prepare stage                    #
    
    15 25
     #####################################################
    
    ... ... @@ -91,20 +101,26 @@ source_dist:
    91 101
     tests-debian-9:
    
    92 102
       image: buildstream/testsuite-debian:9-master-114-4cab18e3
    
    93 103
       <<: *linux-tests
    
    104
    +  <<: *tests-condition
    
    94 105
     
    
    95 106
     tests-fedora-27:
    
    96 107
       image: buildstream/testsuite-fedora:27-master-114-4cab18e3
    
    97 108
       <<: *linux-tests
    
    109
    +  <<: *tests-condition
    
    98 110
     
    
    99 111
     tests-fedora-28:
    
    100 112
       image: buildstream/testsuite-fedora:28-master-114-4cab18e3
    
    101 113
       <<: *linux-tests
    
    114
    +  <<: *tests-condition
    
    102 115
     
    
    103 116
     tests-ubuntu-18.04:
    
    104 117
       image: buildstream/testsuite-ubuntu:18.04-master-114-4cab18e3
    
    105 118
       <<: *linux-tests
    
    119
    +  <<: *tests-condition
    
    106 120
     
    
    107 121
     tests-unix:
    
    122
    +  <<: *tests-condition
    
    123
    +
    
    108 124
       # Use fedora here, to a) run a test on fedora and b) ensure that we
    
    109 125
       # can get rid of ostree - this is not possible with debian-8
    
    110 126
       image: buildstream/testsuite-fedora:27-master-114-4cab18e3
    
    ... ... @@ -143,6 +159,15 @@ tests-unix:
    143 159
     # Note: We still do not enforce a consistent installation of python3-sphinx,
    
    144 160
     #       as it will significantly grow the backing image.
    
    145 161
     docs:
    
    162
    +
    
    163
    +  # Here we build the docs for every pre-merge CI, but avoid
    
    164
    +  # the job on post-merge to stable branches, because we only
    
    165
    +  # ever publish them from master
    
    166
    +  only:
    
    167
    +  - branches
    
    168
    +  except:
    
    169
    +  - /bst-1\..*/
    
    170
    +
    
    146 171
       stage: test
    
    147 172
       script:
    
    148 173
       - export BST_SOURCE_CACHE="$(pwd)/cache/integration-cache/sources"
    
    ... ... @@ -167,6 +192,8 @@ docs:
    167 192
     # as an output of radon, with some conversion
    
    168 193
     #
    
    169 194
     codequality:
    
    195
    +  <<: *tests-condition
    
    196
    +
    
    170 197
       image: docker:stable
    
    171 198
       stage: post
    
    172 199
       variables:
    
    ... ... @@ -185,6 +212,8 @@ codequality:
    185 212
         paths: [codeclimate.json]
    
    186 213
     
    
    187 214
     analysis:
    
    215
    +  <<: *tests-condition
    
    216
    +
    
    188 217
       stage: post
    
    189 218
       script:
    
    190 219
       - |
    
    ... ... @@ -213,6 +242,8 @@ analysis:
    213 242
     # Collate coverage reports
    
    214 243
     #
    
    215 244
     coverage:
    
    245
    +  <<: *tests-condition
    
    246
    +
    
    216 247
       stage: post
    
    217 248
       coverage: '/TOTAL +\d+ +\d+ +(\d+\.\d+)%/'
    
    218 249
       script:
    

  • NEWS
    1
    +=================
    
    2
    +buildstream 1.1.7
    
    3
    +=================
    
    4
    +
    
    5
    +  o Fix CAS resource_name format
    
    6
    +
    
    7
    +    Artifact servers need to be updated.
    
    8
    +
    
    9
    +
    
    1 10
     =================
    
    2 11
     buildstream 1.1.6
    
    3 12
     =================
    

  • README.rst
    1 1
     About
    
    2 2
     -----
    
    3
    -.. image:: https://gitlab.com/BuildStream/buildstream/badges/master/pipeline.svg
    
    4
    -   :target: https://gitlab.com/BuildStream/buildstream/commits/master
    
    5
    -
    
    6
    -.. image:: https://gitlab.com/BuildStream/buildstream/badges/master/coverage.svg?job=coverage
    
    7
    -   :target: https://gitlab.com/BuildStream/buildstream/commits/master
    
    8 3
     
    
    9 4
     
    
    10 5
     What is BuildStream?
    

  • buildstream/_artifactcache/cascache.py
    ... ... @@ -24,6 +24,7 @@ import os
    24 24
     import signal
    
    25 25
     import stat
    
    26 26
     import tempfile
    
    27
    +import uuid
    
    27 28
     import errno
    
    28 29
     from urllib.parse import urlparse
    
    29 30
     
    
    ... ... @@ -315,8 +316,11 @@ class CASCache(ArtifactCache):
    315 316
                         # Upload any blobs missing on the server
    
    316 317
                         skipped_remote = False
    
    317 318
                         for digest in missing_blobs.values():
    
    318
    -                        def request_stream():
    
    319
    -                            resource_name = os.path.join(digest.hash, str(digest.size_bytes))
    
    319
    +                        uuid_ = uuid.uuid4()
    
    320
    +                        resource_name = '/'.join(['uploads', str(uuid_), 'blobs',
    
    321
    +                                                  digest.hash, str(digest.size_bytes)])
    
    322
    +
    
    323
    +                        def request_stream(resname):
    
    320 324
                                 with open(self.objpath(digest), 'rb') as f:
    
    321 325
                                     assert os.fstat(f.fileno()).st_size == digest.size_bytes
    
    322 326
                                     offset = 0
    
    ... ... @@ -330,12 +334,12 @@ class CASCache(ArtifactCache):
    330 334
                                         request.write_offset = offset
    
    331 335
                                         # max. 64 kB chunks
    
    332 336
                                         request.data = f.read(chunk_size)
    
    333
    -                                    request.resource_name = resource_name  # pylint: disable=cell-var-from-loop
    
    337
    +                                    request.resource_name = resname
    
    334 338
                                         request.finish_write = remaining <= 0
    
    335 339
                                         yield request
    
    336 340
                                         offset += chunk_size
    
    337 341
                                         finished = request.finish_write
    
    338
    -                        response = remote.bytestream.Write(request_stream())
    
    342
    +                        response = remote.bytestream.Write(request_stream(resource_name))
    
    339 343
     
    
    340 344
                         request = buildstream_pb2.UpdateReferenceRequest()
    
    341 345
                         request.keys.append(ref)
    
    ... ... @@ -772,7 +776,7 @@ class CASCache(ArtifactCache):
    772 776
                 yield from self._required_blobs(dirnode.digest)
    
    773 777
     
    
    774 778
         def _fetch_blob(self, remote, digest, out):
    
    775
    -        resource_name = os.path.join(digest.hash, str(digest.size_bytes))
    
    779
    +        resource_name = '/'.join(['blobs', digest.hash, str(digest.size_bytes)])
    
    776 780
             request = bytestream_pb2.ReadRequest()
    
    777 781
             request.resource_name = resource_name
    
    778 782
             request.read_offset = 0
    

  • buildstream/_artifactcache/casserver.py
    ... ... @@ -23,6 +23,7 @@ import os
    23 23
     import signal
    
    24 24
     import sys
    
    25 25
     import tempfile
    
    26
    +import uuid
    
    26 27
     
    
    27 28
     import click
    
    28 29
     import grpc
    
    ... ... @@ -130,12 +131,21 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
    130 131
     
    
    131 132
         def Read(self, request, context):
    
    132 133
             resource_name = request.resource_name
    
    133
    -        client_digest = _digest_from_resource_name(resource_name)
    
    134
    -        assert request.read_offset <= client_digest.size_bytes
    
    134
    +        client_digest = _digest_from_download_resource_name(resource_name)
    
    135
    +        if client_digest is None:
    
    136
    +            context.set_code(grpc.StatusCode.NOT_FOUND)
    
    137
    +            return
    
    138
    +
    
    139
    +        if request.read_offset > client_digest.size_bytes:
    
    140
    +            context.set_code(grpc.StatusCode.OUT_OF_RANGE)
    
    141
    +            return
    
    135 142
     
    
    136 143
             try:
    
    137 144
                 with open(self.cas.objpath(client_digest), 'rb') as f:
    
    138
    -                assert os.fstat(f.fileno()).st_size == client_digest.size_bytes
    
    145
    +                if os.fstat(f.fileno()).st_size != client_digest.size_bytes:
    
    146
    +                    context.set_code(grpc.StatusCode.NOT_FOUND)
    
    147
    +                    return
    
    148
    +
    
    139 149
                     if request.read_offset > 0:
    
    140 150
                         f.seek(request.read_offset)
    
    141 151
     
    
    ... ... @@ -163,12 +173,18 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
    163 173
             resource_name = None
    
    164 174
             with tempfile.NamedTemporaryFile(dir=self.cas.tmpdir) as out:
    
    165 175
                 for request in request_iterator:
    
    166
    -                assert not finished
    
    167
    -                assert request.write_offset == offset
    
    176
    +                if finished or request.write_offset != offset:
    
    177
    +                    context.set_code(grpc.StatusCode.FAILED_PRECONDITION)
    
    178
    +                    return response
    
    179
    +
    
    168 180
                     if resource_name is None:
    
    169 181
                         # First request
    
    170 182
                         resource_name = request.resource_name
    
    171
    -                    client_digest = _digest_from_resource_name(resource_name)
    
    183
    +                    client_digest = _digest_from_upload_resource_name(resource_name)
    
    184
    +                    if client_digest is None:
    
    185
    +                        context.set_code(grpc.StatusCode.NOT_FOUND)
    
    186
    +                        return response
    
    187
    +
    
    172 188
                         try:
    
    173 189
                             _clean_up_cache(self.cas, client_digest.size_bytes)
    
    174 190
                         except ArtifactTooLargeException as e:
    
    ... ... @@ -177,14 +193,20 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
    177 193
                             return response
    
    178 194
                     elif request.resource_name:
    
    179 195
                         # If it is set on subsequent calls, it **must** match the value of the first request.
    
    180
    -                    assert request.resource_name == resource_name
    
    196
    +                    if request.resource_name != resource_name:
    
    197
    +                        context.set_code(grpc.StatusCode.FAILED_PRECONDITION)
    
    198
    +                        return response
    
    181 199
                     out.write(request.data)
    
    182 200
                     offset += len(request.data)
    
    183 201
                     if request.finish_write:
    
    184
    -                    assert client_digest.size_bytes == offset
    
    202
    +                    if client_digest.size_bytes != offset:
    
    203
    +                        context.set_code(grpc.StatusCode.FAILED_PRECONDITION)
    
    204
    +                        return response
    
    185 205
                         out.flush()
    
    186 206
                         digest = self.cas.add_object(path=out.name)
    
    187
    -                    assert digest.hash == client_digest.hash
    
    207
    +                    if digest.hash != client_digest.hash:
    
    208
    +                        context.set_code(grpc.StatusCode.FAILED_PRECONDITION)
    
    209
    +                        return response
    
    188 210
                         finished = True
    
    189 211
     
    
    190 212
             assert finished
    
    ... ... @@ -247,13 +269,48 @@ class _ReferenceStorageServicer(buildstream_pb2_grpc.ReferenceStorageServicer):
    247 269
             return response
    
    248 270
     
    
    249 271
     
    
    250
    -def _digest_from_resource_name(resource_name):
    
    272
    +def _digest_from_download_resource_name(resource_name):
    
    273
    +    parts = resource_name.split('/')
    
    274
    +
    
    275
    +    # Accept requests from non-conforming BuildStream 1.1.x clients
    
    276
    +    if len(parts) == 2:
    
    277
    +        parts.insert(0, 'blobs')
    
    278
    +
    
    279
    +    if len(parts) != 3 or parts[0] != 'blobs':
    
    280
    +        return None
    
    281
    +
    
    282
    +    try:
    
    283
    +        digest = remote_execution_pb2.Digest()
    
    284
    +        digest.hash = parts[1]
    
    285
    +        digest.size_bytes = int(parts[2])
    
    286
    +        return digest
    
    287
    +    except ValueError:
    
    288
    +        return None
    
    289
    +
    
    290
    +
    
    291
    +def _digest_from_upload_resource_name(resource_name):
    
    251 292
         parts = resource_name.split('/')
    
    252
    -    assert len(parts) == 2
    
    253
    -    digest = remote_execution_pb2.Digest()
    
    254
    -    digest.hash = parts[0]
    
    255
    -    digest.size_bytes = int(parts[1])
    
    256
    -    return digest
    
    293
    +
    
    294
    +    # Accept requests from non-conforming BuildStream 1.1.x clients
    
    295
    +    if len(parts) == 2:
    
    296
    +        parts.insert(0, 'uploads')
    
    297
    +        parts.insert(1, str(uuid.uuid4()))
    
    298
    +        parts.insert(2, 'blobs')
    
    299
    +
    
    300
    +    if len(parts) < 5 or parts[0] != 'uploads' or parts[2] != 'blobs':
    
    301
    +        return None
    
    302
    +
    
    303
    +    try:
    
    304
    +        uuid_ = uuid.UUID(hex=parts[1])
    
    305
    +        if uuid_.version != 4:
    
    306
    +            return None
    
    307
    +
    
    308
    +        digest = remote_execution_pb2.Digest()
    
    309
    +        digest.hash = parts[3]
    
    310
    +        digest.size_bytes = int(parts[4])
    
    311
    +        return digest
    
    312
    +    except ValueError:
    
    313
    +        return None
    
    257 314
     
    
    258 315
     
    
    259 316
     def _has_object(cas, digest):
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]