[Notes] [Git][BuildStream/buildstream][willsalmon/CacheExpiryTest] 19 commits: install_linux_distro.rst: Fix broken links



Title: GitLab

Will Salmon pushed to branch willsalmon/CacheExpiryTest at BuildStream / buildstream

Commits:

18 changed files:

Changes:

  • buildstream/_artifactcache/cascache.py
    ... ... @@ -846,6 +846,9 @@ class _CASRemote():
    846 846
     
    
    847 847
     
    
    848 848
     def _grouper(iterable, n):
    
    849
    -    # pylint: disable=stop-iteration-return
    
    850 849
         while True:
    
    851
    -        yield itertools.chain([next(iterable)], itertools.islice(iterable, n - 1))
    850
    +        try:
    
    851
    +            current = next(iterable)
    
    852
    +        except StopIteration:
    
    853
    +            return
    
    854
    +        yield itertools.chain([current], itertools.islice(iterable, n - 1))

  • buildstream/_pipeline.py
    ... ... @@ -358,10 +358,24 @@ class Pipeline():
    358 358
                         inconsistent.append(element)
    
    359 359
     
    
    360 360
             if inconsistent:
    
    361
    -            detail = "Exact versions are missing for the following elements\n" + \
    
    362
    -                     "Try tracking these elements first with `bst track`\n\n"
    
    361
    +            detail = "Exact versions are missing for the following elements:\n\n"
    
    362
    +
    
    363
    +            missingTrack = 0
    
    363 364
                 for element in inconsistent:
    
    364
    -                detail += "  " + element._get_full_name() + "\n"
    
    365
    +                detail += "  " + element._get_full_name()
    
    366
    +                for source in element.sources():
    
    367
    +                    if not source._get_consistency() and not source.get_ref():
    
    368
    +                        if hasattr(source, 'tracking') and source.tracking is None:
    
    369
    +                            detail += ": Source {} is missing ref and track. ".format(source._get_full_name()) + \
    
    370
    +                                      "Please specify a ref or branch/tag to track."
    
    371
    +                            missingTrack = 1
    
    372
    +
    
    373
    +                detail += "\n"
    
    374
    +
    
    375
    +            if missingTrack:
    
    376
    +                detail += "\nThen track these elements with `bst track`\n"
    
    377
    +            else:
    
    378
    +                detail += "\nTry tracking these elements first with `bst track`\n"
    
    365 379
                 raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline")
    
    366 380
     
    
    367 381
         #############################################################
    

  • buildstream/_scheduler/queues/pullqueue.py
    ... ... @@ -29,7 +29,7 @@ class PullQueue(Queue):
    29 29
     
    
    30 30
         action_name = "Pull"
    
    31 31
         complete_name = "Pulled"
    
    32
    -    resources = [ResourceType.UPLOAD]
    
    32
    +    resources = [ResourceType.DOWNLOAD]
    
    33 33
     
    
    34 34
         def process(self, element):
    
    35 35
             # returns whether an artifact was downloaded or not
    

  • buildstream/_versions.py
    ... ... @@ -23,7 +23,7 @@
    23 23
     # This version is bumped whenever enhancements are made
    
    24 24
     # to the `project.conf` format or the core element format.
    
    25 25
     #
    
    26
    -BST_FORMAT_VERSION = 12
    
    26
    +BST_FORMAT_VERSION = 13
    
    27 27
     
    
    28 28
     
    
    29 29
     # The base BuildStream artifact version
    

  • buildstream/data/userconfig.yaml
    ... ... @@ -35,13 +35,13 @@ cache:
    35 35
     #
    
    36 36
     scheduler:
    
    37 37
     
    
    38
    -  # Maximum number of simultaneous source downloading tasks.
    
    38
    +  # Maximum number of simultaneous downloading tasks.
    
    39 39
       fetchers: 10
    
    40 40
     
    
    41 41
       # Maximum number of simultaneous build tasks.
    
    42 42
       builders: 4
    
    43 43
     
    
    44
    -  # Maximum number of simultaneous artifact uploading tasks.
    
    44
    +  # Maximum number of simultaneous uploading tasks.
    
    45 45
       pushers: 4
    
    46 46
     
    
    47 47
       # Maximum number of retries for network tasks.
    

  • buildstream/plugins/sources/git.py
    ... ... @@ -363,6 +363,12 @@ class GitSource(Source):
    363 363
     
    
    364 364
             # If self.tracking is not specified it's not an error, just silently return
    
    365 365
             if not self.tracking:
    
    366
    +            # Is there a better way to check if a ref is given.
    
    367
    +            if self.mirror.ref is None:
    
    368
    +                detail = 'Without a tracking branch ref can not be updated. Please ' + \
    
    369
    +                         'provide a ref or a track.'
    
    370
    +                raise SourceError("{}: No track or ref".format(self),
    
    371
    +                                  detail=detail, reason="track-attempt-no-track")
    
    366 372
                 return None
    
    367 373
     
    
    368 374
             with self.timed_activity("Tracking {} from {}"
    

  • buildstream/plugins/sources/remote.py
    ... ... @@ -35,6 +35,10 @@ remote - stage files from remote urls
    35 35
        # If not specified, the basename of the url will be used.
    
    36 36
        # filename: customfilename
    
    37 37
     
    
    38
    +   # Optionally specify whether the downloaded file should be
    
    39
    +   # marked executable.
    
    40
    +   # executable: true
    
    41
    +
    
    38 42
        # Specify the url. Using an alias defined in your project
    
    39 43
        # configuration is encouraged. 'bst track' will update the
    
    40 44
        # sha256sum in 'ref' to the downloaded file's sha256sum.
    
    ... ... @@ -43,6 +47,8 @@ remote - stage files from remote urls
    43 47
        # Specify the ref. It's a sha256sum of the file you download.
    
    44 48
        ref: 6c9f6f68a131ec6381da82f2bff978083ed7f4f7991d931bfa767b7965ebc94b
    
    45 49
     
    
    50
    +
    
    51
    +
    
    46 52
     .. note::
    
    47 53
     
    
    48 54
        The ``remote`` plugin is available since :ref:`format version 10 <project_format_version>`
    
    ... ... @@ -60,22 +66,31 @@ class RemoteSource(DownloadableFileSource):
    60 66
             super().configure(node)
    
    61 67
     
    
    62 68
             self.filename = self.node_get_member(node, str, 'filename', os.path.basename(self.url))
    
    69
    +        self.executable = self.node_get_member(node, bool, 'executable', False)
    
    63 70
     
    
    64 71
             if os.sep in self.filename:
    
    65 72
                 raise SourceError('{}: filename parameter cannot contain directories'.format(self),
    
    66 73
                                   reason="filename-contains-directory")
    
    67
    -        self.node_validate(node, DownloadableFileSource.COMMON_CONFIG_KEYS + ['filename'])
    
    74
    +        self.node_validate(node, DownloadableFileSource.COMMON_CONFIG_KEYS + ['filename', 'executable'])
    
    68 75
     
    
    69 76
         def get_unique_key(self):
    
    70
    -        return super().get_unique_key() + [self.filename]
    
    77
    +        return super().get_unique_key() + [self.filename, self.executable]
    
    71 78
     
    
    72 79
         def stage(self, directory):
    
    73 80
             # Same as in local plugin, don't use hardlinks to stage sources, they
    
    74 81
             # are not write protected in the sandbox.
    
    75 82
             dest = os.path.join(directory, self.filename)
    
    76 83
             with self.timed_activity("Staging remote file to {}".format(dest)):
    
    84
    +
    
    77 85
                 utils.safe_copy(self._get_mirror_file(), dest)
    
    78 86
     
    
    87
    +            # To prevent user's umask introducing variability here, explicitly set
    
    88
    +            # file modes.
    
    89
    +            if self.executable:
    
    90
    +                os.chmod(dest, 0o755)
    
    91
    +            else:
    
    92
    +                os.chmod(dest, 0o644)
    
    93
    +
    
    79 94
     
    
    80 95
     def setup():
    
    81 96
         return RemoteSource

  • doc/source/install_artifacts.rst
    ... ... @@ -143,6 +143,50 @@ Instance with push and requiring client authentication:
    143 143
     
    
    144 144
         bst-artifact-server --port 11002 --server-key server.key --server-cert server.crt --client-certs authorized.crt --enable-push /home/artifacts/artifacts
    
    145 145
     
    
    146
    +Managing the cache with systemd
    
    147
    +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    
    148
    +
    
    149
    +It is better to run the cache as a systemd service, especially if it is running on a dedicated server, as this will allow systemd to manage the cache, incase the server ever encounters any issues.
    
    150
    +
    
    151
    +Below are two examples of how to run the cache server as a systemd service, one is for pull only and the other is configured for push & pull.
    
    152
    +
    
    153
    +.. code:: ini
    
    154
    +
    
    155
    +   #
    
    156
    +   # Pull
    
    157
    +   #
    
    158
    +   [Unit]
    
    159
    +   Description=Buildstream Artifact pull server
    
    160
    +   After=remote-fs.target network-online.target
    
    161
    +
    
    162
    +   [Service]
    
    163
    +   Environment="LC_ALL=C.UTF-8"
    
    164
    +   ExecStart=/usr/local/bin/bst-artifact-server --port 11001 --server-key {{certs_path}}/privkey.pem --
    
    165
    +   server-cert {{certs_path}}/fullchain.pem {{artifacts_path}}
    
    166
    +   User=artifacts
    
    167
    +
    
    168
    +   [Install]
    
    169
    +   WantedBy=multi-user.target
    
    170
    +
    
    171
    +
    
    172
    +   #
    
    173
    +   # Pull/Push
    
    174
    +   #
    
    175
    +   [Unit]
    
    176
    +   Description=Buildstream Artifact pull/push server
    
    177
    +   After=remote-fs.target network-online.target
    
    178
    +
    
    179
    +   [Service]
    
    180
    +   Environment="LC_ALL=C.UTF-8"
    
    181
    +   ExecStart=/usr/local/bin/bst-artifact-server --port 11002 --server-key {{certs_path}}/privkey.pem --
    
    182
    +   server-cert {{certs_path}}/fullchain.pem --client-certs /home/artifacts/authorized.crt --enable-push /
    
    183
    +   {{artifacts_path}}
    
    184
    +   User=artifacts
    
    185
    +
    
    186
    +   [Install]
    
    187
    +   WantedBy=multi-user.target
    
    188
    +
    
    189
    +Here we define when systemd should start the service, which is after the networking stack has been started, we then define how to run the cache with the desired configuration, under the artifacts user. The {{ }} are there to denote where you should change these files to point to your desired locations.
    
    146 190
     
    
    147 191
     User configuration
    
    148 192
     ~~~~~~~~~~~~~~~~~~
    

  • doc/source/install_linux_distro.rst
    ... ... @@ -57,9 +57,20 @@ Install the dependencies with::
    57 57
     For the default plugins::
    
    58 58
     
    
    59 59
       sudo pacman -S \
    
    60
    -      bzr git lzip ostree patch python-arpy python-gobject
    
    60
    +      bzr git lzip ostree patch python-gobject
    
    61 61
     
    
    62 62
     
    
    63
    +The package *python-arpy* is required by the deb source plugin. This is not
    
    64
    +obtainable via `pacman`, you must get *python-arpy* from AUR:
    
    65
    +https://aur.archlinux.org/packages/python-arpy/
    
    66
    +
    
    67
    +To install::
    
    68
    +
    
    69
    +  wget https://aur.archlinux.org/cgit/aur.git/snapshot/python-arpy.tar.gz
    
    70
    +  tar -xvf python-arpy.tar.gz
    
    71
    +  cd python-arpy
    
    72
    +  makepkg -si
    
    73
    +
    
    63 74
     Debian
    
    64 75
     ++++++
    
    65 76
     Install the dependencies with::
    
    ... ... @@ -132,8 +143,8 @@ For the default plugins::
    132 143
     
    
    133 144
     Ubuntu 16.04 LTS
    
    134 145
     ^^^^^^^^^^^^^^^^
    
    135
    -On Ubuntu 16.04, neither `bubblewrap<https://github.com/projectatomic/bubblewrap/>`
    
    136
    -or `ostree<https://github.com/ostreedev/ostree>` are available in the official repositories.
    
    146
    +On Ubuntu 16.04, neither `bubblewrap <https://github.com/projectatomic/bubblewrap/>`_
    
    147
    +or `ostree <https://github.com/ostreedev/ostree>`_ are available in the official repositories.
    
    137 148
     You will need to install them in whichever way you see fit. Refer the the upstream documentation
    
    138 149
     for advice on this.
    
    139 150
     
    
    ... ... @@ -216,17 +227,18 @@ Installing from distro packages
    216 227
     
    
    217 228
     Arch Linux
    
    218 229
     ~~~~~~~~~~
    
    219
    -Install `buildstream <https://aur.archlinux.org/packages/buildstream>`_
    
    220
    -from `AUR <https://wiki.archlinux.org/index.php/Arch_User_Repository#Installing_packages>`_.
    
    221
    -Alternatively, use
    
    222
    -`buildstream-git <https://aur.archlinux.org/packages/buildstream-git>`_
    
    223
    -for the lastest version of the development branch.
    
    230
    +Packages for Arch exist in `AUR <https://wiki.archlinux.org/index.php/Arch_User_Repository#Installing_packages>`_.
    
    231
    +Two different package versions are available:
    
    232
    +
    
    233
    +* Latest release: `buildstream <https://aur.archlinux.org/packages/buildstream>`_
    
    234
    +* Latest development snapshot: `buildstream-git <https://aur.archlinux.org/packages/buildstream-git>`_
    
    235
    +
    
    224 236
     
    
    225 237
     Fedora
    
    226 238
     ~~~~~~
    
    227 239
     
    
    228 240
     BuildStream is not yet in the official Fedora repositories, but you can
    
    229
    -install it from a Copr:
    
    241
    +install it from a Copr::
    
    230 242
     
    
    231 243
       sudo dnf copr enable bochecha/buildstream
    
    232 244
       sudo dnf install buildstream
    

  • setup.py
    ... ... @@ -272,6 +272,5 @@ setup(name='BuildStream',
    272 272
                          'pytest-cov >= 2.5.0',
    
    273 273
                          # Provide option to run tests in parallel, less reliable
    
    274 274
                          'pytest-xdist',
    
    275
    -                     'pytest >= 3.1.0',
    
    276
    -                     'pylint >= 1.8 , < 2'],
    
    275
    +                     'pytest >= 3.1.0'],
    
    277 276
           zip_safe=False)

  • tests/artifactcache/expiry.py
    ... ... @@ -5,7 +5,7 @@ import pytest
    5 5
     from buildstream import _yaml
    
    6 6
     from buildstream._exceptions import ErrorDomain, LoadErrorReason
    
    7 7
     
    
    8
    -from tests.testutils import cli, create_element_size
    
    8
    +from tests.testutils import cli, create_element_size, wait_for_cache_granularity
    
    9 9
     
    
    10 10
     
    
    11 11
     DATA_DIR = os.path.join(
    
    ... ... @@ -108,6 +108,8 @@ def test_expiry_order(cli, datafiles, tmpdir):
    108 108
         res = cli.run(project=project, args=['build', 'target2.bst'])
    
    109 109
         res.assert_success()
    
    110 110
     
    
    111
    +    wait_for_cache_granularity()
    
    112
    +
    
    111 113
         # Now extract dep.bst
    
    112 114
         res = cli.run(project=project, args=['checkout', 'dep.bst', checkout])
    
    113 115
         res.assert_success()
    

  • tests/frontend/push.py
    ... ... @@ -3,7 +3,7 @@ import pytest
    3 3
     
    
    4 4
     from buildstream._exceptions import ErrorDomain
    
    5 5
     from tests.testutils import cli, create_artifact_share, create_element_size
    
    6
    -from tests.testutils import generate_junction
    
    6
    +from tests.testutils import generate_junction, wait_for_cache_granularity
    
    7 7
     from . import configure_project
    
    8 8
     
    
    9 9
     
    
    ... ... @@ -327,6 +327,8 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
    327 327
             # Ensure element1 is cached locally
    
    328 328
             assert cli.get_element_state(project, 'element1.bst') == 'cached'
    
    329 329
     
    
    330
    +        wait_for_cache_granularity()
    
    331
    +
    
    330 332
             # Create and build the element3 (of 5 MB)
    
    331 333
             create_element_size('element3.bst', project, element_path, [], int(5e6))
    
    332 334
             result = cli.run(project=project, args=['build', 'element3.bst'])
    

  • tests/frontend/workspace.py
    ... ... @@ -3,7 +3,7 @@ import pytest
    3 3
     import shutil
    
    4 4
     import subprocess
    
    5 5
     from ruamel.yaml.comments import CommentedSet
    
    6
    -from tests.testutils import cli, create_repo, ALL_REPO_KINDS
    
    6
    +from tests.testutils import cli, create_repo, ALL_REPO_KINDS, wait_for_cache_granularity
    
    7 7
     
    
    8 8
     from buildstream import _yaml
    
    9 9
     from buildstream._exceptions import ErrorDomain, LoadError, LoadErrorReason
    
    ... ... @@ -466,6 +466,8 @@ def test_detect_modifications(cli, tmpdir, datafiles, modification, strict):
    466 466
         assert cli.get_element_state(project, element_name) == 'cached'
    
    467 467
         assert cli.get_element_key(project, element_name) != "{:?<64}".format('')
    
    468 468
     
    
    469
    +    wait_for_cache_granularity()
    
    470
    +
    
    469 471
         # Modify the workspace in various different ways, ensuring we
    
    470 472
         # properly detect the changes.
    
    471 473
         #
    

  • tests/sources/git.py
    ... ... @@ -359,3 +359,45 @@ def test_submodule_track_ignore_inconsistent(cli, tmpdir, datafiles):
    359 359
     
    
    360 360
         # Assert that we are just fine without it, and emit a warning to the user.
    
    361 361
         assert "Ignoring inconsistent submodule" in result.stderr
    
    362
    +
    
    363
    +
    
    364
    +@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
    
    365
    +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
    
    366
    +def test_submodule_track_no_ref_or_track(cli, tmpdir, datafiles):
    
    367
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    368
    +
    
    369
    +    # Create the repo from 'repofiles' subdir
    
    370
    +    repo = create_repo('git', str(tmpdir))
    
    371
    +    ref = repo.create(os.path.join(project, 'repofiles'))
    
    372
    +
    
    373
    +    # Write out our test target
    
    374
    +    gitsource = repo.source_config(ref=None)
    
    375
    +    gitsource.pop('track')
    
    376
    +    element = {
    
    377
    +        'kind': 'import',
    
    378
    +        'sources': [
    
    379
    +            gitsource
    
    380
    +        ]
    
    381
    +    }
    
    382
    +
    
    383
    +    _yaml.dump(element, os.path.join(project, 'target.bst'))
    
    384
    +
    
    385
    +    # Track will encounter an inconsistent submodule without any ref
    
    386
    +    result = cli.run(project=project, args=['track', 'target.bst'])
    
    387
    +    result.assert_main_error(ErrorDomain.STREAM, None)
    
    388
    +    result.assert_task_error(ErrorDomain.SOURCE, 'track-attempt-no-track')
    
    389
    +
    
    390
    +    # Assert that we are just fine without it, and emit a warning to the user.
    
    391
    +    assert "FAILURE git source at" in result.stderr
    
    392
    +    assert "Without a tracking branch ref can not be updated. Please " + \
    
    393
    +        "provide a ref or a track." in result.stderr
    
    394
    +
    
    395
    +    # Track will encounter an inconsistent submodule without any ref
    
    396
    +    result = cli.run(project=project, args=['build', 'target.bst'])
    
    397
    +    result.assert_main_error(ErrorDomain.PIPELINE, 'inconsistent-pipeline')
    
    398
    +    result.assert_task_error(None, None)
    
    399
    +
    
    400
    +    # Assert that we are just fine without it, and emit a warning to the user.
    
    401
    +    assert "Exact versions are missing for the following elements" in result.stderr
    
    402
    +    assert "is missing ref and track." in result.stderr
    
    403
    +    assert "Then track these elements with `bst track`" in result.stderr

  • tests/sources/remote.py
    1 1
     import os
    
    2
    +import stat
    
    2 3
     import pytest
    
    3 4
     
    
    4 5
     from buildstream._exceptions import ErrorDomain
    
    ... ... @@ -82,7 +83,14 @@ def test_simple_file_build(cli, tmpdir, datafiles):
    82 83
         result.assert_success()
    
    83 84
         # Note that the url of the file in target.bst is actually /dir/file
    
    84 85
         # but this tests confirms we take the basename
    
    85
    -    assert(os.path.exists(os.path.join(checkoutdir, 'file')))
    
    86
    +    checkout_file = os.path.join(checkoutdir, 'file')
    
    87
    +    assert(os.path.exists(checkout_file))
    
    88
    +
    
    89
    +    mode = os.stat(checkout_file).st_mode
    
    90
    +    # Assert not executable by anyone
    
    91
    +    assert(not (mode & (stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH)))
    
    92
    +    # Assert not writeable by anyone other than me
    
    93
    +    assert(not (mode & (stat.S_IWGRP | stat.S_IWOTH)))
    
    86 94
     
    
    87 95
     
    
    88 96
     @pytest.mark.datafiles(os.path.join(DATA_DIR, 'single-file-custom-name'))
    
    ... ... @@ -119,6 +127,7 @@ def test_unique_key(cli, tmpdir, datafiles):
    119 127
         generate_project(project, tmpdir)
    
    120 128
         assert cli.get_element_state(project, 'target.bst') == "fetch needed"
    
    121 129
         assert cli.get_element_state(project, 'target-custom.bst') == "fetch needed"
    
    130
    +    assert cli.get_element_state(project, 'target-custom-executable.bst') == "fetch needed"
    
    122 131
         # Try to fetch it
    
    123 132
         result = cli.run(project=project, args=[
    
    124 133
             'fetch', 'target.bst'
    
    ... ... @@ -127,7 +136,31 @@ def test_unique_key(cli, tmpdir, datafiles):
    127 136
         # We should download the file only once
    
    128 137
         assert cli.get_element_state(project, 'target.bst') == 'buildable'
    
    129 138
         assert cli.get_element_state(project, 'target-custom.bst') == 'buildable'
    
    139
    +    assert cli.get_element_state(project, 'target-custom-executable.bst') == 'buildable'
    
    130 140
     
    
    131 141
         # But the cache key is different because the 'filename' is different.
    
    132 142
         assert cli.get_element_key(project, 'target.bst') != \
    
    133
    -        cli.get_element_key(project, 'target-custom.bst')
    143
    +        cli.get_element_key(project, 'target-custom.bst') != \
    
    144
    +        cli.get_element_key(project, 'target-custom-executable.bst')
    
    145
    +
    
    146
    +
    
    147
    +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'unique-keys'))
    
    148
    +def test_executable(cli, tmpdir, datafiles):
    
    149
    +    '''This test confirms that the 'ecxecutable' parameter is honoured.
    
    150
    +    '''
    
    151
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    152
    +    generate_project(project, tmpdir)
    
    153
    +    checkoutdir = os.path.join(str(tmpdir), "checkout")
    
    154
    +    assert cli.get_element_state(project, 'target-custom-executable.bst') == "fetch needed"
    
    155
    +    # Try to fetch it
    
    156
    +    result = cli.run(project=project, args=[
    
    157
    +        'build', 'target-custom-executable.bst'
    
    158
    +    ])
    
    159
    +
    
    160
    +    result = cli.run(project=project, args=[
    
    161
    +        'checkout', 'target-custom-executable.bst', checkoutdir
    
    162
    +    ])
    
    163
    +    mode = os.stat(os.path.join(checkoutdir, 'some-custom-file')).st_mode
    
    164
    +    assert (mode & stat.S_IEXEC)
    
    165
    +    # Assert executable by anyone
    
    166
    +    assert(mode & (stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH))

  • tests/sources/remote/unique-keys/target-custom-executable.bst
    1
    +kind: import
    
    2
    +description: test
    
    3
    +sources:
    
    4
    +- kind: remote
    
    5
    +  url: tmpdir:/dir/file
    
    6
    +  ref: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
    
    7
    +  filename: some-custom-file
    
    8
    +  executable: true

  • tests/testutils/__init__.py
    ... ... @@ -3,3 +3,4 @@ from .repo import create_repo, ALL_REPO_KINDS
    3 3
     from .artifactshare import create_artifact_share
    
    4 4
     from .element_generators import create_element_size
    
    5 5
     from .junction import generate_junction
    
    6
    +from .runner_integration import wait_for_cache_granularity

  • tests/testutils/runner_integration.py
    1
    +import time
    
    2
    +
    
    3
    +
    
    4
    +def wait_for_cache_granularity():
    
    5
    +    # This isn't called very often so has minimal impact on test runtime.
    
    6
    +    # If this changes it may be worth while adding a more sophisticated approach.
    
    7
    +    """
    
    8
    +    Mitigate the coarse granularity of the gitlab runners mtime
    
    9
    +
    
    10
    +    This function waits for the mtime to increment so that the cache can sort by mtime and
    
    11
    +    get the most recent results.
    
    12
    +    """
    
    13
    +    time.sleep(1.1)



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]