Will Salmon pushed to branch willsalmon/CacheExpiryTest at BuildStream / buildstream
Commits:
-
ed653fbc
by Chandan Singh at 2018-08-08T00:06:28Z
-
65f382f1
by Chandan Singh at 2018-08-08T10:52:32Z
-
c68dcab8
by Tiago Gomes at 2018-08-09T09:18:43Z
-
35ab0335
by Tiago Gomes at 2018-08-09T10:16:24Z
-
8aa33e23
by Valentin David at 2018-08-09T12:49:17Z
-
ef7810f3
by Valentin David at 2018-08-09T14:06:50Z
-
9b29fafd
by William Salmon at 2018-08-09T16:31:19Z
11 changed files:
- .gitlab-ci.yml
- buildstream/_artifactcache/artifactcache.py
- buildstream/_artifactcache/cascache.py
- buildstream/_artifactcache/casserver.py
- buildstream/_fuse/fuse.py
- buildstream/_fuse/hardlinks.py
- tests/artifactcache/expiry.py
- tests/frontend/push.py
- tests/frontend/workspace.py
- tests/testutils/__init__.py
- + tests/testutils/runner_integration.py
Changes:
... | ... | @@ -26,15 +26,6 @@ source_dist: |
26 | 26 |
- tar -ztf dist/*
|
27 | 27 |
- tarball=$(cd dist && echo $(ls *))
|
28 | 28 |
|
29 |
- # Create an installer script
|
|
30 |
- - |
|
|
31 |
- cat > dist/install.sh << EOF
|
|
32 |
- #!/bin/sh
|
|
33 |
- tar -zxf ${tarball}
|
|
34 |
- cd ${tarball%.tar.gz}
|
|
35 |
- pip3 install --no-index .
|
|
36 |
- EOF
|
|
37 |
- |
|
38 | 29 |
# unpack tarball as `dist/buildstream` directory
|
39 | 30 |
- |
|
40 | 31 |
cat > dist/unpack.sh << EOF
|
... | ... | @@ -44,7 +35,6 @@ source_dist: |
44 | 35 |
EOF
|
45 | 36 |
|
46 | 37 |
# Make our helpers executable
|
47 |
- - chmod +x dist/install.sh
|
|
48 | 38 |
- chmod +x dist/unpack.sh
|
49 | 39 |
artifacts:
|
50 | 40 |
paths:
|
... | ... | @@ -80,6 +80,8 @@ class ArtifactCache(): |
80 | 80 |
self.context = context
|
81 | 81 |
self.required_artifacts = set()
|
82 | 82 |
self.extractdir = os.path.join(context.artifactdir, 'extract')
|
83 |
+ self.tmpdir = os.path.join(context.artifactdir, 'tmp')
|
|
84 |
+ |
|
83 | 85 |
self.max_size = context.cache_quota
|
84 | 86 |
self.estimated_size = None
|
85 | 87 |
|
... | ... | @@ -89,7 +91,8 @@ class ArtifactCache(): |
89 | 91 |
self._local = False
|
90 | 92 |
self.cache_size = None
|
91 | 93 |
|
92 |
- os.makedirs(context.artifactdir, exist_ok=True)
|
|
94 |
+ os.makedirs(self.extractdir, exist_ok=True)
|
|
95 |
+ os.makedirs(self.tmpdir, exist_ok=True)
|
|
93 | 96 |
|
94 | 97 |
################################################
|
95 | 98 |
# Methods implemented on the abstract class #
|
... | ... | @@ -56,7 +56,8 @@ class CASCache(ArtifactCache): |
56 | 56 |
super().__init__(context)
|
57 | 57 |
|
58 | 58 |
self.casdir = os.path.join(context.artifactdir, 'cas')
|
59 |
- os.makedirs(os.path.join(self.casdir, 'tmp'), exist_ok=True)
|
|
59 |
+ os.makedirs(os.path.join(self.casdir, 'refs', 'heads'), exist_ok=True)
|
|
60 |
+ os.makedirs(os.path.join(self.casdir, 'objects'), exist_ok=True)
|
|
60 | 61 |
|
61 | 62 |
self._enable_push = enable_push
|
62 | 63 |
|
... | ... | @@ -85,8 +86,6 @@ class CASCache(ArtifactCache): |
85 | 86 |
# artifact has already been extracted
|
86 | 87 |
return dest
|
87 | 88 |
|
88 |
- os.makedirs(self.extractdir, exist_ok=True)
|
|
89 |
- |
|
90 | 89 |
with tempfile.TemporaryDirectory(prefix='tmp', dir=self.extractdir) as tmpdir:
|
91 | 90 |
checkoutdir = os.path.join(tmpdir, ref)
|
92 | 91 |
self._checkout(checkoutdir, tree)
|
... | ... | @@ -394,7 +393,7 @@ class CASCache(ArtifactCache): |
394 | 393 |
try:
|
395 | 394 |
h = hashlib.sha256()
|
396 | 395 |
# Always write out new file to avoid corruption if input file is modified
|
397 |
- with tempfile.NamedTemporaryFile(dir=os.path.join(self.casdir, 'tmp')) as out:
|
|
396 |
+ with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
|
|
398 | 397 |
# Set mode bits to 0644
|
399 | 398 |
os.chmod(out.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
|
400 | 399 |
|
... | ... | @@ -764,7 +763,7 @@ class CASCache(ArtifactCache): |
764 | 763 |
# already in local cache
|
765 | 764 |
return
|
766 | 765 |
|
767 |
- with tempfile.NamedTemporaryFile(dir=os.path.join(self.casdir, 'tmp')) as out:
|
|
766 |
+ with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
|
|
768 | 767 |
self._fetch_blob(remote, tree, out)
|
769 | 768 |
|
770 | 769 |
directory = remote_execution_pb2.Directory()
|
... | ... | @@ -778,7 +777,7 @@ class CASCache(ArtifactCache): |
778 | 777 |
# already in local cache
|
779 | 778 |
continue
|
780 | 779 |
|
781 |
- with tempfile.NamedTemporaryFile(dir=os.path.join(self.casdir, 'tmp')) as f:
|
|
780 |
+ with tempfile.NamedTemporaryFile(dir=self.tmpdir) as f:
|
|
782 | 781 |
self._fetch_blob(remote, filenode.digest, f)
|
783 | 782 |
|
784 | 783 |
digest = self.add_object(path=f.name)
|
... | ... | @@ -161,7 +161,7 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer): |
161 | 161 |
offset = 0
|
162 | 162 |
finished = False
|
163 | 163 |
resource_name = None
|
164 |
- with tempfile.NamedTemporaryFile(dir=os.path.join(self.cas.casdir, 'tmp')) as out:
|
|
164 |
+ with tempfile.NamedTemporaryFile(dir=self.cas.tmpdir) as out:
|
|
165 | 165 |
for request in request_iterator:
|
166 | 166 |
assert not finished
|
167 | 167 |
assert request.write_offset == offset
|
... | ... | @@ -757,7 +757,11 @@ class FUSE(object): |
757 | 757 |
if self.raw_fi:
|
758 | 758 |
return self.operations('create', path, mode, fi)
|
759 | 759 |
else:
|
760 |
- fi.fh = self.operations('create', path, mode)
|
|
760 |
+ # This line is different from upstream to fix issues
|
|
761 |
+ # reading file opened with O_CREAT|O_RDWR.
|
|
762 |
+ # See issue #143.
|
|
763 |
+ fi.fh = self.operations('create', path, mode, fi.flags)
|
|
764 |
+ # END OF MODIFICATION
|
|
761 | 765 |
return 0
|
762 | 766 |
|
763 | 767 |
def ftruncate(self, path, length, fip):
|
... | ... | @@ -185,12 +185,12 @@ class SafeHardlinkOps(Operations): |
185 | 185 |
|
186 | 186 |
return os.open(full_path, flags)
|
187 | 187 |
|
188 |
- def create(self, path, mode, fi=None):
|
|
188 |
+ def create(self, path, mode, flags):
|
|
189 | 189 |
full_path = self._full_path(path)
|
190 | 190 |
|
191 | 191 |
# If it already exists, ensure it's a copy first
|
192 | 192 |
self._ensure_copy(full_path)
|
193 |
- return os.open(full_path, os.O_WRONLY | os.O_CREAT, mode)
|
|
193 |
+ return os.open(full_path, flags, mode)
|
|
194 | 194 |
|
195 | 195 |
def read(self, path, length, offset, fh):
|
196 | 196 |
os.lseek(fh, offset, os.SEEK_SET)
|
... | ... | @@ -5,7 +5,7 @@ import pytest |
5 | 5 |
from buildstream import _yaml
|
6 | 6 |
from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
7 | 7 |
|
8 |
-from tests.testutils import cli, create_element_size
|
|
8 |
+from tests.testutils import cli, create_element_size, wait_for_cache_granularity
|
|
9 | 9 |
|
10 | 10 |
|
11 | 11 |
DATA_DIR = os.path.join(
|
... | ... | @@ -108,6 +108,8 @@ def test_expiry_order(cli, datafiles, tmpdir): |
108 | 108 |
res = cli.run(project=project, args=['build', 'target2.bst'])
|
109 | 109 |
res.assert_success()
|
110 | 110 |
|
111 |
+ wait_for_cache_granularity()
|
|
112 |
+ |
|
111 | 113 |
# Now extract dep.bst
|
112 | 114 |
res = cli.run(project=project, args=['checkout', 'dep.bst', checkout])
|
113 | 115 |
res.assert_success()
|
... | ... | @@ -3,7 +3,7 @@ import pytest |
3 | 3 |
|
4 | 4 |
from buildstream._exceptions import ErrorDomain
|
5 | 5 |
from tests.testutils import cli, create_artifact_share, create_element_size
|
6 |
-from tests.testutils import generate_junction
|
|
6 |
+from tests.testutils import generate_junction, wait_for_cache_granularity
|
|
7 | 7 |
from . import configure_project
|
8 | 8 |
|
9 | 9 |
|
... | ... | @@ -327,6 +327,8 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir): |
327 | 327 |
# Ensure element1 is cached locally
|
328 | 328 |
assert cli.get_element_state(project, 'element1.bst') == 'cached'
|
329 | 329 |
|
330 |
+ wait_for_cache_granularity()
|
|
331 |
+ |
|
330 | 332 |
# Create and build the element3 (of 5 MB)
|
331 | 333 |
create_element_size('element3.bst', project, element_path, [], int(5e6))
|
332 | 334 |
result = cli.run(project=project, args=['build', 'element3.bst'])
|
... | ... | @@ -3,7 +3,7 @@ import pytest |
3 | 3 |
import shutil
|
4 | 4 |
import subprocess
|
5 | 5 |
from ruamel.yaml.comments import CommentedSet
|
6 |
-from tests.testutils import cli, create_repo, ALL_REPO_KINDS
|
|
6 |
+from tests.testutils import cli, create_repo, ALL_REPO_KINDS, wait_for_cache_granularity
|
|
7 | 7 |
|
8 | 8 |
from buildstream import _yaml
|
9 | 9 |
from buildstream._exceptions import ErrorDomain, LoadError, LoadErrorReason
|
... | ... | @@ -466,6 +466,8 @@ def test_detect_modifications(cli, tmpdir, datafiles, modification, strict): |
466 | 466 |
assert cli.get_element_state(project, element_name) == 'cached'
|
467 | 467 |
assert cli.get_element_key(project, element_name) != "{:?<64}".format('')
|
468 | 468 |
|
469 |
+ wait_for_cache_granularity()
|
|
470 |
+ |
|
469 | 471 |
# Modify the workspace in various different ways, ensuring we
|
470 | 472 |
# properly detect the changes.
|
471 | 473 |
#
|
... | ... | @@ -3,3 +3,4 @@ from .repo import create_repo, ALL_REPO_KINDS |
3 | 3 |
from .artifactshare import create_artifact_share
|
4 | 4 |
from .element_generators import create_element_size
|
5 | 5 |
from .junction import generate_junction
|
6 |
+from .runner_integration import wait_for_cache_granularity
|
1 |
+import time
|
|
2 |
+ |
|
3 |
+ |
|
4 |
+def wait_for_cache_granularity():
|
|
5 |
+ # This isn't called very often so has minimal impact on test runtime.
|
|
6 |
+ # If this changes it may be worth while adding a more sophisticated approach.
|
|
7 |
+ """
|
|
8 |
+ Mitigate the coarse granularity of the gitlab runners mtime
|
|
9 |
+ |
|
10 |
+ This function waits for the mtime to increment so that the cache can sort by mtime and
|
|
11 |
+ get the most recent results.
|
|
12 |
+ """
|
|
13 |
+ time.sleep(1.1)
|