Valentin David pushed to branch valentindavid/make_cache_dir at BuildStream / buildstream
Commits:
-
0098a900
by Tristan Van Berkom at 2019-01-23T13:57:53Z
-
9be31641
by Tristan Van Berkom at 2019-01-23T13:57:53Z
-
3a6e953f
by Tristan Van Berkom at 2019-01-24T01:59:13Z
-
68339b19
by Tristan Van Berkom at 2019-01-24T05:01:55Z
-
6ecc2b0a
by Tristan Van Berkom at 2019-01-24T05:01:55Z
-
1140aed5
by Tristan Van Berkom at 2019-01-24T05:01:55Z
-
b7ea8b74
by Tristan Van Berkom at 2019-01-24T05:01:55Z
-
c2c004f9
by Tristan Van Berkom at 2019-01-24T05:01:55Z
-
21b2958b
by Tristan Van Berkom at 2019-01-24T05:01:55Z
-
83fcaa9f
by Tristan Van Berkom at 2019-01-24T05:01:56Z
-
46eb3018
by Tristan Van Berkom at 2019-01-24T05:01:56Z
-
341b131b
by Tristan Van Berkom at 2019-01-24T06:13:57Z
-
ef2b4648
by Tristan Van Berkom at 2019-01-24T06:14:20Z
-
40c18174
by Tristan Van Berkom at 2019-01-24T07:00:50Z
-
6a556d8c
by Valentin David at 2019-01-24T13:19:34Z
12 changed files:
- buildstream/_artifactcache.py
- conftest.py
- tests/artifactcache/cache_size.py
- tests/artifactcache/expiry.py
- tests/elements/filter.py
- tests/frontend/pull.py
- tests/frontend/push.py
- tests/frontend/track.py
- tests/frontend/workspace.py
- − tests/internals/utils.py
- tests/sources/remote.py
- tests/testutils/runcli.py
Changes:
... | ... | @@ -882,16 +882,16 @@ class ArtifactCache(): |
882 | 882 |
else:
|
883 | 883 |
available = utils._pretty_size(available_space)
|
884 | 884 |
|
885 |
- raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
886 |
- ("Your system does not have enough available " +
|
|
887 |
- "space to support the cache quota specified.\n" +
|
|
888 |
- "\nYou have specified a quota of {quota} total disk space.\n" +
|
|
889 |
- "- The filesystem containing {local_cache_path} only " +
|
|
890 |
- "has: {available_size} available.")
|
|
891 |
- .format(
|
|
892 |
- quota=self.context.config_cache_quota,
|
|
893 |
- local_cache_path=self.context.artifactdir,
|
|
894 |
- available_size=available))
|
|
885 |
+ raise ArtifactError("Your system does not have enough available " +
|
|
886 |
+ "space to support the cache quota specified.",
|
|
887 |
+ detail=("You have specified a quota of {quota} total disk space.\n" +
|
|
888 |
+ "The filesystem containing {local_cache_path} only " +
|
|
889 |
+ "has {available_size} available.")
|
|
890 |
+ .format(
|
|
891 |
+ quota=self.context.config_cache_quota,
|
|
892 |
+ local_cache_path=self.context.artifactdir,
|
|
893 |
+ available_size=available),
|
|
894 |
+ reason='insufficient-storage-for-quota')
|
|
895 | 895 |
|
896 | 896 |
# Place a slight headroom (2e9 (2GB) on the cache_quota) into
|
897 | 897 |
# cache_quota to try and avoid exceptions.
|
... | ... | @@ -54,6 +54,7 @@ class IntegrationCache(): |
54 | 54 |
|
55 | 55 |
def __init__(self, cache):
|
56 | 56 |
cache = os.path.abspath(cache)
|
57 |
+ os.makedirs(cache, exist_ok=True)
|
|
57 | 58 |
|
58 | 59 |
# Use the same sources every time
|
59 | 60 |
self.sources = os.path.join(cache, 'sources')
|
1 | 1 |
import os
|
2 | 2 |
import pytest
|
3 |
+from unittest import mock
|
|
3 | 4 |
|
4 | 5 |
from buildstream import _yaml
|
5 | 6 |
from buildstream._artifactcache import CACHE_SIZE_FILE
|
7 |
+from buildstream._exceptions import ErrorDomain
|
|
6 | 8 |
|
7 | 9 |
from tests.testutils import cli, create_element_size
|
8 | 10 |
|
... | ... | @@ -60,3 +62,29 @@ def test_cache_size_write(cli, tmpdir): |
60 | 62 |
with open(sizefile, "r") as f:
|
61 | 63 |
size_data = f.read()
|
62 | 64 |
size = int(size_data)
|
65 |
+ |
|
66 |
+ |
|
67 |
+def test_quota_over_1024T(cli, tmpdir):
|
|
68 |
+ KiB = 1024
|
|
69 |
+ MiB = (KiB * 1024)
|
|
70 |
+ GiB = (MiB * 1024)
|
|
71 |
+ TiB = (GiB * 1024)
|
|
72 |
+ |
|
73 |
+ cli.configure({
|
|
74 |
+ 'cache': {
|
|
75 |
+ 'quota': 2048 * TiB
|
|
76 |
+ }
|
|
77 |
+ })
|
|
78 |
+ project = tmpdir.join("main")
|
|
79 |
+ os.makedirs(str(project))
|
|
80 |
+ _yaml.dump({'name': 'main'}, str(project.join("project.conf")))
|
|
81 |
+ |
|
82 |
+ volume_space_patch = mock.patch(
|
|
83 |
+ "buildstream._artifactcache.ArtifactCache._get_volume_space_info_for",
|
|
84 |
+ autospec=True,
|
|
85 |
+ return_value=(1025 * TiB, 1025 * TiB)
|
|
86 |
+ )
|
|
87 |
+ |
|
88 |
+ with volume_space_patch:
|
|
89 |
+ result = cli.run(project, args=["build", "file.bst"])
|
|
90 |
+ result.assert_main_error(ErrorDomain.ARTIFACT, 'insufficient-storage-for-quota')
|
... | ... | @@ -66,8 +66,9 @@ def test_artifact_expires(cli, datafiles, tmpdir): |
66 | 66 |
res.assert_success()
|
67 | 67 |
|
68 | 68 |
# Check that the correct element remains in the cache
|
69 |
- assert cli.get_element_state(project, 'target.bst') != 'cached'
|
|
70 |
- assert cli.get_element_state(project, 'target2.bst') == 'cached'
|
|
69 |
+ states = cli.get_element_states(project, ['target.bst', 'target2.bst'])
|
|
70 |
+ assert states['target.bst'] != 'cached'
|
|
71 |
+ assert states['target2.bst'] == 'cached'
|
|
71 | 72 |
|
72 | 73 |
|
73 | 74 |
# Ensure that we don't end up deleting the whole cache (or worse) if
|
... | ... | @@ -144,9 +145,11 @@ def test_expiry_order(cli, datafiles, tmpdir): |
144 | 145 |
# have been removed.
|
145 | 146 |
# Note that buildstream will reduce the cache to 50% of the
|
146 | 147 |
# original size - we therefore remove multiple elements.
|
147 |
- |
|
148 |
- assert (tuple(cli.get_element_state(project, element) for element in
|
|
149 |
- ('unrelated.bst', 'target.bst', 'target2.bst', 'dep.bst', 'expire.bst')) ==
|
|
148 |
+ check_elements = [
|
|
149 |
+ 'unrelated.bst', 'target.bst', 'target2.bst', 'dep.bst', 'expire.bst'
|
|
150 |
+ ]
|
|
151 |
+ states = cli.get_element_states(project, check_elements)
|
|
152 |
+ assert (tuple(states[element] for element in check_elements) ==
|
|
150 | 153 |
('buildable', 'buildable', 'buildable', 'cached', 'cached', ))
|
151 | 154 |
|
152 | 155 |
|
... | ... | @@ -176,8 +179,9 @@ def test_keep_dependencies(cli, datafiles, tmpdir): |
176 | 179 |
res.assert_success()
|
177 | 180 |
|
178 | 181 |
# Check that the correct element remains in the cache
|
179 |
- assert cli.get_element_state(project, 'dependency.bst') == 'cached'
|
|
180 |
- assert cli.get_element_state(project, 'unrelated.bst') == 'cached'
|
|
182 |
+ states = cli.get_element_states(project, ['dependency.bst', 'unrelated.bst'])
|
|
183 |
+ assert states['dependency.bst'] == 'cached'
|
|
184 |
+ assert states['unrelated.bst'] == 'cached'
|
|
181 | 185 |
|
182 | 186 |
# We try to build an element which depends on the LRU artifact,
|
183 | 187 |
# and could therefore fail if we didn't make sure dependencies
|
... | ... | @@ -192,9 +196,10 @@ def test_keep_dependencies(cli, datafiles, tmpdir): |
192 | 196 |
res = cli.run(project=project, args=['build', 'target.bst'])
|
193 | 197 |
res.assert_success()
|
194 | 198 |
|
195 |
- assert cli.get_element_state(project, 'unrelated.bst') != 'cached'
|
|
196 |
- assert cli.get_element_state(project, 'dependency.bst') == 'cached'
|
|
197 |
- assert cli.get_element_state(project, 'target.bst') == 'cached'
|
|
199 |
+ states = cli.get_element_states(project, ['target.bst', 'unrelated.bst'])
|
|
200 |
+ assert states['target.bst'] == 'cached'
|
|
201 |
+ assert states['dependency.bst'] == 'cached'
|
|
202 |
+ assert states['unrelated.bst'] != 'cached'
|
|
198 | 203 |
|
199 | 204 |
|
200 | 205 |
# Assert that we never delete a dependency required for a build tree
|
... | ... | @@ -239,11 +244,11 @@ def test_never_delete_required(cli, datafiles, tmpdir): |
239 | 244 |
# life there may potentially be N-builders cached artifacts
|
240 | 245 |
# which exceed the quota
|
241 | 246 |
#
|
242 |
- assert cli.get_element_state(project, 'dep1.bst') == 'cached'
|
|
243 |
- assert cli.get_element_state(project, 'dep2.bst') == 'cached'
|
|
244 |
- |
|
245 |
- assert cli.get_element_state(project, 'dep3.bst') != 'cached'
|
|
246 |
- assert cli.get_element_state(project, 'target.bst') != 'cached'
|
|
247 |
+ states = cli.get_element_states(project, ['target.bst'])
|
|
248 |
+ assert states['dep1.bst'] == 'cached'
|
|
249 |
+ assert states['dep2.bst'] == 'cached'
|
|
250 |
+ assert states['dep3.bst'] != 'cached'
|
|
251 |
+ assert states['target.bst'] != 'cached'
|
|
247 | 252 |
|
248 | 253 |
|
249 | 254 |
# Assert that we never delete a dependency required for a build tree,
|
... | ... | @@ -275,10 +280,11 @@ def test_never_delete_required_track(cli, datafiles, tmpdir): |
275 | 280 |
res.assert_success()
|
276 | 281 |
|
277 | 282 |
# They should all be cached
|
278 |
- assert cli.get_element_state(project, 'dep1.bst') == 'cached'
|
|
279 |
- assert cli.get_element_state(project, 'dep2.bst') == 'cached'
|
|
280 |
- assert cli.get_element_state(project, 'dep3.bst') == 'cached'
|
|
281 |
- assert cli.get_element_state(project, 'target.bst') == 'cached'
|
|
283 |
+ states = cli.get_element_states(project, ['target.bst'])
|
|
284 |
+ assert states['dep1.bst'] == 'cached'
|
|
285 |
+ assert states['dep2.bst'] == 'cached'
|
|
286 |
+ assert states['dep3.bst'] == 'cached'
|
|
287 |
+ assert states['target.bst'] == 'cached'
|
|
282 | 288 |
|
283 | 289 |
# Now increase the size of all the elements
|
284 | 290 |
#
|
... | ... | @@ -296,28 +302,37 @@ def test_never_delete_required_track(cli, datafiles, tmpdir): |
296 | 302 |
|
297 | 303 |
# Expect the same result that we did in test_never_delete_required()
|
298 | 304 |
#
|
299 |
- assert cli.get_element_state(project, 'dep1.bst') == 'cached'
|
|
300 |
- assert cli.get_element_state(project, 'dep2.bst') == 'cached'
|
|
301 |
- assert cli.get_element_state(project, 'dep3.bst') != 'cached'
|
|
302 |
- assert cli.get_element_state(project, 'target.bst') != 'cached'
|
|
305 |
+ states = cli.get_element_states(project, ['target.bst'])
|
|
306 |
+ assert states['dep1.bst'] == 'cached'
|
|
307 |
+ assert states['dep2.bst'] == 'cached'
|
|
308 |
+ assert states['dep3.bst'] != 'cached'
|
|
309 |
+ assert states['target.bst'] != 'cached'
|
|
303 | 310 |
|
304 | 311 |
|
305 | 312 |
# Ensure that only valid cache quotas make it through the loading
|
306 | 313 |
# process.
|
307 |
-@pytest.mark.parametrize("quota,success", [
|
|
308 |
- ("1", True),
|
|
309 |
- ("1K", True),
|
|
310 |
- ("50%", True),
|
|
311 |
- ("infinity", True),
|
|
312 |
- ("0", True),
|
|
313 |
- ("-1", False),
|
|
314 |
- ("pony", False),
|
|
315 |
- ("7K", False),
|
|
316 |
- ("70%", False),
|
|
317 |
- ("200%", False)
|
|
314 |
+#
|
|
315 |
+# This test virtualizes the condition to assume a storage volume
|
|
316 |
+# has 10K total disk space, and 6K of it is already in use (not
|
|
317 |
+# including any space used by the artifact cache).
|
|
318 |
+#
|
|
319 |
+@pytest.mark.parametrize("quota,err_domain,err_reason", [
|
|
320 |
+ # Valid configurations
|
|
321 |
+ ("1", 'success', None),
|
|
322 |
+ ("1K", 'success', None),
|
|
323 |
+ ("50%", 'success', None),
|
|
324 |
+ ("infinity", 'success', None),
|
|
325 |
+ ("0", 'success', None),
|
|
326 |
+ # Invalid configurations
|
|
327 |
+ ("-1", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
|
|
328 |
+ ("pony", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
|
|
329 |
+ ("200%", ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA),
|
|
330 |
+ # Not enough space for these caches
|
|
331 |
+ ("7K", ErrorDomain.ARTIFACT, 'insufficient-storage-for-quota'),
|
|
332 |
+ ("70%", ErrorDomain.ARTIFACT, 'insufficient-storage-for-quota')
|
|
318 | 333 |
])
|
319 | 334 |
@pytest.mark.datafiles(DATA_DIR)
|
320 |
-def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, success):
|
|
335 |
+def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, err_domain, err_reason):
|
|
321 | 336 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
322 | 337 |
os.makedirs(os.path.join(project, 'elements'))
|
323 | 338 |
|
... | ... | @@ -356,10 +371,10 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, success): |
356 | 371 |
with volume_space_patch, cache_size_patch:
|
357 | 372 |
res = cli.run(project=project, args=['workspace', 'list'])
|
358 | 373 |
|
359 |
- if success:
|
|
374 |
+ if err_domain == 'success':
|
|
360 | 375 |
res.assert_success()
|
361 | 376 |
else:
|
362 |
- res.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
|
|
377 |
+ res.assert_main_error(err_domain, err_reason)
|
|
363 | 378 |
|
364 | 379 |
|
365 | 380 |
@pytest.mark.datafiles(DATA_DIR)
|
... | ... | @@ -389,8 +389,9 @@ def test_filter_track_multi(datafiles, cli, tmpdir): |
389 | 389 |
_yaml.dump(filter2_config, filter2_file)
|
390 | 390 |
|
391 | 391 |
# Assert that a fetch is needed
|
392 |
- assert cli.get_element_state(project, input_name) == 'no reference'
|
|
393 |
- assert cli.get_element_state(project, input2_name) == 'no reference'
|
|
392 |
+ states = cli.get_element_states(project, [input_name, input2_name])
|
|
393 |
+ assert states[input_name] == 'no reference'
|
|
394 |
+ assert states[input2_name] == 'no reference'
|
|
394 | 395 |
|
395 | 396 |
# Now try to track it
|
396 | 397 |
result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst"])
|
... | ... | @@ -450,8 +451,9 @@ def test_filter_track_multi_exclude(datafiles, cli, tmpdir): |
450 | 451 |
_yaml.dump(filter2_config, filter2_file)
|
451 | 452 |
|
452 | 453 |
# Assert that a fetch is needed
|
453 |
- assert cli.get_element_state(project, input_name) == 'no reference'
|
|
454 |
- assert cli.get_element_state(project, input2_name) == 'no reference'
|
|
454 |
+ states = cli.get_element_states(project, [input_name, input2_name])
|
|
455 |
+ assert states[input_name] == 'no reference'
|
|
456 |
+ assert states[input2_name] == 'no reference'
|
|
455 | 457 |
|
456 | 458 |
# Now try to track it
|
457 | 459 |
result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst", "--except", input_name])
|
... | ... | @@ -66,16 +66,16 @@ def test_push_pull_all(cli, tmpdir, datafiles): |
66 | 66 |
shutil.rmtree(artifacts)
|
67 | 67 |
|
68 | 68 |
# Assert that nothing is cached locally anymore
|
69 |
- for element_name in all_elements:
|
|
70 |
- assert cli.get_element_state(project, element_name) != 'cached'
|
|
69 |
+ states = cli.get_element_states(project, all_elements)
|
|
70 |
+ assert not any(states[e] == 'cached' for e in all_elements)
|
|
71 | 71 |
|
72 | 72 |
# Now try bst pull
|
73 | 73 |
result = cli.run(project=project, args=['artifact', 'pull', '--deps', 'all', 'target.bst'])
|
74 | 74 |
result.assert_success()
|
75 | 75 |
|
76 | 76 |
# And assert that it's again in the local cache, without having built
|
77 |
- for element_name in all_elements:
|
|
78 |
- assert cli.get_element_state(project, element_name) == 'cached'
|
|
77 |
+ states = cli.get_element_states(project, all_elements)
|
|
78 |
+ assert not any(states[e] != 'cached' for e in all_elements)
|
|
79 | 79 |
|
80 | 80 |
|
81 | 81 |
# Tests that:
|
... | ... | @@ -250,9 +250,10 @@ def test_artifact_expires(cli, datafiles, tmpdir): |
250 | 250 |
result.assert_success()
|
251 | 251 |
|
252 | 252 |
# check that element's 1 and 2 are cached both locally and remotely
|
253 |
- assert cli.get_element_state(project, 'element1.bst') == 'cached'
|
|
253 |
+ states = cli.get_element_states(project, ['element1.bst', 'element2.bst'])
|
|
254 |
+ assert states['element1.bst'] == 'cached'
|
|
255 |
+ assert states['element2.bst'] == 'cached'
|
|
254 | 256 |
assert_shared(cli, share, project, 'element1.bst')
|
255 |
- assert cli.get_element_state(project, 'element2.bst') == 'cached'
|
|
256 | 257 |
assert_shared(cli, share, project, 'element2.bst')
|
257 | 258 |
|
258 | 259 |
# Create and build another element of 5 MB (This will exceed the free disk space available)
|
... | ... | @@ -298,11 +299,12 @@ def test_artifact_too_large(cli, datafiles, tmpdir): |
298 | 299 |
result.assert_success()
|
299 | 300 |
|
300 | 301 |
# Ensure that the small artifact is still in the share
|
301 |
- assert cli.get_element_state(project, 'small_element.bst') == 'cached'
|
|
302 |
+ states = cli.get_element_states(project, ['small_element.bst', 'large_element.bst'])
|
|
303 |
+ states['small_element.bst'] == 'cached'
|
|
302 | 304 |
assert_shared(cli, share, project, 'small_element.bst')
|
303 | 305 |
|
304 | 306 |
# Ensure that the artifact is cached locally but NOT remotely
|
305 |
- assert cli.get_element_state(project, 'large_element.bst') == 'cached'
|
|
307 |
+ states['large_element.bst'] == 'cached'
|
|
306 | 308 |
assert_not_shared(cli, share, project, 'large_element.bst')
|
307 | 309 |
|
308 | 310 |
|
... | ... | @@ -334,8 +336,9 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir): |
334 | 336 |
result.assert_success()
|
335 | 337 |
|
336 | 338 |
# Ensure they are cached locally
|
337 |
- assert cli.get_element_state(project, 'element1.bst') == 'cached'
|
|
338 |
- assert cli.get_element_state(project, 'element2.bst') == 'cached'
|
|
339 |
+ states = cli.get_element_states(project, ['element1.bst', 'element2.bst'])
|
|
340 |
+ assert states['element1.bst'] == 'cached'
|
|
341 |
+ assert states['element2.bst'] == 'cached'
|
|
339 | 342 |
|
340 | 343 |
# Ensure that they have been pushed to the cache
|
341 | 344 |
assert_shared(cli, share, project, 'element1.bst')
|
... | ... | @@ -123,7 +123,7 @@ def test_track_recurse(cli, tmpdir, datafiles, kind, amount): |
123 | 123 |
last_element_name = element_name
|
124 | 124 |
|
125 | 125 |
# Assert that a fetch is needed
|
126 |
- states = cli.get_element_states(project, last_element_name)
|
|
126 |
+ states = cli.get_element_states(project, [last_element_name])
|
|
127 | 127 |
for element_name in element_names:
|
128 | 128 |
assert states[element_name] == 'no reference'
|
129 | 129 |
|
... | ... | @@ -143,7 +143,7 @@ def test_track_recurse(cli, tmpdir, datafiles, kind, amount): |
143 | 143 |
result.assert_success()
|
144 | 144 |
|
145 | 145 |
# Assert that the base is buildable and the rest are waiting
|
146 |
- states = cli.get_element_states(project, last_element_name)
|
|
146 |
+ states = cli.get_element_states(project, [last_element_name])
|
|
147 | 147 |
for element_name in element_names:
|
148 | 148 |
if element_name == element_names[0]:
|
149 | 149 |
assert states[element_name] == 'buildable'
|
... | ... | @@ -171,8 +171,9 @@ def test_track_single(cli, tmpdir, datafiles): |
171 | 171 |
dep_name=element_dep_name)
|
172 | 172 |
|
173 | 173 |
# Assert that tracking is needed for both elements
|
174 |
- assert cli.get_element_state(project, element_dep_name) == 'no reference'
|
|
175 |
- assert cli.get_element_state(project, element_target_name) == 'no reference'
|
|
174 |
+ states = cli.get_element_states(project, [element_target_name])
|
|
175 |
+ assert states[element_dep_name] == 'no reference'
|
|
176 |
+ assert states[element_target_name] == 'no reference'
|
|
176 | 177 |
|
177 | 178 |
# Now first try to track only one element
|
178 | 179 |
result = cli.run(project=project, args=[
|
... | ... | @@ -187,8 +188,9 @@ def test_track_single(cli, tmpdir, datafiles): |
187 | 188 |
result.assert_success()
|
188 | 189 |
|
189 | 190 |
# Assert that the dependency is waiting and the target has still never been tracked
|
190 |
- assert cli.get_element_state(project, element_dep_name) == 'no reference'
|
|
191 |
- assert cli.get_element_state(project, element_target_name) == 'waiting'
|
|
191 |
+ states = cli.get_element_states(project, [element_target_name])
|
|
192 |
+ assert states[element_dep_name] == 'no reference'
|
|
193 |
+ assert states[element_target_name] == 'waiting'
|
|
192 | 194 |
|
193 | 195 |
|
194 | 196 |
@pytest.mark.datafiles(DATA_DIR)
|
... | ... | @@ -212,8 +214,9 @@ def test_track_recurse_except(cli, tmpdir, datafiles, kind): |
212 | 214 |
dep_name=element_dep_name)
|
213 | 215 |
|
214 | 216 |
# Assert that a fetch is needed
|
215 |
- assert cli.get_element_state(project, element_dep_name) == 'no reference'
|
|
216 |
- assert cli.get_element_state(project, element_target_name) == 'no reference'
|
|
217 |
+ states = cli.get_element_states(project, [element_target_name])
|
|
218 |
+ assert states[element_dep_name] == 'no reference'
|
|
219 |
+ assert states[element_target_name] == 'no reference'
|
|
217 | 220 |
|
218 | 221 |
# Now first try to track it
|
219 | 222 |
result = cli.run(project=project, args=[
|
... | ... | @@ -231,8 +234,9 @@ def test_track_recurse_except(cli, tmpdir, datafiles, kind): |
231 | 234 |
result.assert_success()
|
232 | 235 |
|
233 | 236 |
# Assert that the dependency is buildable and the target is waiting
|
234 |
- assert cli.get_element_state(project, element_dep_name) == 'no reference'
|
|
235 |
- assert cli.get_element_state(project, element_target_name) == 'waiting'
|
|
237 |
+ states = cli.get_element_states(project, [element_target_name])
|
|
238 |
+ assert states[element_dep_name] == 'no reference'
|
|
239 |
+ assert states[element_target_name] == 'waiting'
|
|
236 | 240 |
|
237 | 241 |
|
238 | 242 |
@pytest.mark.datafiles(os.path.join(TOP_DIR))
|
... | ... | @@ -672,21 +676,20 @@ def test_track_junction_included(cli, tmpdir, datafiles, ref_storage, kind): |
672 | 676 |
|
673 | 677 |
|
674 | 678 |
@pytest.mark.datafiles(DATA_DIR)
|
675 |
-@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
|
|
676 |
-def test_track_error_cannot_write_file(cli, tmpdir, datafiles, kind):
|
|
679 |
+def test_track_error_cannot_write_file(cli, tmpdir, datafiles):
|
|
677 | 680 |
if os.geteuid() == 0:
|
678 | 681 |
pytest.skip("This is not testable with root permissions")
|
679 | 682 |
|
680 | 683 |
project = str(datafiles)
|
681 | 684 |
dev_files_path = os.path.join(project, 'files', 'dev-files')
|
682 | 685 |
element_path = os.path.join(project, 'elements')
|
683 |
- element_name = 'track-test-{}.bst'.format(kind)
|
|
686 |
+ element_name = 'track-test.bst'
|
|
684 | 687 |
|
685 | 688 |
configure_project(project, {
|
686 | 689 |
'ref-storage': 'inline'
|
687 | 690 |
})
|
688 | 691 |
|
689 |
- repo = create_repo(kind, str(tmpdir))
|
|
692 |
+ repo = create_repo('git', str(tmpdir))
|
|
690 | 693 |
ref = repo.create(dev_files_path)
|
691 | 694 |
|
692 | 695 |
element_full_path = os.path.join(element_path, element_name)
|
... | ... | @@ -107,15 +107,17 @@ class WorkspaceCreater(): |
107 | 107 |
element_name, element_path, workspace_dir = \
|
108 | 108 |
self.create_workspace_element(kind, track, suffix, workspace_dir_usr,
|
109 | 109 |
element_attrs)
|
110 |
- |
|
111 |
- # Assert that there is no reference, a track & fetch is needed
|
|
112 |
- state = self.cli.get_element_state(self.project_path, element_name)
|
|
113 |
- if track:
|
|
114 |
- assert state == 'no reference'
|
|
115 |
- else:
|
|
116 |
- assert state == 'fetch needed'
|
|
117 | 110 |
element_tuples.append((element_name, workspace_dir))
|
118 | 111 |
|
112 |
+ # Assert that there is no reference, a track & fetch is needed
|
|
113 |
+ states = self.cli.get_element_states(self.project_path, [
|
|
114 |
+ e for e, _ in element_tuples
|
|
115 |
+ ])
|
|
116 |
+ if track:
|
|
117 |
+ assert not any(states[e] != 'no reference' for e, _ in element_tuples)
|
|
118 |
+ else:
|
|
119 |
+ assert not any(states[e] != 'fetch needed' for e, _ in element_tuples)
|
|
120 |
+ |
|
119 | 121 |
return element_tuples
|
120 | 122 |
|
121 | 123 |
def open_workspaces(self, kinds, track, suffixs=None, workspace_dir=None,
|
... | ... | @@ -140,12 +142,14 @@ class WorkspaceCreater(): |
140 | 142 |
|
141 | 143 |
result.assert_success()
|
142 | 144 |
|
143 |
- for element_name, workspace_dir in element_tuples:
|
|
144 |
- # Assert that we are now buildable because the source is
|
|
145 |
- # now cached.
|
|
146 |
- assert self.cli.get_element_state(self.project_path, element_name) == 'buildable'
|
|
145 |
+ # Assert that we are now buildable because the source is now cached.
|
|
146 |
+ states = self.cli.get_element_states(self.project_path, [
|
|
147 |
+ e for e, _ in element_tuples
|
|
148 |
+ ])
|
|
149 |
+ assert not any(states[e] != 'buildable' for e, _ in element_tuples)
|
|
147 | 150 |
|
148 |
- # Check that the executable hello file is found in the workspace
|
|
151 |
+ # Check that the executable hello file is found in each workspace
|
|
152 |
+ for element_name, workspace_dir in element_tuples:
|
|
149 | 153 |
filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
|
150 | 154 |
assert os.path.exists(filename)
|
151 | 155 |
|
1 |
-import os
|
|
2 |
-from unittest import mock
|
|
3 |
- |
|
4 |
-from buildstream import _yaml
|
|
5 |
- |
|
6 |
-from ..testutils.runcli import cli
|
|
7 |
- |
|
8 |
- |
|
9 |
-KiB = 1024
|
|
10 |
-MiB = (KiB * 1024)
|
|
11 |
-GiB = (MiB * 1024)
|
|
12 |
-TiB = (GiB * 1024)
|
|
13 |
- |
|
14 |
- |
|
15 |
-def test_parse_size_over_1024T(cli, tmpdir):
|
|
16 |
- cli.configure({
|
|
17 |
- 'cache': {
|
|
18 |
- 'quota': 2048 * TiB
|
|
19 |
- }
|
|
20 |
- })
|
|
21 |
- project = tmpdir.join("main")
|
|
22 |
- os.makedirs(str(project))
|
|
23 |
- _yaml.dump({'name': 'main'}, str(project.join("project.conf")))
|
|
24 |
- |
|
25 |
- volume_space_patch = mock.patch(
|
|
26 |
- "buildstream._artifactcache.ArtifactCache._get_volume_space_info_for",
|
|
27 |
- autospec=True,
|
|
28 |
- return_value=(1025 * TiB, 1025 * TiB)
|
|
29 |
- )
|
|
30 |
- |
|
31 |
- with volume_space_patch:
|
|
32 |
- result = cli.run(project, args=["build", "file.bst"])
|
|
33 |
- failure_msg = 'Your system does not have enough available space to support the cache quota specified.'
|
|
34 |
- assert failure_msg in result.stderr
|
... | ... | @@ -136,18 +136,25 @@ def test_unique_key(cli, tmpdir, datafiles): |
136 | 136 |
'''
|
137 | 137 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
138 | 138 |
generate_project(project, tmpdir)
|
139 |
- assert cli.get_element_state(project, 'target.bst') == "fetch needed"
|
|
140 |
- assert cli.get_element_state(project, 'target-custom.bst') == "fetch needed"
|
|
141 |
- assert cli.get_element_state(project, 'target-custom-executable.bst') == "fetch needed"
|
|
139 |
+ states = cli.get_element_states(project, [
|
|
140 |
+ 'target.bst', 'target-custom.bst', 'target-custom-executable.bst'
|
|
141 |
+ ])
|
|
142 |
+ assert states['target.bst'] == "fetch needed"
|
|
143 |
+ assert states['target-custom.bst'] == "fetch needed"
|
|
144 |
+ assert states['target-custom-executable.bst'] == "fetch needed"
|
|
145 |
+ |
|
142 | 146 |
# Try to fetch it
|
143 | 147 |
result = cli.run(project=project, args=[
|
144 | 148 |
'source', 'fetch', 'target.bst'
|
145 | 149 |
])
|
146 | 150 |
|
147 | 151 |
# We should download the file only once
|
148 |
- assert cli.get_element_state(project, 'target.bst') == 'buildable'
|
|
149 |
- assert cli.get_element_state(project, 'target-custom.bst') == 'buildable'
|
|
150 |
- assert cli.get_element_state(project, 'target-custom-executable.bst') == 'buildable'
|
|
152 |
+ states = cli.get_element_states(project, [
|
|
153 |
+ 'target.bst', 'target-custom.bst', 'target-custom-executable.bst'
|
|
154 |
+ ])
|
|
155 |
+ assert states['target.bst'] == 'buildable'
|
|
156 |
+ assert states['target-custom.bst'] == 'buildable'
|
|
157 |
+ assert states['target-custom-executable.bst'] == 'buildable'
|
|
151 | 158 |
|
152 | 159 |
# But the cache key is different because the 'filename' is different.
|
153 | 160 |
assert cli.get_element_key(project, 'target.bst') != \
|
... | ... | @@ -398,13 +398,12 @@ class Cli(): |
398 | 398 |
#
|
399 | 399 |
# Returns a dictionary with the element names as keys
|
400 | 400 |
#
|
401 |
- def get_element_states(self, project, target, deps='all'):
|
|
401 |
+ def get_element_states(self, project, targets, deps='all'):
|
|
402 | 402 |
result = self.run(project=project, silent=True, args=[
|
403 | 403 |
'show',
|
404 | 404 |
'--deps', deps,
|
405 | 405 |
'--format', '%{name}||%{state}',
|
406 |
- target
|
|
407 |
- ])
|
|
406 |
+ ] + targets)
|
|
408 | 407 |
result.assert_success()
|
409 | 408 |
lines = result.output.splitlines()
|
410 | 409 |
states = {}
|