James Ennis pushed to branch jennis/correct_HACKING at BuildStream / buildstream
Commits:
-
91d62d2f
by Chandan Singh at 2018-08-22T18:38:01Z
-
cd0775eb
by Chandan Singh at 2018-08-22T23:44:36Z
-
b0d1aa83
by Tristan Van Berkom at 2018-08-23T05:54:05Z
-
2215859c
by Valentin David at 2018-08-23T06:40:19Z
-
e37ac3bc
by Tristan Van Berkom at 2018-08-23T07:45:49Z
-
2ac654a8
by Valentin David at 2018-08-23T08:02:50Z
-
176aa09f
by Tristan Van Berkom at 2018-08-23T09:10:51Z
-
f1e3f10f
by Mathieu Bridon at 2018-08-23T09:11:47Z
-
bea01531
by Tristan Van Berkom at 2018-08-23T09:14:40Z
-
da19bcf1
by Tristan Van Berkom at 2018-08-23T09:14:40Z
-
8216ff8d
by Tristan Van Berkom at 2018-08-23T10:13:23Z
-
ef1da7ba
by James Ennis at 2018-08-23T14:26:43Z
10 changed files:
- .gitlab-ci.yml
- HACKING.rst
- MANIFEST.in
- README.rst
- buildstream/_artifactcache/cascache.py
- buildstream/_frontend/linuxapp.py
- buildstream/_pipeline.py
- setup.py
- tests/artifactcache/expiry.py
- tests/frontend/workspace.py
Changes:
... | ... | @@ -10,6 +10,16 @@ stages: |
10 | 10 |
- test
|
11 | 11 |
- post
|
12 | 12 |
|
13 |
+# Avoid running all the tests post merge on
|
|
14 |
+# master or on any release branch.
|
|
15 |
+#
|
|
16 |
+.tests-condition-template: &tests-condition
|
|
17 |
+ only:
|
|
18 |
+ - branches
|
|
19 |
+ except:
|
|
20 |
+ - master
|
|
21 |
+ - /bst-1\..*/
|
|
22 |
+ |
|
13 | 23 |
#####################################################
|
14 | 24 |
# Prepare stage #
|
15 | 25 |
#####################################################
|
... | ... | @@ -81,20 +91,26 @@ source_dist: |
81 | 91 |
tests-debian-9:
|
82 | 92 |
image: buildstream/testsuite-debian:9-master-114-4cab18e3
|
83 | 93 |
<<: *linux-tests
|
94 |
+ <<: *tests-condition
|
|
84 | 95 |
|
85 | 96 |
tests-fedora-27:
|
86 | 97 |
image: buildstream/testsuite-fedora:27-master-114-4cab18e3
|
87 | 98 |
<<: *linux-tests
|
99 |
+ <<: *tests-condition
|
|
88 | 100 |
|
89 | 101 |
tests-fedora-28:
|
90 | 102 |
image: buildstream/testsuite-fedora:28-master-114-4cab18e3
|
91 | 103 |
<<: *linux-tests
|
104 |
+ <<: *tests-condition
|
|
92 | 105 |
|
93 | 106 |
tests-ubuntu-18.04:
|
94 | 107 |
image: buildstream/testsuite-ubuntu:18.04-master-114-4cab18e3
|
95 | 108 |
<<: *linux-tests
|
109 |
+ <<: *tests-condition
|
|
96 | 110 |
|
97 | 111 |
tests-unix:
|
112 |
+ <<: *tests-condition
|
|
113 |
+ |
|
98 | 114 |
# Use fedora here, to a) run a test on fedora and b) ensure that we
|
99 | 115 |
# can get rid of ostree - this is not possible with debian-8
|
100 | 116 |
image: buildstream/testsuite-fedora:27-master-114-4cab18e3
|
... | ... | @@ -133,6 +149,15 @@ tests-unix: |
133 | 149 |
# Note: We still do not enforce a consistent installation of python3-sphinx,
|
134 | 150 |
# as it will significantly grow the backing image.
|
135 | 151 |
docs:
|
152 |
+ |
|
153 |
+ # Here we build the docs for every pre-merge CI, but avoid
|
|
154 |
+ # the job on post-merge to stable branches, because we only
|
|
155 |
+ # ever publish them from master
|
|
156 |
+ only:
|
|
157 |
+ - branches
|
|
158 |
+ except:
|
|
159 |
+ - /bst-1\..*/
|
|
160 |
+ |
|
136 | 161 |
stage: test
|
137 | 162 |
script:
|
138 | 163 |
- export BST_SOURCE_CACHE="$(pwd)/cache/integration-cache/sources"
|
... | ... | @@ -157,6 +182,8 @@ docs: |
157 | 182 |
# as an output of radon, with some conversion
|
158 | 183 |
#
|
159 | 184 |
codequality:
|
185 |
+ <<: *tests-condition
|
|
186 |
+ |
|
160 | 187 |
image: docker:stable
|
161 | 188 |
stage: post
|
162 | 189 |
variables:
|
... | ... | @@ -175,6 +202,8 @@ codequality: |
175 | 202 |
paths: [codeclimate.json]
|
176 | 203 |
|
177 | 204 |
analysis:
|
205 |
+ <<: *tests-condition
|
|
206 |
+ |
|
178 | 207 |
stage: post
|
179 | 208 |
script:
|
180 | 209 |
- |
|
... | ... | @@ -203,6 +232,8 @@ analysis: |
203 | 232 |
# Collate coverage reports
|
204 | 233 |
#
|
205 | 234 |
coverage:
|
235 |
+ <<: *tests-condition
|
|
236 |
+ |
|
206 | 237 |
stage: post
|
207 | 238 |
coverage: '/TOTAL +\d+ +\d+ +(\d+\.\d+)%/'
|
208 | 239 |
script:
|
... | ... | @@ -465,12 +465,12 @@ If you want to run a specific test or a group of tests, you |
465 | 465 |
can specify a prefix to match. E.g. if you want to run all of
|
466 | 466 |
the frontend tests you can do::
|
467 | 467 |
|
468 |
- ./setup.py test --addopts '-k tests/frontend/'
|
|
468 |
+ ./setup.py test --addopts 'tests/frontend/'
|
|
469 | 469 |
|
470 | 470 |
Specific tests can be chosen by using the :: delimeter after the test module.
|
471 | 471 |
If you wanted to run the test_build_track test within frontend/buildtrack.py you could do::
|
472 | 472 |
|
473 |
- ./setup.py test --adopts '-k tests/frontend/buildtrack.py::test_build_track'
|
|
473 |
+ ./setup.py test --addopts 'tests/frontend/buildtrack.py::test_build_track'
|
|
474 | 474 |
|
475 | 475 |
We also have a set of slow integration tests that are disabled by
|
476 | 476 |
default - you will notice most of them marked with SKIP in the pytest
|
... | ... | @@ -23,4 +23,4 @@ recursive-include tests *.expected |
23 | 23 |
recursive-include buildstream/_protos *.proto
|
24 | 24 |
|
25 | 25 |
# Requirements files
|
26 |
-dev-requirements.txt
|
|
26 |
+include dev-requirements.txt
|
1 | 1 |
About
|
2 | 2 |
-----
|
3 |
-.. image:: https://gitlab.com/BuildStream/buildstream/badges/master/pipeline.svg
|
|
4 |
- :target: https://gitlab.com/BuildStream/buildstream/commits/master
|
|
5 |
- |
|
6 |
-.. image:: https://gitlab.com/BuildStream/buildstream/badges/master/coverage.svg?job=coverage
|
|
7 |
- :target: https://gitlab.com/BuildStream/buildstream/commits/master
|
|
8 | 3 |
|
9 | 4 |
|
10 | 5 |
What is BuildStream?
|
... | ... | @@ -30,6 +30,8 @@ from urllib.parse import urlparse |
30 | 30 |
|
31 | 31 |
import grpc
|
32 | 32 |
|
33 |
+from .. import _yaml
|
|
34 |
+ |
|
33 | 35 |
from .._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
|
34 | 36 |
from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
|
35 | 37 |
from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc
|
... | ... | @@ -526,6 +528,25 @@ class CASCache(ArtifactCache): |
526 | 528 |
#
|
527 | 529 |
def remove(self, ref, *, defer_prune=False):
|
528 | 530 |
|
531 |
+ # Remove extract if not used by other ref
|
|
532 |
+ tree = self.resolve_ref(ref)
|
|
533 |
+ ref_name, ref_hash = os.path.split(ref)
|
|
534 |
+ extract = os.path.join(self.extractdir, ref_name, tree.hash)
|
|
535 |
+ keys_file = os.path.join(extract, 'meta', 'keys.yaml')
|
|
536 |
+ if os.path.exists(keys_file):
|
|
537 |
+ keys_meta = _yaml.load(keys_file)
|
|
538 |
+ keys = [keys_meta['strong'], keys_meta['weak']]
|
|
539 |
+ remove_extract = True
|
|
540 |
+ for other_hash in keys:
|
|
541 |
+ if other_hash == ref_hash:
|
|
542 |
+ continue
|
|
543 |
+ remove_extract = False
|
|
544 |
+ break
|
|
545 |
+ |
|
546 |
+ if remove_extract:
|
|
547 |
+ utils._force_rmtree(extract)
|
|
548 |
+ |
|
549 |
+ # Remove cache ref
|
|
529 | 550 |
refpath = self._refpath(ref)
|
530 | 551 |
if not os.path.exists(refpath):
|
531 | 552 |
raise ArtifactError("Could not find artifact for ref '{}'".format(ref))
|
... | ... | @@ -28,9 +28,9 @@ from .app import App |
28 | 28 |
#
|
29 | 29 |
def _osc_777_supported():
|
30 | 30 |
|
31 |
- term = os.environ['TERM']
|
|
31 |
+ term = os.environ.get('TERM')
|
|
32 | 32 |
|
33 |
- if term.startswith('xterm') or term.startswith('vte'):
|
|
33 |
+ if term and (term.startswith('xterm') or term.startswith('vte')):
|
|
34 | 34 |
|
35 | 35 |
# Since vte version 4600, upstream silently ignores
|
36 | 36 |
# the OSC 777 without printing garbage to the terminal.
|
... | ... | @@ -39,10 +39,10 @@ def _osc_777_supported(): |
39 | 39 |
# will trigger a desktop notification and bring attention
|
40 | 40 |
# to the terminal.
|
41 | 41 |
#
|
42 |
- vte_version = os.environ['VTE_VERSION']
|
|
42 |
+ vte_version = os.environ.get('VTE_VERSION')
|
|
43 | 43 |
try:
|
44 | 44 |
vte_version_int = int(vte_version)
|
45 |
- except ValueError:
|
|
45 |
+ except (ValueError, TypeError):
|
|
46 | 46 |
return False
|
47 | 47 |
|
48 | 48 |
if vte_version_int >= 4600:
|
... | ... | @@ -355,10 +355,14 @@ class Pipeline(): |
355 | 355 |
#
|
356 | 356 |
def assert_consistent(self, elements):
|
357 | 357 |
inconsistent = []
|
358 |
+ inconsistent_workspaced = []
|
|
358 | 359 |
with self._context.timed_activity("Checking sources"):
|
359 | 360 |
for element in elements:
|
360 | 361 |
if element._get_consistency() == Consistency.INCONSISTENT:
|
361 |
- inconsistent.append(element)
|
|
362 |
+ if element._get_workspace():
|
|
363 |
+ inconsistent_workspaced.append(element)
|
|
364 |
+ else:
|
|
365 |
+ inconsistent.append(element)
|
|
362 | 366 |
|
363 | 367 |
if inconsistent:
|
364 | 368 |
detail = "Exact versions are missing for the following elements:\n\n"
|
... | ... | @@ -372,6 +376,13 @@ class Pipeline(): |
372 | 376 |
|
373 | 377 |
raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline")
|
374 | 378 |
|
379 |
+ if inconsistent_workspaced:
|
|
380 |
+ detail = "Some workspaces do not exist but are not closed\n" + \
|
|
381 |
+ "Try closing them with `bst workspace close`\n\n"
|
|
382 |
+ for element in inconsistent_workspaced:
|
|
383 |
+ detail += " " + element._get_full_name() + "\n"
|
|
384 |
+ raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline-workspaced")
|
|
385 |
+ |
|
375 | 386 |
#############################################################
|
376 | 387 |
# Private Methods #
|
377 | 388 |
#############################################################
|
... | ... | @@ -273,7 +273,7 @@ setup(name='BuildStream', |
273 | 273 |
'ruamel.yaml < 0.15.52',
|
274 | 274 |
'pluginbase',
|
275 | 275 |
'Click',
|
276 |
- 'blessings',
|
|
276 |
+ 'blessings >= 1.6',
|
|
277 | 277 |
'jinja2 >= 2.10',
|
278 | 278 |
'protobuf >= 3.5',
|
279 | 279 |
'grpcio >= 1.10',
|
... | ... | @@ -268,3 +268,38 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, success): |
268 | 268 |
res.assert_success()
|
269 | 269 |
else:
|
270 | 270 |
res.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
|
271 |
+ |
|
272 |
+ |
|
273 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
274 |
+def test_extract_expiry(cli, datafiles, tmpdir):
|
|
275 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
276 |
+ element_path = 'elements'
|
|
277 |
+ |
|
278 |
+ cli.configure({
|
|
279 |
+ 'cache': {
|
|
280 |
+ 'quota': 10000000,
|
|
281 |
+ }
|
|
282 |
+ })
|
|
283 |
+ |
|
284 |
+ create_element_size('target.bst', project, element_path, [], 6000000)
|
|
285 |
+ res = cli.run(project=project, args=['build', 'target.bst'])
|
|
286 |
+ res.assert_success()
|
|
287 |
+ assert cli.get_element_state(project, 'target.bst') == 'cached'
|
|
288 |
+ |
|
289 |
+ # Force creating extract
|
|
290 |
+ res = cli.run(project=project, args=['checkout', 'target.bst', os.path.join(str(tmpdir), 'checkout')])
|
|
291 |
+ res.assert_success()
|
|
292 |
+ |
|
293 |
+ extractdir = os.path.join(project, 'cache', 'artifacts', 'extract', 'test', 'target')
|
|
294 |
+ extracts = os.listdir(extractdir)
|
|
295 |
+ assert(len(extracts) == 1)
|
|
296 |
+ extract = os.path.join(extractdir, extracts[0])
|
|
297 |
+ |
|
298 |
+ # Remove target.bst from artifact cache
|
|
299 |
+ create_element_size('target2.bst', project, element_path, [], 6000000)
|
|
300 |
+ res = cli.run(project=project, args=['build', 'target2.bst'])
|
|
301 |
+ res.assert_success()
|
|
302 |
+ assert cli.get_element_state(project, 'target.bst') != 'cached'
|
|
303 |
+ |
|
304 |
+ # Now the extract should be removed.
|
|
305 |
+ assert not os.path.exists(extract)
|
... | ... | @@ -767,3 +767,16 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte |
767 | 767 |
# Check that workspace config is converted correctly if necessary
|
768 | 768 |
loaded_config = _yaml.node_sanitize(_yaml.load(workspace_config_path))
|
769 | 769 |
assert loaded_config == parse_dict_as_yaml(expected)
|
770 |
+ |
|
771 |
+ |
|
772 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
773 |
+@pytest.mark.parametrize("kind", repo_kinds)
|
|
774 |
+def test_inconsitent_pipeline_message(cli, tmpdir, datafiles, kind):
|
|
775 |
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
776 |
+ |
|
777 |
+ shutil.rmtree(workspace)
|
|
778 |
+ |
|
779 |
+ result = cli.run(project=project, args=[
|
|
780 |
+ 'build', element_name
|
|
781 |
+ ])
|
|
782 |
+ result.assert_main_error(ErrorDomain.PIPELINE, "inconsistent-pipeline-workspaced")
|