Valentin David pushed to branch valentindavid/447-stack-trace-checkout at BuildStream / buildstream
Commits:
-
064abe66
by Sam Thursfield at 2018-08-13T13:50:08Z
-
6a2f3b59
by Valentin David at 2018-08-13T15:25:42Z
-
002749b5
by Jürg Billeter at 2018-08-13T15:39:22Z
-
60df233d
by Valentin David at 2018-08-13T17:44:03Z
-
60c817c0
by Phillip Smyth at 2018-08-13T19:04:16Z
-
2315fff0
by Valentin David at 2018-08-13T20:04:28Z
-
b3fac258
by Francisco Redondo Marchena at 2018-08-13T20:23:50Z
-
41d97b7e
by Valentin David at 2018-08-13T21:18:08Z
-
83710050
by Martin Blanchard at 2018-08-13T21:37:28Z
-
d6714dc2
by Valentin David at 2018-08-13T22:51:26Z
-
b82d6793
by Phil Dawson at 2018-08-14T05:33:23Z
-
9e2b66cc
by Phil Dawson at 2018-08-14T05:33:23Z
-
cbdeba7b
by Tristan Van Berkom at 2018-08-14T06:45:42Z
-
3088aca2
by Martin Blanchard at 2018-08-14T07:12:45Z
-
7a9cd5ff
by Martin Blanchard at 2018-08-14T07:12:45Z
15 changed files:
- buildstream/_artifactcache/cascache.py
- buildstream/_frontend/app.py
- buildstream/_frontend/cli.py
- buildstream/_frontend/complete.py
- buildstream/_stream.py
- buildstream/element.py
- tests/completions/completions.py
- + tests/completions/sub-folders/base/unwanted.bst
- + tests/completions/sub-folders/elements/base.bst
- + tests/completions/sub-folders/elements/base/wanted.bst
- + tests/completions/sub-folders/elements/hello.bst
- + tests/completions/sub-folders/project.conf
- tests/frontend/buildcheckout.py
- tests/frontend/show.py
- tests/frontend/workspace.py
Changes:
... | ... | @@ -845,6 +845,9 @@ class _CASRemote(): |
845 | 845 |
|
846 | 846 |
|
847 | 847 |
def _grouper(iterable, n):
|
848 |
- # pylint: disable=stop-iteration-return
|
|
849 | 848 |
while True:
|
850 |
- yield itertools.chain([next(iterable)], itertools.islice(iterable, n - 1))
|
|
849 |
+ try:
|
|
850 |
+ current = next(iterable)
|
|
851 |
+ except StopIteration:
|
|
852 |
+ return
|
|
853 |
+ yield itertools.chain([current], itertools.islice(iterable, n - 1))
|
... | ... | @@ -275,6 +275,10 @@ class App(): |
275 | 275 |
|
276 | 276 |
# Exit with the error
|
277 | 277 |
self._error_exit(e)
|
278 |
+ except RecursionError:
|
|
279 |
+ click.echo("RecursionError: Depency depth is too large. Maximum recursion depth exceeded.",
|
|
280 |
+ err=True)
|
|
281 |
+ sys.exit(-1)
|
|
278 | 282 |
|
279 | 283 |
else:
|
280 | 284 |
# No exceptions occurred, print session time and summary
|
... | ... | @@ -827,4 +827,5 @@ def source_bundle(app, element, force, directory, |
827 | 827 |
app.stream.source_bundle(element, directory,
|
828 | 828 |
track_first=track_,
|
829 | 829 |
force=force,
|
830 |
- compression=compression)
|
|
830 |
+ compression=compression,
|
|
831 |
+ except_targets=except_)
|
... | ... | @@ -68,9 +68,10 @@ def complete_path(path_type, incomplete, base_directory='.'): |
68 | 68 |
# If there was nothing on the left of the last separator,
|
69 | 69 |
# we are completing files in the filesystem root
|
70 | 70 |
base_path = os.path.join(base_directory, base_path)
|
71 |
- |
|
72 |
- elif os.path.isdir(incomplete):
|
|
73 |
- base_path = incomplete
|
|
71 |
+ else:
|
|
72 |
+ incomplete_base_path = os.path.join(base_directory, incomplete)
|
|
73 |
+ if os.path.isdir(incomplete_base_path):
|
|
74 |
+ base_path = incomplete_base_path
|
|
74 | 75 |
|
75 | 76 |
try:
|
76 | 77 |
if base_path:
|
... | ... | @@ -437,7 +437,7 @@ class Stream(): |
437 | 437 |
|
438 | 438 |
except BstError as e:
|
439 | 439 |
raise StreamError("Error while staging dependencies into a sandbox"
|
440 |
- ": '{}'".format(e), reason=e.reason) from e
|
|
440 |
+ ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
|
|
441 | 441 |
|
442 | 442 |
# workspace_open
|
443 | 443 |
#
|
... | ... | @@ -478,7 +478,7 @@ class Stream(): |
478 | 478 |
|
479 | 479 |
# Check for workspace config
|
480 | 480 |
workspace = workspaces.get_workspace(target._get_full_name())
|
481 |
- if workspace:
|
|
481 |
+ if workspace and not force:
|
|
482 | 482 |
raise StreamError("Workspace '{}' is already defined at: {}"
|
483 | 483 |
.format(target.name, workspace.path))
|
484 | 484 |
|
... | ... | @@ -497,6 +497,10 @@ class Stream(): |
497 | 497 |
"fetch the latest version of the " +
|
498 | 498 |
"source.")
|
499 | 499 |
|
500 |
+ if workspace:
|
|
501 |
+ workspaces.delete_workspace(target._get_full_name())
|
|
502 |
+ workspaces.save_config()
|
|
503 |
+ shutil.rmtree(directory)
|
|
500 | 504 |
try:
|
501 | 505 |
os.makedirs(directory, exist_ok=True)
|
502 | 506 |
except OSError as e:
|
... | ... | @@ -654,7 +658,8 @@ class Stream(): |
654 | 658 |
def source_bundle(self, target, directory, *,
|
655 | 659 |
track_first=False,
|
656 | 660 |
force=False,
|
657 |
- compression="gz"):
|
|
661 |
+ compression="gz",
|
|
662 |
+ except_targets=()):
|
|
658 | 663 |
|
659 | 664 |
if track_first:
|
660 | 665 |
track_targets = (target,)
|
... | ... | @@ -663,6 +668,7 @@ class Stream(): |
663 | 668 |
|
664 | 669 |
elements, track_elements = self._load((target,), track_targets,
|
665 | 670 |
selection=PipelineSelection.ALL,
|
671 |
+ except_targets=except_targets,
|
|
666 | 672 |
track_selection=PipelineSelection.ALL,
|
667 | 673 |
fetch_subprojects=True)
|
668 | 674 |
|
... | ... | @@ -194,6 +194,9 @@ class Element(Plugin): |
194 | 194 |
|
195 | 195 |
def __init__(self, context, project, artifacts, meta, plugin_conf):
|
196 | 196 |
|
197 |
+ self.__cache_key_dict = None # Dict for cache key calculation
|
|
198 |
+ self.__cache_key = None # Our cached cache key
|
|
199 |
+ |
|
197 | 200 |
super().__init__(meta.name, context, project, meta.provenance, "element")
|
198 | 201 |
|
199 | 202 |
self.__is_junction = meta.kind == "junction"
|
... | ... | @@ -212,8 +215,6 @@ class Element(Plugin): |
212 | 215 |
self.__runtime_dependencies = [] # Direct runtime dependency Elements
|
213 | 216 |
self.__build_dependencies = [] # Direct build dependency Elements
|
214 | 217 |
self.__sources = [] # List of Sources
|
215 |
- self.__cache_key_dict = None # Dict for cache key calculation
|
|
216 |
- self.__cache_key = None # Our cached cache key
|
|
217 | 218 |
self.__weak_cache_key = None # Our cached weak cache key
|
218 | 219 |
self.__strict_cache_key = None # Our cached cache key for strict builds
|
219 | 220 |
self.__artifacts = artifacts # Artifact cache
|
... | ... | @@ -612,6 +613,12 @@ class Element(Plugin): |
612 | 613 |
dep.stage_artifact(sandbox)
|
613 | 614 |
"""
|
614 | 615 |
|
616 |
+ if not self._cached():
|
|
617 |
+ detail = "No artifacts have been cached yet for that element\n" + \
|
|
618 |
+ "Try building the element first with `bst build`\n"
|
|
619 |
+ raise ElementError("No artifacts to stage",
|
|
620 |
+ detail=detail, reason="uncached-checkout-attempt")
|
|
621 |
+ |
|
615 | 622 |
if update_mtimes is None:
|
616 | 623 |
update_mtimes = []
|
617 | 624 |
|
... | ... | @@ -212,6 +212,10 @@ def test_option_directory(datafiles, cli, cmd, word_idx, expected, subdir): |
212 | 212 |
# Also try multi arguments together
|
213 | 213 |
('no-element-path', 'bst --directory ../ checkout t ', 4, ['target.bst '], 'files'),
|
214 | 214 |
('no-element-path', 'bst --directory ../ checkout target.bst ', 5, ['bin-files/', 'dev-files/'], 'files'),
|
215 |
+ |
|
216 |
+ # When element-path have sub-folders
|
|
217 |
+ ('sub-folders', 'bst show base', 2, ['base/wanted.bst '], None),
|
|
218 |
+ ('sub-folders', 'bst show base/', 2, ['base/wanted.bst '], None),
|
|
215 | 219 |
])
|
216 | 220 |
def test_argument_element(datafiles, cli, project, cmd, word_idx, expected, subdir):
|
217 | 221 |
cwd = os.path.join(str(datafiles), project)
|
1 |
+kind: autotools
|
|
2 |
+description: |
|
|
3 |
+ |
|
4 |
+ Not auto-completed element
|
1 |
+kind: stack
|
|
2 |
+description: Base stack
|
|
3 |
+ |
|
4 |
+depends:
|
|
5 |
+- base/wanted.bst
|
1 |
+kind: autotools
|
|
2 |
+description: |
|
|
3 |
+ |
|
4 |
+ Auto-completed element
|
1 |
+kind: autotools
|
|
2 |
+description: |
|
|
3 |
+ |
|
4 |
+ Hello world
|
1 |
+# Project config for frontend build test
|
|
2 |
+name: test
|
|
3 |
+ |
|
4 |
+element-path: elements
|
... | ... | @@ -96,6 +96,16 @@ def test_build_checkout_deps(datafiles, cli, deps): |
96 | 96 |
assert not os.path.exists(filename)
|
97 | 97 |
|
98 | 98 |
|
99 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
100 |
+def test_build_checkout_unbuilt(datafiles, cli):
|
|
101 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
102 |
+ checkout = os.path.join(cli.directory, 'checkout')
|
|
103 |
+ |
|
104 |
+ # Check that checking out an unbuilt element fails nicely
|
|
105 |
+ result = cli.run(project=project, args=['checkout', 'target.bst', checkout])
|
|
106 |
+ result.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
|
|
107 |
+ |
|
108 |
+ |
|
99 | 109 |
@pytest.mark.datafiles(DATA_DIR)
|
100 | 110 |
def test_build_checkout_tarball(datafiles, cli):
|
101 | 111 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
1 | 1 |
import os
|
2 |
-import pytest
|
|
2 |
+import sys
|
|
3 |
+import shutil
|
|
3 | 4 |
import itertools
|
5 |
+import pytest
|
|
4 | 6 |
from tests.testutils import cli, generate_junction
|
5 | 7 |
|
6 | 8 |
from buildstream import _yaml
|
... | ... | @@ -232,3 +234,58 @@ def test_fetched_junction(cli, tmpdir, datafiles, element_name): |
232 | 234 |
|
233 | 235 |
results = result.output.strip().splitlines()
|
234 | 236 |
assert 'junction.bst:import-etc.bst-buildable' in results
|
237 |
+ |
|
238 |
+ |
|
239 |
+###############################################################
|
|
240 |
+# Testing recursion depth #
|
|
241 |
+###############################################################
|
|
242 |
+@pytest.mark.parametrize("dependency_depth", [100, 500, 1200])
|
|
243 |
+def test_exceed_max_recursion_depth(cli, tmpdir, dependency_depth):
|
|
244 |
+ project_name = "recursion-test"
|
|
245 |
+ path = str(tmpdir)
|
|
246 |
+ project_path = os.path.join(path, project_name)
|
|
247 |
+ |
|
248 |
+ def setup_test():
|
|
249 |
+ """
|
|
250 |
+ Creates a bst project with dependencydepth + 1 elements, each of which
|
|
251 |
+ depends of the previous element to be created. Each element created
|
|
252 |
+ is of type import and has an empty source file.
|
|
253 |
+ """
|
|
254 |
+ os.mkdir(project_path)
|
|
255 |
+ |
|
256 |
+ result = cli.run(project=project_path, silent=True,
|
|
257 |
+ args=['init', '--project-name', project_name])
|
|
258 |
+ result.assert_success()
|
|
259 |
+ |
|
260 |
+ sourcefiles_path = os.path.join(project_path, "files")
|
|
261 |
+ os.mkdir(sourcefiles_path)
|
|
262 |
+ |
|
263 |
+ element_path = os.path.join(project_path, "elements")
|
|
264 |
+ for i in range(0, dependency_depth + 1):
|
|
265 |
+ element = {
|
|
266 |
+ 'kind': 'import',
|
|
267 |
+ 'sources': [{'kind': 'local',
|
|
268 |
+ 'path': 'files/source{}'.format(str(i))}],
|
|
269 |
+ 'depends': ['element{}.bst'.format(str(i - 1))]
|
|
270 |
+ }
|
|
271 |
+ if i == 0:
|
|
272 |
+ del element['depends']
|
|
273 |
+ _yaml.dump(element, os.path.join(element_path, "element{}.bst".format(str(i))))
|
|
274 |
+ |
|
275 |
+ source = os.path.join(sourcefiles_path, "source{}".format(str(i)))
|
|
276 |
+ open(source, 'x').close()
|
|
277 |
+ assert os.path.exists(source)
|
|
278 |
+ |
|
279 |
+ setup_test()
|
|
280 |
+ result = cli.run(project=project_path, silent=True,
|
|
281 |
+ args=['show', "element{}.bst".format(str(dependency_depth))])
|
|
282 |
+ |
|
283 |
+ recursion_limit = sys.getrecursionlimit()
|
|
284 |
+ if dependency_depth <= recursion_limit:
|
|
285 |
+ result.assert_success()
|
|
286 |
+ else:
|
|
287 |
+ # Assert exception is thown and handled
|
|
288 |
+ assert not result.unhandled_exception
|
|
289 |
+ assert result.exit_code == -1
|
|
290 |
+ |
|
291 |
+ shutil.rmtree(project_path)
|
... | ... | @@ -123,6 +123,58 @@ def test_open_force(cli, tmpdir, datafiles, kind): |
123 | 123 |
result.assert_success()
|
124 | 124 |
|
125 | 125 |
|
126 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
127 |
+@pytest.mark.parametrize("kind", repo_kinds)
|
|
128 |
+def test_open_force_open(cli, tmpdir, datafiles, kind):
|
|
129 |
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
130 |
+ |
|
131 |
+ # Assert the workspace dir exists
|
|
132 |
+ assert os.path.exists(workspace)
|
|
133 |
+ |
|
134 |
+ # Now open the workspace again with --force, this should happily succeed
|
|
135 |
+ result = cli.run(project=project, args=[
|
|
136 |
+ 'workspace', 'open', '--force', element_name, workspace
|
|
137 |
+ ])
|
|
138 |
+ result.assert_success()
|
|
139 |
+ |
|
140 |
+ |
|
141 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
142 |
+@pytest.mark.parametrize("kind", repo_kinds)
|
|
143 |
+def test_open_force_different_workspace(cli, tmpdir, datafiles, kind):
|
|
144 |
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False, "-alpha")
|
|
145 |
+ |
|
146 |
+ # Assert the workspace dir exists
|
|
147 |
+ assert os.path.exists(workspace)
|
|
148 |
+ |
|
149 |
+ hello_path = os.path.join(workspace, 'usr', 'bin', 'hello')
|
|
150 |
+ hello1_path = os.path.join(workspace, 'usr', 'bin', 'hello1')
|
|
151 |
+ |
|
152 |
+ tmpdir = os.path.join(str(tmpdir), "-beta")
|
|
153 |
+ shutil.move(hello_path, hello1_path)
|
|
154 |
+ element_name2, project2, workspace2 = open_workspace(cli, tmpdir, datafiles, kind, False, "-beta")
|
|
155 |
+ |
|
156 |
+ # Assert the workspace dir exists
|
|
157 |
+ assert os.path.exists(workspace2)
|
|
158 |
+ |
|
159 |
+ # Assert that workspace 1 contains the modified file
|
|
160 |
+ assert os.path.exists(hello1_path)
|
|
161 |
+ |
|
162 |
+ # Assert that workspace 2 contains the unmodified file
|
|
163 |
+ assert os.path.exists(os.path.join(workspace2, 'usr', 'bin', 'hello'))
|
|
164 |
+ |
|
165 |
+ # Now open the workspace again with --force, this should happily succeed
|
|
166 |
+ result = cli.run(project=project, args=[
|
|
167 |
+ 'workspace', 'open', '--force', element_name2, workspace
|
|
168 |
+ ])
|
|
169 |
+ |
|
170 |
+ # Assert that the file in workspace 1 has been replaced
|
|
171 |
+ # With the file from workspace 2
|
|
172 |
+ assert os.path.exists(hello_path)
|
|
173 |
+ assert not os.path.exists(hello1_path)
|
|
174 |
+ |
|
175 |
+ result.assert_success()
|
|
176 |
+ |
|
177 |
+ |
|
126 | 178 |
@pytest.mark.datafiles(DATA_DIR)
|
127 | 179 |
@pytest.mark.parametrize("kind", repo_kinds)
|
128 | 180 |
def test_close(cli, tmpdir, datafiles, kind):
|