Abderrahim Kitouni pushed to branch abderrahim/artifact-cache-junction at BuildStream / buildstream
Commits:
-
3590ca8c
by Abderrahim Kitouni at 2019-01-28T14:59:19Z
-
39b952dc
by Abderrahim Kitouni at 2019-01-28T15:19:04Z
-
80b36d0c
by Javier Jardón at 2019-01-28T17:54:52Z
-
a1ab48da
by Valentin David at 2019-01-28T21:30:26Z
-
2fcb4491
by Jürg Billeter at 2019-01-28T22:36:22Z
-
1c05a092
by Valentin David at 2019-01-29T05:58:17Z
-
785da59c
by Jürg Billeter at 2019-01-29T06:49:10Z
-
ddef91ea
by Valentin David at 2019-01-29T07:23:35Z
-
6a4c8611
by Jürg Billeter at 2019-01-29T08:18:45Z
-
86c8e414
by Angelos Evripiotis at 2019-01-29T10:39:29Z
-
aae35e13
by Angelos Evripiotis at 2019-01-29T12:08:07Z
-
03111d39
by Dor Askayo at 2019-01-30T10:35:06Z
-
7256bb0c
by James Ennis at 2019-01-30T11:34:04Z
-
36746730
by Chandan Singh at 2019-01-31T10:50:05Z
-
fa4a21ce
by Chandan Singh at 2019-01-31T12:15:43Z
-
dd791373
by Chandan Singh at 2019-01-31T14:32:44Z
-
96c0fbd6
by Chandan Singh at 2019-01-31T15:39:19Z
-
d25e2795
by Benjamin Schubert at 2019-01-31T17:06:23Z
-
2d0eebbf
by Benjamin Schubert at 2019-01-31T17:06:23Z
-
583bd97d
by Benjamin Schubert at 2019-02-01T10:26:37Z
-
51cec3da
by Phil Dawson at 2019-02-01T14:25:44Z
-
2b38aabe
by Phil Dawson at 2019-02-01T15:33:00Z
-
6a661be6
by Abderrahim Kitouni at 2019-02-01T16:15:03Z
-
1c20cab4
by Abderrahim Kitouni at 2019-02-01T16:15:11Z
24 changed files:
- NEWS
- buildstream/_artifactcache.py
- buildstream/_cas/casserver.py
- buildstream/_context.py
- buildstream/_frontend/cli.py
- buildstream/_loader/loadelement.py
- buildstream/_loader/loader.py
- buildstream/_project.py
- buildstream/_scheduler/scheduler.py
- buildstream/data/userconfig.yaml
- buildstream/plugins/elements/filter.py
- conftest.py
- requirements/requirements.in
- tests/cachekey/cachekey.py
- + tests/cachekey/project/elements/key-stability/aaa.bst
- + tests/cachekey/project/elements/key-stability/t1.bst
- + tests/cachekey/project/elements/key-stability/t2.bst
- + tests/cachekey/project/elements/key-stability/top-level.bst
- + tests/cachekey/project/elements/key-stability/zzz.bst
- tests/elements/filter.py
- + tests/elements/filter/basic/elements/input-with-deps.bst
- + tests/elements/filter/basic/elements/output-include-with-indirect-deps.bst
- tests/testutils/site.py
- tox.ini
Changes:
... | ... | @@ -50,6 +50,11 @@ buildstream 1.3.1 |
50 | 50 |
an error message and a hint instead, to avoid bothering folks that just
|
51 | 51 |
made a mistake.
|
52 | 52 |
|
53 |
+ o BREAKING CHANGE: The unconditional 'Are you sure?' prompts have been
|
|
54 |
+ removed. These would always ask you if you were sure when running
|
|
55 |
+ 'bst workspace close --remove-dir' or 'bst workspace reset'. They got in
|
|
56 |
+ the way too often.
|
|
57 |
+ |
|
53 | 58 |
o Failed builds are included in the cache as well.
|
54 | 59 |
`bst checkout` will provide anything in `%{install-root}`.
|
55 | 60 |
A build including cached fails will cause any dependant elements
|
... | ... | @@ -87,12 +92,6 @@ buildstream 1.3.1 |
87 | 92 |
instead of just a specially-formatted build-root with a `root` and `scratch`
|
88 | 93 |
subdirectory.
|
89 | 94 |
|
90 |
- o The buildstream.conf file learned new
|
|
91 |
- 'prompt.really-workspace-close-remove-dir' and
|
|
92 |
- 'prompt.really-workspace-reset-hard' options. These allow users to suppress
|
|
93 |
- certain confirmation prompts, e.g. double-checking that the user meant to
|
|
94 |
- run the command as typed.
|
|
95 |
- |
|
96 | 95 |
o Due to the element `build tree` being cached in the respective artifact their
|
97 | 96 |
size in some cases has significantly increased. In *most* cases the build trees
|
98 | 97 |
are not utilised when building targets, as such by default bst 'pull' & 'build'
|
... | ... | @@ -467,7 +467,7 @@ class ArtifactCache(): |
467 | 467 |
# on_failure (callable): Called if we fail to contact one of the caches.
|
468 | 468 |
#
|
469 | 469 |
def initialize_remotes(self, *, on_failure=None):
|
470 |
- remote_specs = self.global_remote_specs
|
|
470 |
+ remote_specs = list(self.global_remote_specs)
|
|
471 | 471 |
|
472 | 472 |
for project in self.project_remote_specs:
|
473 | 473 |
remote_specs += self.project_remote_specs[project]
|
... | ... | @@ -1046,8 +1046,5 @@ class ArtifactCache(): |
1046 | 1046 |
# A list of ArtifactCacheSpec instances describing the remote artifact caches.
|
1047 | 1047 |
#
|
1048 | 1048 |
def _configured_remote_artifact_cache_specs(context, project):
|
1049 |
- project_overrides = context.get_overrides(project.name)
|
|
1050 |
- project_extra_specs = ArtifactCache.specs_from_config_node(project_overrides)
|
|
1051 |
- |
|
1052 | 1049 |
return list(utils._deduplicate(
|
1053 |
- project_extra_specs + project.artifact_cache_specs + context.artifact_cache_specs))
|
|
1050 |
+ project.artifact_cache_specs + context.artifact_cache_specs))
|
... | ... | @@ -324,7 +324,7 @@ class _ContentAddressableStorageServicer(remote_execution_pb2_grpc.ContentAddres |
324 | 324 |
blob_response.digest.size_bytes = digest.size_bytes
|
325 | 325 |
|
326 | 326 |
if len(blob_request.data) != digest.size_bytes:
|
327 |
- blob_response.status.code = grpc.StatusCode.FAILED_PRECONDITION
|
|
327 |
+ blob_response.status.code = code_pb2.FAILED_PRECONDITION
|
|
328 | 328 |
continue
|
329 | 329 |
|
330 | 330 |
try:
|
... | ... | @@ -335,10 +335,10 @@ class _ContentAddressableStorageServicer(remote_execution_pb2_grpc.ContentAddres |
335 | 335 |
out.flush()
|
336 | 336 |
server_digest = self.cas.add_object(path=out.name)
|
337 | 337 |
if server_digest.hash != digest.hash:
|
338 |
- blob_response.status.code = grpc.StatusCode.FAILED_PRECONDITION
|
|
338 |
+ blob_response.status.code = code_pb2.FAILED_PRECONDITION
|
|
339 | 339 |
|
340 | 340 |
except ArtifactTooLargeException:
|
341 |
- blob_response.status.code = grpc.StatusCode.RESOURCE_EXHAUSTED
|
|
341 |
+ blob_response.status.code = code_pb2.RESOURCE_EXHAUSTED
|
|
342 | 342 |
|
343 | 343 |
return response
|
344 | 344 |
|
... | ... | @@ -121,18 +121,10 @@ class Context(): |
121 | 121 |
# Whether or not to attempt to pull build trees globally
|
122 | 122 |
self.pull_buildtrees = None
|
123 | 123 |
|
124 |
- # Boolean, whether we double-check with the user that they meant to
|
|
125 |
- # remove a workspace directory.
|
|
126 |
- self.prompt_workspace_close_remove_dir = None
|
|
127 |
- |
|
128 | 124 |
# Boolean, whether we double-check with the user that they meant to
|
129 | 125 |
# close the workspace when they're using it to access the project.
|
130 | 126 |
self.prompt_workspace_close_project_inaccessible = None
|
131 | 127 |
|
132 |
- # Boolean, whether we double-check with the user that they meant to do
|
|
133 |
- # a hard reset of a workspace, potentially losing changes.
|
|
134 |
- self.prompt_workspace_reset_hard = None
|
|
135 |
- |
|
136 | 128 |
# Whether elements must be rebuilt when their dependencies have changed
|
137 | 129 |
self._strict_build_plan = None
|
138 | 130 |
|
... | ... | @@ -260,16 +252,10 @@ class Context(): |
260 | 252 |
prompt = _yaml.node_get(
|
261 | 253 |
defaults, Mapping, 'prompt')
|
262 | 254 |
_yaml.node_validate(prompt, [
|
263 |
- 'really-workspace-close-remove-dir',
|
|
264 | 255 |
'really-workspace-close-project-inaccessible',
|
265 |
- 'really-workspace-reset-hard',
|
|
266 | 256 |
])
|
267 |
- self.prompt_workspace_close_remove_dir = _node_get_option_str(
|
|
268 |
- prompt, 'really-workspace-close-remove-dir', ['ask', 'yes']) == 'ask'
|
|
269 | 257 |
self.prompt_workspace_close_project_inaccessible = _node_get_option_str(
|
270 | 258 |
prompt, 'really-workspace-close-project-inaccessible', ['ask', 'yes']) == 'ask'
|
271 |
- self.prompt_workspace_reset_hard = _node_get_option_str(
|
|
272 |
- prompt, 'really-workspace-reset-hard', ['ask', 'yes']) == 'ask'
|
|
273 | 259 |
|
274 | 260 |
# Load per-projects overrides
|
275 | 261 |
self._project_overrides = _yaml.node_get(defaults, Mapping, 'projects', default_value={})
|
... | ... | @@ -841,11 +841,6 @@ def workspace_close(app, remove_dir, all_, elements): |
841 | 841 |
if nonexisting:
|
842 | 842 |
raise AppError("Workspace does not exist", detail="\n".join(nonexisting))
|
843 | 843 |
|
844 |
- if app.interactive and remove_dir and app.context.prompt_workspace_close_remove_dir:
|
|
845 |
- if not click.confirm('This will remove all your changes, are you sure?'):
|
|
846 |
- click.echo('Aborting', err=True)
|
|
847 |
- sys.exit(-1)
|
|
848 |
- |
|
849 | 844 |
for element_name in elements:
|
850 | 845 |
app.stream.workspace_close(element_name, remove_dir=remove_dir)
|
851 | 846 |
|
... | ... | @@ -879,11 +874,6 @@ def workspace_reset(app, soft, track_, all_, elements): |
879 | 874 |
if all_ and not app.stream.workspace_exists():
|
880 | 875 |
raise AppError("No open workspaces to reset")
|
881 | 876 |
|
882 |
- if app.interactive and not soft and app.context.prompt_workspace_reset_hard:
|
|
883 |
- if not click.confirm('This will remove all your changes, are you sure?'):
|
|
884 |
- click.echo('Aborting', err=True)
|
|
885 |
- sys.exit(-1)
|
|
886 |
- |
|
887 | 877 |
if all_:
|
888 | 878 |
elements = tuple(element_name for element_name, _ in app.context.get_workspaces().list())
|
889 | 879 |
|
... | ... | @@ -39,6 +39,20 @@ from .types import Symbol, Dependency |
39 | 39 |
# loader (Loader): The Loader object for this element
|
40 | 40 |
#
|
41 | 41 |
class LoadElement():
|
42 |
+ # Dependency():
|
|
43 |
+ #
|
|
44 |
+ # A link from a LoadElement to its dependencies.
|
|
45 |
+ #
|
|
46 |
+ # Keeps a link to one of the current Element's dependencies, together with
|
|
47 |
+ # its dependency type.
|
|
48 |
+ #
|
|
49 |
+ # Args:
|
|
50 |
+ # element (LoadElement): a LoadElement on which there is a dependency
|
|
51 |
+ # dep_type (str): the type of dependency this dependency link is
|
|
52 |
+ class Dependency:
|
|
53 |
+ def __init__(self, element, dep_type):
|
|
54 |
+ self.element = element
|
|
55 |
+ self.dep_type = dep_type
|
|
42 | 56 |
|
43 | 57 |
def __init__(self, node, filename, loader):
|
44 | 58 |
|
... | ... | @@ -74,8 +88,11 @@ class LoadElement(): |
74 | 88 |
'build-depends', 'runtime-depends',
|
75 | 89 |
])
|
76 | 90 |
|
77 |
- # Extract the Dependencies
|
|
78 |
- self.deps = _extract_depends_from_node(self.node)
|
|
91 |
+ self.dependencies = []
|
|
92 |
+ |
|
93 |
+ @property
|
|
94 |
+ def junction(self):
|
|
95 |
+ return self._loader.project.junction
|
|
79 | 96 |
|
80 | 97 |
# depends():
|
81 | 98 |
#
|
... | ... | @@ -101,8 +118,8 @@ class LoadElement(): |
101 | 118 |
return
|
102 | 119 |
|
103 | 120 |
self._dep_cache = {}
|
104 |
- for dep in self.deps:
|
|
105 |
- elt = self._loader.get_element_for_dep(dep)
|
|
121 |
+ for dep in self.dependencies:
|
|
122 |
+ elt = dep.element
|
|
106 | 123 |
|
107 | 124 |
# Ensure the cache of the element we depend on
|
108 | 125 |
elt._ensure_depends_cache()
|
... | ... | @@ -19,7 +19,6 @@ |
19 | 19 |
|
20 | 20 |
import os
|
21 | 21 |
from functools import cmp_to_key
|
22 |
-from collections import namedtuple
|
|
23 | 22 |
from collections.abc import Mapping
|
24 | 23 |
import tempfile
|
25 | 24 |
import shutil
|
... | ... | @@ -32,8 +31,8 @@ from .._profile import Topics, profile_start, profile_end |
32 | 31 |
from .._includes import Includes
|
33 | 32 |
from .._yamlcache import YamlCache
|
34 | 33 |
|
35 |
-from .types import Symbol, Dependency
|
|
36 |
-from .loadelement import LoadElement
|
|
34 |
+from .types import Symbol
|
|
35 |
+from .loadelement import LoadElement, _extract_depends_from_node
|
|
37 | 36 |
from . import MetaElement
|
38 | 37 |
from . import MetaSource
|
39 | 38 |
from ..types import CoreWarnings
|
... | ... | @@ -112,7 +111,7 @@ class Loader(): |
112 | 111 |
|
113 | 112 |
# First pass, recursively load files and populate our table of LoadElements
|
114 | 113 |
#
|
115 |
- deps = []
|
|
114 |
+ target_elements = []
|
|
116 | 115 |
|
117 | 116 |
# XXX This will need to be changed to the context's top-level project if this method
|
118 | 117 |
# is ever used for subprojects
|
... | ... | @@ -122,10 +121,10 @@ class Loader(): |
122 | 121 |
with YamlCache.open(self._context, cache_file) as yaml_cache:
|
123 | 122 |
for target in targets:
|
124 | 123 |
profile_start(Topics.LOAD_PROJECT, target)
|
125 |
- junction, name, loader = self._parse_name(target, rewritable, ticker,
|
|
126 |
- fetch_subprojects=fetch_subprojects)
|
|
127 |
- loader._load_file(name, rewritable, ticker, fetch_subprojects, yaml_cache)
|
|
128 |
- deps.append(Dependency(name, junction=junction))
|
|
124 |
+ _junction, name, loader = self._parse_name(target, rewritable, ticker,
|
|
125 |
+ fetch_subprojects=fetch_subprojects)
|
|
126 |
+ element = loader._load_file(name, rewritable, ticker, fetch_subprojects, yaml_cache)
|
|
127 |
+ target_elements.append(element)
|
|
129 | 128 |
profile_end(Topics.LOAD_PROJECT, target)
|
130 | 129 |
|
131 | 130 |
#
|
... | ... | @@ -134,29 +133,29 @@ class Loader(): |
134 | 133 |
|
135 | 134 |
# Set up a dummy element that depends on all top-level targets
|
136 | 135 |
# to resolve potential circular dependencies between them
|
137 |
- DummyTarget = namedtuple('DummyTarget', ['name', 'full_name', 'deps'])
|
|
138 |
- |
|
139 |
- dummy = DummyTarget(name='', full_name='', deps=deps)
|
|
140 |
- self._elements[''] = dummy
|
|
136 |
+ dummy_target = LoadElement("", "", self)
|
|
137 |
+ dummy_target.dependencies.extend(
|
|
138 |
+ LoadElement.Dependency(element, Symbol.RUNTIME)
|
|
139 |
+ for element in target_elements
|
|
140 |
+ )
|
|
141 | 141 |
|
142 | 142 |
profile_key = "_".join(t for t in targets)
|
143 | 143 |
profile_start(Topics.CIRCULAR_CHECK, profile_key)
|
144 |
- self._check_circular_deps('')
|
|
144 |
+ self._check_circular_deps(dummy_target)
|
|
145 | 145 |
profile_end(Topics.CIRCULAR_CHECK, profile_key)
|
146 | 146 |
|
147 | 147 |
ret = []
|
148 | 148 |
#
|
149 | 149 |
# Sort direct dependencies of elements by their dependency ordering
|
150 | 150 |
#
|
151 |
- for target in targets:
|
|
152 |
- profile_start(Topics.SORT_DEPENDENCIES, target)
|
|
153 |
- junction, name, loader = self._parse_name(target, rewritable, ticker,
|
|
154 |
- fetch_subprojects=fetch_subprojects)
|
|
155 |
- loader._sort_dependencies(name)
|
|
156 |
- profile_end(Topics.SORT_DEPENDENCIES, target)
|
|
151 |
+ for element in target_elements:
|
|
152 |
+ loader = element._loader
|
|
153 |
+ profile_start(Topics.SORT_DEPENDENCIES, element.name)
|
|
154 |
+ loader._sort_dependencies(element)
|
|
155 |
+ profile_end(Topics.SORT_DEPENDENCIES, element.name)
|
|
157 | 156 |
# Finally, wrap what we have into LoadElements and return the target
|
158 | 157 |
#
|
159 |
- ret.append(loader._collect_element(name))
|
|
158 |
+ ret.append(loader._collect_element(element))
|
|
160 | 159 |
|
161 | 160 |
return ret
|
162 | 161 |
|
... | ... | @@ -184,22 +183,6 @@ class Loader(): |
184 | 183 |
if os.path.exists(self._tempdir):
|
185 | 184 |
shutil.rmtree(self._tempdir)
|
186 | 185 |
|
187 |
- # get_element_for_dep():
|
|
188 |
- #
|
|
189 |
- # Gets a cached LoadElement by Dependency object
|
|
190 |
- #
|
|
191 |
- # This is used by LoadElement
|
|
192 |
- #
|
|
193 |
- # Args:
|
|
194 |
- # dep (Dependency): The dependency to search for
|
|
195 |
- #
|
|
196 |
- # Returns:
|
|
197 |
- # (LoadElement): The cached LoadElement
|
|
198 |
- #
|
|
199 |
- def get_element_for_dep(self, dep):
|
|
200 |
- loader = self._get_loader_for_dep(dep)
|
|
201 |
- return loader._elements[dep.name]
|
|
202 |
- |
|
203 | 186 |
###########################################
|
204 | 187 |
# Private Methods #
|
205 | 188 |
###########################################
|
... | ... | @@ -272,8 +255,10 @@ class Loader(): |
272 | 255 |
|
273 | 256 |
self._elements[filename] = element
|
274 | 257 |
|
258 |
+ dependencies = _extract_depends_from_node(node)
|
|
259 |
+ |
|
275 | 260 |
# Load all dependency files for the new LoadElement
|
276 |
- for dep in element.deps:
|
|
261 |
+ for dep in dependencies:
|
|
277 | 262 |
if dep.junction:
|
278 | 263 |
self._load_file(dep.junction, rewritable, ticker, fetch_subprojects, yaml_cache)
|
279 | 264 |
loader = self._get_loader(dep.junction, rewritable=rewritable, ticker=ticker,
|
... | ... | @@ -288,7 +273,9 @@ class Loader(): |
288 | 273 |
"{}: Cannot depend on junction"
|
289 | 274 |
.format(dep.provenance))
|
290 | 275 |
|
291 |
- deps_names = [dep.name for dep in element.deps]
|
|
276 |
+ element.dependencies.append(LoadElement.Dependency(dep_element, dep.dep_type))
|
|
277 |
+ |
|
278 |
+ deps_names = [dep.name for dep in dependencies]
|
|
292 | 279 |
self._warn_invalid_elements(deps_names)
|
293 | 280 |
|
294 | 281 |
return element
|
... | ... | @@ -299,12 +286,12 @@ class Loader(): |
299 | 286 |
# dependencies already resolved.
|
300 | 287 |
#
|
301 | 288 |
# Args:
|
302 |
- # element_name (str): The element-path relative element name to check
|
|
289 |
+ # element (str): The element to check
|
|
303 | 290 |
#
|
304 | 291 |
# Raises:
|
305 | 292 |
# (LoadError): In case there was a circular dependency error
|
306 | 293 |
#
|
307 |
- def _check_circular_deps(self, element_name, check_elements=None, validated=None, sequence=None):
|
|
294 |
+ def _check_circular_deps(self, element, check_elements=None, validated=None, sequence=None):
|
|
308 | 295 |
|
309 | 296 |
if check_elements is None:
|
310 | 297 |
check_elements = {}
|
... | ... | @@ -313,38 +300,31 @@ class Loader(): |
313 | 300 |
if sequence is None:
|
314 | 301 |
sequence = []
|
315 | 302 |
|
316 |
- element = self._elements[element_name]
|
|
317 |
- |
|
318 |
- # element name must be unique across projects
|
|
319 |
- # to be usable as key for the check_elements and validated dicts
|
|
320 |
- element_name = element.full_name
|
|
321 |
- |
|
322 | 303 |
# Skip already validated branches
|
323 |
- if validated.get(element_name) is not None:
|
|
304 |
+ if validated.get(element) is not None:
|
|
324 | 305 |
return
|
325 | 306 |
|
326 |
- if check_elements.get(element_name) is not None:
|
|
307 |
+ if check_elements.get(element) is not None:
|
|
327 | 308 |
# Create `chain`, the loop of element dependencies from this
|
328 | 309 |
# element back to itself, by trimming everything before this
|
329 | 310 |
# element from the sequence under consideration.
|
330 |
- chain = sequence[sequence.index(element_name):]
|
|
331 |
- chain.append(element_name)
|
|
311 |
+ chain = sequence[sequence.index(element.full_name):]
|
|
312 |
+ chain.append(element.full_name)
|
|
332 | 313 |
raise LoadError(LoadErrorReason.CIRCULAR_DEPENDENCY,
|
333 | 314 |
("Circular dependency detected at element: {}\n" +
|
334 | 315 |
"Dependency chain: {}")
|
335 |
- .format(element.name, " -> ".join(chain)))
|
|
316 |
+ .format(element.full_name, " -> ".join(chain)))
|
|
336 | 317 |
|
337 | 318 |
# Push / Check each dependency / Pop
|
338 |
- check_elements[element_name] = True
|
|
339 |
- sequence.append(element_name)
|
|
340 |
- for dep in element.deps:
|
|
341 |
- loader = self._get_loader_for_dep(dep)
|
|
342 |
- loader._check_circular_deps(dep.name, check_elements, validated, sequence)
|
|
343 |
- del check_elements[element_name]
|
|
319 |
+ check_elements[element] = True
|
|
320 |
+ sequence.append(element.full_name)
|
|
321 |
+ for dep in element.dependencies:
|
|
322 |
+ dep.element._loader._check_circular_deps(dep.element, check_elements, validated, sequence)
|
|
323 |
+ del check_elements[element]
|
|
344 | 324 |
sequence.pop()
|
345 | 325 |
|
346 | 326 |
# Eliminate duplicate paths
|
347 |
- validated[element_name] = True
|
|
327 |
+ validated[element] = True
|
|
348 | 328 |
|
349 | 329 |
# _sort_dependencies():
|
350 | 330 |
#
|
... | ... | @@ -357,28 +337,21 @@ class Loader(): |
357 | 337 |
# sorts throughout the build process.
|
358 | 338 |
#
|
359 | 339 |
# Args:
|
360 |
- # element_name (str): The element-path relative element name to sort
|
|
340 |
+ # element (LoadElement): The element to sort
|
|
361 | 341 |
#
|
362 |
- def _sort_dependencies(self, element_name, visited=None):
|
|
342 |
+ def _sort_dependencies(self, element, visited=None):
|
|
363 | 343 |
if visited is None:
|
364 |
- visited = {}
|
|
344 |
+ visited = set()
|
|
365 | 345 |
|
366 |
- element = self._elements[element_name]
|
|
367 |
- |
|
368 |
- # element name must be unique across projects
|
|
369 |
- # to be usable as key for the visited dict
|
|
370 |
- element_name = element.full_name
|
|
371 |
- |
|
372 |
- if visited.get(element_name) is not None:
|
|
346 |
+ if element in visited:
|
|
373 | 347 |
return
|
374 | 348 |
|
375 |
- for dep in element.deps:
|
|
376 |
- loader = self._get_loader_for_dep(dep)
|
|
377 |
- loader._sort_dependencies(dep.name, visited=visited)
|
|
349 |
+ for dep in element.dependencies:
|
|
350 |
+ dep.element._loader._sort_dependencies(dep.element, visited=visited)
|
|
378 | 351 |
|
379 | 352 |
def dependency_cmp(dep_a, dep_b):
|
380 |
- element_a = self.get_element_for_dep(dep_a)
|
|
381 |
- element_b = self.get_element_for_dep(dep_b)
|
|
353 |
+ element_a = dep_a.element
|
|
354 |
+ element_b = dep_b.element
|
|
382 | 355 |
|
383 | 356 |
# Sort on inter element dependency first
|
384 | 357 |
if element_a.depends(element_b):
|
... | ... | @@ -395,21 +368,21 @@ class Loader(): |
395 | 368 |
return -1
|
396 | 369 |
|
397 | 370 |
# All things being equal, string comparison.
|
398 |
- if dep_a.name > dep_b.name:
|
|
371 |
+ if element_a.name > element_b.name:
|
|
399 | 372 |
return 1
|
400 |
- elif dep_a.name < dep_b.name:
|
|
373 |
+ elif element_a.name < element_b.name:
|
|
401 | 374 |
return -1
|
402 | 375 |
|
403 | 376 |
# Sort local elements before junction elements
|
404 | 377 |
# and use string comparison between junction elements
|
405 |
- if dep_a.junction and dep_b.junction:
|
|
406 |
- if dep_a.junction > dep_b.junction:
|
|
378 |
+ if element_a.junction and element_b.junction:
|
|
379 |
+ if element_a.junction > element_b.junction:
|
|
407 | 380 |
return 1
|
408 |
- elif dep_a.junction < dep_b.junction:
|
|
381 |
+ elif element_a.junction < element_b.junction:
|
|
409 | 382 |
return -1
|
410 |
- elif dep_a.junction:
|
|
383 |
+ elif element_a.junction:
|
|
411 | 384 |
return -1
|
412 |
- elif dep_b.junction:
|
|
385 |
+ elif element_b.junction:
|
|
413 | 386 |
return 1
|
414 | 387 |
|
415 | 388 |
# This wont ever happen
|
... | ... | @@ -418,26 +391,23 @@ class Loader(): |
418 | 391 |
# Now dependency sort, we ensure that if any direct dependency
|
419 | 392 |
# directly or indirectly depends on another direct dependency,
|
420 | 393 |
# it is found later in the list.
|
421 |
- element.deps.sort(key=cmp_to_key(dependency_cmp))
|
|
394 |
+ element.dependencies.sort(key=cmp_to_key(dependency_cmp))
|
|
422 | 395 |
|
423 |
- visited[element_name] = True
|
|
396 |
+ visited.add(element)
|
|
424 | 397 |
|
425 | 398 |
# _collect_element()
|
426 | 399 |
#
|
427 | 400 |
# Collect the toplevel elements we have
|
428 | 401 |
#
|
429 | 402 |
# Args:
|
430 |
- # element_name (str): The element-path relative element name to sort
|
|
403 |
+ # element (LoadElement): The element for which to load a MetaElement
|
|
431 | 404 |
#
|
432 | 405 |
# Returns:
|
433 | 406 |
# (MetaElement): A recursively loaded MetaElement
|
434 | 407 |
#
|
435 |
- def _collect_element(self, element_name):
|
|
436 |
- |
|
437 |
- element = self._elements[element_name]
|
|
438 |
- |
|
408 |
+ def _collect_element(self, element):
|
|
439 | 409 |
# Return the already built one, if we already built it
|
440 |
- meta_element = self._meta_elements.get(element_name)
|
|
410 |
+ meta_element = self._meta_elements.get(element.name)
|
|
441 | 411 |
if meta_element:
|
442 | 412 |
return meta_element
|
443 | 413 |
|
... | ... | @@ -461,10 +431,10 @@ class Loader(): |
461 | 431 |
del source[Symbol.DIRECTORY]
|
462 | 432 |
|
463 | 433 |
index = sources.index(source)
|
464 |
- meta_source = MetaSource(element_name, index, element_kind, kind, source, directory)
|
|
434 |
+ meta_source = MetaSource(element.name, index, element_kind, kind, source, directory)
|
|
465 | 435 |
meta_sources.append(meta_source)
|
466 | 436 |
|
467 |
- meta_element = MetaElement(self.project, element_name, element_kind,
|
|
437 |
+ meta_element = MetaElement(self.project, element.name, element_kind,
|
|
468 | 438 |
elt_provenance, meta_sources,
|
469 | 439 |
_yaml.node_get(node, Mapping, Symbol.CONFIG, default_value={}),
|
470 | 440 |
_yaml.node_get(node, Mapping, Symbol.VARIABLES, default_value={}),
|
... | ... | @@ -475,12 +445,12 @@ class Loader(): |
475 | 445 |
element_kind == 'junction')
|
476 | 446 |
|
477 | 447 |
# Cache it now, make sure it's already there before recursing
|
478 |
- self._meta_elements[element_name] = meta_element
|
|
448 |
+ self._meta_elements[element.name] = meta_element
|
|
479 | 449 |
|
480 | 450 |
# Descend
|
481 |
- for dep in element.deps:
|
|
482 |
- loader = self._get_loader_for_dep(dep)
|
|
483 |
- meta_dep = loader._collect_element(dep.name)
|
|
451 |
+ for dep in element.dependencies:
|
|
452 |
+ loader = dep.element._loader
|
|
453 |
+ meta_dep = loader._collect_element(dep.element)
|
|
484 | 454 |
if dep.dep_type != 'runtime':
|
485 | 455 |
meta_element.build_dependencies.append(meta_dep)
|
486 | 456 |
if dep.dep_type != 'build':
|
... | ... | @@ -539,7 +509,7 @@ class Loader(): |
539 | 509 |
return None
|
540 | 510 |
|
541 | 511 |
# meta junction element
|
542 |
- meta_element = self._collect_element(filename)
|
|
512 |
+ meta_element = self._collect_element(self._elements[filename])
|
|
543 | 513 |
if meta_element.kind != 'junction':
|
544 | 514 |
raise LoadError(LoadErrorReason.INVALID_DATA,
|
545 | 515 |
"{}: Expected junction but element kind is {}".format(filename, meta_element.kind))
|
... | ... | @@ -601,23 +571,6 @@ class Loader(): |
601 | 571 |
|
602 | 572 |
return loader
|
603 | 573 |
|
604 |
- # _get_loader_for_dep():
|
|
605 |
- #
|
|
606 |
- # Gets the appropriate Loader for a Dependency object
|
|
607 |
- #
|
|
608 |
- # Args:
|
|
609 |
- # dep (Dependency): A Dependency object
|
|
610 |
- #
|
|
611 |
- # Returns:
|
|
612 |
- # (Loader): The Loader object to use for this Dependency
|
|
613 |
- #
|
|
614 |
- def _get_loader_for_dep(self, dep):
|
|
615 |
- if dep.junction:
|
|
616 |
- # junction dependency, delegate to appropriate loader
|
|
617 |
- return self._loaders[dep.junction]
|
|
618 |
- else:
|
|
619 |
- return self
|
|
620 |
- |
|
621 | 574 |
# _parse_name():
|
622 | 575 |
#
|
623 | 576 |
# Get junction and base name of element along with loader for the sub-project
|
... | ... | @@ -549,7 +549,15 @@ class Project(): |
549 | 549 |
#
|
550 | 550 |
|
551 | 551 |
# Load artifacts pull/push configuration for this project
|
552 |
- self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config, self.directory)
|
|
552 |
+ project_specs = ArtifactCache.specs_from_config_node(config, self.directory)
|
|
553 |
+ override_specs = ArtifactCache.specs_from_config_node(
|
|
554 |
+ self._context.get_overrides(self.name), self.directory)
|
|
555 |
+ |
|
556 |
+ self.artifact_cache_specs = override_specs + project_specs
|
|
557 |
+ |
|
558 |
+ if self.junction:
|
|
559 |
+ parent = self.junction._get_project()
|
|
560 |
+ self.artifact_cache_specs = parent.artifact_cache_specs + self.artifact_cache_specs
|
|
553 | 561 |
|
554 | 562 |
# Load remote-execution configuration for this project
|
555 | 563 |
project_specs = SandboxRemote.specs_from_config_node(config, self.directory)
|
... | ... | @@ -314,10 +314,10 @@ class Scheduler(): |
314 | 314 |
# job (Job): The job to spawn
|
315 | 315 |
#
|
316 | 316 |
def _spawn_job(self, job):
|
317 |
- job.spawn()
|
|
318 | 317 |
self._active_jobs.append(job)
|
319 | 318 |
if self._job_start_callback:
|
320 | 319 |
self._job_start_callback(job)
|
320 |
+ job.spawn()
|
|
321 | 321 |
|
322 | 322 |
# Callback for the cache size job
|
323 | 323 |
def _cache_size_job_complete(self, status, cache_size):
|
... | ... | @@ -112,14 +112,6 @@ logging: |
112 | 112 |
#
|
113 | 113 |
prompt:
|
114 | 114 |
|
115 |
- # Whether to really proceed with 'bst workspace close --remove-dir' removing
|
|
116 |
- # a workspace directory, potentially losing changes.
|
|
117 |
- #
|
|
118 |
- # ask - Ask the user if they are sure.
|
|
119 |
- # yes - Always remove, without asking.
|
|
120 |
- #
|
|
121 |
- really-workspace-close-remove-dir: ask
|
|
122 |
- |
|
123 | 115 |
# Whether to really proceed with 'bst workspace close' when doing so would
|
124 | 116 |
# stop them from running bst commands in this workspace.
|
125 | 117 |
#
|
... | ... | @@ -127,11 +119,3 @@ prompt: |
127 | 119 |
# yes - Always close, without asking.
|
128 | 120 |
#
|
129 | 121 |
really-workspace-close-project-inaccessible: ask
|
130 |
- |
|
131 |
- # Whether to really proceed with 'bst workspace reset' doing a hard reset of
|
|
132 |
- # a workspace, potentially losing changes.
|
|
133 |
- #
|
|
134 |
- # ask - Ask the user if they are sure.
|
|
135 |
- # yes - Always hard reset, without asking.
|
|
136 |
- #
|
|
137 |
- really-workspace-reset-hard: ask
|
... | ... | @@ -47,6 +47,8 @@ from buildstream import Element, ElementError, Scope |
47 | 47 |
class FilterElement(Element):
|
48 | 48 |
# pylint: disable=attribute-defined-outside-init
|
49 | 49 |
|
50 |
+ BST_ARTIFACT_VERSION = 1
|
|
51 |
+ |
|
50 | 52 |
# The filter element's output is its dependencies, so
|
51 | 53 |
# we must rebuild if the dependencies change even when
|
52 | 54 |
# not in strict build plans.
|
... | ... | @@ -102,7 +104,7 @@ class FilterElement(Element): |
102 | 104 |
|
103 | 105 |
def assemble(self, sandbox):
|
104 | 106 |
with self.timed_activity("Staging artifact", silent_nested=True):
|
105 |
- for dep in self.dependencies(Scope.BUILD):
|
|
107 |
+ for dep in self.dependencies(Scope.BUILD, recurse=False):
|
|
106 | 108 |
dep.stage_artifact(sandbox, include=self.include,
|
107 | 109 |
exclude=self.exclude, orphans=self.include_orphans)
|
108 | 110 |
return ""
|
... | ... | @@ -54,6 +54,7 @@ class IntegrationCache(): |
54 | 54 |
|
55 | 55 |
def __init__(self, cache):
|
56 | 56 |
cache = os.path.abspath(cache)
|
57 |
+ os.makedirs(cache, exist_ok=True)
|
|
57 | 58 |
|
58 | 59 |
# Use the same sources every time
|
59 | 60 |
self.sources = os.path.join(cache, 'sources')
|
1 |
-Click
|
|
1 |
+Click >= 7.0
|
|
2 | 2 |
grpcio >= 1.10
|
3 | 3 |
Jinja2 >= 2.10
|
4 | 4 |
pluginbase
|
5 |
-protobuf >= 3.5
|
|
5 |
+protobuf >= 3.6
|
|
6 | 6 |
psutil
|
7 | 7 |
# According to ruamel.yaml's PyPI page, we are suppose to use
|
8 | 8 |
# "<=0.15" in production until 0.15 becomes API stable.
|
... | ... | @@ -214,3 +214,41 @@ def test_cache_key_fatal_warnings(cli, tmpdir, first_warnings, second_warnings, |
214 | 214 |
second_keys = run_get_cache_key("second", second_warnings)
|
215 | 215 |
|
216 | 216 |
assert compare_cache_keys(first_keys, second_keys) == identical_keys
|
217 |
+ |
|
218 |
+ |
|
219 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
220 |
+def test_keys_stable_over_targets(cli, datafiles):
|
|
221 |
+ root_element = 'elements/key-stability/top-level.bst'
|
|
222 |
+ target1 = 'elements/key-stability/t1.bst'
|
|
223 |
+ target2 = 'elements/key-stability/t2.bst'
|
|
224 |
+ |
|
225 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
226 |
+ full_graph_result = cli.run(project=project, args=[
|
|
227 |
+ 'show',
|
|
228 |
+ '--format', '%{name}::%{full-key}',
|
|
229 |
+ root_element
|
|
230 |
+ ])
|
|
231 |
+ full_graph_result.assert_success()
|
|
232 |
+ all_cache_keys = parse_output_keys(full_graph_result.output)
|
|
233 |
+ |
|
234 |
+ ordering1_result = cli.run(project=project, args=[
|
|
235 |
+ 'show',
|
|
236 |
+ '--format', '%{name}::%{full-key}',
|
|
237 |
+ target1,
|
|
238 |
+ target2
|
|
239 |
+ ])
|
|
240 |
+ ordering1_result.assert_success()
|
|
241 |
+ ordering1_cache_keys = parse_output_keys(ordering1_result.output)
|
|
242 |
+ |
|
243 |
+ ordering2_result = cli.run(project=project, args=[
|
|
244 |
+ 'show',
|
|
245 |
+ '--format', '%{name}::%{full-key}',
|
|
246 |
+ target2,
|
|
247 |
+ target1
|
|
248 |
+ ])
|
|
249 |
+ ordering2_result.assert_success()
|
|
250 |
+ ordering2_cache_keys = parse_output_keys(ordering2_result.output)
|
|
251 |
+ |
|
252 |
+ for element in ordering1_cache_keys:
|
|
253 |
+ assert ordering1_cache_keys[element] == ordering2_cache_keys[element]
|
|
254 |
+ assert ordering1_cache_keys[element] == all_cache_keys[element]
|
1 |
+kind: import
|
|
2 |
+sources:
|
|
3 |
+- kind: local
|
|
4 |
+ path: elements/key-stability/aaa.bst
|
1 |
+kind: import
|
|
2 |
+sources:
|
|
3 |
+- kind: local
|
|
4 |
+ path: elements/key-stability/t1.bst
|
|
5 |
+depends:
|
|
6 |
+- elements/key-stability/zzz.bst
|
1 |
+kind: import
|
|
2 |
+sources:
|
|
3 |
+- kind: local
|
|
4 |
+ path: elements/key-stability/t2.bst
|
|
5 |
+depends:
|
|
6 |
+- elements/key-stability/aaa.bst
|
|
7 |
+- elements/key-stability/zzz.bst
|
1 |
+kind: import
|
|
2 |
+sources:
|
|
3 |
+- kind: local
|
|
4 |
+ path: elements/key-stability/top-level.bst
|
|
5 |
+depends:
|
|
6 |
+- elements/key-stability/t1.bst
|
|
7 |
+- elements/key-stability/t2.bst
|
1 |
+kind: import
|
|
2 |
+sources:
|
|
3 |
+- kind: local
|
|
4 |
+ path: elements/key-stability/zzz.bst
|
... | ... | @@ -464,3 +464,23 @@ def test_filter_track_multi_exclude(datafiles, cli, tmpdir): |
464 | 464 |
assert "ref" not in new_input["sources"][0]
|
465 | 465 |
new_input2 = _yaml.load(input2_file)
|
466 | 466 |
assert new_input2["sources"][0]["ref"] == ref
|
467 |
+ |
|
468 |
+ |
|
469 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'basic'))
|
|
470 |
+def test_filter_include_with_indirect_deps(datafiles, cli, tmpdir):
|
|
471 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
472 |
+ result = cli.run(project=project, args=[
|
|
473 |
+ 'build', 'output-include-with-indirect-deps.bst'])
|
|
474 |
+ result.assert_success()
|
|
475 |
+ |
|
476 |
+ checkout = os.path.join(tmpdir.dirname, tmpdir.basename, 'checkout')
|
|
477 |
+ result = cli.run(project=project, args=[
|
|
478 |
+ 'artifact', 'checkout', 'output-include-with-indirect-deps.bst', '--directory', checkout])
|
|
479 |
+ result.assert_success()
|
|
480 |
+ |
|
481 |
+ # direct dependencies should be staged and filtered
|
|
482 |
+ assert os.path.exists(os.path.join(checkout, "baz"))
|
|
483 |
+ |
|
484 |
+ # indirect dependencies shouldn't be staged and filtered
|
|
485 |
+ assert not os.path.exists(os.path.join(checkout, "foo"))
|
|
486 |
+ assert not os.path.exists(os.path.join(checkout, "bar"))
|
1 |
+kind: import
|
|
2 |
+ |
|
3 |
+depends:
|
|
4 |
+- filename: input.bst
|
|
5 |
+ |
|
6 |
+sources:
|
|
7 |
+- kind: local
|
|
8 |
+ path: files
|
|
9 |
+ |
|
10 |
+public:
|
|
11 |
+ bst:
|
|
12 |
+ split-rules:
|
|
13 |
+ baz:
|
|
14 |
+ - /baz
|
1 |
+kind: filter
|
|
2 |
+ |
|
3 |
+depends:
|
|
4 |
+- filename: input-with-deps.bst
|
|
5 |
+ type: build
|
... | ... | @@ -18,7 +18,7 @@ try: |
18 | 18 |
utils.get_host_tool('git')
|
19 | 19 |
HAVE_GIT = True
|
20 | 20 |
out = str(subprocess.check_output(['git', '--version']), "utf-8")
|
21 |
- version = tuple(int(x) for x in out.split(' ', 2)[2].split('.'))
|
|
21 |
+ version = tuple(int(x) for x in out.split(' ')[2].split('.'))
|
|
22 | 22 |
HAVE_OLD_GIT = version < (1, 8, 5)
|
23 | 23 |
except ProgramNotFoundError:
|
24 | 24 |
HAVE_GIT = False
|
... | ... | @@ -88,5 +88,5 @@ whitelist_externals = |
88 | 88 |
commands =
|
89 | 89 |
python3 setup.py --command-packages=click_man.commands man_pages
|
90 | 90 |
deps =
|
91 |
- click-man
|
|
91 |
+ click-man >= 0.3.0
|
|
92 | 92 |
-rrequirements/requirements.txt
|