Valentin David pushed to branch valentindavid/sysroot_dependencies at BuildStream / buildstream
Commits:
-
24167e64
by Valentin David at 2019-01-02T15:03:47Z
29 changed files:
- buildstream/_loader/loadelement.py
- buildstream/_loader/loader.py
- buildstream/buildelement.py
- buildstream/element.py
- buildstream/plugins/elements/compose.py
- doc/source/format_declaring.rst
- tests/loader/dependencies.py
- + tests/sysroot_depends/project/elements/a.bst
- + tests/sysroot_depends/project/elements/b.bst
- + tests/sysroot_depends/project/elements/base.bst
- + tests/sysroot_depends/project/elements/base/base-alpine.bst
- + tests/sysroot_depends/project/elements/compose-integration.bst
- + tests/sysroot_depends/project/elements/compose-layers-with-sysroot.bst
- + tests/sysroot_depends/project/elements/compose-layers.bst
- + tests/sysroot_depends/project/elements/integration.bst
- + tests/sysroot_depends/project/elements/layer1-files.bst
- + tests/sysroot_depends/project/elements/layer1.bst
- + tests/sysroot_depends/project/elements/layer2-files.bst
- + tests/sysroot_depends/project/elements/layer2.bst
- + tests/sysroot_depends/project/elements/manual-integration-runtime.bst
- + tests/sysroot_depends/project/elements/manual-integration.bst
- + tests/sysroot_depends/project/elements/target-variable.bst
- + tests/sysroot_depends/project/elements/target.bst
- + tests/sysroot_depends/project/files/a/a.txt
- + tests/sysroot_depends/project/files/b/b.txt
- + tests/sysroot_depends/project/files/layer1/1
- + tests/sysroot_depends/project/files/layer2/2
- + tests/sysroot_depends/project/project.conf
- + tests/sysroot_depends/sysroot_depends.py
Changes:
... | ... | @@ -72,10 +72,24 @@ class LoadElement(): |
72 | 72 |
'variables', 'environment', 'environment-nocache',
|
73 | 73 |
'config', 'public', 'description',
|
74 | 74 |
'build-depends', 'runtime-depends',
|
75 |
+ 'sysroots',
|
|
75 | 76 |
])
|
76 | 77 |
|
78 |
+ self.deps = []
|
|
79 |
+ sysroots = _yaml.node_get(node, list, 'sysroots', default_value=[])
|
|
80 |
+ for sysroot in sysroots:
|
|
81 |
+ _yaml.node_validate(sysroot, ['path', 'depends', 'build-depends'])
|
|
82 |
+ path = _yaml.node_get(sysroot, str, 'path')
|
|
83 |
+ for dep in _extract_depends_from_node(sysroot):
|
|
84 |
+ if dep.dep_type == Symbol.RUNTIME:
|
|
85 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
86 |
+ "{}: Sysroot'ed dependencies can not be of type 'runtime'"
|
|
87 |
+ .format(dep.provenance))
|
|
88 |
+ self.deps.append((path, dep))
|
|
89 |
+ |
|
77 | 90 |
# Extract the Dependencies
|
78 |
- self.deps = _extract_depends_from_node(self.node)
|
|
91 |
+ for dep in _extract_depends_from_node(self.node):
|
|
92 |
+ self.deps.append(('/', dep))
|
|
79 | 93 |
|
80 | 94 |
# depends():
|
81 | 95 |
#
|
... | ... | @@ -101,7 +115,7 @@ class LoadElement(): |
101 | 115 |
return
|
102 | 116 |
|
103 | 117 |
self._dep_cache = {}
|
104 |
- for dep in self.deps:
|
|
118 |
+ for _, dep in self.deps:
|
|
105 | 119 |
elt = self._loader.get_element_for_dep(dep)
|
106 | 120 |
|
107 | 121 |
# Ensure the cache of the element we depend on
|
... | ... | @@ -125,7 +125,7 @@ class Loader(): |
125 | 125 |
junction, name, loader = self._parse_name(target, rewritable, ticker,
|
126 | 126 |
fetch_subprojects=fetch_subprojects)
|
127 | 127 |
loader._load_file(name, rewritable, ticker, fetch_subprojects, yaml_cache)
|
128 |
- deps.append(Dependency(name, junction=junction))
|
|
128 |
+ deps.append(('/', Dependency(name, junction=junction)))
|
|
129 | 129 |
profile_end(Topics.LOAD_PROJECT, target)
|
130 | 130 |
|
131 | 131 |
#
|
... | ... | @@ -273,7 +273,7 @@ class Loader(): |
273 | 273 |
self._elements[filename] = element
|
274 | 274 |
|
275 | 275 |
# Load all dependency files for the new LoadElement
|
276 |
- for dep in element.deps:
|
|
276 |
+ for _, dep in element.deps:
|
|
277 | 277 |
if dep.junction:
|
278 | 278 |
self._load_file(dep.junction, rewritable, ticker, fetch_subprojects, yaml_cache)
|
279 | 279 |
loader = self._get_loader(dep.junction, rewritable=rewritable, ticker=ticker,
|
... | ... | @@ -288,7 +288,7 @@ class Loader(): |
288 | 288 |
"{}: Cannot depend on junction"
|
289 | 289 |
.format(dep.provenance))
|
290 | 290 |
|
291 |
- deps_names = [dep.name for dep in element.deps]
|
|
291 |
+ deps_names = [dep.name for _, dep in element.deps]
|
|
292 | 292 |
self._warn_invalid_elements(deps_names)
|
293 | 293 |
|
294 | 294 |
return element
|
... | ... | @@ -337,7 +337,7 @@ class Loader(): |
337 | 337 |
# Push / Check each dependency / Pop
|
338 | 338 |
check_elements[element_name] = True
|
339 | 339 |
sequence.append(element_name)
|
340 |
- for dep in element.deps:
|
|
340 |
+ for _, dep in element.deps:
|
|
341 | 341 |
loader = self._get_loader_for_dep(dep)
|
342 | 342 |
loader._check_circular_deps(dep.name, check_elements, validated, sequence)
|
343 | 343 |
del check_elements[element_name]
|
... | ... | @@ -372,14 +372,21 @@ class Loader(): |
372 | 372 |
if visited.get(element_name) is not None:
|
373 | 373 |
return
|
374 | 374 |
|
375 |
- for dep in element.deps:
|
|
375 |
+ for _, dep in element.deps:
|
|
376 | 376 |
loader = self._get_loader_for_dep(dep)
|
377 | 377 |
loader._sort_dependencies(dep.name, visited=visited)
|
378 | 378 |
|
379 |
- def dependency_cmp(dep_a, dep_b):
|
|
379 |
+ def dependency_cmp(sdep_a, sdep_b):
|
|
380 |
+ sysroot_a, dep_a = sdep_a
|
|
381 |
+ sysroot_b, dep_b = sdep_b
|
|
380 | 382 |
element_a = self.get_element_for_dep(dep_a)
|
381 | 383 |
element_b = self.get_element_for_dep(dep_b)
|
382 | 384 |
|
385 |
+ if sysroot_a < sysroot_b:
|
|
386 |
+ return -1
|
|
387 |
+ if sysroot_b < sysroot_a:
|
|
388 |
+ return 1
|
|
389 |
+ |
|
383 | 390 |
# Sort on inter element dependency first
|
384 | 391 |
if element_a.depends(element_b):
|
385 | 392 |
return 1
|
... | ... | @@ -478,11 +485,11 @@ class Loader(): |
478 | 485 |
self._meta_elements[element_name] = meta_element
|
479 | 486 |
|
480 | 487 |
# Descend
|
481 |
- for dep in element.deps:
|
|
488 |
+ for sysroot, dep in element.deps:
|
|
482 | 489 |
loader = self._get_loader_for_dep(dep)
|
483 | 490 |
meta_dep = loader._collect_element(dep.name)
|
484 | 491 |
if dep.dep_type != 'runtime':
|
485 |
- meta_element.build_dependencies.append(meta_dep)
|
|
492 |
+ meta_element.build_dependencies.append((sysroot, meta_dep))
|
|
486 | 493 |
if dep.dep_type != 'build':
|
487 | 494 |
meta_element.dependencies.append(meta_dep)
|
488 | 495 |
|
... | ... | @@ -224,8 +224,9 @@ class BuildElement(Element): |
224 | 224 |
# Run any integration commands provided by the dependencies
|
225 | 225 |
# once they are all staged and ready
|
226 | 226 |
with sandbox.batch(SandboxFlags.NONE, label="Integrating sandbox"):
|
227 |
- for dep in self.dependencies(Scope.BUILD):
|
|
228 |
- dep.integrate(sandbox)
|
|
227 |
+ for sysroot, dep in self.dependencies(Scope.BUILD, with_sysroot=True):
|
|
228 |
+ if sysroot == '/':
|
|
229 |
+ dep.integrate(sandbox)
|
|
229 | 230 |
|
230 | 231 |
# Stage sources in the build root
|
231 | 232 |
self.stage_sources(sandbox, self.get_variable('build-root'))
|
... | ... | @@ -379,7 +379,8 @@ class Element(Plugin): |
379 | 379 |
for source in self.__sources:
|
380 | 380 |
yield source
|
381 | 381 |
|
382 |
- def dependencies(self, scope, *, recurse=True, visited=None, recursed=False):
|
|
382 |
+ def dependencies(self, scope, *, recurse=True, visited=None, recursed=False,
|
|
383 |
+ with_sysroot=False, sysroot='/'):
|
|
383 | 384 |
"""dependencies(scope, *, recurse=True)
|
384 | 385 |
|
385 | 386 |
A generator function which yields the dependencies of the given element.
|
... | ... | @@ -390,9 +391,17 @@ class Element(Plugin): |
390 | 391 |
dependencies in the given `scope` will be traversed, and the element itself
|
391 | 392 |
will be omitted.
|
392 | 393 |
|
394 |
+ If `with_sysroot` is true, then the generator will yield
|
|
395 |
+ tuples with the sysroot path along with the Element. It is
|
|
396 |
+ possible to get multiple times the same Element with
|
|
397 |
+ `with_sysroot` if the sysroot is different. `with_sysroot` is
|
|
398 |
+ not usable with `Scope.ALL`.
|
|
399 |
+ |
|
393 | 400 |
Args:
|
394 | 401 |
scope (:class:`.Scope`): The scope to iterate in
|
395 | 402 |
recurse (bool): Whether to recurse
|
403 |
+ with_sysroot (bool): whether to return tuples with sysroot path
|
|
404 |
+ sysroot (str): Initial sysroot. Should not be set externally.
|
|
396 | 405 |
|
397 | 406 |
Yields:
|
398 | 407 |
(:class:`.Element`): The dependencies in `scope`, in deterministic staging order
|
... | ... | @@ -404,40 +413,58 @@ class Element(Plugin): |
404 | 413 |
|
405 | 414 |
scope_set = set((Scope.BUILD, Scope.RUN)) if scope == Scope.ALL else set((scope,))
|
406 | 415 |
|
407 |
- if full_name in visited and scope_set.issubset(visited[full_name]):
|
|
416 |
+ if scope == Scope.ALL:
|
|
417 |
+ assert not with_sysroot
|
|
418 |
+ |
|
419 |
+ if with_sysroot:
|
|
420 |
+ key = (sysroot, full_name)
|
|
421 |
+ else:
|
|
422 |
+ key = full_name
|
|
423 |
+ |
|
424 |
+ if key in visited and scope_set.issubset(visited[key]):
|
|
408 | 425 |
return
|
409 | 426 |
|
410 | 427 |
should_yield = False
|
411 |
- if full_name not in visited:
|
|
412 |
- visited[full_name] = scope_set
|
|
428 |
+ if key not in visited:
|
|
429 |
+ visited[key] = scope_set
|
|
413 | 430 |
should_yield = True
|
414 | 431 |
else:
|
415 |
- visited[full_name] |= scope_set
|
|
432 |
+ visited[key] |= scope_set
|
|
416 | 433 |
|
417 | 434 |
if recurse or not recursed:
|
418 | 435 |
if scope == Scope.ALL:
|
419 |
- for dep in self.__build_dependencies:
|
|
436 |
+ build_deps = []
|
|
437 |
+ for dep_sysroot, dep in self.__build_dependencies:
|
|
420 | 438 |
yield from dep.dependencies(Scope.ALL, recurse=recurse,
|
421 |
- visited=visited, recursed=True)
|
|
439 |
+ visited=visited, recursed=True,
|
|
440 |
+ with_sysroot=False)
|
|
441 |
+ build_deps.append(dep)
|
|
422 | 442 |
|
423 | 443 |
for dep in self.__runtime_dependencies:
|
424 |
- if dep not in self.__build_dependencies:
|
|
444 |
+ if dep not in build_deps:
|
|
425 | 445 |
yield from dep.dependencies(Scope.ALL, recurse=recurse,
|
426 |
- visited=visited, recursed=True)
|
|
446 |
+ visited=visited, recursed=True,
|
|
447 |
+ with_sysroot=False)
|
|
427 | 448 |
|
428 | 449 |
elif scope == Scope.BUILD:
|
429 |
- for dep in self.__build_dependencies:
|
|
450 |
+ for dep_sysroot, dep in self.__build_dependencies:
|
|
451 |
+ new_sysroot = self._subst_string(dep_sysroot) if not recursed else sysroot
|
|
430 | 452 |
yield from dep.dependencies(Scope.RUN, recurse=recurse,
|
431 |
- visited=visited, recursed=True)
|
|
453 |
+ visited=visited, recursed=True,
|
|
454 |
+ sysroot=new_sysroot, with_sysroot=with_sysroot)
|
|
432 | 455 |
|
433 | 456 |
elif scope == Scope.RUN:
|
434 | 457 |
for dep in self.__runtime_dependencies:
|
435 | 458 |
yield from dep.dependencies(Scope.RUN, recurse=recurse,
|
436 |
- visited=visited, recursed=True)
|
|
459 |
+ visited=visited, recursed=True,
|
|
460 |
+ sysroot=sysroot, with_sysroot=with_sysroot)
|
|
437 | 461 |
|
438 | 462 |
# Yeild self only at the end, after anything needed has been traversed
|
439 | 463 |
if should_yield and (recurse or recursed) and scope != Scope.BUILD:
|
440 |
- yield self
|
|
464 |
+ if with_sysroot:
|
|
465 |
+ yield sysroot, self
|
|
466 |
+ else:
|
|
467 |
+ yield self
|
|
441 | 468 |
|
442 | 469 |
def search(self, scope, name):
|
443 | 470 |
"""Search for a dependency by name
|
... | ... | @@ -636,7 +663,7 @@ class Element(Plugin): |
636 | 663 |
vbasedir = sandbox.get_virtual_directory()
|
637 | 664 |
vstagedir = vbasedir \
|
638 | 665 |
if path is None \
|
639 |
- else vbasedir.descend(path.lstrip(os.sep).split(os.sep))
|
|
666 |
+ else vbasedir.descend(path.lstrip(os.sep).split(os.sep), create=True)
|
|
640 | 667 |
|
641 | 668 |
files = list(self.__compute_splits(include, exclude, orphans))
|
642 | 669 |
|
... | ... | @@ -654,7 +681,8 @@ class Element(Plugin): |
654 | 681 |
return link_result.combine(copy_result)
|
655 | 682 |
|
656 | 683 |
def stage_dependency_artifacts(self, sandbox, scope, *, path=None,
|
657 |
- include=None, exclude=None, orphans=True):
|
|
684 |
+ include=None, exclude=None, orphans=True,
|
|
685 |
+ build=True):
|
|
658 | 686 |
"""Stage element dependencies in scope
|
659 | 687 |
|
660 | 688 |
This is primarily a convenience wrapper around
|
... | ... | @@ -669,6 +697,7 @@ class Element(Plugin): |
669 | 697 |
include (list): An optional list of domains to include files from
|
670 | 698 |
exclude (list): An optional list of domains to exclude files from
|
671 | 699 |
orphans (bool): Whether to include files not spoken for by split domains
|
700 |
+ build (bool): Whether to stage artifacts for a build respecting sysroots
|
|
672 | 701 |
|
673 | 702 |
Raises:
|
674 | 703 |
(:class:`.ElementError`): If any of the dependencies in `scope` have not
|
... | ... | @@ -684,7 +713,14 @@ class Element(Plugin): |
684 | 713 |
if self.__can_build_incrementally() and workspace.last_successful:
|
685 | 714 |
old_dep_keys = self.__get_artifact_metadata_dependencies(workspace.last_successful)
|
686 | 715 |
|
687 |
- for dep in self.dependencies(scope):
|
|
716 |
+ def deps():
|
|
717 |
+ if build:
|
|
718 |
+ yield from self.dependencies(scope, with_sysroot=True)
|
|
719 |
+ else:
|
|
720 |
+ for dep in self.dependencies(scope, with_sysroot=False):
|
|
721 |
+ yield '/', dep
|
|
722 |
+ |
|
723 |
+ for sysroot, dep in deps():
|
|
688 | 724 |
# If we are workspaced, and we therefore perform an
|
689 | 725 |
# incremental build, we must ensure that we update the mtimes
|
690 | 726 |
# of any files created by our dependencies since the last
|
... | ... | @@ -709,8 +745,13 @@ class Element(Plugin): |
709 | 745 |
if utils._is_main_process():
|
710 | 746 |
self._get_context().get_workspaces().save_config()
|
711 | 747 |
|
748 |
+ if build:
|
|
749 |
+ sub_path = os.path.join(path, os.path.relpath(sysroot, '/')) if path else sysroot
|
|
750 |
+ else:
|
|
751 |
+ sub_path = path
|
|
752 |
+ |
|
712 | 753 |
result = dep.stage_artifact(sandbox,
|
713 |
- path=path,
|
|
754 |
+ path=sub_path,
|
|
714 | 755 |
include=include,
|
715 | 756 |
exclude=exclude,
|
716 | 757 |
orphans=orphans,
|
... | ... | @@ -927,9 +968,9 @@ class Element(Plugin): |
927 | 968 |
for meta_dep in meta.dependencies:
|
928 | 969 |
dependency = Element._new_from_meta(meta_dep)
|
929 | 970 |
element.__runtime_dependencies.append(dependency)
|
930 |
- for meta_dep in meta.build_dependencies:
|
|
971 |
+ for sysroot, meta_dep in meta.build_dependencies:
|
|
931 | 972 |
dependency = Element._new_from_meta(meta_dep)
|
932 |
- element.__build_dependencies.append(dependency)
|
|
973 |
+ element.__build_dependencies.append((sysroot, dependency))
|
|
933 | 974 |
|
934 | 975 |
return element
|
935 | 976 |
|
... | ... | @@ -1109,14 +1150,11 @@ class Element(Plugin): |
1109 | 1150 |
# Weak cache key includes names of direct build dependencies
|
1110 | 1151 |
# but does not include keys of dependencies.
|
1111 | 1152 |
if self.BST_STRICT_REBUILD:
|
1112 |
- dependencies = [
|
|
1113 |
- e._get_cache_key(strength=_KeyStrength.WEAK)
|
|
1114 |
- for e in self.dependencies(Scope.BUILD)
|
|
1115 |
- ]
|
|
1153 |
+ dependencies = [(sysroot, e._get_cache_key(strength=_KeyStrength.WEAK))
|
|
1154 |
+ for sysroot, e in self.dependencies(Scope.BUILD, with_sysroot=True)]
|
|
1116 | 1155 |
else:
|
1117 |
- dependencies = [
|
|
1118 |
- e.name for e in self.dependencies(Scope.BUILD, recurse=False)
|
|
1119 |
- ]
|
|
1156 |
+ dependencies = [(sysroot, e.name)
|
|
1157 |
+ for sysroot, e in self.dependencies(Scope.BUILD, with_sysroot=True)]
|
|
1120 | 1158 |
|
1121 | 1159 |
self.__weak_cache_key = self.__calculate_cache_key(dependencies)
|
1122 | 1160 |
|
... | ... | @@ -1144,9 +1182,8 @@ class Element(Plugin): |
1144 | 1182 |
return
|
1145 | 1183 |
|
1146 | 1184 |
if self.__strict_cache_key is None:
|
1147 |
- dependencies = [
|
|
1148 |
- e.__strict_cache_key for e in self.dependencies(Scope.BUILD)
|
|
1149 |
- ]
|
|
1185 |
+ dependencies = [(sysroot, e.__strict_cache_key)
|
|
1186 |
+ for sysroot, e in self.dependencies(Scope.BUILD, with_sysroot=True)]
|
|
1150 | 1187 |
self.__strict_cache_key = self.__calculate_cache_key(dependencies)
|
1151 | 1188 |
|
1152 | 1189 |
if self.__strict_cache_key is None:
|
... | ... | @@ -1186,10 +1223,8 @@ class Element(Plugin): |
1186 | 1223 |
strong_key, _ = self.__get_artifact_metadata_keys()
|
1187 | 1224 |
self.__cache_key = strong_key
|
1188 | 1225 |
elif self.__assemble_scheduled or self.__assemble_done:
|
1189 |
- # Artifact will or has been built, not downloaded
|
|
1190 |
- dependencies = [
|
|
1191 |
- e._get_cache_key() for e in self.dependencies(Scope.BUILD)
|
|
1192 |
- ]
|
|
1226 |
+ dependencies = [(sysroot, e._get_cache_key())
|
|
1227 |
+ for sysroot, e in self.dependencies(Scope.BUILD, with_sysroot=True)]
|
|
1193 | 1228 |
self.__cache_key = self.__calculate_cache_key(dependencies)
|
1194 | 1229 |
|
1195 | 1230 |
if self.__cache_key is None:
|
... | ... | @@ -1354,7 +1389,7 @@ class Element(Plugin): |
1354 | 1389 |
else:
|
1355 | 1390 |
# Stage deps in the sandbox root
|
1356 | 1391 |
with self.timed_activity("Staging dependencies", silent_nested=True):
|
1357 |
- self.stage_dependency_artifacts(sandbox, scope)
|
|
1392 |
+ self.stage_dependency_artifacts(sandbox, scope, build=False)
|
|
1358 | 1393 |
|
1359 | 1394 |
# Run any integration commands provided by the dependencies
|
1360 | 1395 |
# once they are all staged and ready
|
... | ... | @@ -2080,8 +2115,11 @@ class Element(Plugin): |
2080 | 2115 |
#
|
2081 | 2116 |
def __calculate_cache_key(self, dependencies):
|
2082 | 2117 |
# No cache keys for dependencies which have no cache keys
|
2083 |
- if None in dependencies:
|
|
2084 |
- return None
|
|
2118 |
+ for dep in dependencies:
|
|
2119 |
+ if dep[1] is None:
|
|
2120 |
+ return None
|
|
2121 |
+ # Do not break cache keys
|
|
2122 |
+ dependencies = [(sysroot, key) if sysroot != '/' else key for sysroot, key in dependencies]
|
|
2085 | 2123 |
|
2086 | 2124 |
# Generate dict that is used as base for all cache keys
|
2087 | 2125 |
if self.__cache_key_dict is None:
|
... | ... | @@ -123,8 +123,9 @@ class ComposeElement(Element): |
123 | 123 |
vbasedir.mark_unmodified()
|
124 | 124 |
|
125 | 125 |
with sandbox.batch(0):
|
126 |
- for dep in self.dependencies(Scope.BUILD):
|
|
127 |
- dep.integrate(sandbox)
|
|
126 |
+ for sysroot, dep in self.dependencies(Scope.BUILD, with_sysroot=True):
|
|
127 |
+ if sysroot == '/':
|
|
128 |
+ dep.integrate(sandbox)
|
|
128 | 129 |
|
129 | 130 |
if require_split:
|
130 | 131 |
# Calculate added, modified and removed files
|
... | ... | @@ -159,6 +159,57 @@ See :ref:`format_dependencies` for more information on the dependency model. |
159 | 159 |
|
160 | 160 |
The ``runtime-depends`` configuration is available since :ref:`format version 14 <project_format_version>`
|
161 | 161 |
|
162 |
+Sysroot'ed dependencies
|
|
163 |
+~~~~~~~~~~~~~~~~~~~~~~~
|
|
164 |
+ |
|
165 |
+Sysroot'ed dependencies are intended for bootstraping base systems or
|
|
166 |
+cross-compiling.
|
|
167 |
+ |
|
168 |
+.. code:: yaml
|
|
169 |
+ |
|
170 |
+ # Specify some sysroot'ed dependencies
|
|
171 |
+ sysroots:
|
|
172 |
+ - path: /sysroot
|
|
173 |
+ depends:
|
|
174 |
+ - element1.bst
|
|
175 |
+ - element2.bst
|
|
176 |
+ |
|
177 |
+During build, or initialization of build shell, sysroot'ed build
|
|
178 |
+dependencies will be staged in the given sysroot path instead of '/'
|
|
179 |
+together with the runtime dependencies of those sysroot'ed build
|
|
180 |
+dependencies.
|
|
181 |
+ |
|
182 |
+It is possible to end up with indirect runtime dependencies in
|
|
183 |
+different sysroots if they are staged from build dependencies with
|
|
184 |
+different sysroots. They will be staged multiple times.
|
|
185 |
+ |
|
186 |
+Sysroot paths only apply to build dependencies. It is not possible to
|
|
187 |
+define runtime dependencies either with ``type: runtime`` or
|
|
188 |
+``runtime-depends``. It is possible to use ``all`` dependencies, but
|
|
189 |
+the sysroot part is only for the build part not the runtime.
|
|
190 |
+ |
|
191 |
+For example:
|
|
192 |
+ |
|
193 |
+.. code:: yaml
|
|
194 |
+ |
|
195 |
+ sysroots:
|
|
196 |
+ - path: /sysroot
|
|
197 |
+ depends:
|
|
198 |
+ - element.bst
|
|
199 |
+ |
|
200 |
+is equivalent to:
|
|
201 |
+ |
|
202 |
+.. code:: yaml
|
|
203 |
+ |
|
204 |
+ runtime-depends:
|
|
205 |
+ - element.bst
|
|
206 |
+ sysroots:
|
|
207 |
+ - path: /sysroot
|
|
208 |
+ build-depends:
|
|
209 |
+ - element.bst
|
|
210 |
+ |
|
211 |
+:ref:`Integration commands <public_integration>` are never executed for
|
|
212 |
+sysroot'ed dependencies.
|
|
162 | 213 |
|
163 | 214 |
.. _format_sources:
|
164 | 215 |
|
... | ... | @@ -135,7 +135,7 @@ def test_build_dependency(datafiles): |
135 | 135 |
|
136 | 136 |
assert(len(element.build_dependencies) == 1)
|
137 | 137 |
firstdep = element.build_dependencies[0]
|
138 |
- assert(isinstance(firstdep, MetaElement))
|
|
138 |
+ assert(isinstance(firstdep[1], MetaElement))
|
|
139 | 139 |
|
140 | 140 |
assert(len(element.dependencies) == 0)
|
141 | 141 |
|
... | ... | @@ -170,7 +170,7 @@ def test_build_runtime_dependency(datafiles): |
170 | 170 |
firstdep = element.dependencies[0]
|
171 | 171 |
assert(isinstance(firstdep, MetaElement))
|
172 | 172 |
firstbuilddep = element.build_dependencies[0]
|
173 |
- assert(firstdep == firstbuilddep)
|
|
173 |
+ assert(firstdep == firstbuilddep[1])
|
|
174 | 174 |
|
175 | 175 |
|
176 | 176 |
@pytest.mark.datafiles(DATA_DIR)
|
... | ... | @@ -187,7 +187,7 @@ def test_all_dependency(datafiles): |
187 | 187 |
firstdep = element.dependencies[0]
|
188 | 188 |
assert(isinstance(firstdep, MetaElement))
|
189 | 189 |
firstbuilddep = element.build_dependencies[0]
|
190 |
- assert(firstdep == firstbuilddep)
|
|
190 |
+ assert(firstdep == firstbuilddep[1])
|
|
191 | 191 |
|
192 | 192 |
|
193 | 193 |
@pytest.mark.datafiles(DATA_DIR)
|
1 |
+kind: import
|
|
2 |
+sources:
|
|
3 |
+ - kind: local
|
|
4 |
+ path: files/a
|
1 |
+kind: import
|
|
2 |
+sources:
|
|
3 |
+ - kind: local
|
|
4 |
+ path: files/b
|
1 |
+kind: stack
|
|
2 |
+depends:
|
|
3 |
+- base/base-alpine.bst
|
1 |
+kind: import
|
|
2 |
+ |
|
3 |
+description: |
|
|
4 |
+ Alpine Linux base for tests
|
|
5 |
+ |
|
6 |
+ Generated using the `tests/integration-tests/base/generate-base.sh` script.
|
|
7 |
+ |
|
8 |
+sources:
|
|
9 |
+ - kind: tar
|
|
10 |
+ url: alpine:integration-tests-base.v1.x86_64.tar.xz
|
|
11 |
+ base-dir: ''
|
|
12 |
+ ref: 3eb559250ba82b64a68d86d0636a6b127aa5f6d25d3601a79f79214dc9703639
|
1 |
+kind: compose
|
|
2 |
+ |
|
3 |
+sysroots:
|
|
4 |
+- path: /sysroot
|
|
5 |
+ build-depends:
|
|
6 |
+ - integration.bst
|
1 |
+kind: compose
|
|
2 |
+ |
|
3 |
+sysroots:
|
|
4 |
+- path: /other-sysroot
|
|
5 |
+ build-depends:
|
|
6 |
+ - layer2.bst
|
1 |
+kind: compose
|
|
2 |
+ |
|
3 |
+build-depends:
|
|
4 |
+- layer2.bst
|
1 |
+kind: manual
|
|
2 |
+ |
|
3 |
+depends:
|
|
4 |
+- base.bst
|
|
5 |
+ |
|
6 |
+config:
|
|
7 |
+ install-commands:
|
|
8 |
+ - echo 0 >"%{install-root}/integrated.txt"
|
|
9 |
+ |
|
10 |
+public:
|
|
11 |
+ bst:
|
|
12 |
+ integration-commands:
|
|
13 |
+ - echo 1 >/integrated.txt
|
1 |
+kind: import
|
|
2 |
+sources:
|
|
3 |
+- kind: local
|
|
4 |
+ path: files/layer1
|
1 |
+kind: stack
|
|
2 |
+ |
|
3 |
+depends:
|
|
4 |
+- layer1-files.bst
|
1 |
+kind: import
|
|
2 |
+sources:
|
|
3 |
+- kind: local
|
|
4 |
+ path: files/layer2
|
1 |
+kind: manual
|
|
2 |
+ |
|
3 |
+depends:
|
|
4 |
+- layer2-files.bst
|
|
5 |
+ |
|
6 |
+build-depends:
|
|
7 |
+- base.bst
|
|
8 |
+ |
|
9 |
+sysroots:
|
|
10 |
+- path: /sysroot
|
|
11 |
+ depends:
|
|
12 |
+ - layer1.bst
|
|
13 |
+ |
|
14 |
+config:
|
|
15 |
+ install-commands:
|
|
16 |
+ - mkdir -p "%{install-root}"
|
|
17 |
+ - |
|
|
18 |
+ for file in /*; do
|
|
19 |
+ if test -f "${file}"; then
|
|
20 |
+ cp "${file}" "%{install-root}"
|
|
21 |
+ fi
|
|
22 |
+ done
|
1 |
+kind: manual
|
|
2 |
+ |
|
3 |
+depends:
|
|
4 |
+- base.bst
|
|
5 |
+ |
|
6 |
+sysroots:
|
|
7 |
+- path: /sysroot
|
|
8 |
+ depends:
|
|
9 |
+ - integration.bst
|
|
10 |
+ |
|
11 |
+config:
|
|
12 |
+ install-commands:
|
|
13 |
+ - mkdir -p "%{install-root}"
|
|
14 |
+ - echo dummy >"%{install-root}/dummy.txt"
|
1 |
+kind: manual
|
|
2 |
+ |
|
3 |
+build-depends:
|
|
4 |
+- base.bst
|
|
5 |
+ |
|
6 |
+sysroots:
|
|
7 |
+- path: /sysroot
|
|
8 |
+ build-depends:
|
|
9 |
+ - integration.bst
|
|
10 |
+ |
|
11 |
+config:
|
|
12 |
+ install-commands:
|
|
13 |
+ - mkdir -p "%{install-root}/sysroot"
|
|
14 |
+ - if test -f /sysroot/integrated.txt; then cp /sysroot/integrated.txt "%{install-root}/sysroot"; fi
|
|
15 |
+ - if test -f /integrated.txt; then cp /integrated.txt "%{install-root}"; fi
|
1 |
+kind: compose
|
|
2 |
+ |
|
3 |
+build-depends:
|
|
4 |
+- a.bst
|
|
5 |
+ |
|
6 |
+variables:
|
|
7 |
+ mydir: test
|
|
8 |
+ |
|
9 |
+sysroots:
|
|
10 |
+- path: "/path/%{mydir}"
|
|
11 |
+ build-depends:
|
|
12 |
+ - b.bst
|
1 |
+kind: compose
|
|
2 |
+ |
|
3 |
+build-depends:
|
|
4 |
+- a.bst
|
|
5 |
+ |
|
6 |
+sysroots:
|
|
7 |
+- path: /sysroot
|
|
8 |
+ build-depends:
|
|
9 |
+ - b.bst
|
1 |
+test
|
1 |
+test
|
1 |
+1
|
1 |
+2
|
1 |
+name: test
|
|
2 |
+element-path: elements
|
|
3 |
+aliases:
|
|
4 |
+ alpine: https://bst-integration-test-images.ams3.cdn.digitaloceanspaces.com/
|
|
5 |
+options:
|
|
6 |
+ linux:
|
|
7 |
+ type: bool
|
|
8 |
+ description: Whether to expect a linux platform
|
|
9 |
+ default: True
|
1 |
+import os
|
|
2 |
+import pytest
|
|
3 |
+from tests.testutils import cli_integration as cli
|
|
4 |
+from tests.testutils.site import IS_LINUX, HAVE_BWRAP
|
|
5 |
+ |
|
6 |
+ |
|
7 |
+# Project directory
|
|
8 |
+DATA_DIR = os.path.join(
|
|
9 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
10 |
+ "project",
|
|
11 |
+)
|
|
12 |
+ |
|
13 |
+ |
|
14 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
15 |
+def test_sysroot_dependency_smoke_test(datafiles, cli, tmpdir):
|
|
16 |
+ "Test simple sysroot use case without integration"
|
|
17 |
+ |
|
18 |
+ project = str(datafiles)
|
|
19 |
+ checkout = os.path.join(str(tmpdir), 'checkout')
|
|
20 |
+ |
|
21 |
+ result = cli.run(project=project,
|
|
22 |
+ args=['build', 'target.bst'])
|
|
23 |
+ result.assert_success()
|
|
24 |
+ |
|
25 |
+ result = cli.run(project=project,
|
|
26 |
+ args=['checkout', 'target.bst', checkout])
|
|
27 |
+ result.assert_success()
|
|
28 |
+ assert os.path.exists(os.path.join(checkout, 'a.txt'))
|
|
29 |
+ assert os.path.exists(os.path.join(checkout, 'sysroot', 'b.txt'))
|
|
30 |
+ |
|
31 |
+ |
|
32 |
+@pytest.mark.integration
|
|
33 |
+@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
|
34 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
35 |
+def test_skip_integration_commands_compose(datafiles, cli, tmpdir):
|
|
36 |
+ "Integration commands are not run on sysroots"
|
|
37 |
+ |
|
38 |
+ project = str(datafiles)
|
|
39 |
+ checkout = os.path.join(str(tmpdir), 'checkout')
|
|
40 |
+ |
|
41 |
+ result = cli.run(project=project,
|
|
42 |
+ args=['build', 'compose-integration.bst'])
|
|
43 |
+ result.assert_success()
|
|
44 |
+ |
|
45 |
+ result = cli.run(project=project,
|
|
46 |
+ args=['checkout', 'compose-integration.bst', checkout])
|
|
47 |
+ result.assert_success()
|
|
48 |
+ |
|
49 |
+ integrated = os.path.join(checkout, 'sysroot', 'integrated.txt')
|
|
50 |
+ assert os.path.exists(integrated)
|
|
51 |
+ with open(integrated, 'r') as f:
|
|
52 |
+ assert f.read() == '0\n'
|
|
53 |
+ |
|
54 |
+ |
|
55 |
+@pytest.mark.integration
|
|
56 |
+@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
|
57 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
58 |
+def test_skip_integration_commands_build_element(datafiles, cli, tmpdir):
|
|
59 |
+ "Integration commands are not run on sysroots"
|
|
60 |
+ |
|
61 |
+ project = str(datafiles)
|
|
62 |
+ checkout = os.path.join(str(tmpdir), 'checkout')
|
|
63 |
+ |
|
64 |
+ result = cli.run(project=project,
|
|
65 |
+ args=['build', 'manual-integration.bst'])
|
|
66 |
+ result.assert_success()
|
|
67 |
+ |
|
68 |
+ result = cli.run(project=project,
|
|
69 |
+ args=['checkout', 'manual-integration.bst', checkout])
|
|
70 |
+ result.assert_success()
|
|
71 |
+ |
|
72 |
+ sysroot_integrated = os.path.join(checkout, 'sysroot', 'integrated.txt')
|
|
73 |
+ integrated = os.path.join(checkout, 'integrated.txt')
|
|
74 |
+ assert os.path.exists(sysroot_integrated)
|
|
75 |
+ with open(sysroot_integrated, 'r') as f:
|
|
76 |
+ assert f.read() == '0\n'
|
|
77 |
+ # We need to make sure that integration command has not been run on / either.
|
|
78 |
+ assert not os.path.exists(integrated)
|
|
79 |
+ |
|
80 |
+ |
|
81 |
+@pytest.mark.integration
|
|
82 |
+@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
|
83 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
84 |
+def test_sysroot_only_for_build(cli, tmpdir, datafiles):
|
|
85 |
+ project = str(datafiles)
|
|
86 |
+ checkout = os.path.join(str(tmpdir), 'checkout')
|
|
87 |
+ |
|
88 |
+ result = cli.run(project=project,
|
|
89 |
+ args=['build', 'compose-layers.bst'])
|
|
90 |
+ result.assert_success()
|
|
91 |
+ |
|
92 |
+ result = cli.run(project=project,
|
|
93 |
+ args=['checkout', 'compose-layers.bst', checkout])
|
|
94 |
+ |
|
95 |
+ result.assert_success()
|
|
96 |
+ assert os.path.exists(os.path.join(checkout, '1'))
|
|
97 |
+ assert os.path.exists(os.path.join(checkout, '2'))
|
|
98 |
+ assert not os.path.exists(os.path.join(checkout, 'sysroot', '1'))
|
|
99 |
+ |
|
100 |
+ |
|
101 |
+@pytest.mark.integration
|
|
102 |
+@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
|
103 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
104 |
+def test_sysroot_only_for_build_with_sysroot(cli, tmpdir, datafiles):
|
|
105 |
+ project = str(datafiles)
|
|
106 |
+ checkout = os.path.join(str(tmpdir), 'checkout')
|
|
107 |
+ |
|
108 |
+ result = cli.run(project=project,
|
|
109 |
+ args=['build', 'compose-layers-with-sysroot.bst'])
|
|
110 |
+ result.assert_success()
|
|
111 |
+ |
|
112 |
+ result = cli.run(project=project,
|
|
113 |
+ args=['checkout', 'compose-layers-with-sysroot.bst', checkout])
|
|
114 |
+ |
|
115 |
+ result.assert_success()
|
|
116 |
+ assert os.path.exists(os.path.join(checkout, 'other-sysroot', '1'))
|
|
117 |
+ assert os.path.exists(os.path.join(checkout, 'other-sysroot', '2'))
|
|
118 |
+ assert not os.path.exists(os.path.join(checkout, 'sysroot', '1'))
|
|
119 |
+ |
|
120 |
+ |
|
121 |
+@pytest.mark.integration
|
|
122 |
+@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
|
123 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
124 |
+def test_shell_no_sysroot(cli, tmpdir, datafiles):
|
|
125 |
+ "bst shell does not have sysroots and dependencies are integrated"
|
|
126 |
+ |
|
127 |
+ project = str(datafiles)
|
|
128 |
+ |
|
129 |
+ result = cli.run(project=project,
|
|
130 |
+ args=['build', 'base.bst', 'manual-integration-runtime.bst'])
|
|
131 |
+ result.assert_success()
|
|
132 |
+ |
|
133 |
+ result = cli.run(project=project,
|
|
134 |
+ args=['shell', 'manual-integration-runtime.bst', '--', 'cat', '/integrated.txt'])
|
|
135 |
+ result.assert_success()
|
|
136 |
+ assert result.output == '1\n'
|
|
137 |
+ |
|
138 |
+ result = cli.run(project=project,
|
|
139 |
+ args=['shell', 'manual-integration-runtime.bst', '--', 'ls', '/sysroot/integrated.txt'])
|
|
140 |
+ assert result.exit_code != 0
|
|
141 |
+ assert result.output == ''
|
|
142 |
+ |
|
143 |
+ |
|
144 |
+@pytest.mark.integration
|
|
145 |
+@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
|
146 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
147 |
+def test_shell_build_sysroot(cli, tmpdir, datafiles):
|
|
148 |
+ "Build shell should stage build dependencies sysroot'ed non integrated"
|
|
149 |
+ |
|
150 |
+ project = str(datafiles)
|
|
151 |
+ |
|
152 |
+ result = cli.run(project=project,
|
|
153 |
+ args=['build', 'base.bst', 'integration.bst'])
|
|
154 |
+ result.assert_success()
|
|
155 |
+ |
|
156 |
+ result = cli.run(project=project,
|
|
157 |
+ args=['shell', '-b', 'manual-integration.bst', '--', 'cat', '/sysroot/integrated.txt'])
|
|
158 |
+ result.assert_success()
|
|
159 |
+ assert result.output == '0\n'
|
|
160 |
+ |
|
161 |
+ |
|
162 |
+@pytest.mark.integration
|
|
163 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
164 |
+def test_show_dependencies_only_once(cli, tmpdir, datafiles):
|
|
165 |
+ """Dependencies should not show up in status several times when they
|
|
166 |
+ are staged with multiple sysroots"""
|
|
167 |
+ |
|
168 |
+ project = str(datafiles)
|
|
169 |
+ |
|
170 |
+ result = cli.run(project=project,
|
|
171 |
+ args=['show', '--format', '%{name}', 'manual-integration.bst'])
|
|
172 |
+ result.assert_success()
|
|
173 |
+ pipeline = result.output.splitlines()
|
|
174 |
+ assert pipeline == ['base/base-alpine.bst',
|
|
175 |
+ 'base.bst',
|
|
176 |
+ 'integration.bst',
|
|
177 |
+ 'manual-integration.bst']
|
|
178 |
+ |
|
179 |
+ |
|
180 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
181 |
+def test_sysroot_path_subst_variable(datafiles, cli, tmpdir):
|
|
182 |
+ "Test that variables are expanded in sysroot path"
|
|
183 |
+ |
|
184 |
+ project = str(datafiles)
|
|
185 |
+ checkout = os.path.join(str(tmpdir), 'checkout')
|
|
186 |
+ |
|
187 |
+ result = cli.run(project=project,
|
|
188 |
+ args=['build', 'target-variable.bst'])
|
|
189 |
+ result.assert_success()
|
|
190 |
+ |
|
191 |
+ result = cli.run(project=project,
|
|
192 |
+ args=['checkout', 'target-variable.bst', checkout])
|
|
193 |
+ result.assert_success()
|
|
194 |
+ |
|
195 |
+ assert os.path.exists(os.path.join(checkout, 'a.txt'))
|
|
196 |
+ assert os.path.exists(os.path.join(checkout, 'path/test', 'b.txt'))
|