Chandan Singh pushed to branch chandan/source-checkout at BuildStream / buildstream
Commits:
-
1c24979a
by Angelos Evripiotis at 2018-10-19T21:17:01Z
-
43a2eee3
by Angelos Evripiotis at 2018-10-19T21:48:59Z
-
12719f0d
by Jürg Billeter at 2018-10-22T17:05:41Z
-
a7a28d14
by Jürg Billeter at 2018-10-22T17:05:41Z
-
be29e0f5
by Jürg Billeter at 2018-10-22T17:53:26Z
-
b74aca1a
by Jürg Billeter at 2018-10-23T09:22:19Z
-
c7dda150
by Jürg Billeter at 2018-10-23T09:48:00Z
-
74c115b9
by Angelos Evripiotis at 2018-10-23T10:07:31Z
-
ecb58b42
by Phil Dawson at 2018-10-23T10:33:47Z
-
db0611b9
by Chandan Singh at 2018-10-23T11:30:05Z
-
e0b99945
by Chandan Singh at 2018-10-23T11:30:05Z
23 changed files:
- buildstream/_artifactcache/artifactcache.py
- buildstream/_artifactcache/cascache.py
- buildstream/_frontend/cli.py
- buildstream/_pipeline.py
- buildstream/_scheduler/queues/queue.py
- buildstream/_stream.py
- buildstream/_yaml.py
- buildstream/buildelement.py
- buildstream/plugin.py
- buildstream/plugins/elements/autotools.yaml
- buildstream/plugins/elements/cmake.yaml
- buildstream/plugins/elements/make.yaml
- buildstream/plugins/elements/manual.yaml
- buildstream/plugins/elements/meson.yaml
- buildstream/plugins/elements/qmake.yaml
- buildstream/plugins/sources/deb.py
- buildstream/plugins/sources/git.py
- buildstream/plugins/sources/tar.py
- tests/completions/completions.py
- + tests/frontend/project/elements/checkout-deps.bst
- + tests/frontend/project/files/etc-files/etc/buildstream/config
- + tests/frontend/source_checkout.py
- tests/testutils/artifactshare.py
Changes:
... | ... | @@ -228,7 +228,7 @@ class ArtifactCache(): |
228 | 228 |
self._required_elements.update(elements)
|
229 | 229 |
|
230 | 230 |
# For the cache keys which were resolved so far, we bump
|
231 |
- # the atime of them.
|
|
231 |
+ # the mtime of them.
|
|
232 | 232 |
#
|
233 | 233 |
# This is just in case we have concurrent instances of
|
234 | 234 |
# BuildStream running with the same artifact cache, it will
|
... | ... | @@ -240,7 +240,7 @@ class ArtifactCache(): |
240 | 240 |
for key in (strong_key, weak_key):
|
241 | 241 |
if key:
|
242 | 242 |
try:
|
243 |
- self.update_atime(key)
|
|
243 |
+ self.update_mtime(element, key)
|
|
244 | 244 |
except ArtifactError:
|
245 | 245 |
pass
|
246 | 246 |
|
... | ... | @@ -391,15 +391,16 @@ class ArtifactCache(): |
391 | 391 |
def preflight(self):
|
392 | 392 |
pass
|
393 | 393 |
|
394 |
- # update_atime()
|
|
394 |
+ # update_mtime()
|
|
395 | 395 |
#
|
396 |
- # Update the atime of an artifact.
|
|
396 |
+ # Update the mtime of an artifact.
|
|
397 | 397 |
#
|
398 | 398 |
# Args:
|
399 |
+ # element (Element): The Element to update
|
|
399 | 400 |
# key (str): The key of the artifact.
|
400 | 401 |
#
|
401 |
- def update_atime(self, key):
|
|
402 |
- raise ImplError("Cache '{kind}' does not implement contains()"
|
|
402 |
+ def update_mtime(self, element, key):
|
|
403 |
+ raise ImplError("Cache '{kind}' does not implement update_mtime()"
|
|
403 | 404 |
.format(kind=type(self).__name__))
|
404 | 405 |
|
405 | 406 |
# initialize_remotes():
|
... | ... | @@ -538,8 +538,9 @@ class CASCache(ArtifactCache): |
538 | 538 |
except FileNotFoundError as e:
|
539 | 539 |
raise ArtifactError("Attempt to access unavailable artifact: {}".format(e)) from e
|
540 | 540 |
|
541 |
- def update_atime(self, ref):
|
|
541 |
+ def update_mtime(self, element, key):
|
|
542 | 542 |
try:
|
543 |
+ ref = self.get_artifact_fullname(element, key)
|
|
543 | 544 |
os.utime(self._refpath(ref))
|
544 | 545 |
except FileNotFoundError as e:
|
545 | 546 |
raise ArtifactError("Attempt to access unavailable artifact: {}".format(e)) from e
|
... | ... | @@ -662,6 +662,33 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar): |
662 | 662 |
tar=tar)
|
663 | 663 |
|
664 | 664 |
|
665 |
+##################################################################
|
|
666 |
+# Source Checkout Command #
|
|
667 |
+##################################################################
|
|
668 |
+@cli.command(name='source-checkout', short_help='Checkout sources for an element')
|
|
669 |
+@click.option('--except', 'except_', multiple=True,
|
|
670 |
+ type=click.Path(readable=False),
|
|
671 |
+ help="Except certain dependencies")
|
|
672 |
+@click.option('--deps', '-d', default='none',
|
|
673 |
+ type=click.Choice(['build', 'none', 'run', 'all']),
|
|
674 |
+ help='The dependencies whose sources to checkout (default: none)')
|
|
675 |
+@click.option('--fetch', default=False, is_flag=True,
|
|
676 |
+ help='Fetch elements if they are not fetched')
|
|
677 |
+@click.argument('element',
|
|
678 |
+ type=click.Path(readable=False))
|
|
679 |
+@click.argument('location', type=click.Path())
|
|
680 |
+@click.pass_obj
|
|
681 |
+def source_checkout(app, element, location, deps, fetch, except_):
|
|
682 |
+ """Checkout sources of an element to the specified location
|
|
683 |
+ """
|
|
684 |
+ with app.initialized():
|
|
685 |
+ app.stream.source_checkout(element,
|
|
686 |
+ location=location,
|
|
687 |
+ deps=deps,
|
|
688 |
+ fetch=fetch,
|
|
689 |
+ except_targets=except_)
|
|
690 |
+ |
|
691 |
+ |
|
665 | 692 |
##################################################################
|
666 | 693 |
# Workspace Command #
|
667 | 694 |
##################################################################
|
... | ... | @@ -370,7 +370,7 @@ class Pipeline(): |
370 | 370 |
detail += " Element: {} is inconsistent\n".format(element._get_full_name())
|
371 | 371 |
for source in element.sources():
|
372 | 372 |
if source._get_consistency() == Consistency.INCONSISTENT:
|
373 |
- detail += " Source {} is missing ref\n".format(source)
|
|
373 |
+ detail += " {} is missing ref\n".format(source)
|
|
374 | 374 |
detail += '\n'
|
375 | 375 |
detail += "Try tracking these elements first with `bst track`\n"
|
376 | 376 |
|
... | ... | @@ -383,6 +383,33 @@ class Pipeline(): |
383 | 383 |
detail += " " + element._get_full_name() + "\n"
|
384 | 384 |
raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline-workspaced")
|
385 | 385 |
|
386 |
+ # assert_sources_cached()
|
|
387 |
+ #
|
|
388 |
+ # Asserts that sources for the given list of elements are cached.
|
|
389 |
+ #
|
|
390 |
+ # Args:
|
|
391 |
+ # elements (list): The list of elements
|
|
392 |
+ #
|
|
393 |
+ def assert_sources_cached(self, elements):
|
|
394 |
+ uncached = []
|
|
395 |
+ with self._context.timed_activity("Checking sources"):
|
|
396 |
+ for element in elements:
|
|
397 |
+ if element._get_consistency() != Consistency.CACHED:
|
|
398 |
+ uncached.append(element)
|
|
399 |
+ |
|
400 |
+ if uncached:
|
|
401 |
+ detail = "Sources are not cached for the following elements:\n\n"
|
|
402 |
+ for element in uncached:
|
|
403 |
+ detail += " Sources for element: {} are not cached\n".format(element._get_full_name())
|
|
404 |
+ for source in element.sources():
|
|
405 |
+ if source._get_consistency() != Consistency.CACHED:
|
|
406 |
+ detail += " {}\n".format(source)
|
|
407 |
+ detail += '\n'
|
|
408 |
+ detail += "Try fetching these elements first with `bst fetch`,\n" + \
|
|
409 |
+ "or run this command with `--fetch` option\n"
|
|
410 |
+ |
|
411 |
+ raise PipelineError("Uncached sources", detail=detail, reason="uncached-sources")
|
|
412 |
+ |
|
386 | 413 |
#############################################################
|
387 | 414 |
# Private Methods #
|
388 | 415 |
#############################################################
|
... | ... | @@ -208,7 +208,7 @@ class Queue(): |
208 | 208 |
# This will have different results for elements depending
|
209 | 209 |
# on the Queue.status() implementation.
|
210 | 210 |
#
|
211 |
- # o Elements which are QueueStatus.WAIT will not be effected
|
|
211 |
+ # o Elements which are QueueStatus.WAIT will not be affected
|
|
212 | 212 |
#
|
213 | 213 |
# o Elements which are QueueStatus.SKIP will move directly
|
214 | 214 |
# to the dequeue pool
|
... | ... | @@ -379,27 +379,7 @@ class Stream(): |
379 | 379 |
elements, _ = self._load((target,), (), fetch_subprojects=True)
|
380 | 380 |
target = elements[0]
|
381 | 381 |
|
382 |
- if not tar:
|
|
383 |
- try:
|
|
384 |
- os.makedirs(location, exist_ok=True)
|
|
385 |
- except OSError as e:
|
|
386 |
- raise StreamError("Failed to create checkout directory: '{}'"
|
|
387 |
- .format(e)) from e
|
|
388 |
- |
|
389 |
- if not tar:
|
|
390 |
- if not os.access(location, os.W_OK):
|
|
391 |
- raise StreamError("Checkout directory '{}' not writable"
|
|
392 |
- .format(location))
|
|
393 |
- if not force and os.listdir(location):
|
|
394 |
- raise StreamError("Checkout directory '{}' not empty"
|
|
395 |
- .format(location))
|
|
396 |
- elif os.path.exists(location) and location != '-':
|
|
397 |
- if not os.access(location, os.W_OK):
|
|
398 |
- raise StreamError("Output file '{}' not writable"
|
|
399 |
- .format(location))
|
|
400 |
- if not force and os.path.exists(location):
|
|
401 |
- raise StreamError("Output file '{}' already exists"
|
|
402 |
- .format(location))
|
|
382 |
+ self.__check_location_writable(location, force=force, tar=tar)
|
|
403 | 383 |
|
404 | 384 |
# Stage deps into a temporary sandbox first
|
405 | 385 |
try:
|
... | ... | @@ -436,6 +416,42 @@ class Stream(): |
436 | 416 |
raise StreamError("Error while staging dependencies into a sandbox"
|
437 | 417 |
": '{}'".format(e), detail=e.detail, reason=e.reason) from e
|
438 | 418 |
|
419 |
+ # source_checkout()
|
|
420 |
+ #
|
|
421 |
+ # Checkout sources of the target element to the specified location
|
|
422 |
+ #
|
|
423 |
+ # Args:
|
|
424 |
+ # target (str): The target element whose sources to checkout
|
|
425 |
+ # location (str): Location to checkout the sources to
|
|
426 |
+ # deps (str): The dependencies to checkout
|
|
427 |
+ # fetch (bool): Whether to fetch missing sources
|
|
428 |
+ # except_targets (list): List of targets to except from staging
|
|
429 |
+ #
|
|
430 |
+ def source_checkout(self, target, *,
|
|
431 |
+ location=None,
|
|
432 |
+ deps='none',
|
|
433 |
+ fetch=False,
|
|
434 |
+ except_targets=()):
|
|
435 |
+ |
|
436 |
+ self.__check_location_writable(location)
|
|
437 |
+ |
|
438 |
+ elements, _ = self._load((target,), (),
|
|
439 |
+ selection=deps,
|
|
440 |
+ except_targets=except_targets,
|
|
441 |
+ fetch_subprojects=True)
|
|
442 |
+ |
|
443 |
+ # Assert all sources are cached
|
|
444 |
+ if fetch:
|
|
445 |
+ self._fetch(elements)
|
|
446 |
+ self._pipeline.assert_sources_cached(elements)
|
|
447 |
+ |
|
448 |
+ # Stage all sources determined by scope
|
|
449 |
+ try:
|
|
450 |
+ self._write_element_sources(location, elements)
|
|
451 |
+ except BstError as e:
|
|
452 |
+ raise StreamError("Error while writing sources"
|
|
453 |
+ ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
|
|
454 |
+ |
|
439 | 455 |
# workspace_open
|
440 | 456 |
#
|
441 | 457 |
# Open a project workspace
|
... | ... | @@ -719,7 +735,7 @@ class Stream(): |
719 | 735 |
if self._write_element_script(source_directory, element)
|
720 | 736 |
]
|
721 | 737 |
|
722 |
- self._write_element_sources(tempdir, elements)
|
|
738 |
+ self._write_element_sources(os.path.join(tempdir, "source"), elements)
|
|
723 | 739 |
self._write_build_script(tempdir, elements)
|
724 | 740 |
self._collect_sources(tempdir, tar_location,
|
725 | 741 |
target.normal_name, compression)
|
... | ... | @@ -1082,11 +1098,10 @@ class Stream(): |
1082 | 1098 |
# Write all source elements to the given directory
|
1083 | 1099 |
def _write_element_sources(self, directory, elements):
|
1084 | 1100 |
for element in elements:
|
1085 |
- source_dir = os.path.join(directory, "source")
|
|
1086 |
- element_source_dir = os.path.join(source_dir, element.normal_name)
|
|
1087 |
- os.makedirs(element_source_dir)
|
|
1088 |
- |
|
1089 |
- element._stage_sources_at(element_source_dir)
|
|
1101 |
+ element_source_dir = os.path.join(directory, element.normal_name)
|
|
1102 |
+ if list(element.sources()):
|
|
1103 |
+ os.makedirs(element_source_dir)
|
|
1104 |
+ element._stage_sources_at(element_source_dir)
|
|
1090 | 1105 |
|
1091 | 1106 |
# Write a master build script to the sandbox
|
1092 | 1107 |
def _write_build_script(self, directory, elements):
|
... | ... | @@ -1115,3 +1130,29 @@ class Stream(): |
1115 | 1130 |
|
1116 | 1131 |
with tarfile.open(tar_name, permissions) as tar:
|
1117 | 1132 |
tar.add(directory, arcname=element_name)
|
1133 |
+ |
|
1134 |
+ #############################################################
|
|
1135 |
+ # Private Methods #
|
|
1136 |
+ #############################################################
|
|
1137 |
+ |
|
1138 |
+ # Check if given location is writable
|
|
1139 |
+ def __check_location_writable(self, location, force=False, tar=False):
|
|
1140 |
+ if not tar:
|
|
1141 |
+ try:
|
|
1142 |
+ os.makedirs(location, exist_ok=True)
|
|
1143 |
+ except OSError as e:
|
|
1144 |
+ raise StreamError("Failed to create checkout directory: '{}'"
|
|
1145 |
+ .format(e)) from e
|
|
1146 |
+ if not os.access(location, os.W_OK):
|
|
1147 |
+ raise StreamError("Checkout directory '{}' not writable"
|
|
1148 |
+ .format(location))
|
|
1149 |
+ if not force and os.listdir(location):
|
|
1150 |
+ raise StreamError("Checkout directory '{}' not empty"
|
|
1151 |
+ .format(location))
|
|
1152 |
+ elif os.path.exists(location) and location != '-':
|
|
1153 |
+ if not os.access(location, os.W_OK):
|
|
1154 |
+ raise StreamError("Output file '{}' not writable"
|
|
1155 |
+ .format(location))
|
|
1156 |
+ if not force and os.path.exists(location):
|
|
1157 |
+ raise StreamError("Output file '{}' already exists"
|
|
1158 |
+ .format(location))
|
... | ... | @@ -972,7 +972,7 @@ def node_validate(node, valid_keys): |
972 | 972 |
#
|
973 | 973 |
# The purpose of this is to create a virtual copy-on-write
|
974 | 974 |
# copy of a dictionary, so that mutating it in any way does
|
975 |
-# not effect the underlying dictionaries.
|
|
975 |
+# not affect the underlying dictionaries.
|
|
976 | 976 |
#
|
977 | 977 |
# collections.ChainMap covers this already mostly, but fails
|
978 | 978 |
# to record internal state so as to hide keys which have been
|
... | ... | @@ -176,7 +176,7 @@ class BuildElement(Element): |
176 | 176 |
|
177 | 177 |
# Specifying notparallel for a given element effects the
|
178 | 178 |
# cache key, while having the side effect of setting max-jobs to 1,
|
179 |
- # which is normally automatically resolved and does not effect
|
|
179 |
+ # which is normally automatically resolved and does not affect
|
|
180 | 180 |
# the cache key.
|
181 | 181 |
if self.get_variable('notparallel'):
|
182 | 182 |
dictionary['notparallel'] = True
|
... | ... | @@ -266,7 +266,7 @@ class Plugin(): |
266 | 266 |
such as an sha256 sum of a tarball content.
|
267 | 267 |
|
268 | 268 |
Elements and Sources should implement this by collecting any configurations
|
269 |
- which could possibly effect the output and return a dictionary of these settings.
|
|
269 |
+ which could possibly affect the output and return a dictionary of these settings.
|
|
270 | 270 |
|
271 | 271 |
For Sources, this is guaranteed to only be called if
|
272 | 272 |
:func:`Source.get_consistency() <buildstream.source.Source.get_consistency>`
|
... | ... | @@ -123,7 +123,7 @@ environment: |
123 | 123 |
V: 1
|
124 | 124 |
|
125 | 125 |
# And dont consider MAKEFLAGS or V as something which may
|
126 |
-# effect build output.
|
|
126 |
+# affect build output.
|
|
127 | 127 |
environment-nocache:
|
128 | 128 |
- MAKEFLAGS
|
129 | 129 |
- V
|
... | ... | @@ -66,7 +66,7 @@ environment: |
66 | 66 |
V: 1
|
67 | 67 |
|
68 | 68 |
# And dont consider JOBS or V as something which may
|
69 |
-# effect build output.
|
|
69 |
+# affect build output.
|
|
70 | 70 |
environment-nocache:
|
71 | 71 |
- JOBS
|
72 | 72 |
- V
|
... | ... | @@ -36,7 +36,7 @@ environment: |
36 | 36 |
V: 1
|
37 | 37 |
|
38 | 38 |
# And dont consider MAKEFLAGS or V as something which may
|
39 |
-# effect build output.
|
|
39 |
+# affect build output.
|
|
40 | 40 |
environment-nocache:
|
41 | 41 |
- MAKEFLAGS
|
42 | 42 |
- V
|
... | ... | @@ -35,7 +35,7 @@ environment: |
35 | 35 |
V: 1
|
36 | 36 |
|
37 | 37 |
# And dont consider MAKEFLAGS or V as something which may
|
38 |
-# effect build output.
|
|
38 |
+# affect build output.
|
|
39 | 39 |
environment-nocache:
|
40 | 40 |
- MAKEFLAGS
|
41 | 41 |
- V
|
... | ... | @@ -74,6 +74,6 @@ environment: |
74 | 74 |
%{max-jobs}
|
75 | 75 |
|
76 | 76 |
# And dont consider NINJAJOBS as something which may
|
77 |
-# effect build output.
|
|
77 |
+# affect build output.
|
|
78 | 78 |
environment-nocache:
|
79 | 79 |
- NINJAJOBS
|
... | ... | @@ -44,7 +44,7 @@ environment: |
44 | 44 |
V: 1
|
45 | 45 |
|
46 | 46 |
# And dont consider MAKEFLAGS or V as something which may
|
47 |
-# effect build output.
|
|
47 |
+# affect build output.
|
|
48 | 48 |
environment-nocache:
|
49 | 49 |
- MAKEFLAGS
|
50 | 50 |
- V
|
... | ... | @@ -50,7 +50,7 @@ deb - stage files from .deb packages |
50 | 50 |
"""
|
51 | 51 |
|
52 | 52 |
import tarfile
|
53 |
-from contextlib import contextmanager, ExitStack
|
|
53 |
+from contextlib import contextmanager
|
|
54 | 54 |
import arpy # pylint: disable=import-error
|
55 | 55 |
|
56 | 56 |
from .tar import TarSource
|
... | ... | @@ -69,8 +69,7 @@ class DebSource(TarSource): |
69 | 69 |
|
70 | 70 |
@contextmanager
|
71 | 71 |
def _get_tar(self):
|
72 |
- with ExitStack() as context:
|
|
73 |
- deb_file = context.enter_context(open(self._get_mirror_file(), 'rb'))
|
|
72 |
+ with open(self._get_mirror_file(), 'rb') as deb_file:
|
|
74 | 73 |
arpy_archive = arpy.Archive(fileobj=deb_file)
|
75 | 74 |
arpy_archive.read_all_headers()
|
76 | 75 |
data_tar_arpy = [v for k, v in arpy_archive.archived_files.items() if b"data.tar" in k][0]
|
... | ... | @@ -415,7 +415,7 @@ class GitSource(Source): |
415 | 415 |
def get_unique_key(self):
|
416 | 416 |
# Here we want to encode the local name of the repository and
|
417 | 417 |
# the ref, if the user changes the alias to fetch the same sources
|
418 |
- # from another location, it should not effect the cache key.
|
|
418 |
+ # from another location, it should not affect the cache key.
|
|
419 | 419 |
key = [self.original_url, self.mirror.ref]
|
420 | 420 |
|
421 | 421 |
# Only modify the cache key with checkout_submodules if it's something
|
... | ... | @@ -57,7 +57,7 @@ tar - stage files from tar archives |
57 | 57 |
|
58 | 58 |
import os
|
59 | 59 |
import tarfile
|
60 |
-from contextlib import contextmanager, ExitStack
|
|
60 |
+from contextlib import contextmanager
|
|
61 | 61 |
from tempfile import TemporaryFile
|
62 | 62 |
|
63 | 63 |
from buildstream import SourceError
|
... | ... | @@ -88,8 +88,7 @@ class TarSource(DownloadableFileSource): |
88 | 88 |
def _run_lzip(self):
|
89 | 89 |
assert self.host_lzip
|
90 | 90 |
with TemporaryFile() as lzip_stdout:
|
91 |
- with ExitStack() as context:
|
|
92 |
- lzip_file = context.enter_context(open(self._get_mirror_file(), 'r'))
|
|
91 |
+ with open(self._get_mirror_file(), 'r') as lzip_file:
|
|
93 | 92 |
self.call([self.host_lzip, '-d'],
|
94 | 93 |
stdin=lzip_file,
|
95 | 94 |
stdout=lzip_stdout)
|
... | ... | @@ -15,6 +15,7 @@ MAIN_COMMANDS = [ |
15 | 15 |
'push ',
|
16 | 16 |
'shell ',
|
17 | 17 |
'show ',
|
18 |
+ 'source-checkout ',
|
|
18 | 19 |
'source-bundle ',
|
19 | 20 |
'track ',
|
20 | 21 |
'workspace '
|
1 |
+kind: import
|
|
2 |
+description: It is important for this element to have both build and runtime dependencies
|
|
3 |
+sources:
|
|
4 |
+- kind: local
|
|
5 |
+ path: files/etc-files
|
|
6 |
+depends:
|
|
7 |
+- filename: import-dev.bst
|
|
8 |
+ type: build
|
|
9 |
+- filename: import-bin.bst
|
|
10 |
+ type: runtime
|
1 |
+config
|
1 |
+import os
|
|
2 |
+import pytest
|
|
3 |
+ |
|
4 |
+from tests.testutils import cli
|
|
5 |
+ |
|
6 |
+from buildstream import utils, _yaml
|
|
7 |
+from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
|
8 |
+ |
|
9 |
+# Project directory
|
|
10 |
+DATA_DIR = os.path.join(
|
|
11 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
12 |
+ 'project',
|
|
13 |
+)
|
|
14 |
+ |
|
15 |
+ |
|
16 |
+def generate_remote_import_element(input_path, output_path):
|
|
17 |
+ return {
|
|
18 |
+ 'kind': 'import',
|
|
19 |
+ 'sources': [
|
|
20 |
+ {
|
|
21 |
+ 'kind': 'remote',
|
|
22 |
+ 'url': 'file://{}'.format(input_path),
|
|
23 |
+ 'filename': output_path,
|
|
24 |
+ 'ref': utils.sha256sum(input_path),
|
|
25 |
+ }
|
|
26 |
+ ]
|
|
27 |
+ }
|
|
28 |
+ |
|
29 |
+ |
|
30 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
31 |
+def test_source_checkout(datafiles, cli):
|
|
32 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
33 |
+ checkout = os.path.join(cli.directory, 'source-checkout')
|
|
34 |
+ target = 'checkout-deps.bst'
|
|
35 |
+ |
|
36 |
+ result = cli.run(project=project, args=['source-checkout', target, '--deps', 'none', checkout])
|
|
37 |
+ result.assert_success()
|
|
38 |
+ |
|
39 |
+ assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
|
|
40 |
+ |
|
41 |
+ |
|
42 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
43 |
+@pytest.mark.parametrize('deps', [('build'), ('none'), ('run'), ('all')])
|
|
44 |
+def test_source_checkout_deps(datafiles, cli, deps):
|
|
45 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
46 |
+ checkout = os.path.join(cli.directory, 'source-checkout')
|
|
47 |
+ target = 'checkout-deps.bst'
|
|
48 |
+ |
|
49 |
+ result = cli.run(project=project, args=['source-checkout', target, '--deps', deps, checkout])
|
|
50 |
+ result.assert_success()
|
|
51 |
+ |
|
52 |
+ # Sources of the target
|
|
53 |
+ if deps == 'build':
|
|
54 |
+ assert not os.path.exists(os.path.join(checkout, 'checkout-deps'))
|
|
55 |
+ else:
|
|
56 |
+ assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
|
|
57 |
+ |
|
58 |
+ # Sources of the target's build dependencies
|
|
59 |
+ if deps in ('build', 'all'):
|
|
60 |
+ assert os.path.exists(os.path.join(checkout, 'import-dev', 'usr', 'include', 'pony.h'))
|
|
61 |
+ else:
|
|
62 |
+ assert not os.path.exists(os.path.join(checkout, 'import-dev'))
|
|
63 |
+ |
|
64 |
+ # Sources of the target's runtime dependencies
|
|
65 |
+ if deps in ('run', 'all'):
|
|
66 |
+ assert os.path.exists(os.path.join(checkout, 'import-bin', 'usr', 'bin', 'hello'))
|
|
67 |
+ else:
|
|
68 |
+ assert not os.path.exists(os.path.join(checkout, 'import-bin'))
|
|
69 |
+ |
|
70 |
+ |
|
71 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
72 |
+def test_source_checkout_except(datafiles, cli):
|
|
73 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
74 |
+ checkout = os.path.join(cli.directory, 'source-checkout')
|
|
75 |
+ target = 'checkout-deps.bst'
|
|
76 |
+ |
|
77 |
+ result = cli.run(project=project, args=['source-checkout', target,
|
|
78 |
+ '--deps', 'all',
|
|
79 |
+ '--except', 'import-bin.bst',
|
|
80 |
+ checkout])
|
|
81 |
+ result.assert_success()
|
|
82 |
+ |
|
83 |
+ # Sources for the target should be present
|
|
84 |
+ assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
|
|
85 |
+ |
|
86 |
+ # Sources for import-bin.bst should not be present
|
|
87 |
+ assert not os.path.exists(os.path.join(checkout, 'import-bin'))
|
|
88 |
+ |
|
89 |
+ # Sources for other dependencies should be present
|
|
90 |
+ assert os.path.exists(os.path.join(checkout, 'import-dev', 'usr', 'include', 'pony.h'))
|
|
91 |
+ |
|
92 |
+ |
|
93 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
94 |
+@pytest.mark.parametrize('fetch', [(False), (True)])
|
|
95 |
+def test_source_checkout_fetch(datafiles, cli, fetch):
|
|
96 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
97 |
+ checkout = os.path.join(cli.directory, 'source-checkout')
|
|
98 |
+ target = 'remote-import-dev.bst'
|
|
99 |
+ target_path = os.path.join(project, 'elements', target)
|
|
100 |
+ |
|
101 |
+ # Create an element with remote source
|
|
102 |
+ element = generate_remote_import_element(
|
|
103 |
+ os.path.join(project, 'files', 'dev-files', 'usr', 'include', 'pony.h'),
|
|
104 |
+ 'pony.h')
|
|
105 |
+ _yaml.dump(element, target_path)
|
|
106 |
+ |
|
107 |
+ # Testing --fetch option requires that we do not have the sources
|
|
108 |
+ # cached already
|
|
109 |
+ assert cli.get_element_state(project, target) == 'fetch needed'
|
|
110 |
+ |
|
111 |
+ args = ['source-checkout']
|
|
112 |
+ if fetch:
|
|
113 |
+ args += ['--fetch']
|
|
114 |
+ args += [target, checkout]
|
|
115 |
+ result = cli.run(project=project, args=args)
|
|
116 |
+ |
|
117 |
+ if fetch:
|
|
118 |
+ result.assert_success()
|
|
119 |
+ assert os.path.exists(os.path.join(checkout, 'remote-import-dev', 'pony.h'))
|
|
120 |
+ else:
|
|
121 |
+ result.assert_main_error(ErrorDomain.PIPELINE, 'uncached-sources')
|
... | ... | @@ -122,9 +122,8 @@ class ArtifactShare(): |
122 | 122 |
# same algo for creating an artifact reference
|
123 | 123 |
#
|
124 | 124 |
|
125 |
- # Chop off the .bst suffix first
|
|
126 |
- assert element_name.endswith('.bst')
|
|
127 |
- element_name = element_name[:-4]
|
|
125 |
+ # Replace path separator and chop off the .bst suffix
|
|
126 |
+ element_name = os.path.splitext(element_name.replace(os.sep, '-'))[0]
|
|
128 | 127 |
|
129 | 128 |
valid_chars = string.digits + string.ascii_letters + '-._'
|
130 | 129 |
element_name = ''.join([
|