Phil Dawson pushed to branch phil/source-checkout-options at BuildStream / buildstream
Commits:
-
27932739
by Benjamin Schubert at 2018-11-19T09:46:39Z
-
ea2de561
by Benjamin Schubert at 2018-11-19T10:19:24Z
-
f23b6031
by Benjamin Schubert at 2018-11-19T11:39:51Z
-
a6defc0b
by Benjamin Schubert at 2018-11-19T11:39:51Z
-
88089d2d
by Benjamin Schubert at 2018-11-19T11:39:51Z
-
fd9e46be
by Benjamin Schubert at 2018-11-19T11:39:51Z
-
d32e0b83
by Benjamin Schubert at 2018-11-19T11:39:51Z
-
6f837118
by Benjamin Schubert at 2018-11-19T12:22:40Z
-
30b72244
by Chandan Singh at 2018-11-19T12:35:01Z
-
76c5d2f8
by Chandan Singh at 2018-11-19T12:35:01Z
-
9f629638
by Chandan Singh at 2018-11-19T13:03:36Z
-
bc827cc8
by Abderrahim Kitouni at 2018-11-19T14:55:15Z
-
5fbc5f41
by Valentin David at 2018-11-19T15:31:05Z
-
7f75326c
by Phil Dawson at 2018-11-19T16:20:29Z
-
2b353ea5
by Phil Dawson at 2018-11-19T16:20:29Z
-
b157354f
by Phil Dawson at 2018-11-19T16:20:29Z
-
9021316d
by Phil Dawson at 2018-11-19T16:20:29Z
18 changed files:
- buildstream/_artifactcache/cascache.py
- buildstream/_frontend/cli.py
- buildstream/_pipeline.py
- buildstream/_stream.py
- buildstream/plugins/elements/cmake.yaml
- buildstream/plugins/sources/git.py
- buildstream/plugins/sources/pip.py
- buildstream/utils.py
- doc/source/using_commands.rst
- tests/completions/completions.py
- tests/format/variables.py
- tests/frontend/buildtrack.py
- tests/frontend/help.py
- + tests/frontend/project/elements/checkout-deps.bst
- + tests/frontend/project/files/etc-files/etc/buildstream/config
- − tests/frontend/source_bundle.py
- + tests/frontend/source_checkout.py
- + tests/utils/movedirectory.py
Changes:
... | ... | @@ -24,7 +24,6 @@ import os |
24 | 24 |
import stat
|
25 | 25 |
import tempfile
|
26 | 26 |
import uuid
|
27 |
-import errno
|
|
28 | 27 |
from urllib.parse import urlparse
|
29 | 28 |
|
30 | 29 |
import grpc
|
... | ... | @@ -140,17 +139,13 @@ class CASCache(): |
140 | 139 |
checkoutdir = os.path.join(tmpdir, ref)
|
141 | 140 |
self._checkout(checkoutdir, tree)
|
142 | 141 |
|
143 |
- os.makedirs(os.path.dirname(dest), exist_ok=True)
|
|
144 | 142 |
try:
|
145 |
- os.rename(checkoutdir, dest)
|
|
143 |
+ utils.move_atomic(checkoutdir, dest)
|
|
144 |
+ except utils.DirectoryExistsError:
|
|
145 |
+ # Another process beat us to rename
|
|
146 |
+ pass
|
|
146 | 147 |
except OSError as e:
|
147 |
- # With rename it's possible to get either ENOTEMPTY or EEXIST
|
|
148 |
- # in the case that the destination path is a not empty directory.
|
|
149 |
- #
|
|
150 |
- # If rename fails with these errors, another process beat
|
|
151 |
- # us to it so just ignore.
|
|
152 |
- if e.errno not in [errno.ENOTEMPTY, errno.EEXIST]:
|
|
153 |
- raise CASError("Failed to extract directory for ref '{}': {}".format(ref, e)) from e
|
|
148 |
+ raise CASError("Failed to extract directory for ref '{}': {}".format(ref, e)) from e
|
|
154 | 149 |
|
155 | 150 |
return originaldest
|
156 | 151 |
|
... | ... | @@ -664,6 +664,44 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar): |
664 | 664 |
tar=tar)
|
665 | 665 |
|
666 | 666 |
|
667 |
+##################################################################
|
|
668 |
+# Source Checkout Command #
|
|
669 |
+##################################################################
|
|
670 |
+@cli.command(name='source-checkout', short_help='Checkout sources for an element')
|
|
671 |
+@click.option('--force', '-f', default=False, is_flag=True,
|
|
672 |
+ help="Allow files to be overwritten")
|
|
673 |
+@click.option('--except', 'except_', multiple=True,
|
|
674 |
+ type=click.Path(readable=False),
|
|
675 |
+ help="Except certain dependencies")
|
|
676 |
+@click.option('--deps', '-d', default='none',
|
|
677 |
+ type=click.Choice(['build', 'none', 'run', 'all']),
|
|
678 |
+ help='The dependencies whose sources to checkout (default: none)')
|
|
679 |
+@click.option('--fetch', 'fetch_', default=False, is_flag=True,
|
|
680 |
+ help='Fetch elements if they are not fetched')
|
|
681 |
+@click.option('--tar', 'tar_', default=False, is_flag=True,
|
|
682 |
+ help='Create a tarball from the element\'s sources instead of a '
|
|
683 |
+ 'file tree. If LOCATION is \'-\', the tarball will be dumped '
|
|
684 |
+ 'to the standard output.')
|
|
685 |
+@click.option('--include-build-scripts', 'build_scripts', is_flag=True)
|
|
686 |
+@click.argument('element',
|
|
687 |
+ type=click.Path(readable=False))
|
|
688 |
+@click.argument('location', type=click.Path())
|
|
689 |
+@click.pass_obj
|
|
690 |
+def source_checkout(app, element, location, force, deps, fetch_, except_,
|
|
691 |
+ tar_, build_scripts):
|
|
692 |
+ """Checkout sources of an element to the specified location
|
|
693 |
+ """
|
|
694 |
+ with app.initialized():
|
|
695 |
+ app.stream.source_checkout(element,
|
|
696 |
+ location=location,
|
|
697 |
+ force=force,
|
|
698 |
+ deps=deps,
|
|
699 |
+ fetch=fetch_,
|
|
700 |
+ except_targets=except_,
|
|
701 |
+ tar=tar_,
|
|
702 |
+ include_build_scripts=build_scripts)
|
|
703 |
+ |
|
704 |
+ |
|
667 | 705 |
##################################################################
|
668 | 706 |
# Workspace Command #
|
669 | 707 |
##################################################################
|
... | ... | @@ -800,34 +838,3 @@ def workspace_list(app): |
800 | 838 |
|
801 | 839 |
with app.initialized():
|
802 | 840 |
app.stream.workspace_list()
|
803 |
- |
|
804 |
- |
|
805 |
-##################################################################
|
|
806 |
-# Source Bundle Command #
|
|
807 |
-##################################################################
|
|
808 |
-@cli.command(name="source-bundle", short_help="Produce a build bundle to be manually executed")
|
|
809 |
-@click.option('--except', 'except_', multiple=True,
|
|
810 |
- type=click.Path(readable=False),
|
|
811 |
- help="Elements to except from the tarball")
|
|
812 |
-@click.option('--compression', default='gz',
|
|
813 |
- type=click.Choice(['none', 'gz', 'bz2', 'xz']),
|
|
814 |
- help="Compress the tar file using the given algorithm.")
|
|
815 |
-@click.option('--track', 'track_', default=False, is_flag=True,
|
|
816 |
- help="Track new source references before bundling")
|
|
817 |
-@click.option('--force', '-f', default=False, is_flag=True,
|
|
818 |
- help="Overwrite an existing tarball")
|
|
819 |
-@click.option('--directory', default=os.getcwd(),
|
|
820 |
- help="The directory to write the tarball to")
|
|
821 |
-@click.argument('element',
|
|
822 |
- type=click.Path(readable=False))
|
|
823 |
-@click.pass_obj
|
|
824 |
-def source_bundle(app, element, force, directory,
|
|
825 |
- track_, compression, except_):
|
|
826 |
- """Produce a source bundle to be manually executed
|
|
827 |
- """
|
|
828 |
- with app.initialized():
|
|
829 |
- app.stream.source_bundle(element, directory,
|
|
830 |
- track_first=track_,
|
|
831 |
- force=force,
|
|
832 |
- compression=compression,
|
|
833 |
- except_targets=except_)
|
... | ... | @@ -370,7 +370,7 @@ class Pipeline(): |
370 | 370 |
detail += " Element: {} is inconsistent\n".format(element._get_full_name())
|
371 | 371 |
for source in element.sources():
|
372 | 372 |
if source._get_consistency() == Consistency.INCONSISTENT:
|
373 |
- detail += " Source {} is missing ref\n".format(source)
|
|
373 |
+ detail += " {} is missing ref\n".format(source)
|
|
374 | 374 |
detail += '\n'
|
375 | 375 |
detail += "Try tracking these elements first with `bst track`\n"
|
376 | 376 |
|
... | ... | @@ -383,6 +383,33 @@ class Pipeline(): |
383 | 383 |
detail += " " + element._get_full_name() + "\n"
|
384 | 384 |
raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline-workspaced")
|
385 | 385 |
|
386 |
+ # assert_sources_cached()
|
|
387 |
+ #
|
|
388 |
+ # Asserts that sources for the given list of elements are cached.
|
|
389 |
+ #
|
|
390 |
+ # Args:
|
|
391 |
+ # elements (list): The list of elements
|
|
392 |
+ #
|
|
393 |
+ def assert_sources_cached(self, elements):
|
|
394 |
+ uncached = []
|
|
395 |
+ with self._context.timed_activity("Checking sources"):
|
|
396 |
+ for element in elements:
|
|
397 |
+ if element._get_consistency() != Consistency.CACHED:
|
|
398 |
+ uncached.append(element)
|
|
399 |
+ |
|
400 |
+ if uncached:
|
|
401 |
+ detail = "Sources are not cached for the following elements:\n\n"
|
|
402 |
+ for element in uncached:
|
|
403 |
+ detail += " Following sources for element: {} are not cached:\n".format(element._get_full_name())
|
|
404 |
+ for source in element.sources():
|
|
405 |
+ if source._get_consistency() != Consistency.CACHED:
|
|
406 |
+ detail += " {}\n".format(source)
|
|
407 |
+ detail += '\n'
|
|
408 |
+ detail += "Try fetching these elements first with `bst fetch`,\n" + \
|
|
409 |
+ "or run this command with `--fetch` option\n"
|
|
410 |
+ |
|
411 |
+ raise PipelineError("Uncached sources", detail=detail, reason="uncached-sources")
|
|
412 |
+ |
|
386 | 413 |
#############################################################
|
387 | 414 |
# Private Methods #
|
388 | 415 |
#############################################################
|
... | ... | @@ -379,27 +379,7 @@ class Stream(): |
379 | 379 |
elements, _ = self._load((target,), (), fetch_subprojects=True)
|
380 | 380 |
target = elements[0]
|
381 | 381 |
|
382 |
- if not tar:
|
|
383 |
- try:
|
|
384 |
- os.makedirs(location, exist_ok=True)
|
|
385 |
- except OSError as e:
|
|
386 |
- raise StreamError("Failed to create checkout directory: '{}'"
|
|
387 |
- .format(e)) from e
|
|
388 |
- |
|
389 |
- if not tar:
|
|
390 |
- if not os.access(location, os.W_OK):
|
|
391 |
- raise StreamError("Checkout directory '{}' not writable"
|
|
392 |
- .format(location))
|
|
393 |
- if not force and os.listdir(location):
|
|
394 |
- raise StreamError("Checkout directory '{}' not empty"
|
|
395 |
- .format(location))
|
|
396 |
- elif os.path.exists(location) and location != '-':
|
|
397 |
- if not os.access(location, os.W_OK):
|
|
398 |
- raise StreamError("Output file '{}' not writable"
|
|
399 |
- .format(location))
|
|
400 |
- if not force and os.path.exists(location):
|
|
401 |
- raise StreamError("Output file '{}' already exists"
|
|
402 |
- .format(location))
|
|
382 |
+ self._check_location_writable(location, force=force, tar=tar)
|
|
403 | 383 |
|
404 | 384 |
# Stage deps into a temporary sandbox first
|
405 | 385 |
try:
|
... | ... | @@ -443,6 +423,46 @@ class Stream(): |
443 | 423 |
raise StreamError("Error while staging dependencies into a sandbox"
|
444 | 424 |
": '{}'".format(e), detail=e.detail, reason=e.reason) from e
|
445 | 425 |
|
426 |
+ # source_checkout()
|
|
427 |
+ #
|
|
428 |
+ # Checkout sources of the target element to the specified location
|
|
429 |
+ #
|
|
430 |
+ # Args:
|
|
431 |
+ # target (str): The target element whose sources to checkout
|
|
432 |
+ # location (str): Location to checkout the sources to
|
|
433 |
+ # deps (str): The dependencies to checkout
|
|
434 |
+ # fetch (bool): Whether to fetch missing sources
|
|
435 |
+ # except_targets (list): List of targets to except from staging
|
|
436 |
+ #
|
|
437 |
+ def source_checkout(self, target, *,
|
|
438 |
+ location=None,
|
|
439 |
+ force=False,
|
|
440 |
+ deps='none',
|
|
441 |
+ fetch=False,
|
|
442 |
+ except_targets=(),
|
|
443 |
+ tar=False,
|
|
444 |
+ include_build_scripts=False):
|
|
445 |
+ |
|
446 |
+ self._check_location_writable(location, force=force, tar=tar)
|
|
447 |
+ |
|
448 |
+ elements, _ = self._load((target,), (),
|
|
449 |
+ selection=deps,
|
|
450 |
+ except_targets=except_targets,
|
|
451 |
+ fetch_subprojects=True)
|
|
452 |
+ |
|
453 |
+ # Assert all sources are cached
|
|
454 |
+ if fetch:
|
|
455 |
+ self._fetch(elements)
|
|
456 |
+ self._pipeline.assert_sources_cached(elements)
|
|
457 |
+ |
|
458 |
+ # Stage all sources determined by scope
|
|
459 |
+ try:
|
|
460 |
+ self._source_checkout(elements, location, force, deps, fetch,
|
|
461 |
+ except_targets, tar, include_build_scripts)
|
|
462 |
+ except BstError as e:
|
|
463 |
+ raise StreamError("Error while writing sources"
|
|
464 |
+ ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
|
|
465 |
+ |
|
446 | 466 |
# workspace_open
|
447 | 467 |
#
|
448 | 468 |
# Open a project workspace
|
... | ... | @@ -650,87 +670,6 @@ class Stream(): |
650 | 670 |
'workspaces': workspaces
|
651 | 671 |
})
|
652 | 672 |
|
653 |
- # source_bundle()
|
|
654 |
- #
|
|
655 |
- # Create a host buildable tarball bundle for the given target.
|
|
656 |
- #
|
|
657 |
- # Args:
|
|
658 |
- # target (str): The target element to bundle
|
|
659 |
- # directory (str): The directory to output the tarball
|
|
660 |
- # track_first (bool): Track new source references before bundling
|
|
661 |
- # compression (str): The compression type to use
|
|
662 |
- # force (bool): Overwrite an existing tarball
|
|
663 |
- #
|
|
664 |
- def source_bundle(self, target, directory, *,
|
|
665 |
- track_first=False,
|
|
666 |
- force=False,
|
|
667 |
- compression="gz",
|
|
668 |
- except_targets=()):
|
|
669 |
- |
|
670 |
- if track_first:
|
|
671 |
- track_targets = (target,)
|
|
672 |
- else:
|
|
673 |
- track_targets = ()
|
|
674 |
- |
|
675 |
- elements, track_elements = self._load((target,), track_targets,
|
|
676 |
- selection=PipelineSelection.ALL,
|
|
677 |
- except_targets=except_targets,
|
|
678 |
- track_selection=PipelineSelection.ALL,
|
|
679 |
- fetch_subprojects=True)
|
|
680 |
- |
|
681 |
- # source-bundle only supports one target
|
|
682 |
- target = self.targets[0]
|
|
683 |
- |
|
684 |
- self._message(MessageType.INFO, "Bundling sources for target {}".format(target.name))
|
|
685 |
- |
|
686 |
- # Find the correct filename for the compression algorithm
|
|
687 |
- tar_location = os.path.join(directory, target.normal_name + ".tar")
|
|
688 |
- if compression != "none":
|
|
689 |
- tar_location += "." + compression
|
|
690 |
- |
|
691 |
- # Attempt writing a file to generate a good error message
|
|
692 |
- # early
|
|
693 |
- #
|
|
694 |
- # FIXME: A bit hackish
|
|
695 |
- try:
|
|
696 |
- open(tar_location, mode="x")
|
|
697 |
- os.remove(tar_location)
|
|
698 |
- except IOError as e:
|
|
699 |
- raise StreamError("Cannot write to {0}: {1}"
|
|
700 |
- .format(tar_location, e)) from e
|
|
701 |
- |
|
702 |
- # Fetch and possibly track first
|
|
703 |
- #
|
|
704 |
- self._fetch(elements, track_elements=track_elements)
|
|
705 |
- |
|
706 |
- # We don't use the scheduler for this as it is almost entirely IO
|
|
707 |
- # bound.
|
|
708 |
- |
|
709 |
- # Create a temporary directory to build the source tree in
|
|
710 |
- builddir = self._context.builddir
|
|
711 |
- os.makedirs(builddir, exist_ok=True)
|
|
712 |
- prefix = "{}-".format(target.normal_name)
|
|
713 |
- |
|
714 |
- with TemporaryDirectory(prefix=prefix, dir=builddir) as tempdir:
|
|
715 |
- source_directory = os.path.join(tempdir, 'source')
|
|
716 |
- try:
|
|
717 |
- os.makedirs(source_directory)
|
|
718 |
- except OSError as e:
|
|
719 |
- raise StreamError("Failed to create directory: {}"
|
|
720 |
- .format(e)) from e
|
|
721 |
- |
|
722 |
- # Any elements that don't implement _write_script
|
|
723 |
- # should not be included in the later stages.
|
|
724 |
- elements = [
|
|
725 |
- element for element in elements
|
|
726 |
- if self._write_element_script(source_directory, element)
|
|
727 |
- ]
|
|
728 |
- |
|
729 |
- self._write_element_sources(tempdir, elements)
|
|
730 |
- self._write_build_script(tempdir, elements)
|
|
731 |
- self._collect_sources(tempdir, tar_location,
|
|
732 |
- target.normal_name, compression)
|
|
733 |
- |
|
734 | 673 |
# redirect_element_names()
|
735 | 674 |
#
|
736 | 675 |
# Takes a list of element names and returns a list where elements have been
|
... | ... | @@ -1068,6 +1007,39 @@ class Stream(): |
1068 | 1007 |
self._enqueue_plan(fetch_plan)
|
1069 | 1008 |
self._run()
|
1070 | 1009 |
|
1010 |
+ # _check_location_writable()
|
|
1011 |
+ #
|
|
1012 |
+ # Check if given location is writable.
|
|
1013 |
+ #
|
|
1014 |
+ # Args:
|
|
1015 |
+ # location (str): Destination path
|
|
1016 |
+ # force (bool): Allow files to be overwritten
|
|
1017 |
+ # tar (bool): Whether destination is a tarball
|
|
1018 |
+ #
|
|
1019 |
+ # Raises:
|
|
1020 |
+ # (StreamError): If the destination is not writable
|
|
1021 |
+ #
|
|
1022 |
+ def _check_location_writable(self, location, force=False, tar=False):
|
|
1023 |
+ if not tar:
|
|
1024 |
+ try:
|
|
1025 |
+ os.makedirs(location, exist_ok=True)
|
|
1026 |
+ except OSError as e:
|
|
1027 |
+ raise StreamError("Failed to create destination directory: '{}'"
|
|
1028 |
+ .format(e)) from e
|
|
1029 |
+ if not os.access(location, os.W_OK):
|
|
1030 |
+ raise StreamError("Destination directory '{}' not writable"
|
|
1031 |
+ .format(location))
|
|
1032 |
+ if not force and os.listdir(location):
|
|
1033 |
+ raise StreamError("Destination directory '{}' not empty"
|
|
1034 |
+ .format(location))
|
|
1035 |
+ elif os.path.exists(location) and location != '-':
|
|
1036 |
+ if not os.access(location, os.W_OK):
|
|
1037 |
+ raise StreamError("Output file '{}' not writable"
|
|
1038 |
+ .format(location))
|
|
1039 |
+ if not force and os.path.exists(location):
|
|
1040 |
+ raise StreamError("Output file '{}' already exists"
|
|
1041 |
+ .format(location))
|
|
1042 |
+ |
|
1071 | 1043 |
# Helper function for checkout()
|
1072 | 1044 |
#
|
1073 | 1045 |
def _checkout_hardlinks(self, sandbox_vroot, directory):
|
... | ... | @@ -1078,6 +1050,23 @@ class Stream(): |
1078 | 1050 |
|
1079 | 1051 |
sandbox_vroot.export_files(directory, can_link=True, can_destroy=True)
|
1080 | 1052 |
|
1053 |
+ # Helper function for source_checkout()
|
|
1054 |
+ def _source_checkout(self, elements,
|
|
1055 |
+ location=None,
|
|
1056 |
+ force=False,
|
|
1057 |
+ deps='none',
|
|
1058 |
+ fetch=False,
|
|
1059 |
+ except_targets=(),
|
|
1060 |
+ tar=False,
|
|
1061 |
+ include_build_scripts=False):
|
|
1062 |
+ location = os.path.abspath(location)
|
|
1063 |
+ if tar:
|
|
1064 |
+ self._create_source_tarball(location, elements, include_build_scripts)
|
|
1065 |
+ else:
|
|
1066 |
+ self._write_element_sources(location, elements)
|
|
1067 |
+ if include_build_scripts:
|
|
1068 |
+ self._write_build_scripts(location, elements)
|
|
1069 |
+ |
|
1081 | 1070 |
# Write the element build script to the given directory
|
1082 | 1071 |
def _write_element_script(self, directory, element):
|
1083 | 1072 |
try:
|
... | ... | @@ -1089,14 +1078,41 @@ class Stream(): |
1089 | 1078 |
# Write all source elements to the given directory
|
1090 | 1079 |
def _write_element_sources(self, directory, elements):
|
1091 | 1080 |
for element in elements:
|
1092 |
- source_dir = os.path.join(directory, "source")
|
|
1093 |
- element_source_dir = os.path.join(source_dir, element.normal_name)
|
|
1094 |
- os.makedirs(element_source_dir)
|
|
1081 |
+ element_source_dir = self._get_element_dirname(directory, element)
|
|
1082 |
+ if list(element.sources()):
|
|
1083 |
+ os.makedirs(element_source_dir)
|
|
1084 |
+ element._stage_sources_at(element_source_dir)
|
|
1085 |
+ |
|
1086 |
+ # Create a tarball containing the sources of each element in elements
|
|
1087 |
+ def _create_source_tarball(self, tar_name, elements, include_build_scripts):
|
|
1088 |
+ # Stage sources into a temporary directory then create a tarball from this
|
|
1089 |
+ with TemporaryDirectory() as tmpdir:
|
|
1090 |
+ self._write_element_sources(tmpdir, elements)
|
|
1091 |
+ if include_build_scripts:
|
|
1092 |
+ self._write_build_scripts(tmpdir, elements)
|
|
1093 |
+ self._create_tar_from_directory(tar_name, tmpdir)
|
|
1094 |
+ |
|
1095 |
+ # Create a tarball from the content of directory
|
|
1096 |
+ def _create_tar_from_directory(self, tar_name, directory):
|
|
1097 |
+ try:
|
|
1098 |
+ with tarfile.open(name=tar_name, mode='w') as tf:
|
|
1099 |
+ for item in os.listdir(str(directory)):
|
|
1100 |
+ file_to_add = os.path.join(directory, item)
|
|
1101 |
+ tf.add(file_to_add, arcname=item)
|
|
1102 |
+ except OSError as e:
|
|
1103 |
+ # If we have a partially constructed tar file, clean up after ourselves
|
|
1104 |
+ if os.path.exists(tar_name):
|
|
1105 |
+ os.remove(tar_name)
|
|
1106 |
+ raise StreamError("Failed to create tar archieve: {}".format(e)) from e
|
|
1095 | 1107 |
|
1096 |
- element._stage_sources_at(element_source_dir)
|
|
1108 |
+ # Write all the build_scripts for elements in the directory location
|
|
1109 |
+ def _write_build_scripts(self, location, elements):
|
|
1110 |
+ for element in elements:
|
|
1111 |
+ self._write_element_script(location, element)
|
|
1112 |
+ self._write_master_build_script(location, elements)
|
|
1097 | 1113 |
|
1098 | 1114 |
# Write a master build script to the sandbox
|
1099 |
- def _write_build_script(self, directory, elements):
|
|
1115 |
+ def _write_master_build_script(self, directory, elements):
|
|
1100 | 1116 |
|
1101 | 1117 |
module_string = ""
|
1102 | 1118 |
for element in elements:
|
... | ... | @@ -1122,3 +1138,25 @@ class Stream(): |
1122 | 1138 |
|
1123 | 1139 |
with tarfile.open(tar_name, permissions) as tar:
|
1124 | 1140 |
tar.add(directory, arcname=element_name)
|
1141 |
+ |
|
1142 |
+ # _get_element_dirname()
|
|
1143 |
+ #
|
|
1144 |
+ # Get path to directory for an element based on its normal name.
|
|
1145 |
+ #
|
|
1146 |
+ # For cross-junction elements, the path will be prefixed with the name
|
|
1147 |
+ # of the junction element.
|
|
1148 |
+ #
|
|
1149 |
+ # Args:
|
|
1150 |
+ # directory (str): path to base directory
|
|
1151 |
+ # element (Element): the element
|
|
1152 |
+ #
|
|
1153 |
+ # Returns:
|
|
1154 |
+ # (str): Path to directory for this element
|
|
1155 |
+ #
|
|
1156 |
+ def _get_element_dirname(self, directory, element):
|
|
1157 |
+ parts = [element.normal_name]
|
|
1158 |
+ while element._get_project() != self._project:
|
|
1159 |
+ element = element._get_project().junction
|
|
1160 |
+ parts.append(element.normal_name)
|
|
1161 |
+ |
|
1162 |
+ return os.path.join(directory, *reversed(parts))
|
... | ... | @@ -19,7 +19,7 @@ variables: |
19 | 19 |
cmake-args: |
|
20 | 20 |
|
21 | 21 |
-DCMAKE_INSTALL_PREFIX:PATH="%{prefix}" \
|
22 |
- -DCMAKE_INSTALL_LIBDIR=%{lib} %{cmake-extra} %{cmake-global} %{cmake-local}
|
|
22 |
+ -DCMAKE_INSTALL_LIBDIR:PATH="%{lib}" %{cmake-extra} %{cmake-global} %{cmake-local}
|
|
23 | 23 |
|
24 | 24 |
cmake: |
|
25 | 25 |
|
... | ... | @@ -86,7 +86,6 @@ This plugin also utilises the following configurable core plugin warnings: |
86 | 86 |
"""
|
87 | 87 |
|
88 | 88 |
import os
|
89 |
-import errno
|
|
90 | 89 |
import re
|
91 | 90 |
import shutil
|
92 | 91 |
from collections.abc import Mapping
|
... | ... | @@ -97,6 +96,7 @@ from configparser import RawConfigParser |
97 | 96 |
from buildstream import Source, SourceError, Consistency, SourceFetcher
|
98 | 97 |
from buildstream import utils
|
99 | 98 |
from buildstream.plugin import CoreWarnings
|
99 |
+from buildstream.utils import move_atomic, DirectoryExistsError
|
|
100 | 100 |
|
101 | 101 |
GIT_MODULES = '.gitmodules'
|
102 | 102 |
|
... | ... | @@ -141,21 +141,16 @@ class GitMirror(SourceFetcher): |
141 | 141 |
fail="Failed to clone git repository {}".format(url),
|
142 | 142 |
fail_temporarily=True)
|
143 | 143 |
|
144 |
- # Attempt atomic rename into destination, this will fail if
|
|
145 |
- # another process beat us to the punch
|
|
146 | 144 |
try:
|
147 |
- os.rename(tmpdir, self.mirror)
|
|
145 |
+ move_atomic(tmpdir, self.mirror)
|
|
146 |
+ except DirectoryExistsError:
|
|
147 |
+ # Another process was quicker to download this repository.
|
|
148 |
+ # Let's discard our own
|
|
149 |
+ self.source.status("{}: Discarding duplicate clone of {}"
|
|
150 |
+ .format(self.source, url))
|
|
148 | 151 |
except OSError as e:
|
149 |
- |
|
150 |
- # When renaming and the destination repo already exists, os.rename()
|
|
151 |
- # will fail with ENOTEMPTY, since an empty directory will be silently
|
|
152 |
- # replaced
|
|
153 |
- if e.errno == errno.ENOTEMPTY:
|
|
154 |
- self.source.status("{}: Discarding duplicate clone of {}"
|
|
155 |
- .format(self.source, url))
|
|
156 |
- else:
|
|
157 |
- raise SourceError("{}: Failed to move cloned git repository {} from '{}' to '{}': {}"
|
|
158 |
- .format(self.source, url, tmpdir, self.mirror, e)) from e
|
|
152 |
+ raise SourceError("{}: Failed to move cloned git repository {} from '{}' to '{}': {}"
|
|
153 |
+ .format(self.source, url, tmpdir, self.mirror, e)) from e
|
|
159 | 154 |
|
160 | 155 |
def _fetch(self, alias_override=None):
|
161 | 156 |
url = self.source.translate_url(self.url,
|
... | ... | @@ -68,7 +68,6 @@ details on common configuration options for sources. |
68 | 68 |
The ``pip`` plugin is available since :ref:`format version 16 <project_format_version>`
|
69 | 69 |
"""
|
70 | 70 |
|
71 |
-import errno
|
|
72 | 71 |
import hashlib
|
73 | 72 |
import os
|
74 | 73 |
import re
|
... | ... | @@ -80,6 +79,7 @@ _PYPI_INDEX_URL = 'https://pypi.org/simple/' |
80 | 79 |
|
81 | 80 |
# Used only for finding pip command
|
82 | 81 |
_PYTHON_VERSIONS = [
|
82 |
+ 'python', # when running in a venv, we might not have the exact version
|
|
83 | 83 |
'python2.7',
|
84 | 84 |
'python3.0',
|
85 | 85 |
'python3.1',
|
... | ... | @@ -192,13 +192,14 @@ class PipSource(Source): |
192 | 192 |
# process has fetched the sources before us and ensure that we do
|
193 | 193 |
# not raise an error in that case.
|
194 | 194 |
try:
|
195 |
- os.makedirs(self._mirror)
|
|
196 |
- os.rename(package_dir, self._mirror)
|
|
197 |
- except FileExistsError:
|
|
198 |
- return
|
|
195 |
+ utils.move_atomic(package_dir, self._mirror)
|
|
196 |
+ except utils.DirectoryExistsError:
|
|
197 |
+ # Another process has beaten us and has fetched the sources
|
|
198 |
+ # before us.
|
|
199 |
+ pass
|
|
199 | 200 |
except OSError as e:
|
200 |
- if e.errno != errno.ENOTEMPTY:
|
|
201 |
- raise
|
|
201 |
+ raise SourceError("{}: Failed to move downloaded pip packages from '{}' to '{}': {}"
|
|
202 |
+ .format(self, package_dir, self._mirror, e)) from e
|
|
202 | 203 |
|
203 | 204 |
def stage(self, directory):
|
204 | 205 |
with self.timed_activity("Staging Python packages", silent_nested=True):
|
... | ... | @@ -72,6 +72,11 @@ class ProgramNotFoundError(BstError): |
72 | 72 |
super().__init__(message, domain=ErrorDomain.PROG_NOT_FOUND, reason=reason)
|
73 | 73 |
|
74 | 74 |
|
75 |
+class DirectoryExistsError(OSError):
|
|
76 |
+ """Raised when a `os.rename` is attempted but the destination is an existing directory.
|
|
77 |
+ """
|
|
78 |
+ |
|
79 |
+ |
|
75 | 80 |
class FileListResult():
|
76 | 81 |
"""An object which stores the result of one of the operations
|
77 | 82 |
which run on a list of files.
|
... | ... | @@ -500,6 +505,38 @@ def get_bst_version(): |
500 | 505 |
.format(__version__))
|
501 | 506 |
|
502 | 507 |
|
508 |
+def move_atomic(source, destination, ensure_parents=True):
|
|
509 |
+ """Move the source to the destination using atomic primitives.
|
|
510 |
+ |
|
511 |
+ This uses `os.rename` to move a file or directory to a new destination.
|
|
512 |
+ It wraps some `OSError` thrown errors to ensure their handling is correct.
|
|
513 |
+ |
|
514 |
+ The main reason for this to exist is that rename can throw different errors
|
|
515 |
+ for the same symptom (https://www.unix.com/man-page/POSIX/3posix/rename/).
|
|
516 |
+ |
|
517 |
+ We are especially interested here in the case when the destination already
|
|
518 |
+ exists. In this case, either EEXIST or ENOTEMPTY are thrown.
|
|
519 |
+ |
|
520 |
+ In order to ensure consistent handling of these exceptions, this function
|
|
521 |
+ should be used instead of `os.rename`
|
|
522 |
+ |
|
523 |
+ Args:
|
|
524 |
+ source (str or Path): source to rename
|
|
525 |
+ destination (str or Path): destination to which to move the source
|
|
526 |
+ ensure_parents (bool): Whether or not to create the parent's directories
|
|
527 |
+ of the destination (default: True)
|
|
528 |
+ """
|
|
529 |
+ if ensure_parents:
|
|
530 |
+ os.makedirs(os.path.dirname(str(destination)), exist_ok=True)
|
|
531 |
+ |
|
532 |
+ try:
|
|
533 |
+ os.rename(str(source), str(destination))
|
|
534 |
+ except OSError as exc:
|
|
535 |
+ if exc.errno in (errno.EEXIST, errno.ENOTEMPTY):
|
|
536 |
+ raise DirectoryExistsError(*exc.args) from exc
|
|
537 |
+ raise
|
|
538 |
+ |
|
539 |
+ |
|
503 | 540 |
@contextmanager
|
504 | 541 |
def save_file_atomic(filename, mode='w', *, buffering=-1, encoding=None,
|
505 | 542 |
errors=None, newline=None, closefd=True, opener=None, tempdir=None):
|
... | ... | @@ -86,13 +86,6 @@ project's main directory. |
86 | 86 |
|
87 | 87 |
----
|
88 | 88 |
|
89 |
-.. _invoking_source_bundle:
|
|
90 |
- |
|
91 |
-.. click:: buildstream._frontend.cli:source_bundle
|
|
92 |
- :prog: bst source bundle
|
|
93 |
- |
|
94 |
-----
|
|
95 |
- |
|
96 | 89 |
.. _invoking_workspace:
|
97 | 90 |
|
98 | 91 |
.. click:: buildstream._frontend.cli:workspace
|
... | ... | @@ -15,7 +15,7 @@ MAIN_COMMANDS = [ |
15 | 15 |
'push ',
|
16 | 16 |
'shell ',
|
17 | 17 |
'show ',
|
18 |
- 'source-bundle ',
|
|
18 |
+ 'source-checkout ',
|
|
19 | 19 |
'track ',
|
20 | 20 |
'workspace '
|
21 | 21 |
]
|
... | ... | @@ -20,7 +20,7 @@ DATA_DIR = os.path.join( |
20 | 20 |
('autotools.bst', 'make-install', "make -j1 DESTDIR=\"/buildstream-install\" install"),
|
21 | 21 |
('cmake.bst', 'cmake',
|
22 | 22 |
"cmake -B_builddir -H\".\" -G\"Unix Makefiles\" " + "-DCMAKE_INSTALL_PREFIX:PATH=\"/usr\" \\\n" +
|
23 |
- "-DCMAKE_INSTALL_LIBDIR=lib "),
|
|
23 |
+ "-DCMAKE_INSTALL_LIBDIR:PATH=\"lib\" "),
|
|
24 | 24 |
('distutils.bst', 'python-install',
|
25 | 25 |
"python3 ./setup.py install --prefix \"/usr\" \\\n" +
|
26 | 26 |
"--root \"/buildstream-install\""),
|
... | ... | @@ -46,7 +46,7 @@ def test_defaults(cli, datafiles, tmpdir, target, varname, expected): |
46 | 46 |
('autotools.bst', 'make-install', "make -j1 DESTDIR=\"/custom/install/root\" install"),
|
47 | 47 |
('cmake.bst', 'cmake',
|
48 | 48 |
"cmake -B_builddir -H\".\" -G\"Ninja\" " + "-DCMAKE_INSTALL_PREFIX:PATH=\"/opt\" \\\n" +
|
49 |
- "-DCMAKE_INSTALL_LIBDIR=lib "),
|
|
49 |
+ "-DCMAKE_INSTALL_LIBDIR:PATH=\"lib\" "),
|
|
50 | 50 |
('distutils.bst', 'python-install',
|
51 | 51 |
"python3 ./setup.py install --prefix \"/opt\" \\\n" +
|
52 | 52 |
"--root \"/custom/install/root\""),
|
... | ... | @@ -115,6 +115,7 @@ def test_build_track(cli, datafiles, tmpdir, ref_storage, |
115 | 115 |
args += ['0.bst']
|
116 | 116 |
|
117 | 117 |
result = cli.run(project=project, silent=True, args=args)
|
118 |
+ result.assert_success()
|
|
118 | 119 |
tracked_elements = result.get_tracked_elements()
|
119 | 120 |
|
120 | 121 |
assert set(tracked_elements) == set(tracked)
|
... | ... | @@ -25,7 +25,6 @@ def test_help_main(cli): |
25 | 25 |
('push'),
|
26 | 26 |
('shell'),
|
27 | 27 |
('show'),
|
28 |
- ('source-bundle'),
|
|
29 | 28 |
('track'),
|
30 | 29 |
('workspace')
|
31 | 30 |
])
|
1 |
+kind: import
|
|
2 |
+description: It is important for this element to have both build and runtime dependencies
|
|
3 |
+sources:
|
|
4 |
+- kind: local
|
|
5 |
+ path: files/etc-files
|
|
6 |
+depends:
|
|
7 |
+- filename: import-dev.bst
|
|
8 |
+ type: build
|
|
9 |
+- filename: import-bin.bst
|
|
10 |
+ type: runtime
|
1 |
+config
|
1 |
-#
|
|
2 |
-# Copyright (C) 2018 Bloomberg Finance LP
|
|
3 |
-#
|
|
4 |
-# This program is free software; you can redistribute it and/or
|
|
5 |
-# modify it under the terms of the GNU Lesser General Public
|
|
6 |
-# License as published by the Free Software Foundation; either
|
|
7 |
-# version 2 of the License, or (at your option) any later version.
|
|
8 |
-#
|
|
9 |
-# This library is distributed in the hope that it will be useful,
|
|
10 |
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
11 |
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
12 |
-# Lesser General Public License for more details.
|
|
13 |
-#
|
|
14 |
-# You should have received a copy of the GNU Lesser General Public
|
|
15 |
-# License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
|
16 |
-#
|
|
17 |
-# Authors: Chandan Singh <csingh43 bloomberg net>
|
|
18 |
-#
|
|
19 |
- |
|
20 |
-import os
|
|
21 |
-import tarfile
|
|
22 |
- |
|
23 |
-import pytest
|
|
24 |
- |
|
25 |
-from tests.testutils import cli
|
|
26 |
- |
|
27 |
-# Project directory
|
|
28 |
-DATA_DIR = os.path.join(
|
|
29 |
- os.path.dirname(os.path.realpath(__file__)),
|
|
30 |
- "project",
|
|
31 |
-)
|
|
32 |
- |
|
33 |
- |
|
34 |
-@pytest.mark.datafiles(DATA_DIR)
|
|
35 |
-def test_source_bundle(cli, tmpdir, datafiles):
|
|
36 |
- project_path = os.path.join(datafiles.dirname, datafiles.basename)
|
|
37 |
- element_name = 'source-bundle/source-bundle-hello.bst'
|
|
38 |
- normal_name = 'source-bundle-source-bundle-hello'
|
|
39 |
- |
|
40 |
- # Verify that we can correctly produce a source-bundle
|
|
41 |
- args = ['source-bundle', element_name, '--directory', str(tmpdir)]
|
|
42 |
- result = cli.run(project=project_path, args=args)
|
|
43 |
- result.assert_success()
|
|
44 |
- |
|
45 |
- # Verify that the source-bundle contains our sources and a build script
|
|
46 |
- with tarfile.open(os.path.join(str(tmpdir), '{}.tar.gz'.format(normal_name))) as bundle:
|
|
47 |
- assert os.path.join(normal_name, 'source', normal_name, 'llamas.txt') in bundle.getnames()
|
|
48 |
- assert os.path.join(normal_name, 'build.sh') in bundle.getnames()
|
1 |
+import os
|
|
2 |
+import pytest
|
|
3 |
+import tarfile
|
|
4 |
+from pathlib import Path
|
|
5 |
+ |
|
6 |
+from tests.testutils import cli
|
|
7 |
+ |
|
8 |
+from buildstream import utils, _yaml
|
|
9 |
+from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
|
10 |
+ |
|
11 |
+# Project directory
|
|
12 |
+DATA_DIR = os.path.join(
|
|
13 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
14 |
+ 'project',
|
|
15 |
+)
|
|
16 |
+ |
|
17 |
+ |
|
18 |
+def generate_remote_import_element(input_path, output_path):
|
|
19 |
+ return {
|
|
20 |
+ 'kind': 'import',
|
|
21 |
+ 'sources': [
|
|
22 |
+ {
|
|
23 |
+ 'kind': 'remote',
|
|
24 |
+ 'url': 'file://{}'.format(input_path),
|
|
25 |
+ 'filename': output_path,
|
|
26 |
+ 'ref': utils.sha256sum(input_path),
|
|
27 |
+ }
|
|
28 |
+ ]
|
|
29 |
+ }
|
|
30 |
+ |
|
31 |
+ |
|
32 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
33 |
+def test_source_checkout(datafiles, cli):
|
|
34 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
35 |
+ checkout = os.path.join(cli.directory, 'source-checkout')
|
|
36 |
+ target = 'checkout-deps.bst'
|
|
37 |
+ |
|
38 |
+ result = cli.run(project=project, args=['source-checkout', target, '--deps', 'none', checkout])
|
|
39 |
+ result.assert_success()
|
|
40 |
+ |
|
41 |
+ assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
|
|
42 |
+ |
|
43 |
+ |
|
44 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
45 |
+@pytest.mark.parametrize('force_flag', ['--force', '-f'])
|
|
46 |
+def test_source_checkout_force(datafiles, cli, force_flag):
|
|
47 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
48 |
+ checkout = os.path.join(cli.directory, 'source-checkout')
|
|
49 |
+ target = 'checkout-deps.bst'
|
|
50 |
+ |
|
51 |
+ os.makedirs(os.path.join(checkout, 'some-thing'))
|
|
52 |
+ # Path(os.path.join(checkout, 'some-file')).touch()
|
|
53 |
+ |
|
54 |
+ result = cli.run(project=project, args=['source-checkout', force_flag, target, '--deps', 'none', checkout])
|
|
55 |
+ result.assert_success()
|
|
56 |
+ |
|
57 |
+ assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
|
|
58 |
+ |
|
59 |
+ |
|
60 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
61 |
+def test_source_checkout_tar(datafiles, cli):
|
|
62 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
63 |
+ checkout = os.path.join(cli.directory, 'source-checkout.tar')
|
|
64 |
+ target = 'checkout-deps.bst'
|
|
65 |
+ |
|
66 |
+ result = cli.run(project=project, args=['source-checkout', '--tar', target, '--deps', 'none', checkout])
|
|
67 |
+ result.assert_success()
|
|
68 |
+ |
|
69 |
+ assert os.path.exists(checkout)
|
|
70 |
+ with tarfile.open(checkout) as tf:
|
|
71 |
+ expected_content = os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config')
|
|
72 |
+ tar_members = [f.name for f in tf]
|
|
73 |
+ for member in tar_members:
|
|
74 |
+ assert member in expected_content
|
|
75 |
+ |
|
76 |
+ |
|
77 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
78 |
+@pytest.mark.parametrize('deps', [('build'), ('none'), ('run'), ('all')])
|
|
79 |
+def test_source_checkout_deps(datafiles, cli, deps):
|
|
80 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
81 |
+ checkout = os.path.join(cli.directory, 'source-checkout')
|
|
82 |
+ target = 'checkout-deps.bst'
|
|
83 |
+ |
|
84 |
+ result = cli.run(project=project, args=['source-checkout', target, '--deps', deps, checkout])
|
|
85 |
+ result.assert_success()
|
|
86 |
+ |
|
87 |
+ # Sources of the target
|
|
88 |
+ if deps == 'build':
|
|
89 |
+ assert not os.path.exists(os.path.join(checkout, 'checkout-deps'))
|
|
90 |
+ else:
|
|
91 |
+ assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
|
|
92 |
+ |
|
93 |
+ # Sources of the target's build dependencies
|
|
94 |
+ if deps in ('build', 'all'):
|
|
95 |
+ assert os.path.exists(os.path.join(checkout, 'import-dev', 'usr', 'include', 'pony.h'))
|
|
96 |
+ else:
|
|
97 |
+ assert not os.path.exists(os.path.join(checkout, 'import-dev'))
|
|
98 |
+ |
|
99 |
+ # Sources of the target's runtime dependencies
|
|
100 |
+ if deps in ('run', 'all'):
|
|
101 |
+ assert os.path.exists(os.path.join(checkout, 'import-bin', 'usr', 'bin', 'hello'))
|
|
102 |
+ else:
|
|
103 |
+ assert not os.path.exists(os.path.join(checkout, 'import-bin'))
|
|
104 |
+ |
|
105 |
+ |
|
106 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
107 |
+def test_source_checkout_except(datafiles, cli):
|
|
108 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
109 |
+ checkout = os.path.join(cli.directory, 'source-checkout')
|
|
110 |
+ target = 'checkout-deps.bst'
|
|
111 |
+ |
|
112 |
+ result = cli.run(project=project, args=['source-checkout', target,
|
|
113 |
+ '--deps', 'all',
|
|
114 |
+ '--except', 'import-bin.bst',
|
|
115 |
+ checkout])
|
|
116 |
+ result.assert_success()
|
|
117 |
+ |
|
118 |
+ # Sources for the target should be present
|
|
119 |
+ assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
|
|
120 |
+ |
|
121 |
+ # Sources for import-bin.bst should not be present
|
|
122 |
+ assert not os.path.exists(os.path.join(checkout, 'import-bin'))
|
|
123 |
+ |
|
124 |
+ # Sources for other dependencies should be present
|
|
125 |
+ assert os.path.exists(os.path.join(checkout, 'import-dev', 'usr', 'include', 'pony.h'))
|
|
126 |
+ |
|
127 |
+ |
|
128 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
129 |
+@pytest.mark.parametrize('fetch', [(False), (True)])
|
|
130 |
+def test_source_checkout_fetch(datafiles, cli, fetch):
|
|
131 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
132 |
+ checkout = os.path.join(cli.directory, 'source-checkout')
|
|
133 |
+ target = 'remote-import-dev.bst'
|
|
134 |
+ target_path = os.path.join(project, 'elements', target)
|
|
135 |
+ |
|
136 |
+ # Create an element with remote source
|
|
137 |
+ element = generate_remote_import_element(
|
|
138 |
+ os.path.join(project, 'files', 'dev-files', 'usr', 'include', 'pony.h'),
|
|
139 |
+ 'pony.h')
|
|
140 |
+ _yaml.dump(element, target_path)
|
|
141 |
+ |
|
142 |
+ # Testing --fetch option requires that we do not have the sources
|
|
143 |
+ # cached already
|
|
144 |
+ assert cli.get_element_state(project, target) == 'fetch needed'
|
|
145 |
+ |
|
146 |
+ args = ['source-checkout']
|
|
147 |
+ if fetch:
|
|
148 |
+ args += ['--fetch']
|
|
149 |
+ args += [target, checkout]
|
|
150 |
+ result = cli.run(project=project, args=args)
|
|
151 |
+ |
|
152 |
+ if fetch:
|
|
153 |
+ result.assert_success()
|
|
154 |
+ assert os.path.exists(os.path.join(checkout, 'remote-import-dev', 'pony.h'))
|
|
155 |
+ else:
|
|
156 |
+ result.assert_main_error(ErrorDomain.PIPELINE, 'uncached-sources')
|
|
157 |
+ |
|
158 |
+ |
|
159 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
160 |
+def test_source_checkout_build_scripts(cli, tmpdir, datafiles):
|
|
161 |
+ project_path = os.path.join(datafiles.dirname, datafiles.basename)
|
|
162 |
+ element_name = 'source-bundle/source-bundle-hello.bst'
|
|
163 |
+ normal_name = 'source-bundle-source-bundle-hello'
|
|
164 |
+ checkout = os.path.join(str(tmpdir), 'source-checkout')
|
|
165 |
+ |
|
166 |
+ args = ['source-checkout', '--include-build-scripts', element_name, checkout]
|
|
167 |
+ result = cli.run(project=project_path, args=args)
|
|
168 |
+ result.assert_success()
|
|
169 |
+ |
|
170 |
+ # There sould be a script for each element (just one in this case) and a top level build script
|
|
171 |
+ expected_scripts = ['build.sh', 'build-' + normal_name]
|
|
172 |
+ for script in expected_scripts:
|
|
173 |
+ assert script in os.listdir(checkout)
|
|
174 |
+ |
|
175 |
+ |
|
176 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
177 |
+def test_source_checkout_tar_buildscripts(cli, tmpdir, datafiles):
|
|
178 |
+ project_path = os.path.join(datafiles.dirname, datafiles.basename)
|
|
179 |
+ element_name = 'source-bundle/source-bundle-hello.bst'
|
|
180 |
+ normal_name = 'source-bundle-source-bundle-hello'
|
|
181 |
+ tar_file = os.path.join(str(tmpdir), 'source-checkout.tar')
|
|
182 |
+ |
|
183 |
+ args = ['source-checkout', '--include-build-scripts', '--tar', element_name, tar_file]
|
|
184 |
+ result = cli.run(project=project_path, args=args)
|
|
185 |
+ result.assert_success()
|
|
186 |
+ |
|
187 |
+ expected_scripts = ['build.sh', 'build-' + normal_name]
|
|
188 |
+ |
|
189 |
+ with tarfile.open(tar_file, 'r') as tf:
|
|
190 |
+ for script in expected_scripts:
|
|
191 |
+ assert script in tf.getnames()
|
1 |
+import pytest
|
|
2 |
+ |
|
3 |
+from buildstream.utils import move_atomic, DirectoryExistsError
|
|
4 |
+ |
|
5 |
+ |
|
6 |
+@pytest.fixture
|
|
7 |
+def src(tmp_path):
|
|
8 |
+ src = tmp_path.joinpath("src")
|
|
9 |
+ src.mkdir()
|
|
10 |
+ |
|
11 |
+ with src.joinpath("test").open("w") as fp:
|
|
12 |
+ fp.write("test")
|
|
13 |
+ |
|
14 |
+ return src
|
|
15 |
+ |
|
16 |
+ |
|
17 |
+def test_move_to_empty_dir(src, tmp_path):
|
|
18 |
+ dst = tmp_path.joinpath("dst")
|
|
19 |
+ |
|
20 |
+ move_atomic(src, dst)
|
|
21 |
+ |
|
22 |
+ assert dst.joinpath("test").exists()
|
|
23 |
+ |
|
24 |
+ |
|
25 |
+def test_move_to_empty_dir_create_parents(src, tmp_path):
|
|
26 |
+ dst = tmp_path.joinpath("nested/dst")
|
|
27 |
+ |
|
28 |
+ move_atomic(src, dst)
|
|
29 |
+ assert dst.joinpath("test").exists()
|
|
30 |
+ |
|
31 |
+ |
|
32 |
+def test_move_to_empty_dir_no_create_parents(src, tmp_path):
|
|
33 |
+ dst = tmp_path.joinpath("nested/dst")
|
|
34 |
+ |
|
35 |
+ with pytest.raises(FileNotFoundError):
|
|
36 |
+ move_atomic(src, dst, ensure_parents=False)
|
|
37 |
+ |
|
38 |
+ |
|
39 |
+def test_move_non_existing_dir(tmp_path):
|
|
40 |
+ dst = tmp_path.joinpath("dst")
|
|
41 |
+ src = tmp_path.joinpath("src")
|
|
42 |
+ |
|
43 |
+ with pytest.raises(FileNotFoundError):
|
|
44 |
+ move_atomic(src, dst)
|
|
45 |
+ |
|
46 |
+ |
|
47 |
+def test_move_to_existing_empty_dir(src, tmp_path):
|
|
48 |
+ dst = tmp_path.joinpath("dst")
|
|
49 |
+ dst.mkdir()
|
|
50 |
+ |
|
51 |
+ move_atomic(src, dst)
|
|
52 |
+ assert dst.joinpath("test").exists()
|
|
53 |
+ |
|
54 |
+ |
|
55 |
+def test_move_to_existing_file(src, tmp_path):
|
|
56 |
+ dst = tmp_path.joinpath("dst")
|
|
57 |
+ |
|
58 |
+ with dst.open("w") as fp:
|
|
59 |
+ fp.write("error")
|
|
60 |
+ |
|
61 |
+ with pytest.raises(NotADirectoryError):
|
|
62 |
+ move_atomic(src, dst)
|
|
63 |
+ |
|
64 |
+ |
|
65 |
+def test_move_file_to_existing_file(tmp_path):
|
|
66 |
+ dst = tmp_path.joinpath("dst")
|
|
67 |
+ src = tmp_path.joinpath("src")
|
|
68 |
+ |
|
69 |
+ with src.open("w") as fp:
|
|
70 |
+ fp.write("src")
|
|
71 |
+ |
|
72 |
+ with dst.open("w") as fp:
|
|
73 |
+ fp.write("dst")
|
|
74 |
+ |
|
75 |
+ move_atomic(src, dst)
|
|
76 |
+ with dst.open() as fp:
|
|
77 |
+ assert fp.read() == "src"
|
|
78 |
+ |
|
79 |
+ |
|
80 |
+def test_move_to_existing_non_empty_dir(src, tmp_path):
|
|
81 |
+ dst = tmp_path.joinpath("dst")
|
|
82 |
+ dst.mkdir()
|
|
83 |
+ |
|
84 |
+ with dst.joinpath("existing").open("w") as fp:
|
|
85 |
+ fp.write("already there")
|
|
86 |
+ |
|
87 |
+ with pytest.raises(DirectoryExistsError):
|
|
88 |
+ move_atomic(src, dst)
|