Jonathan Maw pushed to branch jonathan/workspace-fragment-multi-project at BuildStream / buildstream
Commits:
-
a5a53ddd
by Jonathan Maw at 2018-12-12T13:44:09Z
-
d55b9e39
by Phil Dawson at 2018-12-12T13:55:19Z
-
733aab53
by Phil Dawson at 2018-12-12T13:55:19Z
-
c2efeba0
by Phil Dawson at 2018-12-12T14:43:40Z
-
85c61894
by Phil Dawson at 2018-12-12T14:43:40Z
-
a322d5c0
by Phil Dawson at 2018-12-12T14:43:40Z
-
ec909605
by Phil Dawson at 2018-12-12T15:45:42Z
-
3697a611
by Richard Maw at 2018-12-12T16:31:38Z
-
b3dceb16
by Richard Maw at 2018-12-12T16:32:41Z
-
ba08a0cd
by Richard Maw at 2018-12-12T16:32:41Z
-
70fb9554
by Richard Maw at 2018-12-12T16:32:41Z
-
f773e746
by Richard Maw at 2018-12-12T16:33:02Z
-
b6528441
by richardmaw-codethink at 2018-12-12T18:00:59Z
-
d03bf316
by Benjamin Schubert at 2018-12-13T10:24:11Z
-
a116f576
by Tristan Van Berkom at 2018-12-13T10:58:46Z
-
180fa774
by Benjamin Schubert at 2018-12-13T12:05:15Z
-
224aa4c2
by Benjamin Schubert at 2018-12-13T12:34:41Z
-
053beb66
by Tristan Van Berkom at 2018-12-13T14:23:19Z
-
29ab271c
by Tristan Van Berkom at 2018-12-13T14:23:19Z
-
ba955cf0
by Tristan Van Berkom at 2018-12-13T14:23:19Z
-
6010b5a4
by Tristan Van Berkom at 2018-12-13T14:23:19Z
-
3a6d27a4
by Tristan Van Berkom at 2018-12-13T14:23:19Z
-
4c0e602c
by Tristan Van Berkom at 2018-12-13T14:23:19Z
-
60ddeeb9
by Tristan Van Berkom at 2018-12-13T14:58:28Z
-
ce5577a2
by Jonathan Maw at 2018-12-13T17:18:27Z
-
3226719c
by Jonathan Maw at 2018-12-13T17:18:27Z
26 changed files:
- NEWS
- buildstream/_artifactcache/artifactcache.py
- buildstream/_frontend/cli.py
- buildstream/_stream.py
- buildstream/_workspaces.py
- buildstream/buildelement.py
- buildstream/element.py
- buildstream/plugins/elements/autotools.py
- buildstream/plugins/elements/cmake.py
- buildstream/plugins/elements/distutils.py
- buildstream/plugins/elements/make.py
- buildstream/plugins/elements/makemaker.py
- buildstream/plugins/elements/manual.py
- buildstream/plugins/elements/meson.py
- buildstream/plugins/elements/modulebuild.py
- buildstream/plugins/elements/pip.py
- buildstream/plugins/elements/qmake.py
- doc/source/using_commands.rst
- tests/artifactcache/expiry.py
- tests/completions/completions.py
- tests/frontend/help.py
- tests/frontend/source_checkout.py
- tests/frontend/workspace.py
- tests/frontend/source_bundle.py → tests/integration/artifact.py
- − tests/testutils/mock_os.py
- tests/utils/misc.py
Changes:
... | ... | @@ -2,6 +2,14 @@ |
2 | 2 |
buildstream 1.3.1
|
3 | 3 |
=================
|
4 | 4 |
|
5 |
+ o Added `bst artifact log` subcommand for viewing build logs.
|
|
6 |
+ |
|
7 |
+ o BREAKING CHANGE: The bst source-bundle command has been removed. The
|
|
8 |
+ functionality it provided has been replaced by the `--include-build-scripts`
|
|
9 |
+ option of the `bst source-checkout` command. To produce a tarball containing
|
|
10 |
+ an element's sources and generated build scripts you can do the command
|
|
11 |
+ `bst source-checkout --include-build-scripts --tar foo.bst some-file.tar`
|
|
12 |
+ |
|
5 | 13 |
o BREAKING CHANGE: Default strip-commands have been removed as they are too
|
6 | 14 |
specific. Recommendation if you are building in Linux is to use the
|
7 | 15 |
ones being used in freedesktop-sdk project, for example
|
... | ... | @@ -874,9 +874,7 @@ class ArtifactCache(): |
874 | 874 |
"\nValid values are, for example: 800M 10G 1T 50%\n"
|
875 | 875 |
.format(str(e))) from e
|
876 | 876 |
|
877 |
- stat = os.statvfs(artifactdir_volume)
|
|
878 |
- available_space = (stat.f_bsize * stat.f_bavail)
|
|
879 |
- |
|
877 |
+ available_space, total_size = self._get_volume_space_info_for(artifactdir_volume)
|
|
880 | 878 |
cache_size = self.get_cache_size()
|
881 | 879 |
|
882 | 880 |
# Ensure system has enough storage for the cache_quota
|
... | ... | @@ -893,7 +891,7 @@ class ArtifactCache(): |
893 | 891 |
"BuildStream requires a minimum cache quota of 2G.")
|
894 | 892 |
elif cache_quota > cache_size + available_space: # Check maximum
|
895 | 893 |
if '%' in self.context.config_cache_quota:
|
896 |
- available = (available_space / (stat.f_blocks * stat.f_bsize)) * 100
|
|
894 |
+ available = (available_space / total_size) * 100
|
|
897 | 895 |
available = '{}% of total disk space'.format(round(available, 1))
|
898 | 896 |
else:
|
899 | 897 |
available = utils._pretty_size(available_space)
|
... | ... | @@ -919,6 +917,20 @@ class ArtifactCache(): |
919 | 917 |
self._cache_quota = cache_quota - headroom
|
920 | 918 |
self._cache_lower_threshold = self._cache_quota / 2
|
921 | 919 |
|
920 |
+ # _get_volume_space_info_for
|
|
921 |
+ #
|
|
922 |
+ # Get the available space and total space for the given volume
|
|
923 |
+ #
|
|
924 |
+ # Args:
|
|
925 |
+ # volume: volume for which to get the size
|
|
926 |
+ #
|
|
927 |
+ # Returns:
|
|
928 |
+ # A tuple containing first the availabe number of bytes on the requested
|
|
929 |
+ # volume, then the total number of bytes of the volume.
|
|
930 |
+ def _get_volume_space_info_for(self, volume):
|
|
931 |
+ stat = os.statvfs(volume)
|
|
932 |
+ return stat.f_bsize * stat.f_bavail, stat.f_bsize * stat.f_blocks
|
|
933 |
+ |
|
922 | 934 |
|
923 | 935 |
# _configured_remote_artifact_cache_specs():
|
924 | 936 |
#
|
1 | 1 |
import os
|
2 | 2 |
import sys
|
3 |
+from contextlib import ExitStack
|
|
4 |
+from fnmatch import fnmatch
|
|
5 |
+from tempfile import TemporaryDirectory
|
|
3 | 6 |
|
4 | 7 |
import click
|
5 | 8 |
from .. import _yaml
|
... | ... | @@ -107,6 +110,23 @@ def complete_target(args, incomplete): |
107 | 110 |
return complete_list
|
108 | 111 |
|
109 | 112 |
|
113 |
+def complete_artifact(args, incomplete):
|
|
114 |
+ from .._context import Context
|
|
115 |
+ ctx = Context()
|
|
116 |
+ |
|
117 |
+ config = None
|
|
118 |
+ for i, arg in enumerate(args):
|
|
119 |
+ if arg in ('-c', '--config'):
|
|
120 |
+ config = args[i + 1]
|
|
121 |
+ ctx.load(config)
|
|
122 |
+ |
|
123 |
+ # element targets are valid artifact names
|
|
124 |
+ complete_list = complete_target(args, incomplete)
|
|
125 |
+ complete_list.extend(ref for ref in ctx.artifactcache.cas.list_refs() if ref.startswith(incomplete))
|
|
126 |
+ |
|
127 |
+ return complete_list
|
|
128 |
+ |
|
129 |
+ |
|
110 | 130 |
def override_completions(cmd, cmd_param, args, incomplete):
|
111 | 131 |
"""
|
112 | 132 |
:param cmd_param: command definition
|
... | ... | @@ -121,13 +141,15 @@ def override_completions(cmd, cmd_param, args, incomplete): |
121 | 141 |
# We can't easily extend click's data structures without
|
122 | 142 |
# modifying click itself, so just do some weak special casing
|
123 | 143 |
# right here and select which parameters we want to handle specially.
|
124 |
- if isinstance(cmd_param.type, click.Path) and \
|
|
125 |
- (cmd_param.name == 'elements' or
|
|
126 |
- cmd_param.name == 'element' or
|
|
127 |
- cmd_param.name == 'except_' or
|
|
128 |
- cmd_param.opts == ['--track'] or
|
|
129 |
- cmd_param.opts == ['--track-except']):
|
|
130 |
- return complete_target(args, incomplete)
|
|
144 |
+ if isinstance(cmd_param.type, click.Path):
|
|
145 |
+ if (cmd_param.name == 'elements' or
|
|
146 |
+ cmd_param.name == 'element' or
|
|
147 |
+ cmd_param.name == 'except_' or
|
|
148 |
+ cmd_param.opts == ['--track'] or
|
|
149 |
+ cmd_param.opts == ['--track-except']):
|
|
150 |
+ return complete_target(args, incomplete)
|
|
151 |
+ if cmd_param.name == 'artifacts':
|
|
152 |
+ return complete_artifact(args, incomplete)
|
|
131 | 153 |
|
132 | 154 |
raise CompleteUnhandled()
|
133 | 155 |
|
... | ... | @@ -725,6 +747,8 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar): |
725 | 747 |
# Source Checkout Command #
|
726 | 748 |
##################################################################
|
727 | 749 |
@cli.command(name='source-checkout', short_help='Checkout sources for an element')
|
750 |
+@click.option('--force', '-f', default=False, is_flag=True,
|
|
751 |
+ help="Allow files to be overwritten")
|
|
728 | 752 |
@click.option('--except', 'except_', multiple=True,
|
729 | 753 |
type=click.Path(readable=False),
|
730 | 754 |
help="Except certain dependencies")
|
... | ... | @@ -733,11 +757,15 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar): |
733 | 757 |
help='The dependencies whose sources to checkout (default: none)')
|
734 | 758 |
@click.option('--fetch', 'fetch_', default=False, is_flag=True,
|
735 | 759 |
help='Fetch elements if they are not fetched')
|
736 |
-@click.argument('element', required=False,
|
|
737 |
- type=click.Path(readable=False))
|
|
760 |
+@click.option('--tar', 'tar', default=False, is_flag=True,
|
|
761 |
+ help='Create a tarball from the element\'s sources instead of a '
|
|
762 |
+ 'file tree.')
|
|
763 |
+@click.option('--include-build-scripts', 'build_scripts', is_flag=True)
|
|
764 |
+@click.argument('element', required=False, type=click.Path(readable=False))
|
|
738 | 765 |
@click.argument('location', type=click.Path(), required=False)
|
739 | 766 |
@click.pass_obj
|
740 |
-def source_checkout(app, element, location, deps, fetch_, except_):
|
|
767 |
+def source_checkout(app, element, location, force, deps, fetch_, except_,
|
|
768 |
+ tar, build_scripts):
|
|
741 | 769 |
"""Checkout sources of an element to the specified location
|
742 | 770 |
"""
|
743 | 771 |
if not element and not location:
|
... | ... | @@ -757,9 +785,12 @@ def source_checkout(app, element, location, deps, fetch_, except_): |
757 | 785 |
|
758 | 786 |
app.stream.source_checkout(element,
|
759 | 787 |
location=location,
|
788 |
+ force=force,
|
|
760 | 789 |
deps=deps,
|
761 | 790 |
fetch=fetch_,
|
762 |
- except_targets=except_)
|
|
791 |
+ except_targets=except_,
|
|
792 |
+ tar=tar,
|
|
793 |
+ include_build_scripts=build_scripts)
|
|
763 | 794 |
|
764 | 795 |
|
765 | 796 |
##################################################################
|
... | ... | @@ -908,32 +939,99 @@ def workspace_list(app): |
908 | 939 |
app.stream.workspace_list()
|
909 | 940 |
|
910 | 941 |
|
911 |
-##################################################################
|
|
912 |
-# Source Bundle Command #
|
|
913 |
-##################################################################
|
|
914 |
-@cli.command(name="source-bundle", short_help="Produce a build bundle to be manually executed")
|
|
915 |
-@click.option('--except', 'except_', multiple=True,
|
|
916 |
- type=click.Path(readable=False),
|
|
917 |
- help="Elements to except from the tarball")
|
|
918 |
-@click.option('--compression', default='gz',
|
|
919 |
- type=click.Choice(['none', 'gz', 'bz2', 'xz']),
|
|
920 |
- help="Compress the tar file using the given algorithm.")
|
|
921 |
-@click.option('--track', 'track_', default=False, is_flag=True,
|
|
922 |
- help="Track new source references before bundling")
|
|
923 |
-@click.option('--force', '-f', default=False, is_flag=True,
|
|
924 |
- help="Overwrite an existing tarball")
|
|
925 |
-@click.option('--directory', default=os.getcwd(),
|
|
926 |
- help="The directory to write the tarball to")
|
|
927 |
-@click.argument('element',
|
|
928 |
- type=click.Path(readable=False))
|
|
942 |
+#############################################################
|
|
943 |
+# Artifact Commands #
|
|
944 |
+#############################################################
|
|
945 |
+def _classify_artifacts(names, cas, project_directory):
|
|
946 |
+ element_targets = []
|
|
947 |
+ artifact_refs = []
|
|
948 |
+ element_globs = []
|
|
949 |
+ artifact_globs = []
|
|
950 |
+ |
|
951 |
+ for name in names:
|
|
952 |
+ if name.endswith('.bst'):
|
|
953 |
+ if any(c in "*?[" for c in name):
|
|
954 |
+ element_globs.append(name)
|
|
955 |
+ else:
|
|
956 |
+ element_targets.append(name)
|
|
957 |
+ else:
|
|
958 |
+ if any(c in "*?[" for c in name):
|
|
959 |
+ artifact_globs.append(name)
|
|
960 |
+ else:
|
|
961 |
+ artifact_refs.append(name)
|
|
962 |
+ |
|
963 |
+ if element_globs:
|
|
964 |
+ for dirpath, _, filenames in os.walk(project_directory):
|
|
965 |
+ for filename in filenames:
|
|
966 |
+ element_path = os.path.join(dirpath, filename).lstrip(project_directory).lstrip('/')
|
|
967 |
+ if any(fnmatch(element_path, glob) for glob in element_globs):
|
|
968 |
+ element_targets.append(element_path)
|
|
969 |
+ |
|
970 |
+ if artifact_globs:
|
|
971 |
+ artifact_refs.extend(ref for ref in cas.list_refs()
|
|
972 |
+ if any(fnmatch(ref, glob) for glob in artifact_globs))
|
|
973 |
+ |
|
974 |
+ return element_targets, artifact_refs
|
|
975 |
+ |
|
976 |
+ |
|
977 |
+@cli.group(short_help="Manipulate cached artifacts")
|
|
978 |
+def artifact():
|
|
979 |
+ """Manipulate cached artifacts"""
|
|
980 |
+ pass
|
|
981 |
+ |
|
982 |
+ |
|
983 |
+################################################################
|
|
984 |
+# Artifact Log Command #
|
|
985 |
+################################################################
|
|
986 |
+@artifact.command(name='log', short_help="Show logs of an artifact")
|
|
987 |
+@click.argument('artifacts', type=click.Path(), nargs=-1)
|
|
929 | 988 |
@click.pass_obj
|
930 |
-def source_bundle(app, element, force, directory,
|
|
931 |
- track_, compression, except_):
|
|
932 |
- """Produce a source bundle to be manually executed
|
|
933 |
- """
|
|
934 |
- with app.initialized():
|
|
935 |
- app.stream.source_bundle(element, directory,
|
|
936 |
- track_first=track_,
|
|
937 |
- force=force,
|
|
938 |
- compression=compression,
|
|
939 |
- except_targets=except_)
|
|
989 |
+def artifact_log(app, artifacts):
|
|
990 |
+ """Show logs of all artifacts"""
|
|
991 |
+ from .._exceptions import CASError
|
|
992 |
+ from .._message import MessageType
|
|
993 |
+ from .._pipeline import PipelineSelection
|
|
994 |
+ from ..storage._casbaseddirectory import CasBasedDirectory
|
|
995 |
+ |
|
996 |
+ with ExitStack() as stack:
|
|
997 |
+ stack.enter_context(app.initialized())
|
|
998 |
+ cache = app.context.artifactcache
|
|
999 |
+ |
|
1000 |
+ elements, artifacts = _classify_artifacts(artifacts, cache.cas,
|
|
1001 |
+ app.project.directory)
|
|
1002 |
+ |
|
1003 |
+ vdirs = []
|
|
1004 |
+ extractdirs = []
|
|
1005 |
+ if artifacts:
|
|
1006 |
+ for ref in artifacts:
|
|
1007 |
+ try:
|
|
1008 |
+ cache_id = cache.cas.resolve_ref(ref, update_mtime=True)
|
|
1009 |
+ vdir = CasBasedDirectory(cache.cas, cache_id)
|
|
1010 |
+ vdirs.append(vdir)
|
|
1011 |
+ except CASError as e:
|
|
1012 |
+ app._message(MessageType.WARN, "Artifact {} is not cached".format(ref), detail=str(e))
|
|
1013 |
+ continue
|
|
1014 |
+ if elements:
|
|
1015 |
+ elements = app.stream.load_selection(elements, selection=PipelineSelection.NONE)
|
|
1016 |
+ for element in elements:
|
|
1017 |
+ if not element._cached():
|
|
1018 |
+ app._message(MessageType.WARN, "Element {} is not cached".format(element))
|
|
1019 |
+ continue
|
|
1020 |
+ ref = cache.get_artifact_fullname(element, element._get_cache_key())
|
|
1021 |
+ cache_id = cache.cas.resolve_ref(ref, update_mtime=True)
|
|
1022 |
+ vdir = CasBasedDirectory(cache.cas, cache_id)
|
|
1023 |
+ vdirs.append(vdir)
|
|
1024 |
+ |
|
1025 |
+ for vdir in vdirs:
|
|
1026 |
+ # NOTE: If reading the logs feels unresponsive, here would be a good place to provide progress information.
|
|
1027 |
+ logsdir = vdir.descend(["logs"])
|
|
1028 |
+ td = stack.enter_context(TemporaryDirectory())
|
|
1029 |
+ logsdir.export_files(td, can_link=True)
|
|
1030 |
+ extractdirs.append(td)
|
|
1031 |
+ |
|
1032 |
+ for extractdir in extractdirs:
|
|
1033 |
+ for log in (os.path.join(extractdir, log) for log in os.listdir(extractdir)):
|
|
1034 |
+ # NOTE: Should click gain the ability to pass files to the pager this can be optimised.
|
|
1035 |
+ with open(log) as f:
|
|
1036 |
+ data = f.read()
|
|
1037 |
+ click.echo_via_pager(data)
|
... | ... | @@ -25,8 +25,8 @@ import stat |
25 | 25 |
import shlex
|
26 | 26 |
import shutil
|
27 | 27 |
import tarfile
|
28 |
-from contextlib import contextmanager
|
|
29 |
-from tempfile import TemporaryDirectory
|
|
28 |
+import tempfile
|
|
29 |
+from contextlib import contextmanager, suppress
|
|
30 | 30 |
|
31 | 31 |
from ._exceptions import StreamError, ImplError, BstError, set_last_task_error
|
32 | 32 |
from ._message import Message, MessageType
|
... | ... | @@ -449,11 +449,14 @@ class Stream(): |
449 | 449 |
#
|
450 | 450 |
def source_checkout(self, target, *,
|
451 | 451 |
location=None,
|
452 |
+ force=False,
|
|
452 | 453 |
deps='none',
|
453 | 454 |
fetch=False,
|
454 |
- except_targets=()):
|
|
455 |
+ except_targets=(),
|
|
456 |
+ tar=False,
|
|
457 |
+ include_build_scripts=False):
|
|
455 | 458 |
|
456 |
- self._check_location_writable(location)
|
|
459 |
+ self._check_location_writable(location, force=force, tar=tar)
|
|
457 | 460 |
|
458 | 461 |
elements, _ = self._load((target,), (),
|
459 | 462 |
selection=deps,
|
... | ... | @@ -467,7 +470,8 @@ class Stream(): |
467 | 470 |
|
468 | 471 |
# Stage all sources determined by scope
|
469 | 472 |
try:
|
470 |
- self._write_element_sources(location, elements)
|
|
473 |
+ self._source_checkout(elements, location, force, deps,
|
|
474 |
+ fetch, tar, include_build_scripts)
|
|
471 | 475 |
except BstError as e:
|
472 | 476 |
raise StreamError("Error while writing sources"
|
473 | 477 |
": '{}'".format(e), detail=e.detail, reason=e.reason) from e
|
... | ... | @@ -568,6 +572,9 @@ class Stream(): |
568 | 572 |
self._message(MessageType.INFO, "Creating workspace for element {}"
|
569 | 573 |
.format(target.name))
|
570 | 574 |
|
575 |
+ # Ensure the WorkspaceProject is loaded before we delete it
|
|
576 |
+ workspaces.get_workspace_project(directory)
|
|
577 |
+ |
|
571 | 578 |
workspace = workspaces.get_workspace(target._get_full_name())
|
572 | 579 |
if workspace:
|
573 | 580 |
workspaces.delete_workspace(target._get_full_name())
|
... | ... | @@ -582,7 +589,7 @@ class Stream(): |
582 | 589 |
todo_elements = "\nDid not try to create workspaces for " + todo_elements
|
583 | 590 |
raise StreamError("Failed to create workspace directory: {}".format(e) + todo_elements) from e
|
584 | 591 |
|
585 |
- workspaces.create_workspace(target, directory, checkout=not no_checkout)
|
|
592 |
+ workspaces.create_workspace(target, directory, checkout=not no_checkout, append=force)
|
|
586 | 593 |
self._message(MessageType.INFO, "Created a workspace for element: {}"
|
587 | 594 |
.format(target._get_full_name()))
|
588 | 595 |
|
... | ... | @@ -656,6 +663,9 @@ class Stream(): |
656 | 663 |
.format(element.name, workspace_path))
|
657 | 664 |
continue
|
658 | 665 |
|
666 |
+ # Ensure the WorkspaceProject is in the cache before it gets deleted
|
|
667 |
+ workspaces.get_workspace_project(workspace_path)
|
|
668 |
+ |
|
659 | 669 |
with element.timed_activity("Removing workspace directory {}"
|
660 | 670 |
.format(workspace_path)):
|
661 | 671 |
try:
|
... | ... | @@ -664,8 +674,10 @@ class Stream(): |
664 | 674 |
raise StreamError("Could not remove '{}': {}"
|
665 | 675 |
.format(workspace_path, e)) from e
|
666 | 676 |
|
667 |
- workspaces.delete_workspace(element._get_full_name())
|
|
668 |
- workspaces.create_workspace(element, workspace_path, checkout=True)
|
|
677 |
+ workspaces.delete_workspace(element._get_full_name(),
|
|
678 |
+ preserve_workspace_project=True)
|
|
679 |
+ workspaces.create_workspace(element, workspace_path, checkout=True,
|
|
680 |
+ append=False, preserve_workspace_project=True)
|
|
669 | 681 |
|
670 | 682 |
self._message(MessageType.INFO,
|
671 | 683 |
"Reset workspace for {} at: {}".format(element.name,
|
... | ... | @@ -728,87 +740,6 @@ class Stream(): |
728 | 740 |
'workspaces': workspaces
|
729 | 741 |
})
|
730 | 742 |
|
731 |
- # source_bundle()
|
|
732 |
- #
|
|
733 |
- # Create a host buildable tarball bundle for the given target.
|
|
734 |
- #
|
|
735 |
- # Args:
|
|
736 |
- # target (str): The target element to bundle
|
|
737 |
- # directory (str): The directory to output the tarball
|
|
738 |
- # track_first (bool): Track new source references before bundling
|
|
739 |
- # compression (str): The compression type to use
|
|
740 |
- # force (bool): Overwrite an existing tarball
|
|
741 |
- #
|
|
742 |
- def source_bundle(self, target, directory, *,
|
|
743 |
- track_first=False,
|
|
744 |
- force=False,
|
|
745 |
- compression="gz",
|
|
746 |
- except_targets=()):
|
|
747 |
- |
|
748 |
- if track_first:
|
|
749 |
- track_targets = (target,)
|
|
750 |
- else:
|
|
751 |
- track_targets = ()
|
|
752 |
- |
|
753 |
- elements, track_elements = self._load((target,), track_targets,
|
|
754 |
- selection=PipelineSelection.ALL,
|
|
755 |
- except_targets=except_targets,
|
|
756 |
- track_selection=PipelineSelection.ALL,
|
|
757 |
- fetch_subprojects=True)
|
|
758 |
- |
|
759 |
- # source-bundle only supports one target
|
|
760 |
- target = self.targets[0]
|
|
761 |
- |
|
762 |
- self._message(MessageType.INFO, "Bundling sources for target {}".format(target.name))
|
|
763 |
- |
|
764 |
- # Find the correct filename for the compression algorithm
|
|
765 |
- tar_location = os.path.join(directory, target.normal_name + ".tar")
|
|
766 |
- if compression != "none":
|
|
767 |
- tar_location += "." + compression
|
|
768 |
- |
|
769 |
- # Attempt writing a file to generate a good error message
|
|
770 |
- # early
|
|
771 |
- #
|
|
772 |
- # FIXME: A bit hackish
|
|
773 |
- try:
|
|
774 |
- open(tar_location, mode="x")
|
|
775 |
- os.remove(tar_location)
|
|
776 |
- except IOError as e:
|
|
777 |
- raise StreamError("Cannot write to {0}: {1}"
|
|
778 |
- .format(tar_location, e)) from e
|
|
779 |
- |
|
780 |
- # Fetch and possibly track first
|
|
781 |
- #
|
|
782 |
- self._fetch(elements, track_elements=track_elements)
|
|
783 |
- |
|
784 |
- # We don't use the scheduler for this as it is almost entirely IO
|
|
785 |
- # bound.
|
|
786 |
- |
|
787 |
- # Create a temporary directory to build the source tree in
|
|
788 |
- builddir = self._context.builddir
|
|
789 |
- os.makedirs(builddir, exist_ok=True)
|
|
790 |
- prefix = "{}-".format(target.normal_name)
|
|
791 |
- |
|
792 |
- with TemporaryDirectory(prefix=prefix, dir=builddir) as tempdir:
|
|
793 |
- source_directory = os.path.join(tempdir, 'source')
|
|
794 |
- try:
|
|
795 |
- os.makedirs(source_directory)
|
|
796 |
- except OSError as e:
|
|
797 |
- raise StreamError("Failed to create directory: {}"
|
|
798 |
- .format(e)) from e
|
|
799 |
- |
|
800 |
- # Any elements that don't implement _write_script
|
|
801 |
- # should not be included in the later stages.
|
|
802 |
- elements = [
|
|
803 |
- element for element in elements
|
|
804 |
- if self._write_element_script(source_directory, element)
|
|
805 |
- ]
|
|
806 |
- |
|
807 |
- self._write_element_sources(os.path.join(tempdir, "source"), elements)
|
|
808 |
- self._write_build_script(tempdir, elements)
|
|
809 |
- self._collect_sources(tempdir, tar_location,
|
|
810 |
- target.normal_name, compression)
|
|
811 |
- |
|
812 | 743 |
# redirect_element_names()
|
813 | 744 |
#
|
814 | 745 |
# Takes a list of element names and returns a list where elements have been
|
... | ... | @@ -1189,6 +1120,54 @@ class Stream(): |
1189 | 1120 |
|
1190 | 1121 |
sandbox_vroot.export_files(directory, can_link=True, can_destroy=True)
|
1191 | 1122 |
|
1123 |
+ # Helper function for source_checkout()
|
|
1124 |
+ def _source_checkout(self, elements,
|
|
1125 |
+ location=None,
|
|
1126 |
+ force=False,
|
|
1127 |
+ deps='none',
|
|
1128 |
+ fetch=False,
|
|
1129 |
+ tar=False,
|
|
1130 |
+ include_build_scripts=False):
|
|
1131 |
+ location = os.path.abspath(location)
|
|
1132 |
+ location_parent = os.path.abspath(os.path.join(location, ".."))
|
|
1133 |
+ |
|
1134 |
+ # Stage all our sources in a temporary directory. The this
|
|
1135 |
+ # directory can be used to either construct a tarball or moved
|
|
1136 |
+ # to the final desired location.
|
|
1137 |
+ temp_source_dir = tempfile.TemporaryDirectory(dir=location_parent)
|
|
1138 |
+ try:
|
|
1139 |
+ self._write_element_sources(temp_source_dir.name, elements)
|
|
1140 |
+ if include_build_scripts:
|
|
1141 |
+ self._write_build_scripts(temp_source_dir.name, elements)
|
|
1142 |
+ if tar:
|
|
1143 |
+ self._create_tarball(temp_source_dir.name, location)
|
|
1144 |
+ else:
|
|
1145 |
+ self._move_directory(temp_source_dir.name, location, force)
|
|
1146 |
+ except OSError as e:
|
|
1147 |
+ raise StreamError("Failed to checkout sources to {}: {}"
|
|
1148 |
+ .format(location, e)) from e
|
|
1149 |
+ finally:
|
|
1150 |
+ with suppress(FileNotFoundError):
|
|
1151 |
+ temp_source_dir.cleanup()
|
|
1152 |
+ |
|
1153 |
+ # Move a directory src to dest. This will work across devices and
|
|
1154 |
+ # may optionaly overwrite existing files.
|
|
1155 |
+ def _move_directory(self, src, dest, force=False):
|
|
1156 |
+ def is_empty_dir(path):
|
|
1157 |
+ return os.path.isdir(dest) and not os.listdir(dest)
|
|
1158 |
+ |
|
1159 |
+ try:
|
|
1160 |
+ os.rename(src, dest)
|
|
1161 |
+ return
|
|
1162 |
+ except OSError:
|
|
1163 |
+ pass
|
|
1164 |
+ |
|
1165 |
+ if force or is_empty_dir(dest):
|
|
1166 |
+ try:
|
|
1167 |
+ utils.link_files(src, dest)
|
|
1168 |
+ except utils.UtilError as e:
|
|
1169 |
+ raise StreamError("Failed to move directory: {}".format(e)) from e
|
|
1170 |
+ |
|
1192 | 1171 |
# Write the element build script to the given directory
|
1193 | 1172 |
def _write_element_script(self, directory, element):
|
1194 | 1173 |
try:
|
... | ... | @@ -1205,8 +1184,28 @@ class Stream(): |
1205 | 1184 |
os.makedirs(element_source_dir)
|
1206 | 1185 |
element._stage_sources_at(element_source_dir, mount_workspaces=False)
|
1207 | 1186 |
|
1187 |
+ # Create a tarball from the content of directory
|
|
1188 |
+ def _create_tarball(self, directory, tar_name):
|
|
1189 |
+ try:
|
|
1190 |
+ with utils.save_file_atomic(tar_name, mode='wb') as f:
|
|
1191 |
+ # This TarFile does not need to be explicitly closed
|
|
1192 |
+ # as the underlying file object will be closed be the
|
|
1193 |
+ # save_file_atomic contect manager
|
|
1194 |
+ tarball = tarfile.open(fileobj=f, mode='w')
|
|
1195 |
+ for item in os.listdir(str(directory)):
|
|
1196 |
+ file_to_add = os.path.join(directory, item)
|
|
1197 |
+ tarball.add(file_to_add, arcname=item)
|
|
1198 |
+ except OSError as e:
|
|
1199 |
+ raise StreamError("Failed to create tar archive: {}".format(e)) from e
|
|
1200 |
+ |
|
1201 |
+ # Write all the build_scripts for elements in the directory location
|
|
1202 |
+ def _write_build_scripts(self, location, elements):
|
|
1203 |
+ for element in elements:
|
|
1204 |
+ self._write_element_script(location, element)
|
|
1205 |
+ self._write_master_build_script(location, elements)
|
|
1206 |
+ |
|
1208 | 1207 |
# Write a master build script to the sandbox
|
1209 |
- def _write_build_script(self, directory, elements):
|
|
1208 |
+ def _write_master_build_script(self, directory, elements):
|
|
1210 | 1209 |
|
1211 | 1210 |
module_string = ""
|
1212 | 1211 |
for element in elements:
|
... | ... | @@ -143,8 +143,38 @@ class WorkspaceProject(): |
143 | 143 |
# element_name (str): The name of the element that the workspace belongs to.
|
144 | 144 |
#
|
145 | 145 |
def add_project(self, project_path, element_name):
|
146 |
- assert (project_path and element_name)
|
|
147 |
- self._projects.append({'project-path': project_path, 'element-name': element_name})
|
|
146 |
+ # TODO: Decide whether to raise an exception if the project already exists.
|
|
147 |
+ project = {'project-path': project_path, 'element-name': element_name}
|
|
148 |
+ if project not in self._projects:
|
|
149 |
+ self._projects.append(project)
|
|
150 |
+ |
|
151 |
+ # set_project()
|
|
152 |
+ #
|
|
153 |
+ # Sets the project to only contain the project_path and element_name specified.
|
|
154 |
+ #
|
|
155 |
+ # Args:
|
|
156 |
+ # project_path (str): the path to the project that opened the workspace.
|
|
157 |
+ # element_name (str): the name of the element that the workspace belongs to.
|
|
158 |
+ #
|
|
159 |
+ def set_project(self, project_path, element_name):
|
|
160 |
+ self._projects = [{'project-path': project_path, 'element-name': element_name}]
|
|
161 |
+ |
|
162 |
+ # remove_project()
|
|
163 |
+ #
|
|
164 |
+ # Removes the first project entry that matches the project_path and element_name
|
|
165 |
+ def remove_project(self, project_path, element_name):
|
|
166 |
+ # NOTE: This will need revisiting if projects' data format changes
|
|
167 |
+ # TODO: Figure out what to do if there is no project for those parameters
|
|
168 |
+ self._projects.remove({'project-path': project_path, 'element-name': element_name})
|
|
169 |
+ |
|
170 |
+ # has_projects()
|
|
171 |
+ #
|
|
172 |
+ # Returns whether there are any projects in this WorkspaceProject
|
|
173 |
+ #
|
|
174 |
+ # Returns:
|
|
175 |
+ # (bool): True if there are any projects, or False if there aren't any
|
|
176 |
+ def has_projects(self):
|
|
177 |
+ return any(self._projects)
|
|
148 | 178 |
|
149 | 179 |
|
150 | 180 |
# WorkspaceProjectCache()
|
... | ... | @@ -186,17 +216,21 @@ class WorkspaceProjectCache(): |
186 | 216 |
# directory (str): The directory to search for a WorkspaceProject.
|
187 | 217 |
# project_path (str): The path to the project that refers to this workspace
|
188 | 218 |
# element_name (str): The element in the project that was refers to this workspace
|
219 |
+ # append (bool): Whether the project_path and element_name should be appended
|
|
189 | 220 |
#
|
190 | 221 |
# Returns:
|
191 | 222 |
# (WorkspaceProject): The WorkspaceProject that was found for that directory.
|
192 | 223 |
#
|
193 |
- def add(self, directory, project_path, element_name):
|
|
224 |
+ def add(self, directory, project_path, element_name, *, append):
|
|
194 | 225 |
workspace_project = self.get(directory)
|
195 | 226 |
if not workspace_project:
|
196 | 227 |
workspace_project = WorkspaceProject(directory)
|
197 | 228 |
self._projects[directory] = workspace_project
|
198 | 229 |
|
199 |
- workspace_project.add_project(project_path, element_name)
|
|
230 |
+ if append:
|
|
231 |
+ workspace_project.add_project(project_path, element_name)
|
|
232 |
+ else:
|
|
233 |
+ workspace_project.set_project(project_path, element_name)
|
|
200 | 234 |
return workspace_project
|
201 | 235 |
|
202 | 236 |
# remove()
|
... | ... | @@ -204,23 +238,28 @@ class WorkspaceProjectCache(): |
204 | 238 |
# Removes the project path and element name from the WorkspaceProject that exists
|
205 | 239 |
# for that directory.
|
206 | 240 |
#
|
207 |
- # NOTE: This currently just deletes the file, but with support for multiple
|
|
208 |
- # projects opening the same workspace, this will involve decreasing the count
|
|
209 |
- # and deleting the file if there are no more projects.
|
|
210 |
- #
|
|
211 | 241 |
# Args:
|
212 | 242 |
# directory (str): The directory to search for a WorkspaceProject.
|
243 |
+ # project_path (str): the path to the project that should be removed.
|
|
244 |
+ # element_name (str): the name of the element in the project that should be removed.
|
|
213 | 245 |
#
|
214 |
- def remove(self, directory):
|
|
246 |
+ def remove(self, directory, project_path, element_name):
|
|
215 | 247 |
workspace_project = self.get(directory)
|
216 | 248 |
if not workspace_project:
|
217 | 249 |
raise LoadError(LoadErrorReason.MISSING_FILE,
|
218 | 250 |
"Failed to find a {} file to remove".format(WORKSPACE_PROJECT_FILE))
|
219 |
- path = workspace_project.get_filename()
|
|
220 |
- try:
|
|
221 |
- os.unlink(path)
|
|
222 |
- except FileNotFoundError:
|
|
223 |
- pass
|
|
251 |
+ |
|
252 |
+ workspace_project.remove_project(project_path, element_name)
|
|
253 |
+ |
|
254 |
+ if workspace_project.has_projects():
|
|
255 |
+ workspace_project.write()
|
|
256 |
+ else:
|
|
257 |
+ # Remove the WorkspaceProject file if it's now empty
|
|
258 |
+ path = workspace_project.get_filename()
|
|
259 |
+ try:
|
|
260 |
+ os.unlink(path)
|
|
261 |
+ except FileNotFoundError:
|
|
262 |
+ pass
|
|
224 | 263 |
|
225 | 264 |
|
226 | 265 |
# Workspace()
|
... | ... | @@ -429,8 +468,10 @@ class Workspaces(): |
429 | 468 |
# target (Element) - The element to create a workspace for
|
430 | 469 |
# path (str) - The path in which the workspace should be kept
|
431 | 470 |
# checkout (bool): Whether to check-out the element's sources into the directory
|
471 |
+ # append (bool): Whether the WorkspaceProject file should append this project
|
|
472 |
+ # preserve_workspace_project (bool): Whether the WorkspaceProject should be altered
|
|
432 | 473 |
#
|
433 |
- def create_workspace(self, target, path, *, checkout):
|
|
474 |
+ def create_workspace(self, target, path, *, checkout, append, preserve_workspace_project=False):
|
|
434 | 475 |
element_name = target._get_full_name()
|
435 | 476 |
project_dir = self._toplevel_project.directory
|
436 | 477 |
if path.startswith(project_dir):
|
... | ... | @@ -444,7 +485,11 @@ class Workspaces(): |
444 | 485 |
with target.timed_activity("Staging sources to {}".format(path)):
|
445 | 486 |
target._open_workspace()
|
446 | 487 |
|
447 |
- workspace_project = self._workspace_project_cache.add(path, project_dir, element_name)
|
|
488 |
+ if preserve_workspace_project:
|
|
489 |
+ workspace_project = self._workspace_project_cache.get(path)
|
|
490 |
+ else:
|
|
491 |
+ workspace_project = self._workspace_project_cache.add(path, project_dir, element_name, append=append)
|
|
492 |
+ |
|
448 | 493 |
project_file_path = workspace_project.get_filename()
|
449 | 494 |
|
450 | 495 |
if os.path.exists(project_file_path):
|
... | ... | @@ -469,6 +514,21 @@ class Workspaces(): |
469 | 514 |
return None
|
470 | 515 |
return self._workspaces[element_name]
|
471 | 516 |
|
517 |
+ # get_workspace_project()
|
|
518 |
+ #
|
|
519 |
+ # Returns a WorkspaceProject for a given directory, retrieving from the cache if
|
|
520 |
+ # present.
|
|
521 |
+ #
|
|
522 |
+ # Args:
|
|
523 |
+ # directory (str): The directory to search for a WorkspaceProject.
|
|
524 |
+ #
|
|
525 |
+ # Returns:
|
|
526 |
+ # (WorkspaceProject): The WorkspaceProject that was found for that directory.
|
|
527 |
+ # or (NoneType): None, if no WorkspaceProject can be found.
|
|
528 |
+ #
|
|
529 |
+ def get_workspace_project(self, directory):
|
|
530 |
+ return self._workspace_project_cache.get(directory)
|
|
531 |
+ |
|
472 | 532 |
# update_workspace()
|
473 | 533 |
#
|
474 | 534 |
# Update the datamodel with a new Workspace instance
|
... | ... | @@ -498,20 +558,23 @@ class Workspaces(): |
498 | 558 |
#
|
499 | 559 |
# Args:
|
500 | 560 |
# element_name (str) - The element name whose workspace to delete
|
561 |
+ # preserve_workspace_project (bool): Whether the WorkspaceProject should be altered
|
|
501 | 562 |
#
|
502 |
- def delete_workspace(self, element_name):
|
|
563 |
+ def delete_workspace(self, element_name, preserve_workspace_project=False):
|
|
503 | 564 |
workspace = self.get_workspace(element_name)
|
504 | 565 |
del self._workspaces[element_name]
|
505 | 566 |
|
506 |
- # Remove from the cache if it exists
|
|
507 |
- try:
|
|
508 |
- self._workspace_project_cache.remove(workspace.get_absolute_path())
|
|
509 |
- except LoadError as e:
|
|
510 |
- # We might be closing a workspace with a deleted directory
|
|
511 |
- if e.reason == LoadErrorReason.MISSING_FILE:
|
|
512 |
- pass
|
|
513 |
- else:
|
|
514 |
- raise
|
|
567 |
+ if not preserve_workspace_project:
|
|
568 |
+ # Remove from the cache if it exists
|
|
569 |
+ project_dir = self._toplevel_project.directory
|
|
570 |
+ try:
|
|
571 |
+ self._workspace_project_cache.remove(workspace.get_absolute_path(), project_dir, element_name)
|
|
572 |
+ except LoadError as e:
|
|
573 |
+ # We might be closing a workspace with a deleted directory
|
|
574 |
+ if e.reason == LoadErrorReason.MISSING_FILE:
|
|
575 |
+ pass
|
|
576 |
+ else:
|
|
577 |
+ raise
|
|
515 | 578 |
|
516 | 579 |
# save_config()
|
517 | 580 |
#
|
... | ... | @@ -215,10 +215,6 @@ class BuildElement(Element): |
215 | 215 |
# Setup environment
|
216 | 216 |
sandbox.set_environment(self.get_environment())
|
217 | 217 |
|
218 |
- # Enable command batching across prepare() and assemble()
|
|
219 |
- self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
|
|
220 |
- collect=self.get_variable('install-root'))
|
|
221 |
- |
|
222 | 218 |
def stage(self, sandbox):
|
223 | 219 |
|
224 | 220 |
# Stage deps in the sandbox root
|
... | ... | @@ -1612,9 +1612,9 @@ class Element(Plugin): |
1612 | 1612 |
sandbox_vpath = sandbox_vroot.descend(path_components)
|
1613 | 1613 |
try:
|
1614 | 1614 |
sandbox_vpath.import_files(workspace.get_absolute_path())
|
1615 |
- except UtilError as e:
|
|
1615 |
+ except UtilError as e2:
|
|
1616 | 1616 |
self.warn("Failed to preserve workspace state for failed build sysroot: {}"
|
1617 |
- .format(e))
|
|
1617 |
+ .format(e2))
|
|
1618 | 1618 |
|
1619 | 1619 |
self.__set_build_result(success=False, description=str(e), detail=e.detail)
|
1620 | 1620 |
self._cache_artifact(rootdir, sandbox, e.collect)
|
... | ... | @@ -55,7 +55,7 @@ See :ref:`built-in functionality documentation <core_buildelement_builtins>` for |
55 | 55 |
details on common configuration options for build elements.
|
56 | 56 |
"""
|
57 | 57 |
|
58 |
-from buildstream import BuildElement
|
|
58 |
+from buildstream import BuildElement, SandboxFlags
|
|
59 | 59 |
|
60 | 60 |
|
61 | 61 |
# Element implementation for the 'autotools' kind.
|
... | ... | @@ -63,6 +63,12 @@ class AutotoolsElement(BuildElement): |
63 | 63 |
# Supports virtual directories (required for remote execution)
|
64 | 64 |
BST_VIRTUAL_DIRECTORY = True
|
65 | 65 |
|
66 |
+ # Enable command batching across prepare() and assemble()
|
|
67 |
+ def configure_sandbox(self, sandbox):
|
|
68 |
+ super().configure_sandbox(sandbox)
|
|
69 |
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
|
|
70 |
+ collect=self.get_variable('install-root'))
|
|
71 |
+ |
|
66 | 72 |
|
67 | 73 |
# Plugin entry point
|
68 | 74 |
def setup():
|
... | ... | @@ -54,7 +54,7 @@ See :ref:`built-in functionality documentation <core_buildelement_builtins>` for |
54 | 54 |
details on common configuration options for build elements.
|
55 | 55 |
"""
|
56 | 56 |
|
57 |
-from buildstream import BuildElement
|
|
57 |
+from buildstream import BuildElement, SandboxFlags
|
|
58 | 58 |
|
59 | 59 |
|
60 | 60 |
# Element implementation for the 'cmake' kind.
|
... | ... | @@ -62,6 +62,12 @@ class CMakeElement(BuildElement): |
62 | 62 |
# Supports virtual directories (required for remote execution)
|
63 | 63 |
BST_VIRTUAL_DIRECTORY = True
|
64 | 64 |
|
65 |
+ # Enable command batching across prepare() and assemble()
|
|
66 |
+ def configure_sandbox(self, sandbox):
|
|
67 |
+ super().configure_sandbox(sandbox)
|
|
68 |
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
|
|
69 |
+ collect=self.get_variable('install-root'))
|
|
70 |
+ |
|
65 | 71 |
|
66 | 72 |
# Plugin entry point
|
67 | 73 |
def setup():
|
... | ... | @@ -31,12 +31,19 @@ See :ref:`built-in functionality documentation <core_buildelement_builtins>` for |
31 | 31 |
details on common configuration options for build elements.
|
32 | 32 |
"""
|
33 | 33 |
|
34 |
-from buildstream import BuildElement
|
|
34 |
+from buildstream import BuildElement, SandboxFlags
|
|
35 | 35 |
|
36 | 36 |
|
37 | 37 |
# Element implementation for the python 'distutils' kind.
|
38 | 38 |
class DistutilsElement(BuildElement):
|
39 |
- pass
|
|
39 |
+ # Supports virtual directories (required for remote execution)
|
|
40 |
+ BST_VIRTUAL_DIRECTORY = True
|
|
41 |
+ |
|
42 |
+ # Enable command batching across prepare() and assemble()
|
|
43 |
+ def configure_sandbox(self, sandbox):
|
|
44 |
+ super().configure_sandbox(sandbox)
|
|
45 |
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
|
|
46 |
+ collect=self.get_variable('install-root'))
|
|
40 | 47 |
|
41 | 48 |
|
42 | 49 |
# Plugin entry point
|
... | ... | @@ -36,7 +36,7 @@ See :ref:`built-in functionality documentation <core_buildelement_builtins>` for |
36 | 36 |
details on common configuration options for build elements.
|
37 | 37 |
"""
|
38 | 38 |
|
39 |
-from buildstream import BuildElement
|
|
39 |
+from buildstream import BuildElement, SandboxFlags
|
|
40 | 40 |
|
41 | 41 |
|
42 | 42 |
# Element implementation for the 'make' kind.
|
... | ... | @@ -44,6 +44,12 @@ class MakeElement(BuildElement): |
44 | 44 |
# Supports virtual directories (required for remote execution)
|
45 | 45 |
BST_VIRTUAL_DIRECTORY = True
|
46 | 46 |
|
47 |
+ # Enable command batching across prepare() and assemble()
|
|
48 |
+ def configure_sandbox(self, sandbox):
|
|
49 |
+ super().configure_sandbox(sandbox)
|
|
50 |
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
|
|
51 |
+ collect=self.get_variable('install-root'))
|
|
52 |
+ |
|
47 | 53 |
|
48 | 54 |
# Plugin entry point
|
49 | 55 |
def setup():
|
... | ... | @@ -31,12 +31,19 @@ See :ref:`built-in functionality documentation <core_buildelement_builtins>` for |
31 | 31 |
details on common configuration options for build elements.
|
32 | 32 |
"""
|
33 | 33 |
|
34 |
-from buildstream import BuildElement
|
|
34 |
+from buildstream import BuildElement, SandboxFlags
|
|
35 | 35 |
|
36 | 36 |
|
37 | 37 |
# Element implementation for the 'makemaker' kind.
|
38 | 38 |
class MakeMakerElement(BuildElement):
|
39 |
- pass
|
|
39 |
+ # Supports virtual directories (required for remote execution)
|
|
40 |
+ BST_VIRTUAL_DIRECTORY = True
|
|
41 |
+ |
|
42 |
+ # Enable command batching across prepare() and assemble()
|
|
43 |
+ def configure_sandbox(self, sandbox):
|
|
44 |
+ super().configure_sandbox(sandbox)
|
|
45 |
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
|
|
46 |
+ collect=self.get_variable('install-root'))
|
|
40 | 47 |
|
41 | 48 |
|
42 | 49 |
# Plugin entry point
|
... | ... | @@ -31,12 +31,19 @@ See :ref:`built-in functionality documentation <core_buildelement_builtins>` for |
31 | 31 |
details on common configuration options for build elements.
|
32 | 32 |
"""
|
33 | 33 |
|
34 |
-from buildstream import BuildElement
|
|
34 |
+from buildstream import BuildElement, SandboxFlags
|
|
35 | 35 |
|
36 | 36 |
|
37 | 37 |
# Element implementation for the 'manual' kind.
|
38 | 38 |
class ManualElement(BuildElement):
|
39 |
- pass
|
|
39 |
+ # Supports virtual directories (required for remote execution)
|
|
40 |
+ BST_VIRTUAL_DIRECTORY = True
|
|
41 |
+ |
|
42 |
+ # Enable command batching across prepare() and assemble()
|
|
43 |
+ def configure_sandbox(self, sandbox):
|
|
44 |
+ super().configure_sandbox(sandbox)
|
|
45 |
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
|
|
46 |
+ collect=self.get_variable('install-root'))
|
|
40 | 47 |
|
41 | 48 |
|
42 | 49 |
# Plugin entry point
|
... | ... | @@ -51,7 +51,7 @@ See :ref:`built-in functionality documentation <core_buildelement_builtins>` for |
51 | 51 |
details on common configuration options for build elements.
|
52 | 52 |
"""
|
53 | 53 |
|
54 |
-from buildstream import BuildElement
|
|
54 |
+from buildstream import BuildElement, SandboxFlags
|
|
55 | 55 |
|
56 | 56 |
|
57 | 57 |
# Element implementation for the 'meson' kind.
|
... | ... | @@ -59,6 +59,12 @@ class MesonElement(BuildElement): |
59 | 59 |
# Supports virtual directories (required for remote execution)
|
60 | 60 |
BST_VIRTUAL_DIRECTORY = True
|
61 | 61 |
|
62 |
+ # Enable command batching across prepare() and assemble()
|
|
63 |
+ def configure_sandbox(self, sandbox):
|
|
64 |
+ super().configure_sandbox(sandbox)
|
|
65 |
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
|
|
66 |
+ collect=self.get_variable('install-root'))
|
|
67 |
+ |
|
62 | 68 |
|
63 | 69 |
# Plugin entry point
|
64 | 70 |
def setup():
|
... | ... | @@ -31,12 +31,19 @@ See :ref:`built-in functionality documentation <core_buildelement_builtins>` for |
31 | 31 |
details on common configuration options for build elements.
|
32 | 32 |
"""
|
33 | 33 |
|
34 |
-from buildstream import BuildElement
|
|
34 |
+from buildstream import BuildElement, SandboxFlags
|
|
35 | 35 |
|
36 | 36 |
|
37 | 37 |
# Element implementation for the 'modulebuild' kind.
|
38 | 38 |
class ModuleBuildElement(BuildElement):
|
39 |
- pass
|
|
39 |
+ # Supports virtual directories (required for remote execution)
|
|
40 |
+ BST_VIRTUAL_DIRECTORY = True
|
|
41 |
+ |
|
42 |
+ # Enable command batching across prepare() and assemble()
|
|
43 |
+ def configure_sandbox(self, sandbox):
|
|
44 |
+ super().configure_sandbox(sandbox)
|
|
45 |
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
|
|
46 |
+ collect=self.get_variable('install-root'))
|
|
40 | 47 |
|
41 | 48 |
|
42 | 49 |
# Plugin entry point
|
... | ... | @@ -31,12 +31,19 @@ See :ref:`built-in functionality documentation <core_buildelement_builtins>` for |
31 | 31 |
details on common configuration options for build elements.
|
32 | 32 |
"""
|
33 | 33 |
|
34 |
-from buildstream import BuildElement
|
|
34 |
+from buildstream import BuildElement, SandboxFlags
|
|
35 | 35 |
|
36 | 36 |
|
37 | 37 |
# Element implementation for the 'pip' kind.
|
38 | 38 |
class PipElement(BuildElement):
|
39 |
- pass
|
|
39 |
+ # Supports virtual directories (required for remote execution)
|
|
40 |
+ BST_VIRTUAL_DIRECTORY = True
|
|
41 |
+ |
|
42 |
+ # Enable command batching across prepare() and assemble()
|
|
43 |
+ def configure_sandbox(self, sandbox):
|
|
44 |
+ super().configure_sandbox(sandbox)
|
|
45 |
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
|
|
46 |
+ collect=self.get_variable('install-root'))
|
|
40 | 47 |
|
41 | 48 |
|
42 | 49 |
# Plugin entry point
|
... | ... | @@ -31,7 +31,7 @@ See :ref:`built-in functionality documentation <core_buildelement_builtins>` for |
31 | 31 |
details on common configuration options for build elements.
|
32 | 32 |
"""
|
33 | 33 |
|
34 |
-from buildstream import BuildElement
|
|
34 |
+from buildstream import BuildElement, SandboxFlags
|
|
35 | 35 |
|
36 | 36 |
|
37 | 37 |
# Element implementation for the 'qmake' kind.
|
... | ... | @@ -39,6 +39,12 @@ class QMakeElement(BuildElement): |
39 | 39 |
# Supports virtual directories (required for remote execution)
|
40 | 40 |
BST_VIRTUAL_DIRECTORY = True
|
41 | 41 |
|
42 |
+ # Enable command batching across prepare() and assemble()
|
|
43 |
+ def configure_sandbox(self, sandbox):
|
|
44 |
+ super().configure_sandbox(sandbox)
|
|
45 |
+ self.batch_prepare_assemble(SandboxFlags.ROOT_READ_ONLY,
|
|
46 |
+ collect=self.get_variable('install-root'))
|
|
47 |
+ |
|
42 | 48 |
|
43 | 49 |
# Plugin entry point
|
44 | 50 |
def setup():
|
... | ... | @@ -86,13 +86,6 @@ project's main directory. |
86 | 86 |
|
87 | 87 |
----
|
88 | 88 |
|
89 |
-.. _invoking_source_bundle:
|
|
90 |
- |
|
91 |
-.. click:: buildstream._frontend.cli:source_bundle
|
|
92 |
- :prog: bst source bundle
|
|
93 |
- |
|
94 |
-----
|
|
95 |
- |
|
96 | 89 |
.. _invoking_workspace:
|
97 | 90 |
|
98 | 91 |
.. click:: buildstream._frontend.cli:workspace
|
... | ... | @@ -18,6 +18,7 @@ |
18 | 18 |
#
|
19 | 19 |
|
20 | 20 |
import os
|
21 |
+from unittest import mock
|
|
21 | 22 |
|
22 | 23 |
import pytest
|
23 | 24 |
|
... | ... | @@ -311,6 +312,8 @@ def test_never_delete_required_track(cli, datafiles, tmpdir): |
311 | 312 |
("0", True),
|
312 | 313 |
("-1", False),
|
313 | 314 |
("pony", False),
|
315 |
+ ("7K", False),
|
|
316 |
+ ("70%", False),
|
|
314 | 317 |
("200%", False)
|
315 | 318 |
])
|
316 | 319 |
@pytest.mark.datafiles(DATA_DIR)
|
... | ... | @@ -324,7 +327,35 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, success): |
324 | 327 |
}
|
325 | 328 |
})
|
326 | 329 |
|
327 |
- res = cli.run(project=project, args=['workspace', 'list'])
|
|
330 |
+ # We patch how we get space information
|
|
331 |
+ # Ideally we would instead create a FUSE device on which we control
|
|
332 |
+ # everything.
|
|
333 |
+ # If the value is a percentage, we fix the current values to take into
|
|
334 |
+ # account the block size, since this is important in how we compute the size
|
|
335 |
+ |
|
336 |
+ if quota.endswith("%"): # We set the used space at 60% of total space
|
|
337 |
+ stats = os.statvfs(".")
|
|
338 |
+ free_space = 0.6 * stats.f_bsize * stats.f_blocks
|
|
339 |
+ total_space = stats.f_bsize * stats.f_blocks
|
|
340 |
+ else:
|
|
341 |
+ free_space = 6000
|
|
342 |
+ total_space = 10000
|
|
343 |
+ |
|
344 |
+ volume_space_patch = mock.patch(
|
|
345 |
+ "buildstream._artifactcache.artifactcache.ArtifactCache._get_volume_space_info_for",
|
|
346 |
+ autospec=True,
|
|
347 |
+ return_value=(free_space, total_space),
|
|
348 |
+ )
|
|
349 |
+ |
|
350 |
+ cache_size_patch = mock.patch(
|
|
351 |
+ "buildstream._artifactcache.artifactcache.ArtifactCache.get_cache_size",
|
|
352 |
+ autospec=True,
|
|
353 |
+ return_value=0,
|
|
354 |
+ )
|
|
355 |
+ |
|
356 |
+ with volume_space_patch, cache_size_patch:
|
|
357 |
+ res = cli.run(project=project, args=['workspace', 'list'])
|
|
358 |
+ |
|
328 | 359 |
if success:
|
329 | 360 |
res.assert_success()
|
330 | 361 |
else:
|
... | ... | @@ -6,6 +6,7 @@ from tests.testutils import cli |
6 | 6 |
DATA_DIR = os.path.dirname(os.path.realpath(__file__))
|
7 | 7 |
|
8 | 8 |
MAIN_COMMANDS = [
|
9 |
+ 'artifact ',
|
|
9 | 10 |
'build ',
|
10 | 11 |
'checkout ',
|
11 | 12 |
'fetch ',
|
... | ... | @@ -16,7 +17,6 @@ MAIN_COMMANDS = [ |
16 | 17 |
'shell ',
|
17 | 18 |
'show ',
|
18 | 19 |
'source-checkout ',
|
19 |
- 'source-bundle ',
|
|
20 | 20 |
'track ',
|
21 | 21 |
'workspace '
|
22 | 22 |
]
|
... | ... | @@ -25,7 +25,6 @@ def test_help_main(cli): |
25 | 25 |
('push'),
|
26 | 26 |
('shell'),
|
27 | 27 |
('show'),
|
28 |
- ('source-bundle'),
|
|
29 | 28 |
('track'),
|
30 | 29 |
('workspace')
|
31 | 30 |
])
|
1 | 1 |
import os
|
2 | 2 |
import pytest
|
3 |
+import tarfile
|
|
4 |
+from pathlib import Path
|
|
3 | 5 |
|
4 | 6 |
from tests.testutils import cli
|
5 | 7 |
|
... | ... | @@ -55,6 +57,39 @@ def test_source_checkout(datafiles, cli, tmpdir_factory, with_workspace, guess_e |
55 | 57 |
assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
|
56 | 58 |
|
57 | 59 |
|
60 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
61 |
+@pytest.mark.parametrize('force_flag', ['--force', '-f'])
|
|
62 |
+def test_source_checkout_force(datafiles, cli, force_flag):
|
|
63 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
64 |
+ checkout = os.path.join(cli.directory, 'source-checkout')
|
|
65 |
+ target = 'checkout-deps.bst'
|
|
66 |
+ |
|
67 |
+ os.makedirs(os.path.join(checkout, 'some-thing'))
|
|
68 |
+ # Path(os.path.join(checkout, 'some-file')).touch()
|
|
69 |
+ |
|
70 |
+ result = cli.run(project=project, args=['source-checkout', force_flag, target, '--deps', 'none', checkout])
|
|
71 |
+ result.assert_success()
|
|
72 |
+ |
|
73 |
+ assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
|
|
74 |
+ |
|
75 |
+ |
|
76 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
77 |
+def test_source_checkout_tar(datafiles, cli):
|
|
78 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
79 |
+ checkout = os.path.join(cli.directory, 'source-checkout.tar')
|
|
80 |
+ target = 'checkout-deps.bst'
|
|
81 |
+ |
|
82 |
+ result = cli.run(project=project, args=['source-checkout', '--tar', target, '--deps', 'none', checkout])
|
|
83 |
+ result.assert_success()
|
|
84 |
+ |
|
85 |
+ assert os.path.exists(checkout)
|
|
86 |
+ with tarfile.open(checkout) as tf:
|
|
87 |
+ expected_content = os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config')
|
|
88 |
+ tar_members = [f.name for f in tf]
|
|
89 |
+ for member in tar_members:
|
|
90 |
+ assert member in expected_content
|
|
91 |
+ |
|
92 |
+ |
|
58 | 93 |
@pytest.mark.datafiles(DATA_DIR)
|
59 | 94 |
@pytest.mark.parametrize('deps', [('build'), ('none'), ('run'), ('all')])
|
60 | 95 |
def test_source_checkout_deps(datafiles, cli, deps):
|
... | ... | @@ -135,3 +170,38 @@ def test_source_checkout_fetch(datafiles, cli, fetch): |
135 | 170 |
assert os.path.exists(os.path.join(checkout, 'remote-import-dev', 'pony.h'))
|
136 | 171 |
else:
|
137 | 172 |
result.assert_main_error(ErrorDomain.PIPELINE, 'uncached-sources')
|
173 |
+ |
|
174 |
+ |
|
175 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
176 |
+def test_source_checkout_build_scripts(cli, tmpdir, datafiles):
|
|
177 |
+ project_path = os.path.join(datafiles.dirname, datafiles.basename)
|
|
178 |
+ element_name = 'source-bundle/source-bundle-hello.bst'
|
|
179 |
+ normal_name = 'source-bundle-source-bundle-hello'
|
|
180 |
+ checkout = os.path.join(str(tmpdir), 'source-checkout')
|
|
181 |
+ |
|
182 |
+ args = ['source-checkout', '--include-build-scripts', element_name, checkout]
|
|
183 |
+ result = cli.run(project=project_path, args=args)
|
|
184 |
+ result.assert_success()
|
|
185 |
+ |
|
186 |
+ # There sould be a script for each element (just one in this case) and a top level build script
|
|
187 |
+ expected_scripts = ['build.sh', 'build-' + normal_name]
|
|
188 |
+ for script in expected_scripts:
|
|
189 |
+ assert script in os.listdir(checkout)
|
|
190 |
+ |
|
191 |
+ |
|
192 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
193 |
+def test_source_checkout_tar_buildscripts(cli, tmpdir, datafiles):
|
|
194 |
+ project_path = os.path.join(datafiles.dirname, datafiles.basename)
|
|
195 |
+ element_name = 'source-bundle/source-bundle-hello.bst'
|
|
196 |
+ normal_name = 'source-bundle-source-bundle-hello'
|
|
197 |
+ tar_file = os.path.join(str(tmpdir), 'source-checkout.tar')
|
|
198 |
+ |
|
199 |
+ args = ['source-checkout', '--include-build-scripts', '--tar', element_name, tar_file]
|
|
200 |
+ result = cli.run(project=project_path, args=args)
|
|
201 |
+ result.assert_success()
|
|
202 |
+ |
|
203 |
+ expected_scripts = ['build.sh', 'build-' + normal_name]
|
|
204 |
+ |
|
205 |
+ with tarfile.open(tar_file, 'r') as tf:
|
|
206 |
+ for script in expected_scripts:
|
|
207 |
+ assert script in tf.getnames()
|
... | ... | @@ -119,7 +119,7 @@ class WorkspaceCreater(): |
119 | 119 |
return element_tuples
|
120 | 120 |
|
121 | 121 |
def open_workspaces(self, kinds, track, suffixs=None, workspace_dir=None,
|
122 |
- element_attrs=None):
|
|
122 |
+ element_attrs=None, force=False):
|
|
123 | 123 |
|
124 | 124 |
element_tuples = self.create_workspace_elements(kinds, track, suffixs, workspace_dir,
|
125 | 125 |
element_attrs)
|
... | ... | @@ -130,6 +130,8 @@ class WorkspaceCreater(): |
130 | 130 |
args = ['workspace', 'open']
|
131 | 131 |
if track:
|
132 | 132 |
args.append('--track')
|
133 |
+ if force:
|
|
134 |
+ args.append('--force')
|
|
133 | 135 |
if workspace_dir is not None:
|
134 | 136 |
assert len(element_tuples) == 1, "test logic error"
|
135 | 137 |
_, workspace_dir = element_tuples[0]
|
... | ... | @@ -153,10 +155,10 @@ class WorkspaceCreater(): |
153 | 155 |
|
154 | 156 |
|
155 | 157 |
def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
|
156 |
- project_path=None, element_attrs=None):
|
|
158 |
+ project_path=None, element_attrs=None, force=False):
|
|
157 | 159 |
workspace_object = WorkspaceCreater(cli, tmpdir, datafiles, project_path)
|
158 | 160 |
workspaces = workspace_object.open_workspaces((kind, ), track, (suffix, ), workspace_dir,
|
159 |
- element_attrs)
|
|
161 |
+ element_attrs, force)
|
|
160 | 162 |
assert len(workspaces) == 1
|
161 | 163 |
element_name, workspace = workspaces[0]
|
162 | 164 |
return element_name, workspace_object.project_path, workspace
|
... | ... | @@ -1209,3 +1211,65 @@ def test_external_list(cli, datafiles, tmpdir_factory): |
1209 | 1211 |
|
1210 | 1212 |
result = cli.run(project=project, args=['-C', workspace, 'workspace', 'list'])
|
1211 | 1213 |
result.assert_success()
|
1214 |
+ |
|
1215 |
+ |
|
1216 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
1217 |
+@pytest.mark.parametrize(
|
|
1218 |
+ "force, close_from_external",
|
|
1219 |
+ [(False, False), (True, True), (True, False)],
|
|
1220 |
+ ids=["no-force", "close-from-external", "no-close-from-external"]
|
|
1221 |
+)
|
|
1222 |
+def test_multiple_projects(cli, datafiles, tmpdir_factory, force, close_from_external):
|
|
1223 |
+ # i.e. multiple projects can open the same workspace
|
|
1224 |
+ tmpdir1 = tmpdir_factory.mktemp('')
|
|
1225 |
+ tmpdir2 = tmpdir_factory.mktemp('')
|
|
1226 |
+ workspace_dir = os.path.join(str(tmpdir1), "workspace")
|
|
1227 |
+ alpha_project = os.path.join(str(tmpdir1), "alpha-project")
|
|
1228 |
+ beta_project = os.path.join(str(tmpdir2), "beta-project")
|
|
1229 |
+ |
|
1230 |
+ # Open the same workspace with two different projects
|
|
1231 |
+ alpha_element, alpha_project, _ = open_workspace(
|
|
1232 |
+ cli, tmpdir1, datafiles, "git", False, workspace_dir=workspace_dir,
|
|
1233 |
+ project_path=alpha_project, suffix="-alpha"
|
|
1234 |
+ )
|
|
1235 |
+ if force:
|
|
1236 |
+ beta_element, beta_project, _ = open_workspace(
|
|
1237 |
+ cli, tmpdir2, datafiles, "git", False, workspace_dir=workspace_dir,
|
|
1238 |
+ project_path=beta_project, suffix="-beta", force=force
|
|
1239 |
+ )
|
|
1240 |
+ else:
|
|
1241 |
+ # Opening a workspace on an existing workspace must only work with "--force"
|
|
1242 |
+ message = "Opening an already-existing workspace without --force should fail"
|
|
1243 |
+ with pytest.raises(AssertionError, message=message):
|
|
1244 |
+ open_workspace(cli, tmpdir2, datafiles, "git", False, workspace_dir=workspace_dir,
|
|
1245 |
+ project_path=beta_project, suffix="-beta", force=force)
|
|
1246 |
+ return
|
|
1247 |
+ |
|
1248 |
+ # Run a command and assert it came from the alpha-element
|
|
1249 |
+ # Using element guessing as a way of easily telling which project was used
|
|
1250 |
+ result = cli.run(project=alpha_project, args=['-C', workspace_dir, 'show', '--format', '%{name}'])
|
|
1251 |
+ result.assert_success()
|
|
1252 |
+ assert result.output.strip() == alpha_element
|
|
1253 |
+ |
|
1254 |
+ # Close the workspace
|
|
1255 |
+ args = ((["-C", workspace_dir] if close_from_external else []) +
|
|
1256 |
+ ['workspace', 'close'] +
|
|
1257 |
+ ([] if close_from_external else [alpha_element]))
|
|
1258 |
+ result = cli.run(project=alpha_project, args=args)
|
|
1259 |
+ result.assert_success()
|
|
1260 |
+ |
|
1261 |
+ # Check that the 'beta' element is now found
|
|
1262 |
+ result = cli.run(project=beta_project, args=['-C', workspace_dir, 'show', '--format', '%{name}'])
|
|
1263 |
+ result.assert_success()
|
|
1264 |
+ assert result.output.strip() == beta_element
|
|
1265 |
+ |
|
1266 |
+ # Close the workspace again
|
|
1267 |
+ args = ((["-C", workspace_dir] if close_from_external else []) +
|
|
1268 |
+ ['workspace', 'close'] +
|
|
1269 |
+ ([] if close_from_external else [beta_element]))
|
|
1270 |
+ result = cli.run(project=beta_project, args=args)
|
|
1271 |
+ result.assert_success()
|
|
1272 |
+ |
|
1273 |
+ # Check that the workspace no longer works
|
|
1274 |
+ result = cli.run(project=alpha_project, args=['-C', workspace_dir, 'show', '--format', '%{name}'])
|
|
1275 |
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_PROJECT_CONF)
|
1 | 1 |
#
|
2 |
+# Copyright (C) 2018 Codethink Limited
|
|
2 | 3 |
# Copyright (C) 2018 Bloomberg Finance LP
|
3 | 4 |
#
|
4 | 5 |
# This program is free software; you can redistribute it and/or
|
... | ... | @@ -14,15 +15,17 @@ |
14 | 15 |
# You should have received a copy of the GNU Lesser General Public
|
15 | 16 |
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
16 | 17 |
#
|
17 |
-# Authors: Chandan Singh <csingh43 bloomberg net>
|
|
18 |
+# Authors: Richard Maw <richard maw codethink co uk>
|
|
18 | 19 |
#
|
19 | 20 |
|
20 | 21 |
import os
|
21 |
-import tarfile
|
|
22 |
- |
|
23 | 22 |
import pytest
|
24 | 23 |
|
25 |
-from tests.testutils import cli
|
|
24 |
+from tests.testutils import cli_integration as cli
|
|
25 |
+ |
|
26 |
+ |
|
27 |
+pytestmark = pytest.mark.integration
|
|
28 |
+ |
|
26 | 29 |
|
27 | 30 |
# Project directory
|
28 | 31 |
DATA_DIR = os.path.join(
|
... | ... | @@ -31,18 +34,35 @@ DATA_DIR = os.path.join( |
31 | 34 |
)
|
32 | 35 |
|
33 | 36 |
|
37 |
+@pytest.mark.integration
|
|
34 | 38 |
@pytest.mark.datafiles(DATA_DIR)
|
35 |
-def test_source_bundle(cli, tmpdir, datafiles):
|
|
36 |
- project_path = os.path.join(datafiles.dirname, datafiles.basename)
|
|
37 |
- element_name = 'source-bundle/source-bundle-hello.bst'
|
|
38 |
- normal_name = 'source-bundle-source-bundle-hello'
|
|
39 |
- |
|
40 |
- # Verify that we can correctly produce a source-bundle
|
|
41 |
- args = ['source-bundle', element_name, '--directory', str(tmpdir)]
|
|
42 |
- result = cli.run(project=project_path, args=args)
|
|
43 |
- result.assert_success()
|
|
44 |
- |
|
45 |
- # Verify that the source-bundle contains our sources and a build script
|
|
46 |
- with tarfile.open(os.path.join(str(tmpdir), '{}.tar.gz'.format(normal_name))) as bundle:
|
|
47 |
- assert os.path.join(normal_name, 'source', normal_name, 'llamas.txt') in bundle.getnames()
|
|
48 |
- assert os.path.join(normal_name, 'build.sh') in bundle.getnames()
|
|
39 |
+def test_artifact_log(cli, tmpdir, datafiles):
|
|
40 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
41 |
+ |
|
42 |
+ # Get the cache key of our test element
|
|
43 |
+ result = cli.run(project=project, silent=True, args=[
|
|
44 |
+ '--no-colors',
|
|
45 |
+ 'show', '--deps', 'none', '--format', '%{full-key}',
|
|
46 |
+ 'base.bst'
|
|
47 |
+ ])
|
|
48 |
+ key = result.output.strip()
|
|
49 |
+ |
|
50 |
+ # Ensure we have an artifact to read
|
|
51 |
+ result = cli.run(project=project, args=['build', 'base.bst'])
|
|
52 |
+ assert result.exit_code == 0
|
|
53 |
+ |
|
54 |
+ # Read the log via the element name
|
|
55 |
+ result = cli.run(project=project, args=['artifact', 'log', 'base.bst'])
|
|
56 |
+ assert result.exit_code == 0
|
|
57 |
+ log = result.output
|
|
58 |
+ |
|
59 |
+ # Read the log via the key
|
|
60 |
+ result = cli.run(project=project, args=['artifact', 'log', 'test/base/' + key])
|
|
61 |
+ assert result.exit_code == 0
|
|
62 |
+ assert log == result.output
|
|
63 |
+ |
|
64 |
+ # Read the log via glob
|
|
65 |
+ result = cli.run(project=project, args=['artifact', 'log', 'test/base/*'])
|
|
66 |
+ assert result.exit_code == 0
|
|
67 |
+ # The artifact is cached under both a strong key and a weak key
|
|
68 |
+ assert (log + log) == result.output
|
1 |
-from contextlib import contextmanager
|
|
2 |
-import os
|
|
3 |
- |
|
4 |
- |
|
5 |
-# MockAttributeResult
|
|
6 |
-#
|
|
7 |
-# A class to take a dictionary of kwargs and make them accessible via
|
|
8 |
-# attributes of the object.
|
|
9 |
-#
|
|
10 |
-class MockAttributeResult(dict):
|
|
11 |
- __getattr__ = dict.get
|
|
12 |
- |
|
13 |
- |
|
14 |
-# mock_statvfs():
|
|
15 |
-#
|
|
16 |
-# Gets a function which mocks statvfs and returns a statvfs result with the kwargs accessible.
|
|
17 |
-#
|
|
18 |
-# Returns:
|
|
19 |
-# func(path) -> object: object will have all the kwargs accessible via object.kwarg
|
|
20 |
-#
|
|
21 |
-# Example:
|
|
22 |
-# statvfs = mock_statvfs(f_blocks=10)
|
|
23 |
-# result = statvfs("regardless/of/path")
|
|
24 |
-# assert result.f_blocks == 10 # True
|
|
25 |
-def mock_statvfs(**kwargs):
|
|
26 |
- def statvfs(path):
|
|
27 |
- return MockAttributeResult(kwargs)
|
|
28 |
- return statvfs
|
|
29 |
- |
|
30 |
- |
|
31 |
-# monkey_patch()
|
|
32 |
-#
|
|
33 |
-# with monkey_patch("statvfs", custom_statvfs):
|
|
34 |
-# assert os.statvfs == custom_statvfs # True
|
|
35 |
-# assert os.statvfs == custom_statvfs # False
|
|
36 |
-#
|
|
37 |
-@contextmanager
|
|
38 |
-def monkey_patch(to_patch, patched_func):
|
|
39 |
- orig = getattr(os, to_patch)
|
|
40 |
- setattr(os, to_patch, patched_func)
|
|
41 |
- try:
|
|
42 |
- yield
|
|
43 |
- finally:
|
|
44 |
- setattr(os, to_patch, orig)
|
1 |
+import os
|
|
2 |
+from unittest import mock
|
|
3 |
+ |
|
1 | 4 |
from buildstream import _yaml
|
2 |
-from ..testutils import mock_os
|
|
3 |
-from ..testutils.runcli import cli
|
|
4 | 5 |
|
5 |
-import os
|
|
6 |
-import pytest
|
|
6 |
+from ..testutils.runcli import cli
|
|
7 | 7 |
|
8 | 8 |
|
9 | 9 |
KiB = 1024
|
... | ... | @@ -13,7 +13,6 @@ TiB = (GiB * 1024) |
13 | 13 |
|
14 | 14 |
|
15 | 15 |
def test_parse_size_over_1024T(cli, tmpdir):
|
16 |
- BLOCK_SIZE = 4096
|
|
17 | 16 |
cli.configure({
|
18 | 17 |
'cache': {
|
19 | 18 |
'quota': 2048 * TiB
|
... | ... | @@ -23,9 +22,13 @@ def test_parse_size_over_1024T(cli, tmpdir): |
23 | 22 |
os.makedirs(str(project))
|
24 | 23 |
_yaml.dump({'name': 'main'}, str(project.join("project.conf")))
|
25 | 24 |
|
26 |
- bavail = (1025 * TiB) / BLOCK_SIZE
|
|
27 |
- patched_statvfs = mock_os.mock_statvfs(f_bavail=bavail, f_bsize=BLOCK_SIZE)
|
|
28 |
- with mock_os.monkey_patch("statvfs", patched_statvfs):
|
|
25 |
+ volume_space_patch = mock.patch(
|
|
26 |
+ "buildstream._artifactcache.artifactcache.ArtifactCache._get_volume_space_info_for",
|
|
27 |
+ autospec=True,
|
|
28 |
+ return_value=(1025 * TiB, 1025 * TiB)
|
|
29 |
+ )
|
|
30 |
+ |
|
31 |
+ with volume_space_patch:
|
|
29 | 32 |
result = cli.run(project, args=["build", "file.bst"])
|
30 | 33 |
failure_msg = 'Your system does not have enough available space to support the cache quota specified.'
|
31 | 34 |
assert failure_msg in result.stderr
|