Jonathan Maw pushed to branch jonathan/workspace-fragment-create at BuildStream / buildstream
Commits:
-
fe33e328
by James Ennis at 2018-11-08T17:54:18Z
-
09faf002
by James Ennis at 2018-11-08T17:54:18Z
-
d153453c
by Javier Jardón at 2018-11-08T18:22:54Z
-
642a1a3b
by Jonathan Maw at 2018-11-09T11:00:27Z
-
96675873
by Jonathan Maw at 2018-11-09T11:00:27Z
-
f818c118
by Jonathan Maw at 2018-11-09T11:00:27Z
-
299fe0e2
by Jonathan Maw at 2018-11-09T11:00:27Z
-
3b6ecbfe
by Jonathan Maw at 2018-11-09T11:00:27Z
-
50e4f7e0
by Jonathan Maw at 2018-11-09T11:00:27Z
-
77bc3d85
by Jonathan Maw at 2018-11-09T11:00:27Z
-
0793dae9
by Jonathan Maw at 2018-11-09T11:00:27Z
-
14405073
by Jonathan Maw at 2018-11-09T11:00:27Z
-
59433d7a
by Jonathan Maw at 2018-11-09T11:00:27Z
-
ca696da8
by Jonathan Maw at 2018-11-09T11:00:27Z
-
91231d75
by Jonathan Maw at 2018-11-09T11:00:27Z
10 changed files:
- buildstream/_artifactcache/artifactcache.py
- buildstream/_frontend/cli.py
- buildstream/_project.py
- buildstream/_stream.py
- buildstream/_workspaces.py
- buildstream/utils.py
- doc/source/using_config.rst
- tests/frontend/workspace.py
- tests/integration/shell.py
- tests/utils/misc.py
Changes:
... | ... | @@ -937,15 +937,22 @@ class ArtifactCache(): |
937 | 937 |
"Invalid cache quota ({}): ".format(utils._pretty_size(cache_quota)) +
|
938 | 938 |
"BuildStream requires a minimum cache quota of 2G.")
|
939 | 939 |
elif cache_quota > cache_size + available_space: # Check maximum
|
940 |
+ if '%' in self.context.config_cache_quota:
|
|
941 |
+ available = (available_space / (stat.f_blocks * stat.f_bsize)) * 100
|
|
942 |
+ available = '{}% of total disk space'.format(round(available, 1))
|
|
943 |
+ else:
|
|
944 |
+ available = utils._pretty_size(available_space)
|
|
945 |
+ |
|
940 | 946 |
raise LoadError(LoadErrorReason.INVALID_DATA,
|
941 | 947 |
("Your system does not have enough available " +
|
942 | 948 |
"space to support the cache quota specified.\n" +
|
943 |
- "You currently have:\n" +
|
|
944 |
- "- {used} of cache in use at {local_cache_path}\n" +
|
|
945 |
- "- {available} of available system storage").format(
|
|
946 |
- used=utils._pretty_size(cache_size),
|
|
947 |
- local_cache_path=self.context.artifactdir,
|
|
948 |
- available=utils._pretty_size(available_space)))
|
|
949 |
+ "\nYou have specified a quota of {quota} total disk space.\n" +
|
|
950 |
+ "- The filesystem containing {local_cache_path} only " +
|
|
951 |
+ "has: {available_size} available.")
|
|
952 |
+ .format(
|
|
953 |
+ quota=self.context.config_cache_quota,
|
|
954 |
+ local_cache_path=self.context.artifactdir,
|
|
955 |
+ available_size=available))
|
|
949 | 956 |
|
950 | 957 |
# Place a slight headroom (2e9 (2GB) on the cache_quota) into
|
951 | 958 |
# cache_quota to try and avoid exceptions.
|
... | ... | @@ -59,18 +59,9 @@ def complete_target(args, incomplete): |
59 | 59 |
:return: all the possible user-specified completions for the param
|
60 | 60 |
"""
|
61 | 61 |
|
62 |
+ from .. import utils
|
|
62 | 63 |
project_conf = 'project.conf'
|
63 | 64 |
|
64 |
- def ensure_project_dir(directory):
|
|
65 |
- directory = os.path.abspath(directory)
|
|
66 |
- while not os.path.isfile(os.path.join(directory, project_conf)):
|
|
67 |
- parent_dir = os.path.dirname(directory)
|
|
68 |
- if directory == parent_dir:
|
|
69 |
- break
|
|
70 |
- directory = parent_dir
|
|
71 |
- |
|
72 |
- return directory
|
|
73 |
- |
|
74 | 65 |
# First resolve the directory, in case there is an
|
75 | 66 |
# active --directory/-C option
|
76 | 67 |
#
|
... | ... | @@ -89,7 +80,7 @@ def complete_target(args, incomplete): |
89 | 80 |
else:
|
90 | 81 |
# Check if this directory or any of its parent directories
|
91 | 82 |
# contain a project config file
|
92 |
- base_directory = ensure_project_dir(base_directory)
|
|
83 |
+ base_directory = utils._search_upward_for_file(base_directory, project_conf)
|
|
93 | 84 |
|
94 | 85 |
# Now parse the project.conf just to find the element path,
|
95 | 86 |
# this is unfortunately a bit heavy.
|
... | ... | @@ -713,10 +704,12 @@ def workspace_open(app, no_checkout, force, track_, element, directory): |
713 | 704 |
help="Remove the path that contains the closed workspace")
|
714 | 705 |
@click.option('--all', '-a', 'all_', default=False, is_flag=True,
|
715 | 706 |
help="Close all open workspaces")
|
707 |
+@click.option('--force', '-f', default=False, is_flag=True,
|
|
708 |
+ help="Always close the workspace and/or delete your changes")
|
|
716 | 709 |
@click.argument('elements', nargs=-1,
|
717 | 710 |
type=click.Path(readable=False))
|
718 | 711 |
@click.pass_obj
|
719 |
-def workspace_close(app, remove_dir, all_, elements):
|
|
712 |
+def workspace_close(app, remove_dir, all_, force, elements):
|
|
720 | 713 |
"""Close a workspace"""
|
721 | 714 |
|
722 | 715 |
if not (all_ or elements):
|
... | ... | @@ -735,15 +728,25 @@ def workspace_close(app, remove_dir, all_, elements): |
735 | 728 |
|
736 | 729 |
elements = app.stream.redirect_element_names(elements)
|
737 | 730 |
|
738 |
- # Check that the workspaces in question exist
|
|
731 |
+ # Check that the workspaces in question exist, and that it's safe to
|
|
732 |
+ # remove them.
|
|
739 | 733 |
nonexisting = []
|
740 | 734 |
for element_name in elements:
|
741 | 735 |
if not app.stream.workspace_exists(element_name):
|
742 | 736 |
nonexisting.append(element_name)
|
737 |
+ if app.stream.workspace_is_required(element_name):
|
|
738 |
+ if app.interactive:
|
|
739 |
+ click.echo("Removing '{}' will prevent you from running buildstream commands".format(element_name))
|
|
740 |
+ if not click.confirm('Are you sure you want to close this workspace?'):
|
|
741 |
+ click.echo('Aborting', err=True)
|
|
742 |
+ sys.exit(-1)
|
|
743 |
+ elif not force:
|
|
744 |
+ raise AppError("Cannot close workspaces. Workspace {} is being used to load the project"
|
|
745 |
+ .format(element_name), reason='closing-required-workspace')
|
|
743 | 746 |
if nonexisting:
|
744 | 747 |
raise AppError("Workspace does not exist", detail="\n".join(nonexisting))
|
745 | 748 |
|
746 |
- if app.interactive and remove_dir:
|
|
749 |
+ if app.interactive and remove_dir and not force:
|
|
747 | 750 |
if not click.confirm('This will remove all your changes, are you sure?'):
|
748 | 751 |
click.echo('Aborting', err=True)
|
749 | 752 |
sys.exit(-1)
|
... | ... | @@ -40,6 +40,7 @@ from .element import Element |
40 | 40 |
from ._message import Message, MessageType
|
41 | 41 |
from ._includes import Includes
|
42 | 42 |
from ._platform import Platform
|
43 |
+from ._workspaces import WorkspaceLocal
|
|
43 | 44 |
|
44 | 45 |
|
45 | 46 |
# Project Configuration file
|
... | ... | @@ -94,8 +95,8 @@ class Project(): |
94 | 95 |
# The project name
|
95 | 96 |
self.name = None
|
96 | 97 |
|
97 |
- # The project directory
|
|
98 |
- self.directory = self._ensure_project_dir(directory)
|
|
98 |
+ # The project directory, and whether the project was found from an external workspace
|
|
99 |
+ self.directory, self._required_workspace_element = self._find_project_dir(directory)
|
|
99 | 100 |
|
100 | 101 |
# Absolute path to where elements are loaded from within the project
|
101 | 102 |
self.element_path = None
|
... | ... | @@ -357,6 +358,14 @@ class Project(): |
357 | 358 |
|
358 | 359 |
self._load_second_pass()
|
359 | 360 |
|
361 |
+ # required_workspace_element()
|
|
362 |
+ #
|
|
363 |
+ # Returns the element whose workspace is required to load this project,
|
|
364 |
+ # if any.
|
|
365 |
+ #
|
|
366 |
+ def required_workspace_element(self):
|
|
367 |
+ return self._required_workspace_element
|
|
368 |
+ |
|
360 | 369 |
# cleanup()
|
361 | 370 |
#
|
362 | 371 |
# Cleans up resources used loading elements
|
... | ... | @@ -645,7 +654,7 @@ class Project(): |
645 | 654 |
# Source url aliases
|
646 | 655 |
output._aliases = _yaml.node_get(config, Mapping, 'aliases', default_value={})
|
647 | 656 |
|
648 |
- # _ensure_project_dir()
|
|
657 |
+ # _find_project_dir()
|
|
649 | 658 |
#
|
650 | 659 |
# Returns path of the project directory, if a configuration file is found
|
651 | 660 |
# in given directory or any of its parent directories.
|
... | ... | @@ -656,18 +665,24 @@ class Project(): |
656 | 665 |
# Raises:
|
657 | 666 |
# LoadError if project.conf is not found
|
658 | 667 |
#
|
659 |
- def _ensure_project_dir(self, directory):
|
|
660 |
- directory = os.path.abspath(directory)
|
|
661 |
- while not os.path.isfile(os.path.join(directory, _PROJECT_CONF_FILE)):
|
|
662 |
- parent_dir = os.path.dirname(directory)
|
|
663 |
- if directory == parent_dir:
|
|
668 |
+ # Returns:
|
|
669 |
+ # (str) - the directory that contains the project, and
|
|
670 |
+ # (str) - the name of the element required to find the project, or an empty string
|
|
671 |
+ def _find_project_dir(self, directory):
|
|
672 |
+ workspace_element = ""
|
|
673 |
+ project_directory = utils._search_upward_for_file(directory, _PROJECT_CONF_FILE)
|
|
674 |
+ if not project_directory:
|
|
675 |
+ workspace_local = WorkspaceLocal.load(directory)
|
|
676 |
+ if workspace_local:
|
|
677 |
+ project_directory = workspace_local.get_default_path()
|
|
678 |
+ workspace_element = workspace_local.get_default_element()
|
|
679 |
+ else:
|
|
664 | 680 |
raise LoadError(
|
665 | 681 |
LoadErrorReason.MISSING_PROJECT_CONF,
|
666 | 682 |
'{} not found in current directory or any of its parent directories'
|
667 | 683 |
.format(_PROJECT_CONF_FILE))
|
668 |
- directory = parent_dir
|
|
669 | 684 |
|
670 |
- return directory
|
|
685 |
+ return project_directory, workspace_element
|
|
671 | 686 |
|
672 | 687 |
def _load_plugin_factories(self, config, output):
|
673 | 688 |
plugin_source_origins = [] # Origins of custom sources
|
... | ... | @@ -32,6 +32,7 @@ from ._exceptions import StreamError, ImplError, BstError, set_last_task_error |
32 | 32 |
from ._message import Message, MessageType
|
33 | 33 |
from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
|
34 | 34 |
from ._pipeline import Pipeline, PipelineSelection
|
35 |
+from ._workspaces import WorkspaceLocal
|
|
35 | 36 |
from . import utils, _yaml, _site
|
36 | 37 |
from . import Scope, Consistency
|
37 | 38 |
|
... | ... | @@ -516,6 +517,10 @@ class Stream(): |
516 | 517 |
with target.timed_activity("Staging sources to {}".format(directory)):
|
517 | 518 |
target._open_workspace()
|
518 | 519 |
|
520 |
+ project = self._context.get_toplevel_project()
|
|
521 |
+ workspace_local = WorkspaceLocal.create(directory, project.directory, target._get_full_name())
|
|
522 |
+ workspace_local.write()
|
|
523 |
+ |
|
519 | 524 |
workspaces.save_config()
|
520 | 525 |
self._message(MessageType.INFO, "Saved workspace configuration")
|
521 | 526 |
|
... | ... | @@ -540,6 +545,11 @@ class Stream(): |
540 | 545 |
except OSError as e:
|
541 | 546 |
raise StreamError("Could not remove '{}': {}"
|
542 | 547 |
.format(workspace.get_absolute_path(), e)) from e
|
548 |
+ else:
|
|
549 |
+ # TODO: At some point, closing a workspace only deletes the file if no projects are using it.
|
|
550 |
+ workspace_local = WorkspaceLocal.load(workspace.get_absolute_path())
|
|
551 |
+ if workspace_local:
|
|
552 |
+ workspace_local.delete()
|
|
543 | 553 |
|
544 | 554 |
# Delete the workspace and save the configuration
|
545 | 555 |
workspaces.delete_workspace(element_name)
|
... | ... | @@ -633,6 +643,20 @@ class Stream(): |
633 | 643 |
|
634 | 644 |
return False
|
635 | 645 |
|
646 |
+ # workspace_is_required()
|
|
647 |
+ #
|
|
648 |
+ # Checks whether the workspace belonging to element_name is required to
|
|
649 |
+ # load the project
|
|
650 |
+ #
|
|
651 |
+ # Args:
|
|
652 |
+ # element_name (str): The element whose workspace may be required
|
|
653 |
+ #
|
|
654 |
+ # Returns:
|
|
655 |
+ # (bool): True if the workspace is required
|
|
656 |
+ def workspace_is_required(self, element_name):
|
|
657 |
+ required_elm = self._project.required_workspace_element()
|
|
658 |
+ return required_elm == element_name
|
|
659 |
+ |
|
636 | 660 |
# workspace_list
|
637 | 661 |
#
|
638 | 662 |
# Serializes the workspaces and dumps them in YAML to stdout.
|
... | ... | @@ -25,6 +25,149 @@ from ._exceptions import LoadError, LoadErrorReason |
25 | 25 |
|
26 | 26 |
|
27 | 27 |
BST_WORKSPACE_FORMAT_VERSION = 3
|
28 |
+BST_WORKSPACE_LOCAL_FORMAT_VERSION = 1
|
|
29 |
+WORKSPACE_LOCAL_FILE = ".bstproject.yaml"
|
|
30 |
+ |
|
31 |
+ |
|
32 |
+# WorkspaceLocal()
|
|
33 |
+#
|
|
34 |
+# An object to contain various helper functions and data required for
|
|
35 |
+# referring from a workspace back to buildstream.
|
|
36 |
+#
|
|
37 |
+# Args:
|
|
38 |
+# directory (str): The directory that the workspace exists in
|
|
39 |
+# project_path (str): The project path used to refer back
|
|
40 |
+# to buildstream projects.
|
|
41 |
+# element_name (str): The name of the element used to create this workspace.
|
|
42 |
+class WorkspaceLocal():
|
|
43 |
+ def __init__(self, directory, project_path="", element_name=""):
|
|
44 |
+ self._projects = []
|
|
45 |
+ self._directory = directory
|
|
46 |
+ |
|
47 |
+ assert (project_path and element_name) or (not project_path and not element_name)
|
|
48 |
+ if project_path:
|
|
49 |
+ self._add_project(project_path, element_name)
|
|
50 |
+ |
|
51 |
+ # get_default_path()
|
|
52 |
+ #
|
|
53 |
+ # Retrieves the default path to a project.
|
|
54 |
+ #
|
|
55 |
+ # Returns:
|
|
56 |
+ # (str): The path to a project
|
|
57 |
+ def get_default_path(self):
|
|
58 |
+ return self._projects[0]['project-path']
|
|
59 |
+ |
|
60 |
+ # get_default_element()
|
|
61 |
+ #
|
|
62 |
+ # Retrieves the name of the element that owns this workspace.
|
|
63 |
+ #
|
|
64 |
+ # Returns:
|
|
65 |
+ # (str): The name of an element
|
|
66 |
+ def get_default_element(self):
|
|
67 |
+ return self._projects[0]['element-name']
|
|
68 |
+ |
|
69 |
+ # to_dict()
|
|
70 |
+ #
|
|
71 |
+ # Turn the members data into a dict for serialization purposes
|
|
72 |
+ #
|
|
73 |
+ # Returns:
|
|
74 |
+ # (dict): A dict representation of the WorkspaceLocal
|
|
75 |
+ #
|
|
76 |
+ def to_dict(self):
|
|
77 |
+ ret = {
|
|
78 |
+ 'projects': self._projects,
|
|
79 |
+ 'format-version': BST_WORKSPACE_LOCAL_FORMAT_VERSION,
|
|
80 |
+ }
|
|
81 |
+ return ret
|
|
82 |
+ |
|
83 |
+ # from_dict()
|
|
84 |
+ #
|
|
85 |
+ # Loads a new WorkspaceLocal from a simple dictionary
|
|
86 |
+ #
|
|
87 |
+ # Args:
|
|
88 |
+ # directory (str): The directory that the workspace exists in
|
|
89 |
+ # dictionary (dict): The dict to generate a WorkspaceLocal from
|
|
90 |
+ #
|
|
91 |
+ # Returns:
|
|
92 |
+ # (WorkspaceLocal): A newly instantiated WorkspaceLocal
|
|
93 |
+ @classmethod
|
|
94 |
+ def from_dict(cls, directory, dictionary):
|
|
95 |
+ # Only know how to handle one format-version at the moment.
|
|
96 |
+ format_version = int(dictionary['format-version'])
|
|
97 |
+ assert format_version == BST_WORKSPACE_LOCAL_FORMAT_VERSION, \
|
|
98 |
+ "Format version {} not found in {}".format(BST_WORKSPACE_LOCAL_FORMAT_VERSION, dictionary)
|
|
99 |
+ |
|
100 |
+ workspace_local = cls(directory)
|
|
101 |
+ for item in dictionary['projects']:
|
|
102 |
+ workspace_local._add_project(item['project-path'], item['element-name'])
|
|
103 |
+ |
|
104 |
+ return workspace_local
|
|
105 |
+ |
|
106 |
+ # create()
|
|
107 |
+ #
|
|
108 |
+ # Creates a new WorkspaceLocal
|
|
109 |
+ #
|
|
110 |
+ # Args:
|
|
111 |
+ # directory (str): The directory that the workspace exists in
|
|
112 |
+ # project_path (str): The path to the project to store
|
|
113 |
+ # element_name (str): The name of the element within the project
|
|
114 |
+ #
|
|
115 |
+ # Returns:
|
|
116 |
+ # (WorkspaceLocal): The created WorkspaceLocal
|
|
117 |
+ @classmethod
|
|
118 |
+ def create(cls, directory, project_path, element_name):
|
|
119 |
+ # TODO: Load WorkspaceLocal if it exists, and maybe add project_path to it
|
|
120 |
+ return cls(directory, project_path, element_name)
|
|
121 |
+ |
|
122 |
+ # load()
|
|
123 |
+ #
|
|
124 |
+ # Loads the WorkspaceLocal for a given directory. This directory may be a
|
|
125 |
+ # subdirectory of the workspace's directory.
|
|
126 |
+ #
|
|
127 |
+ # Args:
|
|
128 |
+ # directory (str): The directory
|
|
129 |
+ # Returns:
|
|
130 |
+ # (WorkspaceLocal): The created WorkspaceLocal, if in a workspace, or
|
|
131 |
+ # (NoneType): None, if the directory is not inside a workspace.
|
|
132 |
+ @classmethod
|
|
133 |
+ def load(cls, directory):
|
|
134 |
+ local_dir = cls.search_for_dir(directory)
|
|
135 |
+ if local_dir:
|
|
136 |
+ workspace_file = os.path.join(local_dir, WORKSPACE_LOCAL_FILE)
|
|
137 |
+ data_dict = _yaml.load(workspace_file)
|
|
138 |
+ return cls.from_dict(local_dir, data_dict)
|
|
139 |
+ else:
|
|
140 |
+ return None
|
|
141 |
+ |
|
142 |
+ # write()
|
|
143 |
+ #
|
|
144 |
+ # Writes the WorkspaceLocal to disk
|
|
145 |
+ def write(self):
|
|
146 |
+ os.makedirs(self._directory, exist_ok=True)
|
|
147 |
+ _yaml.dump(self.to_dict(), self._get_filename())
|
|
148 |
+ |
|
149 |
+ # delete()
|
|
150 |
+ #
|
|
151 |
+ # Deletes the WorkspaceLocal from disk, if it exists.
|
|
152 |
+ def delete(self):
|
|
153 |
+ try:
|
|
154 |
+ os.unlink(self._get_filename())
|
|
155 |
+ except FileNotFoundError:
|
|
156 |
+ pass
|
|
157 |
+ |
|
158 |
+ # search_for_dir()
|
|
159 |
+ #
|
|
160 |
+ # Returns the directory that contains the workspace local file,
|
|
161 |
+ # searching upwards from search_dir.
|
|
162 |
+ @staticmethod
|
|
163 |
+ def search_for_dir(search_dir):
|
|
164 |
+ return utils._search_upward_for_file(search_dir, WORKSPACE_LOCAL_FILE)
|
|
165 |
+ |
|
166 |
+ def _get_filename(self):
|
|
167 |
+ return os.path.join(self._directory, WORKSPACE_LOCAL_FILE)
|
|
168 |
+ |
|
169 |
+ def _add_project(self, project_path, element_name):
|
|
170 |
+ self._projects.append({'project-path': project_path, 'element-name': element_name})
|
|
28 | 171 |
|
29 | 172 |
|
30 | 173 |
# Workspace()
|
... | ... | @@ -174,10 +317,15 @@ class Workspace(): |
174 | 317 |
if recalculate or self._key is None:
|
175 | 318 |
fullpath = self.get_absolute_path()
|
176 | 319 |
|
320 |
+ excluded_files = [WORKSPACE_LOCAL_FILE]
|
|
321 |
+ |
|
177 | 322 |
# Get a list of tuples of the the project relative paths and fullpaths
|
178 | 323 |
if os.path.isdir(fullpath):
|
179 | 324 |
filelist = utils.list_relative_paths(fullpath)
|
180 |
- filelist = [(relpath, os.path.join(fullpath, relpath)) for relpath in filelist]
|
|
325 |
+ filelist = [
|
|
326 |
+ (relpath, os.path.join(fullpath, relpath)) for relpath in filelist
|
|
327 |
+ if relpath not in excluded_files
|
|
328 |
+ ]
|
|
181 | 329 |
else:
|
182 | 330 |
filelist = [(self.get_absolute_path(), fullpath)]
|
183 | 331 |
|
... | ... | @@ -1199,3 +1199,17 @@ def _deduplicate(iterable, key=None): |
1199 | 1199 |
def _get_link_mtime(path):
|
1200 | 1200 |
path_stat = os.lstat(path)
|
1201 | 1201 |
return path_stat.st_mtime
|
1202 |
+ |
|
1203 |
+ |
|
1204 |
+# Returns the first directory to contain filename, or an empty string if
|
|
1205 |
+# none found
|
|
1206 |
+#
|
|
1207 |
+def _search_upward_for_file(directory, filename):
|
|
1208 |
+ directory = os.path.abspath(directory)
|
|
1209 |
+ while not os.path.isfile(os.path.join(directory, filename)):
|
|
1210 |
+ parent_dir = os.path.dirname(directory)
|
|
1211 |
+ if directory == parent_dir:
|
|
1212 |
+ return ""
|
|
1213 |
+ directory = parent_dir
|
|
1214 |
+ |
|
1215 |
+ return directory
|
... | ... | @@ -147,6 +147,44 @@ The default mirror is defined by its name, e.g. |
147 | 147 |
``--default-mirror`` command-line option.
|
148 | 148 |
|
149 | 149 |
|
150 |
+Local cache expiry
|
|
151 |
+~~~~~~~~~~~~~~~~~~
|
|
152 |
+BuildStream locally caches artifacts, build trees, log files and sources within a
|
|
153 |
+cache located at ``~/.cache/buildstream`` (unless a $XDG_CACHE_HOME environment
|
|
154 |
+variable exists). When building large projects, this cache can get very large,
|
|
155 |
+thus BuildStream will attempt to clean up the cache automatically by expiring the least
|
|
156 |
+recently *used* artifacts.
|
|
157 |
+ |
|
158 |
+By default, cache expiry will begin once the file system which contains the cache
|
|
159 |
+approaches maximum usage. However, it is also possible to impose a quota on the local
|
|
160 |
+cache in the user configuration. This can be done in two ways:
|
|
161 |
+ |
|
162 |
+1. By restricting the maximum size of the cache directory itself.
|
|
163 |
+ |
|
164 |
+For example, to ensure that BuildStream's cache does not grow beyond 100 GB,
|
|
165 |
+simply declare the following in your user configuration (``~/.config/buildstream.conf``):
|
|
166 |
+ |
|
167 |
+.. code:: yaml
|
|
168 |
+ |
|
169 |
+ cache:
|
|
170 |
+ quota: 100G
|
|
171 |
+ |
|
172 |
+This quota defines the maximum size of the artifact cache in bytes.
|
|
173 |
+Other accepted values are: K, M, G or T (or you can simply declare the value in bytes, without the suffix).
|
|
174 |
+This uses the same format as systemd's
|
|
175 |
+`resource-control <https://www.freedesktop.org/software/systemd/man/systemd.resource-control.html>`_.
|
|
176 |
+ |
|
177 |
+2. By expiring artifacts once the file system which contains the cache exceeds a specified usage.
|
|
178 |
+ |
|
179 |
+To ensure that we start cleaning the cache once we've used 80% of local disk space (on the file system
|
|
180 |
+which mounts the cache):
|
|
181 |
+ |
|
182 |
+.. code:: yaml
|
|
183 |
+ |
|
184 |
+ cache:
|
|
185 |
+ quota: 80%
|
|
186 |
+ |
|
187 |
+ |
|
150 | 188 |
Default configuration
|
151 | 189 |
---------------------
|
152 | 190 |
The default BuildStream configuration is specified here for reference:
|
... | ... | @@ -29,6 +29,7 @@ import shutil |
29 | 29 |
import subprocess
|
30 | 30 |
from ruamel.yaml.comments import CommentedSet
|
31 | 31 |
from tests.testutils import cli, create_repo, ALL_REPO_KINDS, wait_for_cache_granularity
|
32 |
+from tests.testutils import create_artifact_share
|
|
32 | 33 |
|
33 | 34 |
from buildstream import _yaml
|
34 | 35 |
from buildstream._exceptions import ErrorDomain, LoadError, LoadErrorReason
|
... | ... | @@ -93,6 +94,13 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir |
93 | 94 |
|
94 | 95 |
result.assert_success()
|
95 | 96 |
|
97 |
+ # Assert that a .bstproject.yaml file has been created
|
|
98 |
+ # and contains the path to the project
|
|
99 |
+ bstproject_path = os.path.join(workspace_dir, '.bstproject.yaml')
|
|
100 |
+ assert os.path.exists(bstproject_path)
|
|
101 |
+ with open(bstproject_path) as f:
|
|
102 |
+ assert project_path in f.read()
|
|
103 |
+ |
|
96 | 104 |
# Assert that we are now buildable because the source is
|
97 | 105 |
# now cached.
|
98 | 106 |
assert cli.get_element_state(project_path, element_name) == 'buildable'
|
... | ... | @@ -148,6 +156,10 @@ def test_open_force(cli, tmpdir, datafiles, kind): |
148 | 156 |
# Assert the workspace dir still exists
|
149 | 157 |
assert os.path.exists(workspace)
|
150 | 158 |
|
159 |
+ # Assert the bstproject doesn't exist
|
|
160 |
+ bstproject_path = os.path.join(workspace, '.bstproject.yaml')
|
|
161 |
+ assert not os.path.exists(bstproject_path)
|
|
162 |
+ |
|
151 | 163 |
# Now open the workspace again with --force, this should happily succeed
|
152 | 164 |
result = cli.run(project=project, args=[
|
153 | 165 |
'workspace', 'open', '--force', element_name, workspace
|
... | ... | @@ -436,7 +448,9 @@ def test_list(cli, tmpdir, datafiles): |
436 | 448 |
@pytest.mark.datafiles(DATA_DIR)
|
437 | 449 |
@pytest.mark.parametrize("kind", repo_kinds)
|
438 | 450 |
@pytest.mark.parametrize("strict", [("strict"), ("non-strict")])
|
439 |
-def test_build(cli, tmpdir, datafiles, kind, strict):
|
|
451 |
+@pytest.mark.parametrize("call_from", [("project"), ("workspace")])
|
|
452 |
+def test_build(cli, tmpdir_factory, datafiles, kind, strict, call_from):
|
|
453 |
+ tmpdir = tmpdir_factory.mktemp('')
|
|
440 | 454 |
element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
|
441 | 455 |
checkout = os.path.join(str(tmpdir), 'checkout')
|
442 | 456 |
|
... | ... | @@ -458,16 +472,18 @@ def test_build(cli, tmpdir, datafiles, kind, strict): |
458 | 472 |
}
|
459 | 473 |
})
|
460 | 474 |
|
475 |
+ from_dir = project if call_from == "project" else workspace
|
|
476 |
+ |
|
461 | 477 |
# Build modified workspace
|
462 |
- assert cli.get_element_state(project, element_name) == 'buildable'
|
|
463 |
- assert cli.get_element_key(project, element_name) == "{:?<64}".format('')
|
|
464 |
- result = cli.run(project=project, args=['build', element_name])
|
|
478 |
+ assert cli.get_element_state(from_dir, element_name) == 'buildable'
|
|
479 |
+ assert cli.get_element_key(from_dir, element_name) == "{:?<64}".format('')
|
|
480 |
+ result = cli.run(project=from_dir, args=['build', element_name])
|
|
465 | 481 |
result.assert_success()
|
466 |
- assert cli.get_element_state(project, element_name) == 'cached'
|
|
467 |
- assert cli.get_element_key(project, element_name) != "{:?<64}".format('')
|
|
482 |
+ assert cli.get_element_state(from_dir, element_name) == 'cached'
|
|
483 |
+ assert cli.get_element_key(from_dir, element_name) != "{:?<64}".format('')
|
|
468 | 484 |
|
469 | 485 |
# Checkout the result
|
470 |
- result = cli.run(project=project, args=[
|
|
486 |
+ result = cli.run(project=from_dir, args=[
|
|
471 | 487 |
'checkout', element_name, checkout
|
472 | 488 |
])
|
473 | 489 |
result.assert_success()
|
... | ... | @@ -876,3 +892,127 @@ def test_multiple_failed_builds(cli, tmpdir, datafiles): |
876 | 892 |
result = cli.run(project=project, args=["build", element_name])
|
877 | 893 |
assert "BUG" not in result.stderr
|
878 | 894 |
assert cli.get_element_state(project, element_name) != "cached"
|
895 |
+ |
|
896 |
+ |
|
897 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
898 |
+def test_external_fetch(cli, datafiles, tmpdir_factory):
|
|
899 |
+ # Fetching from a workspace outside a project doesn't fail horribly
|
|
900 |
+ tmpdir = tmpdir_factory.mktemp('')
|
|
901 |
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", False)
|
|
902 |
+ |
|
903 |
+ result = cli.run(project=workspace, args=['fetch', element_name])
|
|
904 |
+ result.assert_success()
|
|
905 |
+ |
|
906 |
+ # We already fetched it by opening the workspace, but we're also checking
|
|
907 |
+ # `bst show` works here
|
|
908 |
+ assert cli.get_element_state(workspace, element_name) == 'buildable'
|
|
909 |
+ |
|
910 |
+ |
|
911 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
912 |
+def test_external_push_pull(cli, datafiles, tmpdir_factory):
|
|
913 |
+ # Pushing and pulling to/from an artifact cache works from an external workspace
|
|
914 |
+ tmpdir = tmpdir_factory.mktemp('')
|
|
915 |
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", False)
|
|
916 |
+ |
|
917 |
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
|
918 |
+ result = cli.run(project=workspace, args=['build', element_name])
|
|
919 |
+ result.assert_success()
|
|
920 |
+ |
|
921 |
+ cli.configure({
|
|
922 |
+ 'artifacts': {'url': share.repo, 'push': True}
|
|
923 |
+ })
|
|
924 |
+ |
|
925 |
+ result = cli.run(project=workspace, args=['push', element_name])
|
|
926 |
+ result.assert_success()
|
|
927 |
+ |
|
928 |
+ result = cli.run(project=workspace, args=['pull', '--deps', 'all', 'target.bst'])
|
|
929 |
+ result.assert_success()
|
|
930 |
+ |
|
931 |
+ |
|
932 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
933 |
+def test_external_track(cli, datafiles, tmpdir_factory):
|
|
934 |
+ # Tracking does not get horribly confused
|
|
935 |
+ tmpdir = tmpdir_factory.mktemp('')
|
|
936 |
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", True)
|
|
937 |
+ |
|
938 |
+ # The workspace is necessarily already tracked, so we only care that
|
|
939 |
+ # there's no weird errors.
|
|
940 |
+ result = cli.run(project=workspace, args=['track', element_name])
|
|
941 |
+ result.assert_success()
|
|
942 |
+ |
|
943 |
+ |
|
944 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
945 |
+def test_external_open_other(cli, datafiles, tmpdir_factory):
|
|
946 |
+ # From inside an external workspace, open another workspace
|
|
947 |
+ tmpdir1 = tmpdir_factory.mktemp('')
|
|
948 |
+ tmpdir2 = tmpdir_factory.mktemp('')
|
|
949 |
+ # Making use of the assumption that it's the same project in both invocations of open_workspace
|
|
950 |
+ alpha_element, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", False, suffix="-alpha")
|
|
951 |
+ beta_element, _, beta_workspace = open_workspace(cli, tmpdir2, datafiles, "git", False, suffix="-beta")
|
|
952 |
+ |
|
953 |
+ # Closing the other element first, because I'm too lazy to create an
|
|
954 |
+ # element without opening it
|
|
955 |
+ result = cli.run(project=project, args=['workspace', 'close', beta_element])
|
|
956 |
+ result.assert_success()
|
|
957 |
+ |
|
958 |
+ result = cli.run(project=alpha_workspace, args=['workspace', 'open', '--force', beta_element, beta_workspace])
|
|
959 |
+ result.assert_success()
|
|
960 |
+ |
|
961 |
+ |
|
962 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
963 |
+def test_external_close_other(cli, datafiles, tmpdir_factory):
|
|
964 |
+ # From inside an external workspace, close the other workspace
|
|
965 |
+ tmpdir1 = tmpdir_factory.mktemp('')
|
|
966 |
+ tmpdir2 = tmpdir_factory.mktemp('')
|
|
967 |
+ # Making use of the assumption that it's the same project in both invocations of open_workspace
|
|
968 |
+ alpha_element, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", False, suffix="-alpha")
|
|
969 |
+ beta_element, _, beta_workspace = open_workspace(cli, tmpdir2, datafiles, "git", False, suffix="-beta")
|
|
970 |
+ |
|
971 |
+ result = cli.run(project=alpha_workspace, args=['workspace', 'close', beta_element])
|
|
972 |
+ result.assert_success()
|
|
973 |
+ |
|
974 |
+ |
|
975 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
976 |
+@pytest.mark.parametrize("force", [("force"), ("no-force")])
|
|
977 |
+def test_external_close_self(cli, datafiles, tmpdir_factory, force):
|
|
978 |
+ # From inside an external workspace, close it
|
|
979 |
+ # This is unwise, so is only allowed if --force
|
|
980 |
+ tmpdir1 = tmpdir_factory.mktemp('')
|
|
981 |
+ tmpdir2 = tmpdir_factory.mktemp('')
|
|
982 |
+ # Making use of the assumption that it's the same project in both invocations of open_workspace
|
|
983 |
+ alpha_element, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", False, suffix="-alpha")
|
|
984 |
+ beta_element, _, beta_workspace = open_workspace(cli, tmpdir2, datafiles, "git", False, suffix="-beta")
|
|
985 |
+ |
|
986 |
+ args = ['workspace', 'close']
|
|
987 |
+ if force == "force":
|
|
988 |
+ args.append('--force')
|
|
989 |
+ args.append(alpha_element)
|
|
990 |
+ |
|
991 |
+ result = cli.run(project=alpha_workspace, args=args)
|
|
992 |
+ if force == "force":
|
|
993 |
+ result.assert_success()
|
|
994 |
+ else:
|
|
995 |
+ # TODO: Proper error domain and reason
|
|
996 |
+ result.assert_main_error(ErrorDomain.APP, 'closing-required-workspace')
|
|
997 |
+ |
|
998 |
+ |
|
999 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
1000 |
+def test_external_reset_other(cli, datafiles, tmpdir_factory):
|
|
1001 |
+ tmpdir1 = tmpdir_factory.mktemp('')
|
|
1002 |
+ tmpdir2 = tmpdir_factory.mktemp('')
|
|
1003 |
+ # Making use of the assumption that it's the same project in both invocations of open_workspace
|
|
1004 |
+ alpha_element, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", False, suffix="-alpha")
|
|
1005 |
+ beta_element, _, beta_workspace = open_workspace(cli, tmpdir2, datafiles, "git", False, suffix="-beta")
|
|
1006 |
+ |
|
1007 |
+ result = cli.run(project=alpha_workspace, args=['workspace', 'reset', beta_element])
|
|
1008 |
+ result.assert_success()
|
|
1009 |
+ |
|
1010 |
+ |
|
1011 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
1012 |
+def test_external_list(cli, datafiles, tmpdir_factory):
|
|
1013 |
+ tmpdir = tmpdir_factory.mktemp('')
|
|
1014 |
+ # Making use of the assumption that it's the same project in both invocations of open_workspace
|
|
1015 |
+ element, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", False)
|
|
1016 |
+ |
|
1017 |
+ result = cli.run(project=workspace, args=['workspace', 'list'])
|
|
1018 |
+ result.assert_success()
|
... | ... | @@ -339,3 +339,28 @@ def test_integration_devices(cli, tmpdir, datafiles): |
339 | 339 |
|
340 | 340 |
result = execute_shell(cli, project, ["true"], element=element_name)
|
341 | 341 |
assert result.exit_code == 0
|
342 |
+ |
|
343 |
+ |
|
344 |
+# Test that a shell can be opened from an external workspace
|
|
345 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
346 |
+@pytest.mark.parametrize("build_shell", [("build"), ("nobuild")])
|
|
347 |
+def test_integration_external_workspace(cli, tmpdir_factory, datafiles, build_shell):
|
|
348 |
+ tmpdir = tmpdir_factory.mktemp("")
|
|
349 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
350 |
+ element_name = 'autotools/amhello.bst'
|
|
351 |
+ workspace_dir = os.path.join(str(tmpdir), 'workspace')
|
|
352 |
+ |
|
353 |
+ result = cli.run(project=project, args=[
|
|
354 |
+ 'workspace', 'open', element_name, workspace_dir
|
|
355 |
+ ])
|
|
356 |
+ result.assert_success()
|
|
357 |
+ |
|
358 |
+ result = cli.run(project=project, cwd=workspace_dir, args=['build', element_name])
|
|
359 |
+ result.assert_success()
|
|
360 |
+ |
|
361 |
+ command = ['shell']
|
|
362 |
+ if build_shell == 'build':
|
|
363 |
+ command.append('--build')
|
|
364 |
+ command.extend([element_name, '--', 'true'])
|
|
365 |
+ result = cli.run(project=project, cwd=workspace_dir, args=command)
|
|
366 |
+ result.assert_success()
|
... | ... | @@ -27,4 +27,5 @@ def test_parse_size_over_1024T(cli, tmpdir): |
27 | 27 |
patched_statvfs = mock_os.mock_statvfs(f_bavail=bavail, f_bsize=BLOCK_SIZE)
|
28 | 28 |
with mock_os.monkey_patch("statvfs", patched_statvfs):
|
29 | 29 |
result = cli.run(project, args=["build", "file.bst"])
|
30 |
- assert "1025T of available system storage" in result.stderr
|
|
30 |
+ failure_msg = 'Your system does not have enough available space to support the cache quota specified.'
|
|
31 |
+ assert failure_msg in result.stderr
|