Jim MacArthur pushed to branch jmac/cache_artifacts_with_vdir at BuildStream / buildstream
Commits:
-
f2fcc2f6
by Valentin David at 2018-12-11T12:12:08Z
-
717c10d1
by Valentin David at 2018-12-11T12:44:56Z
-
7892287a
by Jonathan Maw at 2018-12-11T12:56:32Z
-
67c7a58d
by Jonathan Maw at 2018-12-11T12:56:32Z
-
64836b18
by Jonathan Maw at 2018-12-11T12:56:32Z
-
7deeb2c3
by Jonathan Maw at 2018-12-11T12:56:32Z
-
496f0ab7
by Jonathan Maw at 2018-12-11T12:56:32Z
-
921f2bcb
by Jonathan Maw at 2018-12-11T12:56:32Z
-
f145a3e4
by Jonathan Maw at 2018-12-11T12:56:32Z
-
494cb7c6
by Jonathan Maw at 2018-12-11T14:12:54Z
-
fd1c5c5a
by Jonathan Maw at 2018-12-11T14:12:54Z
-
4219a6b4
by Jonathan Maw at 2018-12-11T16:00:55Z
-
3ad64854
by Jim MacArthur at 2018-12-12T13:06:42Z
-
edb2bcbc
by Jim MacArthur at 2018-12-12T13:06:42Z
-
e05d2140
by Jim MacArthur at 2018-12-12T13:06:42Z
-
521aebd1
by Jim MacArthur at 2018-12-12T13:06:42Z
-
0be8a9ba
by Jim MacArthur at 2018-12-12T13:06:42Z
-
13b13471
by Jim MacArthur at 2018-12-12T13:06:42Z
20 changed files:
- NEWS
- buildstream/_artifactcache/cascache.py
- buildstream/_context.py
- buildstream/_frontend/cli.py
- buildstream/_project.py
- buildstream/_stream.py
- buildstream/_workspaces.py
- buildstream/data/userconfig.yaml
- buildstream/element.py
- buildstream/plugins/sources/git.py
- buildstream/storage/_casbaseddirectory.py
- buildstream/storage/_filebaseddirectory.py
- buildstream/storage/directory.py
- buildstream/utils.py
- tests/frontend/cross_junction_workspace.py
- tests/frontend/workspace.py
- tests/integration/shell.py
- tests/sources/git.py
- tests/storage/virtual_directory_import.py
- tests/testutils/repo/git.py
Changes:
| ... | ... | @@ -83,6 +83,10 @@ buildstream 1.3.1 |
| 83 | 83 |
plugin has now a tag tracking feature instead. This can be enabled
|
| 84 | 84 |
by setting 'track-tags'.
|
| 85 | 85 |
|
| 86 |
+ o Opening a workspace now creates a .bstproject.yaml file that allows buildstream
|
|
| 87 |
+ commands to be run from a workspace that is not inside a project.
|
|
| 88 |
+ |
|
| 89 |
+ |
|
| 86 | 90 |
=================
|
| 87 | 91 |
buildstream 1.1.5
|
| 88 | 92 |
=================
|
| ... | ... | @@ -39,6 +39,7 @@ from .. import utils |
| 39 | 39 |
from .._exceptions import CASError, LoadError, LoadErrorReason
|
| 40 | 40 |
from .. import _yaml
|
| 41 | 41 |
|
| 42 |
+from ..storage._casbaseddirectory import CasBasedDirectory
|
|
| 42 | 43 |
|
| 43 | 44 |
# The default limit for gRPC messages is 4 MiB.
|
| 44 | 45 |
# Limit payload to 1 MiB to leave sufficient headroom for metadata.
|
| ... | ... | @@ -768,6 +769,9 @@ class CASCache(): |
| 768 | 769 |
# (Digest): Digest object for the directory added.
|
| 769 | 770 |
#
|
| 770 | 771 |
def _commit_directory(self, path, *, dir_digest=None):
|
| 772 |
+ if isinstance(path, CasBasedDirectory):
|
|
| 773 |
+ return self.add_object(digest=dir_digest, buffer=path.pb2_directory.SerializeToString())
|
|
| 774 |
+ |
|
| 771 | 775 |
directory = remote_execution_pb2.Directory()
|
| 772 | 776 |
|
| 773 | 777 |
for name in sorted(os.listdir(path)):
|
| ... | ... | @@ -32,7 +32,7 @@ from ._message import Message, MessageType |
| 32 | 32 |
from ._profile import Topics, profile_start, profile_end
|
| 33 | 33 |
from ._artifactcache import ArtifactCache
|
| 34 | 34 |
from ._artifactcache.cascache import CASCache
|
| 35 |
-from ._workspaces import Workspaces
|
|
| 35 |
+from ._workspaces import Workspaces, WorkspaceProjectCache
|
|
| 36 | 36 |
from .plugin import _plugin_lookup
|
| 37 | 37 |
|
| 38 | 38 |
|
| ... | ... | @@ -122,6 +122,10 @@ class Context(): |
| 122 | 122 |
# remove a workspace directory.
|
| 123 | 123 |
self.prompt_workspace_close_remove_dir = None
|
| 124 | 124 |
|
| 125 |
+ # Boolean, whether we double-check with the user that they meant to
|
|
| 126 |
+ # close the workspace when they're using it to access the project.
|
|
| 127 |
+ self.prompt_workspace_close_project_inaccessible = None
|
|
| 128 |
+ |
|
| 125 | 129 |
# Boolean, whether we double-check with the user that they meant to do
|
| 126 | 130 |
# a hard reset of a workspace, potentially losing changes.
|
| 127 | 131 |
self.prompt_workspace_reset_hard = None
|
| ... | ... | @@ -140,6 +144,7 @@ class Context(): |
| 140 | 144 |
self._projects = []
|
| 141 | 145 |
self._project_overrides = {}
|
| 142 | 146 |
self._workspaces = None
|
| 147 |
+ self._workspace_project_cache = WorkspaceProjectCache()
|
|
| 143 | 148 |
self._log_handle = None
|
| 144 | 149 |
self._log_filename = None
|
| 145 | 150 |
self._cascache = None
|
| ... | ... | @@ -250,12 +255,15 @@ class Context(): |
| 250 | 255 |
defaults, Mapping, 'prompt')
|
| 251 | 256 |
_yaml.node_validate(prompt, [
|
| 252 | 257 |
'auto-init', 'really-workspace-close-remove-dir',
|
| 258 |
+ 'really-workspace-close-project-inaccessible',
|
|
| 253 | 259 |
'really-workspace-reset-hard',
|
| 254 | 260 |
])
|
| 255 | 261 |
self.prompt_auto_init = _node_get_option_str(
|
| 256 | 262 |
prompt, 'auto-init', ['ask', 'no']) == 'ask'
|
| 257 | 263 |
self.prompt_workspace_close_remove_dir = _node_get_option_str(
|
| 258 | 264 |
prompt, 'really-workspace-close-remove-dir', ['ask', 'yes']) == 'ask'
|
| 265 |
+ self.prompt_workspace_close_project_inaccessible = _node_get_option_str(
|
|
| 266 |
+ prompt, 'really-workspace-close-project-inaccessible', ['ask', 'yes']) == 'ask'
|
|
| 259 | 267 |
self.prompt_workspace_reset_hard = _node_get_option_str(
|
| 260 | 268 |
prompt, 'really-workspace-reset-hard', ['ask', 'yes']) == 'ask'
|
| 261 | 269 |
|
| ... | ... | @@ -285,7 +293,7 @@ class Context(): |
| 285 | 293 |
#
|
| 286 | 294 |
def add_project(self, project):
|
| 287 | 295 |
if not self._projects:
|
| 288 |
- self._workspaces = Workspaces(project)
|
|
| 296 |
+ self._workspaces = Workspaces(project, self._workspace_project_cache)
|
|
| 289 | 297 |
self._projects.append(project)
|
| 290 | 298 |
|
| 291 | 299 |
# get_projects():
|
| ... | ... | @@ -312,6 +320,16 @@ class Context(): |
| 312 | 320 |
def get_workspaces(self):
|
| 313 | 321 |
return self._workspaces
|
| 314 | 322 |
|
| 323 |
+ # get_workspace_project_cache():
|
|
| 324 |
+ #
|
|
| 325 |
+ # Return the WorkspaceProjectCache object used for this BuildStream invocation
|
|
| 326 |
+ #
|
|
| 327 |
+ # Returns:
|
|
| 328 |
+ # (WorkspaceProjectCache): The WorkspaceProjectCache object
|
|
| 329 |
+ #
|
|
| 330 |
+ def get_workspace_project_cache(self):
|
|
| 331 |
+ return self._workspace_project_cache
|
|
| 332 |
+ |
|
| 315 | 333 |
# get_overrides():
|
| 316 | 334 |
#
|
| 317 | 335 |
# Fetch the override dictionary for the active project. This returns
|
| ... | ... | @@ -59,18 +59,9 @@ def complete_target(args, incomplete): |
| 59 | 59 |
:return: all the possible user-specified completions for the param
|
| 60 | 60 |
"""
|
| 61 | 61 |
|
| 62 |
+ from .. import utils
|
|
| 62 | 63 |
project_conf = 'project.conf'
|
| 63 | 64 |
|
| 64 |
- def ensure_project_dir(directory):
|
|
| 65 |
- directory = os.path.abspath(directory)
|
|
| 66 |
- while not os.path.isfile(os.path.join(directory, project_conf)):
|
|
| 67 |
- parent_dir = os.path.dirname(directory)
|
|
| 68 |
- if directory == parent_dir:
|
|
| 69 |
- break
|
|
| 70 |
- directory = parent_dir
|
|
| 71 |
- |
|
| 72 |
- return directory
|
|
| 73 |
- |
|
| 74 | 65 |
# First resolve the directory, in case there is an
|
| 75 | 66 |
# active --directory/-C option
|
| 76 | 67 |
#
|
| ... | ... | @@ -89,7 +80,7 @@ def complete_target(args, incomplete): |
| 89 | 80 |
else:
|
| 90 | 81 |
# Check if this directory or any of its parent directories
|
| 91 | 82 |
# contain a project config file
|
| 92 |
- base_directory = ensure_project_dir(base_directory)
|
|
| 83 |
+ base_directory, _ = utils._search_upward_for_files(base_directory, [project_conf])
|
|
| 93 | 84 |
|
| 94 | 85 |
# Now parse the project.conf just to find the element path,
|
| 95 | 86 |
# this is unfortunately a bit heavy.
|
| ... | ... | @@ -772,11 +763,19 @@ def workspace_close(app, remove_dir, all_, elements): |
| 772 | 763 |
|
| 773 | 764 |
elements = app.stream.redirect_element_names(elements)
|
| 774 | 765 |
|
| 775 |
- # Check that the workspaces in question exist
|
|
| 766 |
+ # Check that the workspaces in question exist, and that it's safe to
|
|
| 767 |
+ # remove them.
|
|
| 776 | 768 |
nonexisting = []
|
| 777 | 769 |
for element_name in elements:
|
| 778 | 770 |
if not app.stream.workspace_exists(element_name):
|
| 779 | 771 |
nonexisting.append(element_name)
|
| 772 |
+ if (app.stream.workspace_is_required(element_name) and app.interactive and
|
|
| 773 |
+ app.context.prompt_workspace_close_project_inaccessible):
|
|
| 774 |
+ click.echo("Removing '{}' will prevent you from running "
|
|
| 775 |
+ "BuildStream commands from the current directory".format(element_name))
|
|
| 776 |
+ if not click.confirm('Are you sure you want to close this workspace?'):
|
|
| 777 |
+ click.echo('Aborting', err=True)
|
|
| 778 |
+ sys.exit(-1)
|
|
| 780 | 779 |
if nonexisting:
|
| 781 | 780 |
raise AppError("Workspace does not exist", detail="\n".join(nonexisting))
|
| 782 | 781 |
|
| ... | ... | @@ -41,6 +41,7 @@ from .element import Element |
| 41 | 41 |
from ._message import Message, MessageType
|
| 42 | 42 |
from ._includes import Includes
|
| 43 | 43 |
from ._platform import Platform
|
| 44 |
+from ._workspaces import WORKSPACE_PROJECT_FILE
|
|
| 44 | 45 |
|
| 45 | 46 |
|
| 46 | 47 |
# Project Configuration file
|
| ... | ... | @@ -95,8 +96,10 @@ class Project(): |
| 95 | 96 |
# The project name
|
| 96 | 97 |
self.name = None
|
| 97 | 98 |
|
| 98 |
- # The project directory
|
|
| 99 |
- self.directory = self._ensure_project_dir(directory)
|
|
| 99 |
+ self._context = context # The invocation Context, a private member
|
|
| 100 |
+ |
|
| 101 |
+ # The project directory, and whether the element whose workspace it was invoked from
|
|
| 102 |
+ self.directory, self._invoked_from_workspace_element = self._find_project_dir(directory)
|
|
| 100 | 103 |
|
| 101 | 104 |
# Absolute path to where elements are loaded from within the project
|
| 102 | 105 |
self.element_path = None
|
| ... | ... | @@ -117,7 +120,6 @@ class Project(): |
| 117 | 120 |
#
|
| 118 | 121 |
# Private Members
|
| 119 | 122 |
#
|
| 120 |
- self._context = context # The invocation Context
|
|
| 121 | 123 |
|
| 122 | 124 |
self._default_mirror = default_mirror # The name of the preferred mirror.
|
| 123 | 125 |
|
| ... | ... | @@ -371,6 +373,14 @@ class Project(): |
| 371 | 373 |
|
| 372 | 374 |
self._load_second_pass()
|
| 373 | 375 |
|
| 376 |
+ # invoked_from_workspace_element()
|
|
| 377 |
+ #
|
|
| 378 |
+ # Returns the element whose workspace was used to invoke buildstream
|
|
| 379 |
+ # if buildstream was invoked from an external workspace
|
|
| 380 |
+ #
|
|
| 381 |
+ def invoked_from_workspace_element(self):
|
|
| 382 |
+ return self._invoked_from_workspace_element
|
|
| 383 |
+ |
|
| 374 | 384 |
# cleanup()
|
| 375 | 385 |
#
|
| 376 | 386 |
# Cleans up resources used loading elements
|
| ... | ... | @@ -650,7 +660,7 @@ class Project(): |
| 650 | 660 |
# Source url aliases
|
| 651 | 661 |
output._aliases = _yaml.node_get(config, Mapping, 'aliases', default_value={})
|
| 652 | 662 |
|
| 653 |
- # _ensure_project_dir()
|
|
| 663 |
+ # _find_project_dir()
|
|
| 654 | 664 |
#
|
| 655 | 665 |
# Returns path of the project directory, if a configuration file is found
|
| 656 | 666 |
# in given directory or any of its parent directories.
|
| ... | ... | @@ -661,18 +671,30 @@ class Project(): |
| 661 | 671 |
# Raises:
|
| 662 | 672 |
# LoadError if project.conf is not found
|
| 663 | 673 |
#
|
| 664 |
- def _ensure_project_dir(self, directory):
|
|
| 665 |
- directory = os.path.abspath(directory)
|
|
| 666 |
- while not os.path.isfile(os.path.join(directory, _PROJECT_CONF_FILE)):
|
|
| 667 |
- parent_dir = os.path.dirname(directory)
|
|
| 668 |
- if directory == parent_dir:
|
|
| 669 |
- raise LoadError(
|
|
| 670 |
- LoadErrorReason.MISSING_PROJECT_CONF,
|
|
| 671 |
- '{} not found in current directory or any of its parent directories'
|
|
| 672 |
- .format(_PROJECT_CONF_FILE))
|
|
| 673 |
- directory = parent_dir
|
|
| 674 |
+ # Returns:
|
|
| 675 |
+ # (str) - the directory that contains the project, and
|
|
| 676 |
+ # (str) - the name of the element required to find the project, or None
|
|
| 677 |
+ #
|
|
| 678 |
+ def _find_project_dir(self, directory):
|
|
| 679 |
+ workspace_element = None
|
|
| 680 |
+ found_directory, filename = utils._search_upward_for_files(
|
|
| 681 |
+ directory, [_PROJECT_CONF_FILE, WORKSPACE_PROJECT_FILE]
|
|
| 682 |
+ )
|
|
| 683 |
+ if filename == _PROJECT_CONF_FILE:
|
|
| 684 |
+ project_directory = found_directory
|
|
| 685 |
+ elif filename == WORKSPACE_PROJECT_FILE:
|
|
| 686 |
+ workspace_project_cache = self._context.get_workspace_project_cache()
|
|
| 687 |
+ workspace_project = workspace_project_cache.get(found_directory)
|
|
| 688 |
+ if workspace_project:
|
|
| 689 |
+ project_directory = workspace_project.get_default_project_path()
|
|
| 690 |
+ workspace_element = workspace_project.get_default_element()
|
|
| 691 |
+ else:
|
|
| 692 |
+ raise LoadError(
|
|
| 693 |
+ LoadErrorReason.MISSING_PROJECT_CONF,
|
|
| 694 |
+ '{} not found in current directory or any of its parent directories'
|
|
| 695 |
+ .format(_PROJECT_CONF_FILE))
|
|
| 674 | 696 |
|
| 675 |
- return directory
|
|
| 697 |
+ return project_directory, workspace_element
|
|
| 676 | 698 |
|
| 677 | 699 |
def _load_plugin_factories(self, config, output):
|
| 678 | 700 |
plugin_source_origins = [] # Origins of custom sources
|
| ... | ... | @@ -581,15 +581,7 @@ class Stream(): |
| 581 | 581 |
todo_elements = "\nDid not try to create workspaces for " + todo_elements
|
| 582 | 582 |
raise StreamError("Failed to create workspace directory: {}".format(e) + todo_elements) from e
|
| 583 | 583 |
|
| 584 |
- workspaces.create_workspace(target._get_full_name(), directory)
|
|
| 585 |
- |
|
| 586 |
- if not no_checkout:
|
|
| 587 |
- with target.timed_activity("Staging sources to {}".format(directory)):
|
|
| 588 |
- target._open_workspace()
|
|
| 589 |
- |
|
| 590 |
- # Saving the workspace once it is set up means that if the next workspace fails to be created before
|
|
| 591 |
- # the configuration gets saved. The successfully created workspace still gets saved.
|
|
| 592 |
- workspaces.save_config()
|
|
| 584 |
+ workspaces.create_workspace(target, directory, checkout=not no_checkout)
|
|
| 593 | 585 |
self._message(MessageType.INFO, "Created a workspace for element: {}"
|
| 594 | 586 |
.format(target._get_full_name()))
|
| 595 | 587 |
|
| ... | ... | @@ -672,10 +664,7 @@ class Stream(): |
| 672 | 664 |
.format(workspace_path, e)) from e
|
| 673 | 665 |
|
| 674 | 666 |
workspaces.delete_workspace(element._get_full_name())
|
| 675 |
- workspaces.create_workspace(element._get_full_name(), workspace_path)
|
|
| 676 |
- |
|
| 677 |
- with element.timed_activity("Staging sources to {}".format(workspace_path)):
|
|
| 678 |
- element._open_workspace()
|
|
| 667 |
+ workspaces.create_workspace(element, workspace_path, checkout=True)
|
|
| 679 | 668 |
|
| 680 | 669 |
self._message(MessageType.INFO,
|
| 681 | 670 |
"Reset workspace for {} at: {}".format(element.name,
|
| ... | ... | @@ -707,6 +696,20 @@ class Stream(): |
| 707 | 696 |
|
| 708 | 697 |
return False
|
| 709 | 698 |
|
| 699 |
+ # workspace_is_required()
|
|
| 700 |
+ #
|
|
| 701 |
+ # Checks whether the workspace belonging to element_name is required to
|
|
| 702 |
+ # load the project
|
|
| 703 |
+ #
|
|
| 704 |
+ # Args:
|
|
| 705 |
+ # element_name (str): The element whose workspace may be required
|
|
| 706 |
+ #
|
|
| 707 |
+ # Returns:
|
|
| 708 |
+ # (bool): True if the workspace is required
|
|
| 709 |
+ def workspace_is_required(self, element_name):
|
|
| 710 |
+ invoked_elm = self._project.invoked_from_workspace_element()
|
|
| 711 |
+ return invoked_elm == element_name
|
|
| 712 |
+ |
|
| 710 | 713 |
# workspace_list
|
| 711 | 714 |
#
|
| 712 | 715 |
# Serializes the workspaces and dumps them in YAML to stdout.
|
| ... | ... | @@ -25,6 +25,202 @@ from ._exceptions import LoadError, LoadErrorReason |
| 25 | 25 |
|
| 26 | 26 |
|
| 27 | 27 |
BST_WORKSPACE_FORMAT_VERSION = 3
|
| 28 |
+BST_WORKSPACE_PROJECT_FORMAT_VERSION = 1
|
|
| 29 |
+WORKSPACE_PROJECT_FILE = ".bstproject.yaml"
|
|
| 30 |
+ |
|
| 31 |
+ |
|
| 32 |
+# WorkspaceProject()
|
|
| 33 |
+#
|
|
| 34 |
+# An object to contain various helper functions and data required for
|
|
| 35 |
+# referring from a workspace back to buildstream.
|
|
| 36 |
+#
|
|
| 37 |
+# Args:
|
|
| 38 |
+# directory (str): The directory that the workspace exists in.
|
|
| 39 |
+#
|
|
| 40 |
+class WorkspaceProject():
|
|
| 41 |
+ def __init__(self, directory):
|
|
| 42 |
+ self._projects = []
|
|
| 43 |
+ self._directory = directory
|
|
| 44 |
+ |
|
| 45 |
+ # get_default_project_path()
|
|
| 46 |
+ #
|
|
| 47 |
+ # Retrieves the default path to a project.
|
|
| 48 |
+ #
|
|
| 49 |
+ # Returns:
|
|
| 50 |
+ # (str): The path to a project
|
|
| 51 |
+ #
|
|
| 52 |
+ def get_default_project_path(self):
|
|
| 53 |
+ return self._projects[0]['project-path']
|
|
| 54 |
+ |
|
| 55 |
+ # get_default_element()
|
|
| 56 |
+ #
|
|
| 57 |
+ # Retrieves the name of the element that owns this workspace.
|
|
| 58 |
+ #
|
|
| 59 |
+ # Returns:
|
|
| 60 |
+ # (str): The name of an element
|
|
| 61 |
+ #
|
|
| 62 |
+ def get_default_element(self):
|
|
| 63 |
+ return self._projects[0]['element-name']
|
|
| 64 |
+ |
|
| 65 |
+ # to_dict()
|
|
| 66 |
+ #
|
|
| 67 |
+ # Turn the members data into a dict for serialization purposes
|
|
| 68 |
+ #
|
|
| 69 |
+ # Returns:
|
|
| 70 |
+ # (dict): A dict representation of the WorkspaceProject
|
|
| 71 |
+ #
|
|
| 72 |
+ def to_dict(self):
|
|
| 73 |
+ ret = {
|
|
| 74 |
+ 'projects': self._projects,
|
|
| 75 |
+ 'format-version': BST_WORKSPACE_PROJECT_FORMAT_VERSION,
|
|
| 76 |
+ }
|
|
| 77 |
+ return ret
|
|
| 78 |
+ |
|
| 79 |
+ # from_dict()
|
|
| 80 |
+ #
|
|
| 81 |
+ # Loads a new WorkspaceProject from a simple dictionary
|
|
| 82 |
+ #
|
|
| 83 |
+ # Args:
|
|
| 84 |
+ # directory (str): The directory that the workspace exists in
|
|
| 85 |
+ # dictionary (dict): The dict to generate a WorkspaceProject from
|
|
| 86 |
+ #
|
|
| 87 |
+ # Returns:
|
|
| 88 |
+ # (WorkspaceProject): A newly instantiated WorkspaceProject
|
|
| 89 |
+ #
|
|
| 90 |
+ @classmethod
|
|
| 91 |
+ def from_dict(cls, directory, dictionary):
|
|
| 92 |
+ # Only know how to handle one format-version at the moment.
|
|
| 93 |
+ format_version = int(dictionary['format-version'])
|
|
| 94 |
+ assert format_version == BST_WORKSPACE_PROJECT_FORMAT_VERSION, \
|
|
| 95 |
+ "Format version {} not found in {}".format(BST_WORKSPACE_PROJECT_FORMAT_VERSION, dictionary)
|
|
| 96 |
+ |
|
| 97 |
+ workspace_project = cls(directory)
|
|
| 98 |
+ for item in dictionary['projects']:
|
|
| 99 |
+ workspace_project.add_project(item['project-path'], item['element-name'])
|
|
| 100 |
+ |
|
| 101 |
+ return workspace_project
|
|
| 102 |
+ |
|
| 103 |
+ # load()
|
|
| 104 |
+ #
|
|
| 105 |
+ # Loads the WorkspaceProject for a given directory.
|
|
| 106 |
+ #
|
|
| 107 |
+ # Args:
|
|
| 108 |
+ # directory (str): The directory
|
|
| 109 |
+ # Returns:
|
|
| 110 |
+ # (WorkspaceProject): The created WorkspaceProject, if in a workspace, or
|
|
| 111 |
+ # (NoneType): None, if the directory is not inside a workspace.
|
|
| 112 |
+ #
|
|
| 113 |
+ @classmethod
|
|
| 114 |
+ def load(cls, directory):
|
|
| 115 |
+ workspace_file = os.path.join(directory, WORKSPACE_PROJECT_FILE)
|
|
| 116 |
+ if os.path.exists(workspace_file):
|
|
| 117 |
+ data_dict = _yaml.load(workspace_file)
|
|
| 118 |
+ return cls.from_dict(directory, data_dict)
|
|
| 119 |
+ else:
|
|
| 120 |
+ return None
|
|
| 121 |
+ |
|
| 122 |
+ # write()
|
|
| 123 |
+ #
|
|
| 124 |
+ # Writes the WorkspaceProject to disk
|
|
| 125 |
+ #
|
|
| 126 |
+ def write(self):
|
|
| 127 |
+ os.makedirs(self._directory, exist_ok=True)
|
|
| 128 |
+ _yaml.dump(self.to_dict(), self.get_filename())
|
|
| 129 |
+ |
|
| 130 |
+ # get_filename()
|
|
| 131 |
+ #
|
|
| 132 |
+ # Returns the full path to the workspace local project file
|
|
| 133 |
+ #
|
|
| 134 |
+ def get_filename(self):
|
|
| 135 |
+ return os.path.join(self._directory, WORKSPACE_PROJECT_FILE)
|
|
| 136 |
+ |
|
| 137 |
+ # add_project()
|
|
| 138 |
+ #
|
|
| 139 |
+ # Adds an entry containing the project's path and element's name.
|
|
| 140 |
+ #
|
|
| 141 |
+ # Args:
|
|
| 142 |
+ # project_path (str): The path to the project that opened the workspace.
|
|
| 143 |
+ # element_name (str): The name of the element that the workspace belongs to.
|
|
| 144 |
+ #
|
|
| 145 |
+ def add_project(self, project_path, element_name):
|
|
| 146 |
+ assert (project_path and element_name)
|
|
| 147 |
+ self._projects.append({'project-path': project_path, 'element-name': element_name})
|
|
| 148 |
+ |
|
| 149 |
+ |
|
| 150 |
+# WorkspaceProjectCache()
|
|
| 151 |
+#
|
|
| 152 |
+# A class to manage workspace project data for multiple workspaces.
|
|
| 153 |
+#
|
|
| 154 |
+class WorkspaceProjectCache():
|
|
| 155 |
+ def __init__(self):
|
|
| 156 |
+ self._projects = {} # Mapping of a workspace directory to its WorkspaceProject
|
|
| 157 |
+ |
|
| 158 |
+ # get()
|
|
| 159 |
+ #
|
|
| 160 |
+ # Returns a WorkspaceProject for a given directory, retrieving from the cache if
|
|
| 161 |
+ # present.
|
|
| 162 |
+ #
|
|
| 163 |
+ # Args:
|
|
| 164 |
+ # directory (str): The directory to search for a WorkspaceProject.
|
|
| 165 |
+ #
|
|
| 166 |
+ # Returns:
|
|
| 167 |
+ # (WorkspaceProject): The WorkspaceProject that was found for that directory.
|
|
| 168 |
+ # or (NoneType): None, if no WorkspaceProject can be found.
|
|
| 169 |
+ #
|
|
| 170 |
+ def get(self, directory):
|
|
| 171 |
+ try:
|
|
| 172 |
+ workspace_project = self._projects[directory]
|
|
| 173 |
+ except KeyError:
|
|
| 174 |
+ workspace_project = WorkspaceProject.load(directory)
|
|
| 175 |
+ if workspace_project:
|
|
| 176 |
+ self._projects[directory] = workspace_project
|
|
| 177 |
+ |
|
| 178 |
+ return workspace_project
|
|
| 179 |
+ |
|
| 180 |
+ # add()
|
|
| 181 |
+ #
|
|
| 182 |
+ # Adds the project path and element name to the WorkspaceProject that exists
|
|
| 183 |
+ # for that directory
|
|
| 184 |
+ #
|
|
| 185 |
+ # Args:
|
|
| 186 |
+ # directory (str): The directory to search for a WorkspaceProject.
|
|
| 187 |
+ # project_path (str): The path to the project that refers to this workspace
|
|
| 188 |
+ # element_name (str): The element in the project that was refers to this workspace
|
|
| 189 |
+ #
|
|
| 190 |
+ # Returns:
|
|
| 191 |
+ # (WorkspaceProject): The WorkspaceProject that was found for that directory.
|
|
| 192 |
+ #
|
|
| 193 |
+ def add(self, directory, project_path, element_name):
|
|
| 194 |
+ workspace_project = self.get(directory)
|
|
| 195 |
+ if not workspace_project:
|
|
| 196 |
+ workspace_project = WorkspaceProject(directory)
|
|
| 197 |
+ self._projects[directory] = workspace_project
|
|
| 198 |
+ |
|
| 199 |
+ workspace_project.add_project(project_path, element_name)
|
|
| 200 |
+ return workspace_project
|
|
| 201 |
+ |
|
| 202 |
+ # remove()
|
|
| 203 |
+ #
|
|
| 204 |
+ # Removes the project path and element name from the WorkspaceProject that exists
|
|
| 205 |
+ # for that directory.
|
|
| 206 |
+ #
|
|
| 207 |
+ # NOTE: This currently just deletes the file, but with support for multiple
|
|
| 208 |
+ # projects opening the same workspace, this will involve decreasing the count
|
|
| 209 |
+ # and deleting the file if there are no more projects.
|
|
| 210 |
+ #
|
|
| 211 |
+ # Args:
|
|
| 212 |
+ # directory (str): The directory to search for a WorkspaceProject.
|
|
| 213 |
+ #
|
|
| 214 |
+ def remove(self, directory):
|
|
| 215 |
+ workspace_project = self.get(directory)
|
|
| 216 |
+ if not workspace_project:
|
|
| 217 |
+ raise LoadError(LoadErrorReason.MISSING_FILE,
|
|
| 218 |
+ "Failed to find a {} file to remove".format(WORKSPACE_PROJECT_FILE))
|
|
| 219 |
+ path = workspace_project.get_filename()
|
|
| 220 |
+ try:
|
|
| 221 |
+ os.unlink(path)
|
|
| 222 |
+ except FileNotFoundError:
|
|
| 223 |
+ pass
|
|
| 28 | 224 |
|
| 29 | 225 |
|
| 30 | 226 |
# Workspace()
|
| ... | ... | @@ -174,10 +370,15 @@ class Workspace(): |
| 174 | 370 |
if recalculate or self._key is None:
|
| 175 | 371 |
fullpath = self.get_absolute_path()
|
| 176 | 372 |
|
| 373 |
+ excluded_files = (WORKSPACE_PROJECT_FILE,)
|
|
| 374 |
+ |
|
| 177 | 375 |
# Get a list of tuples of the the project relative paths and fullpaths
|
| 178 | 376 |
if os.path.isdir(fullpath):
|
| 179 | 377 |
filelist = utils.list_relative_paths(fullpath)
|
| 180 |
- filelist = [(relpath, os.path.join(fullpath, relpath)) for relpath in filelist]
|
|
| 378 |
+ filelist = [
|
|
| 379 |
+ (relpath, os.path.join(fullpath, relpath)) for relpath in filelist
|
|
| 380 |
+ if relpath not in excluded_files
|
|
| 381 |
+ ]
|
|
| 181 | 382 |
else:
|
| 182 | 383 |
filelist = [(self.get_absolute_path(), fullpath)]
|
| 183 | 384 |
|
| ... | ... | @@ -199,12 +400,14 @@ class Workspace(): |
| 199 | 400 |
#
|
| 200 | 401 |
# Args:
|
| 201 | 402 |
# toplevel_project (Project): Top project used to resolve paths.
|
| 403 |
+# workspace_project_cache (WorkspaceProjectCache): The cache of WorkspaceProjects
|
|
| 202 | 404 |
#
|
| 203 | 405 |
class Workspaces():
|
| 204 |
- def __init__(self, toplevel_project):
|
|
| 406 |
+ def __init__(self, toplevel_project, workspace_project_cache):
|
|
| 205 | 407 |
self._toplevel_project = toplevel_project
|
| 206 | 408 |
self._bst_directory = os.path.join(toplevel_project.directory, ".bst")
|
| 207 | 409 |
self._workspaces = self._load_config()
|
| 410 |
+ self._workspace_project_cache = workspace_project_cache
|
|
| 208 | 411 |
|
| 209 | 412 |
# list()
|
| 210 | 413 |
#
|
| ... | ... | @@ -219,19 +422,36 @@ class Workspaces(): |
| 219 | 422 |
|
| 220 | 423 |
# create_workspace()
|
| 221 | 424 |
#
|
| 222 |
- # Create a workspace in the given path for the given element.
|
|
| 425 |
+ # Create a workspace in the given path for the given element, and potentially
|
|
| 426 |
+ # checks-out the target into it.
|
|
| 223 | 427 |
#
|
| 224 | 428 |
# Args:
|
| 225 |
- # element_name (str) - The element name to create a workspace for
|
|
| 429 |
+ # target (Element) - The element to create a workspace for
|
|
| 226 | 430 |
# path (str) - The path in which the workspace should be kept
|
| 431 |
+ # checkout (bool): Whether to check-out the element's sources into the directory
|
|
| 227 | 432 |
#
|
| 228 |
- def create_workspace(self, element_name, path):
|
|
| 229 |
- if path.startswith(self._toplevel_project.directory):
|
|
| 230 |
- path = os.path.relpath(path, self._toplevel_project.directory)
|
|
| 433 |
+ def create_workspace(self, target, path, *, checkout):
|
|
| 434 |
+ element_name = target._get_full_name()
|
|
| 435 |
+ project_dir = self._toplevel_project.directory
|
|
| 436 |
+ if path.startswith(project_dir):
|
|
| 437 |
+ workspace_path = os.path.relpath(path, project_dir)
|
|
| 438 |
+ else:
|
|
| 439 |
+ workspace_path = path
|
|
| 231 | 440 |
|
| 232 |
- self._workspaces[element_name] = Workspace(self._toplevel_project, path=path)
|
|
| 441 |
+ self._workspaces[element_name] = Workspace(self._toplevel_project, path=workspace_path)
|
|
| 233 | 442 |
|
| 234 |
- return self._workspaces[element_name]
|
|
| 443 |
+ if checkout:
|
|
| 444 |
+ with target.timed_activity("Staging sources to {}".format(path)):
|
|
| 445 |
+ target._open_workspace()
|
|
| 446 |
+ |
|
| 447 |
+ workspace_project = self._workspace_project_cache.add(path, project_dir, element_name)
|
|
| 448 |
+ project_file_path = workspace_project.get_filename()
|
|
| 449 |
+ |
|
| 450 |
+ if os.path.exists(project_file_path):
|
|
| 451 |
+ target.warn("{} was staged from this element's sources".format(WORKSPACE_PROJECT_FILE))
|
|
| 452 |
+ workspace_project.write()
|
|
| 453 |
+ |
|
| 454 |
+ self.save_config()
|
|
| 235 | 455 |
|
| 236 | 456 |
# get_workspace()
|
| 237 | 457 |
#
|
| ... | ... | @@ -280,8 +500,19 @@ class Workspaces(): |
| 280 | 500 |
# element_name (str) - The element name whose workspace to delete
|
| 281 | 501 |
#
|
| 282 | 502 |
def delete_workspace(self, element_name):
|
| 503 |
+ workspace = self.get_workspace(element_name)
|
|
| 283 | 504 |
del self._workspaces[element_name]
|
| 284 | 505 |
|
| 506 |
+ # Remove from the cache if it exists
|
|
| 507 |
+ try:
|
|
| 508 |
+ self._workspace_project_cache.remove(workspace.get_absolute_path())
|
|
| 509 |
+ except LoadError as e:
|
|
| 510 |
+ # We might be closing a workspace with a deleted directory
|
|
| 511 |
+ if e.reason == LoadErrorReason.MISSING_FILE:
|
|
| 512 |
+ pass
|
|
| 513 |
+ else:
|
|
| 514 |
+ raise
|
|
| 515 |
+ |
|
| 285 | 516 |
# save_config()
|
| 286 | 517 |
#
|
| 287 | 518 |
# Dump the current workspace element to the project configuration
|
| ... | ... | @@ -128,6 +128,14 @@ prompt: |
| 128 | 128 |
#
|
| 129 | 129 |
really-workspace-close-remove-dir: ask
|
| 130 | 130 |
|
| 131 |
+ # Whether to really proceed with 'bst workspace close' when doing so would
|
|
| 132 |
+ # stop them from running bst commands in this workspace.
|
|
| 133 |
+ #
|
|
| 134 |
+ # ask - Ask the user if they are sure.
|
|
| 135 |
+ # yes - Always close, without asking.
|
|
| 136 |
+ #
|
|
| 137 |
+ really-workspace-close-project-inaccessible: ask
|
|
| 138 |
+ |
|
| 131 | 139 |
# Whether to really proceed with 'bst workspace reset' doing a hard reset of
|
| 132 | 140 |
# a workspace, potentially losing changes.
|
| 133 | 141 |
#
|
| ... | ... | @@ -102,6 +102,7 @@ from .types import _KeyStrength, CoreWarnings |
| 102 | 102 |
|
| 103 | 103 |
from .storage.directory import Directory
|
| 104 | 104 |
from .storage._filebaseddirectory import FileBasedDirectory
|
| 105 |
+from .storage._casbaseddirectory import CasBasedDirectory
|
|
| 105 | 106 |
from .storage.directory import VirtualDirectoryError
|
| 106 | 107 |
|
| 107 | 108 |
|
| ... | ... | @@ -1634,35 +1635,38 @@ class Element(Plugin): |
| 1634 | 1635 |
# No collect directory existed
|
| 1635 | 1636 |
collectvdir = None
|
| 1636 | 1637 |
|
| 1638 |
+ assemblevdir = CasBasedDirectory(cas_cache=self._get_context().artifactcache.cas, ref=None)
|
|
| 1639 |
+ logsvdir = assemblevdir.descend("logs", create=True)
|
|
| 1640 |
+ metavdir = assemblevdir.descend("meta", create=True)
|
|
| 1641 |
+ |
|
| 1637 | 1642 |
# Create artifact directory structure
|
| 1638 | 1643 |
assembledir = os.path.join(rootdir, 'artifact')
|
| 1639 |
- filesdir = os.path.join(assembledir, 'files')
|
|
| 1640 | 1644 |
logsdir = os.path.join(assembledir, 'logs')
|
| 1641 | 1645 |
metadir = os.path.join(assembledir, 'meta')
|
| 1642 |
- buildtreedir = os.path.join(assembledir, 'buildtree')
|
|
| 1643 | 1646 |
os.mkdir(assembledir)
|
| 1644 |
- if collect is not None and collectvdir is not None:
|
|
| 1645 |
- os.mkdir(filesdir)
|
|
| 1646 | 1647 |
os.mkdir(logsdir)
|
| 1647 | 1648 |
os.mkdir(metadir)
|
| 1648 |
- os.mkdir(buildtreedir)
|
|
| 1649 | 1649 |
|
| 1650 |
- # Hard link files from collect dir to files directory
|
|
| 1651 | 1650 |
if collect is not None and collectvdir is not None:
|
| 1652 |
- collectvdir.export_files(filesdir, can_link=True)
|
|
| 1651 |
+ if isinstance(collectvdir, CasBasedDirectory):
|
|
| 1652 |
+ assemblevdir.fast_directory_import("files", collectvdir)
|
|
| 1653 |
+ else:
|
|
| 1654 |
+ filesvdir = assemblevdir.descend("files", create=True)
|
|
| 1655 |
+ filesvdir.import_files(collectvdir, can_link=True)
|
|
| 1653 | 1656 |
|
| 1657 |
+ sandbox_vroot = sandbox.get_virtual_directory()
|
|
| 1654 | 1658 |
try:
|
| 1655 |
- sandbox_vroot = sandbox.get_virtual_directory()
|
|
| 1656 | 1659 |
sandbox_build_dir = sandbox_vroot.descend(
|
| 1657 | 1660 |
self.get_variable('build-root').lstrip(os.sep).split(os.sep))
|
| 1658 |
- # Hard link files from build-root dir to buildtreedir directory
|
|
| 1659 |
- sandbox_build_dir.export_files(buildtreedir)
|
|
| 1661 |
+ assemblevdir.fast_directory_import("buildtree", sandbox_build_dir)
|
|
| 1660 | 1662 |
except VirtualDirectoryError:
|
| 1661 | 1663 |
# Directory could not be found. Pre-virtual
|
| 1662 | 1664 |
# directory behaviour was to continue silently
|
| 1663 |
- # if the directory could not be found.
|
|
| 1664 |
- pass
|
|
| 1665 |
+ # if the directory could not be found, but we must create
|
|
| 1666 |
+ # the directory.
|
|
| 1667 |
+ assemblevdir.descend("buildtree", create=True)
|
|
| 1665 | 1668 |
|
| 1669 |
+ # Write some logs out to normal directories: logsdir and metadir
|
|
| 1666 | 1670 |
# Copy build log
|
| 1667 | 1671 |
log_filename = self._get_context().get_log_filename()
|
| 1668 | 1672 |
self._build_log_path = os.path.join(logsdir, 'build.log')
|
| ... | ... | @@ -1705,9 +1709,12 @@ class Element(Plugin): |
| 1705 | 1709 |
]
|
| 1706 | 1710 |
}), os.path.join(metadir, 'workspaced-dependencies.yaml'))
|
| 1707 | 1711 |
|
| 1708 |
- with self.timed_activity("Caching artifact"):
|
|
| 1709 |
- artifact_size = utils._get_dir_size(assembledir)
|
|
| 1710 |
- self.__artifacts.commit(self, assembledir, self.__get_cache_keys_for_commit())
|
|
| 1712 |
+ metavdir.import_files(metadir)
|
|
| 1713 |
+ logsvdir.import_files(logsdir)
|
|
| 1714 |
+ |
|
| 1715 |
+ artifact_size = assemblevdir.get_size()
|
|
| 1716 |
+ with self.timed_activity("Caching artifact of size {}".format(artifact_size)):
|
|
| 1717 |
+ self.__artifacts.commit(self, assemblevdir, self.__get_cache_keys_for_commit())
|
|
| 1711 | 1718 |
|
| 1712 | 1719 |
if collect is not None and collectvdir is None:
|
| 1713 | 1720 |
raise ElementError(
|
| ... | ... | @@ -247,7 +247,7 @@ class GitMirror(SourceFetcher): |
| 247 | 247 |
else:
|
| 248 | 248 |
remote_name = "origin"
|
| 249 | 249 |
|
| 250 |
- self.source.call([self.source.host_git, 'fetch', remote_name, '--prune'],
|
|
| 250 |
+ self.source.call([self.source.host_git, 'fetch', remote_name, '--prune', '--force', '--tags'],
|
|
| 251 | 251 |
fail="Failed to fetch from remote git repository: {}".format(url),
|
| 252 | 252 |
fail_temporarily=True,
|
| 253 | 253 |
cwd=self.mirror)
|
| ... | ... | @@ -350,10 +350,13 @@ class CasBasedDirectory(Directory): |
| 350 | 350 |
filenode.is_executable = is_executable
|
| 351 | 351 |
self.index[filename] = IndexEntry(filenode, modified=modified or filename in self.index)
|
| 352 | 352 |
|
| 353 |
- def _copy_link_from_filesystem(self, basename, filename):
|
|
| 354 |
- self._add_new_link_direct(filename, os.readlink(os.path.join(basename, filename)))
|
|
| 353 |
+ def _copy_link_from_filesystem(self, filesystem_path, relative_path, destination_name):
|
|
| 354 |
+ # filesystem_path should be a full path point to the source symlink.
|
|
| 355 |
+ # relative_path should be the path we're importing to, which is used to turn absolute paths into relative ones.
|
|
| 356 |
+ # destination_name should be the destination name in this directory.
|
|
| 357 |
+ self._add_new_link_direct(relative_path, destination_name, os.readlink(filesystem_path))
|
|
| 355 | 358 |
|
| 356 |
- def _add_new_link_direct(self, name, target):
|
|
| 359 |
+ def _add_new_link_direct(self, relative_path, name, target):
|
|
| 357 | 360 |
existing_link = self._find_pb2_entry(name)
|
| 358 | 361 |
if existing_link:
|
| 359 | 362 |
symlinknode = existing_link
|
| ... | ... | @@ -361,8 +364,15 @@ class CasBasedDirectory(Directory): |
| 361 | 364 |
symlinknode = self.pb2_directory.symlinks.add()
|
| 362 | 365 |
assert isinstance(symlinknode, remote_execution_pb2.SymlinkNode)
|
| 363 | 366 |
symlinknode.name = name
|
| 364 |
- # A symlink node has no digest.
|
|
| 367 |
+ |
|
| 368 |
+ absolute = target.startswith(CasBasedDirectory._pb2_absolute_path_prefix)
|
|
| 369 |
+ if absolute:
|
|
| 370 |
+ distance_to_root = len(relative_path.split(CasBasedDirectory._pb2_path_sep))
|
|
| 371 |
+ target = CasBasedDirectory._pb2_path_sep.join([".."] * distance_to_root + [target[1:]])
|
|
| 365 | 372 |
symlinknode.target = target
|
| 373 |
+ |
|
| 374 |
+ # A symlink node has no digest.
|
|
| 375 |
+ |
|
| 366 | 376 |
self.index[name] = IndexEntry(symlinknode, modified=(existing_link is not None))
|
| 367 | 377 |
|
| 368 | 378 |
def delete_entry(self, name):
|
| ... | ... | @@ -527,7 +537,7 @@ class CasBasedDirectory(Directory): |
| 527 | 537 |
result.combine(subdir_result)
|
| 528 | 538 |
elif os.path.islink(import_file):
|
| 529 | 539 |
if self._check_replacement(entry, path_prefix, result):
|
| 530 |
- self._copy_link_from_filesystem(source_directory, entry)
|
|
| 540 |
+ self._copy_link_from_filesystem(os.path.join(source_directory, entry), path_prefix, entry)
|
|
| 531 | 541 |
result.files_written.append(relative_pathname)
|
| 532 | 542 |
elif os.path.isdir(import_file):
|
| 533 | 543 |
# A plain directory which already exists isn't a problem; just ignore it.
|
| ... | ... | @@ -602,7 +612,7 @@ class CasBasedDirectory(Directory): |
| 602 | 612 |
self.index[f] = IndexEntry(filenode, modified=True)
|
| 603 | 613 |
else:
|
| 604 | 614 |
assert isinstance(item, remote_execution_pb2.SymlinkNode)
|
| 605 |
- self._add_new_link_direct(name=f, target=item.target)
|
|
| 615 |
+ self._add_new_link_direct(path_prefix, name=f, target=item.target)
|
|
| 606 | 616 |
else:
|
| 607 | 617 |
result.ignored.append(os.path.join(path_prefix, f))
|
| 608 | 618 |
return result
|
| ... | ... | @@ -637,7 +647,7 @@ class CasBasedDirectory(Directory): |
| 637 | 647 |
files = external_pathspec.list_relative_paths()
|
| 638 | 648 |
|
| 639 | 649 |
if isinstance(external_pathspec, FileBasedDirectory):
|
| 640 |
- source_directory = external_pathspec.get_underlying_directory()
|
|
| 650 |
+ source_directory = external_pathspec._get_underlying_directory()
|
|
| 641 | 651 |
result = self._import_files_from_directory(source_directory, files=files)
|
| 642 | 652 |
elif isinstance(external_pathspec, str):
|
| 643 | 653 |
source_directory = external_pathspec
|
| ... | ... | @@ -836,6 +846,27 @@ class CasBasedDirectory(Directory): |
| 836 | 846 |
self._recalculate_recursing_up()
|
| 837 | 847 |
self._recalculate_recursing_down()
|
| 838 | 848 |
|
| 849 |
+ def get_size(self):
|
|
| 850 |
+ total = len(self.pb2_directory.SerializeToString())
|
|
| 851 |
+ for i in self.index.values():
|
|
| 852 |
+ if isinstance(i.buildstream_object, CasBasedDirectory):
|
|
| 853 |
+ total += i.buildstream_object.get_size()
|
|
| 854 |
+ elif isinstance(i.pb_object, remote_execution_pb2.FileNode):
|
|
| 855 |
+ src_name = self.cas_cache.objpath(i.pb_object.digest)
|
|
| 856 |
+ filesize = os.stat(src_name).st_size
|
|
| 857 |
+ total += filesize
|
|
| 858 |
+ # Symlink nodes are encoded as part of the directory serialization.
|
|
| 859 |
+ return total
|
|
| 860 |
+ |
|
| 861 |
+ def fast_directory_import(self, dirname, other_directory):
|
|
| 862 |
+ assert dirname not in self.index
|
|
| 863 |
+ if isinstance(other_directory, CasBasedDirectory):
|
|
| 864 |
+ self.index[dirname] = IndexEntry(other_directory.pb_object,
|
|
| 865 |
+ buildstream_object=other_directory.buildstream_object)
|
|
| 866 |
+ else:
|
|
| 867 |
+ subdir = self.descend(dirname, create=True)
|
|
| 868 |
+ subdir.import_files(other_directory, can_link=True)
|
|
| 869 |
+ |
|
| 839 | 870 |
def _get_identifier(self):
|
| 840 | 871 |
path = ""
|
| 841 | 872 |
if self.parent:
|
| ... | ... | @@ -30,6 +30,7 @@ See also: :ref:`sandboxing`. |
| 30 | 30 |
import os
|
| 31 | 31 |
import time
|
| 32 | 32 |
from .directory import Directory, VirtualDirectoryError
|
| 33 |
+from .. import utils
|
|
| 33 | 34 |
from ..utils import link_files, copy_files, list_relative_paths, _get_link_mtime, _magic_timestamp
|
| 34 | 35 |
from ..utils import _set_deterministic_user, _set_deterministic_mtime
|
| 35 | 36 |
|
| ... | ... | @@ -125,6 +126,13 @@ class FileBasedDirectory(Directory): |
| 125 | 126 |
self._mark_changed()
|
| 126 | 127 |
return import_result
|
| 127 | 128 |
|
| 129 |
+ def fast_directory_import(self, dirname, other_directory):
|
|
| 130 |
+ # We can't do a fast import into a FileBasedDirectory, so this
|
|
| 131 |
+ # falls back to import_files.
|
|
| 132 |
+ assert dirname not in self.index
|
|
| 133 |
+ subdir = self.descend(dirname, create=True)
|
|
| 134 |
+ subdir.import_files(other_directory, can_link=True)
|
|
| 135 |
+ |
|
| 128 | 136 |
def _mark_changed(self):
|
| 129 | 137 |
self._directory_read = False
|
| 130 | 138 |
|
| ... | ... | @@ -201,6 +209,9 @@ class FileBasedDirectory(Directory): |
| 201 | 209 |
|
| 202 | 210 |
return list_relative_paths(self.external_directory)
|
| 203 | 211 |
|
| 212 |
+ def get_size(self):
|
|
| 213 |
+ return utils._get_dir_size(self.external_directory)
|
|
| 214 |
+ |
|
| 204 | 215 |
def __str__(self):
|
| 205 | 216 |
# This returns the whole path (since we don't know where the directory started)
|
| 206 | 217 |
# which exposes the sandbox directory; we will have to assume for the time being
|
| ... | ... | @@ -75,6 +75,10 @@ class Directory(): |
| 75 | 75 |
can_link=False):
|
| 76 | 76 |
"""Imports some or all files from external_path into this directory.
|
| 77 | 77 |
|
| 78 |
+ The order of import will be in the order listed in the 'files'
|
|
| 79 |
+ parameter if it is supplied, and otherwise the same as
|
|
| 80 |
+ utils._process_list().
|
|
| 81 |
+ |
|
| 78 | 82 |
Args:
|
| 79 | 83 |
external_pathspec: Either a string containing a pathname, or a
|
| 80 | 84 |
Directory object, to use as the source.
|
| ... | ... | @@ -99,6 +103,32 @@ class Directory(): |
| 99 | 103 |
|
| 100 | 104 |
raise NotImplementedError()
|
| 101 | 105 |
|
| 106 |
+ def fast_directory_import(self, dirname, other_directory):
|
|
| 107 |
+ """Import other_directory as a new directory in this one.
|
|
| 108 |
+ |
|
| 109 |
+ This is a potentially faster method than import_directory with
|
|
| 110 |
+ fewer options. dirname must not already exist, and all files
|
|
| 111 |
+ are imported unconditionally. It is assumed that it is
|
|
| 112 |
+ acceptable to use filesystem hard links to files in
|
|
| 113 |
+ other_directory. You cannot update utimes or get a
|
|
| 114 |
+ FileListResult.
|
|
| 115 |
+ |
|
| 116 |
+ This only provides a benefit if both this and other_directory
|
|
| 117 |
+ are CAS-based directories. In other cases, it will fall back
|
|
| 118 |
+ to import_directory. The order of files, symlinks and
|
|
| 119 |
+ directories will be the same as the source for CAS-to-CAS, and
|
|
| 120 |
+ will follow normal import_directory rules in other cases.
|
|
| 121 |
+ |
|
| 122 |
+ Args:
|
|
| 123 |
+ dirname: The name to call the subdirectory in this
|
|
| 124 |
+ directory. This must not already exist in this directory.
|
|
| 125 |
+ |
|
| 126 |
+ other_directory: The directory to import.
|
|
| 127 |
+ |
|
| 128 |
+ """
|
|
| 129 |
+ |
|
| 130 |
+ raise NotImplementedError()
|
|
| 131 |
+ |
|
| 102 | 132 |
def export_files(self, to_directory, *, can_link=False, can_destroy=False):
|
| 103 | 133 |
"""Copies everything from this into to_directory.
|
| 104 | 134 |
|
| ... | ... | @@ -176,3 +206,9 @@ class Directory(): |
| 176 | 206 |
|
| 177 | 207 |
"""
|
| 178 | 208 |
raise NotImplementedError()
|
| 209 |
+ |
|
| 210 |
+ def get_size(self):
|
|
| 211 |
+ """ Get an approximation of the storage space in bytes used by this directory
|
|
| 212 |
+ and all files and subdirectories in it. Storage space varies by implementation
|
|
| 213 |
+ and effective space used may be lower than this number due to deduplication. """
|
|
| 214 |
+ raise NotImplementedError()
|
| ... | ... | @@ -1259,3 +1259,34 @@ def _message_digest(message_buffer): |
| 1259 | 1259 |
digest.hash = sha.hexdigest()
|
| 1260 | 1260 |
digest.size_bytes = len(message_buffer)
|
| 1261 | 1261 |
return digest
|
| 1262 |
+ |
|
| 1263 |
+ |
|
| 1264 |
+# _search_upward_for_files()
|
|
| 1265 |
+#
|
|
| 1266 |
+# Searches upwards (from directory, then directory's parent directory...)
|
|
| 1267 |
+# for any of the files listed in `filenames`.
|
|
| 1268 |
+#
|
|
| 1269 |
+# If multiple filenames are specified, and present in the same directory,
|
|
| 1270 |
+# the first filename in the list will be returned.
|
|
| 1271 |
+#
|
|
| 1272 |
+# Args:
|
|
| 1273 |
+# directory (str): The directory to begin searching for files from
|
|
| 1274 |
+# filenames (list of str): The names of files to search for
|
|
| 1275 |
+#
|
|
| 1276 |
+# Returns:
|
|
| 1277 |
+# (str): The directory a file was found in, or None
|
|
| 1278 |
+# (str): The name of the first file that was found in that directory, or None
|
|
| 1279 |
+#
|
|
| 1280 |
+def _search_upward_for_files(directory, filenames):
|
|
| 1281 |
+ directory = os.path.abspath(directory)
|
|
| 1282 |
+ while True:
|
|
| 1283 |
+ for filename in filenames:
|
|
| 1284 |
+ file_path = os.path.join(directory, filename)
|
|
| 1285 |
+ if os.path.isfile(file_path):
|
|
| 1286 |
+ return directory, filename
|
|
| 1287 |
+ |
|
| 1288 |
+ parent_dir = os.path.dirname(directory)
|
|
| 1289 |
+ if directory == parent_dir:
|
|
| 1290 |
+ # i.e. we've reached the root of the filesystem
|
|
| 1291 |
+ return None, None
|
|
| 1292 |
+ directory = parent_dir
|
| ... | ... | @@ -115,3 +115,23 @@ def test_close_all_cross_junction(cli, tmpdir): |
| 115 | 115 |
assert isinstance(loaded.get('workspaces'), list)
|
| 116 | 116 |
workspaces = loaded['workspaces']
|
| 117 | 117 |
assert len(workspaces) == 0
|
| 118 |
+ |
|
| 119 |
+ |
|
| 120 |
+def test_subdir_command_cross_junction(cli, tmpdir):
|
|
| 121 |
+ # i.e. commands can be run successfully from a subdirectory of the
|
|
| 122 |
+ # junction's workspace, in case project loading logic has gone wrong
|
|
| 123 |
+ project = prepare_junction_project(cli, tmpdir)
|
|
| 124 |
+ workspace = os.path.join(str(tmpdir), 'workspace')
|
|
| 125 |
+ junction_element = 'sub.bst'
|
|
| 126 |
+ |
|
| 127 |
+ # Open the junction as a workspace
|
|
| 128 |
+ args = ['workspace', 'open', '--directory', workspace, junction_element]
|
|
| 129 |
+ result = cli.run(project=project, args=args)
|
|
| 130 |
+ result.assert_success()
|
|
| 131 |
+ |
|
| 132 |
+ # Run commands from a subdirectory of the workspace
|
|
| 133 |
+ newdir = os.path.join(str(workspace), "newdir")
|
|
| 134 |
+ element_name = 'data.bst'
|
|
| 135 |
+ os.makedirs(newdir)
|
|
| 136 |
+ result = cli.run(project=str(workspace), args=['-C', newdir, 'show', element_name])
|
|
| 137 |
+ result.assert_success()
|
| ... | ... | @@ -31,6 +31,7 @@ import shutil |
| 31 | 31 |
import subprocess
|
| 32 | 32 |
from ruamel.yaml.comments import CommentedSet
|
| 33 | 33 |
from tests.testutils import cli, create_repo, ALL_REPO_KINDS, wait_for_cache_granularity
|
| 34 |
+from tests.testutils import create_artifact_share
|
|
| 34 | 35 |
|
| 35 | 36 |
from buildstream import _yaml
|
| 36 | 37 |
from buildstream._exceptions import ErrorDomain, LoadError, LoadErrorReason
|
| ... | ... | @@ -615,9 +616,12 @@ def test_list(cli, tmpdir, datafiles): |
| 615 | 616 |
@pytest.mark.datafiles(DATA_DIR)
|
| 616 | 617 |
@pytest.mark.parametrize("kind", repo_kinds)
|
| 617 | 618 |
@pytest.mark.parametrize("strict", [("strict"), ("non-strict")])
|
| 618 |
-def test_build(cli, tmpdir, datafiles, kind, strict):
|
|
| 619 |
+@pytest.mark.parametrize("call_from", [("project"), ("workspace")])
|
|
| 620 |
+def test_build(cli, tmpdir_factory, datafiles, kind, strict, call_from):
|
|
| 621 |
+ tmpdir = tmpdir_factory.mktemp('')
|
|
| 619 | 622 |
element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
|
| 620 | 623 |
checkout = os.path.join(str(tmpdir), 'checkout')
|
| 624 |
+ args_pre = ['-C', workspace] if call_from == "workspace" else []
|
|
| 621 | 625 |
|
| 622 | 626 |
# Modify workspace
|
| 623 | 627 |
shutil.rmtree(os.path.join(workspace, 'usr', 'bin'))
|
| ... | ... | @@ -640,15 +644,14 @@ def test_build(cli, tmpdir, datafiles, kind, strict): |
| 640 | 644 |
# Build modified workspace
|
| 641 | 645 |
assert cli.get_element_state(project, element_name) == 'buildable'
|
| 642 | 646 |
assert cli.get_element_key(project, element_name) == "{:?<64}".format('')
|
| 643 |
- result = cli.run(project=project, args=['build', element_name])
|
|
| 647 |
+ result = cli.run(project=project, args=args_pre + ['build', element_name])
|
|
| 644 | 648 |
result.assert_success()
|
| 645 | 649 |
assert cli.get_element_state(project, element_name) == 'cached'
|
| 646 | 650 |
assert cli.get_element_key(project, element_name) != "{:?<64}".format('')
|
| 647 | 651 |
|
| 648 | 652 |
# Checkout the result
|
| 649 |
- result = cli.run(project=project, args=[
|
|
| 650 |
- 'checkout', element_name, checkout
|
|
| 651 |
- ])
|
|
| 653 |
+ result = cli.run(project=project,
|
|
| 654 |
+ args=args_pre + ['checkout', element_name, checkout])
|
|
| 652 | 655 |
result.assert_success()
|
| 653 | 656 |
|
| 654 | 657 |
# Check that the pony.conf from the modified workspace exists
|
| ... | ... | @@ -1055,3 +1058,137 @@ def test_multiple_failed_builds(cli, tmpdir, datafiles): |
| 1055 | 1058 |
result = cli.run(project=project, args=["build", element_name])
|
| 1056 | 1059 |
assert "BUG" not in result.stderr
|
| 1057 | 1060 |
assert cli.get_element_state(project, element_name) != "cached"
|
| 1061 |
+ |
|
| 1062 |
+ |
|
| 1063 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 1064 |
+@pytest.mark.parametrize('subdir', [True, False], ids=["subdir", "no-subdir"])
|
|
| 1065 |
+def test_external_fetch(cli, datafiles, tmpdir_factory, subdir):
|
|
| 1066 |
+ # Fetching from a workspace outside a project doesn't fail horribly
|
|
| 1067 |
+ tmpdir = tmpdir_factory.mktemp('')
|
|
| 1068 |
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", False)
|
|
| 1069 |
+ |
|
| 1070 |
+ if subdir:
|
|
| 1071 |
+ call_dir = os.path.join(workspace, 'usr')
|
|
| 1072 |
+ else:
|
|
| 1073 |
+ call_dir = workspace
|
|
| 1074 |
+ |
|
| 1075 |
+ result = cli.run(project=project, args=['-C', call_dir, 'fetch', element_name])
|
|
| 1076 |
+ result.assert_success()
|
|
| 1077 |
+ |
|
| 1078 |
+ # We already fetched it by opening the workspace, but we're also checking
|
|
| 1079 |
+ # `bst show` works here
|
|
| 1080 |
+ assert cli.get_element_state(project, element_name) == 'buildable'
|
|
| 1081 |
+ |
|
| 1082 |
+ |
|
| 1083 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 1084 |
+def test_external_push_pull(cli, datafiles, tmpdir_factory):
|
|
| 1085 |
+ # Pushing and pulling to/from an artifact cache works from an external workspace
|
|
| 1086 |
+ tmpdir = tmpdir_factory.mktemp('')
|
|
| 1087 |
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", False)
|
|
| 1088 |
+ |
|
| 1089 |
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
|
| 1090 |
+ result = cli.run(project=project, args=['-C', workspace, 'build', element_name])
|
|
| 1091 |
+ result.assert_success()
|
|
| 1092 |
+ |
|
| 1093 |
+ cli.configure({
|
|
| 1094 |
+ 'artifacts': {'url': share.repo, 'push': True}
|
|
| 1095 |
+ })
|
|
| 1096 |
+ |
|
| 1097 |
+ result = cli.run(project=project, args=['-C', workspace, 'push', element_name])
|
|
| 1098 |
+ result.assert_success()
|
|
| 1099 |
+ |
|
| 1100 |
+ result = cli.run(project=project, args=['-C', workspace, 'pull', '--deps', 'all', element_name])
|
|
| 1101 |
+ result.assert_success()
|
|
| 1102 |
+ |
|
| 1103 |
+ |
|
| 1104 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 1105 |
+def test_external_track(cli, datafiles, tmpdir_factory):
|
|
| 1106 |
+ # Tracking does not get horribly confused
|
|
| 1107 |
+ tmpdir = tmpdir_factory.mktemp('')
|
|
| 1108 |
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", True)
|
|
| 1109 |
+ |
|
| 1110 |
+ # The workspace is necessarily already tracked, so we only care that
|
|
| 1111 |
+ # there's no weird errors.
|
|
| 1112 |
+ result = cli.run(project=project, args=['-C', workspace, 'track', element_name])
|
|
| 1113 |
+ result.assert_success()
|
|
| 1114 |
+ |
|
| 1115 |
+ |
|
| 1116 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 1117 |
+def test_external_open_other(cli, datafiles, tmpdir_factory):
|
|
| 1118 |
+ # >From inside an external workspace, open another workspace
|
|
| 1119 |
+ tmpdir1 = tmpdir_factory.mktemp('')
|
|
| 1120 |
+ tmpdir2 = tmpdir_factory.mktemp('')
|
|
| 1121 |
+ # Making use of the assumption that it's the same project in both invocations of open_workspace
|
|
| 1122 |
+ alpha_element, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", False, suffix="-alpha")
|
|
| 1123 |
+ beta_element, _, beta_workspace = open_workspace(cli, tmpdir2, datafiles, "git", False, suffix="-beta")
|
|
| 1124 |
+ |
|
| 1125 |
+ # Closing the other element first, because I'm too lazy to create an
|
|
| 1126 |
+ # element without opening it
|
|
| 1127 |
+ result = cli.run(project=project, args=['workspace', 'close', beta_element])
|
|
| 1128 |
+ result.assert_success()
|
|
| 1129 |
+ |
|
| 1130 |
+ result = cli.run(project=project, args=[
|
|
| 1131 |
+ '-C', alpha_workspace, 'workspace', 'open', '--force', '--directory', beta_workspace, beta_element
|
|
| 1132 |
+ ])
|
|
| 1133 |
+ result.assert_success()
|
|
| 1134 |
+ |
|
| 1135 |
+ |
|
| 1136 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 1137 |
+def test_external_close_other(cli, datafiles, tmpdir_factory):
|
|
| 1138 |
+ # >From inside an external workspace, close the other workspace
|
|
| 1139 |
+ tmpdir1 = tmpdir_factory.mktemp('')
|
|
| 1140 |
+ tmpdir2 = tmpdir_factory.mktemp('')
|
|
| 1141 |
+ # Making use of the assumption that it's the same project in both invocations of open_workspace
|
|
| 1142 |
+ alpha_element, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", False, suffix="-alpha")
|
|
| 1143 |
+ beta_element, _, beta_workspace = open_workspace(cli, tmpdir2, datafiles, "git", False, suffix="-beta")
|
|
| 1144 |
+ |
|
| 1145 |
+ result = cli.run(project=project, args=['-C', alpha_workspace, 'workspace', 'close', beta_element])
|
|
| 1146 |
+ result.assert_success()
|
|
| 1147 |
+ |
|
| 1148 |
+ |
|
| 1149 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 1150 |
+def test_external_close_self(cli, datafiles, tmpdir_factory):
|
|
| 1151 |
+ # >From inside an external workspace, close it
|
|
| 1152 |
+ tmpdir1 = tmpdir_factory.mktemp('')
|
|
| 1153 |
+ tmpdir2 = tmpdir_factory.mktemp('')
|
|
| 1154 |
+ # Making use of the assumption that it's the same project in both invocations of open_workspace
|
|
| 1155 |
+ alpha_element, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", False, suffix="-alpha")
|
|
| 1156 |
+ beta_element, _, beta_workspace = open_workspace(cli, tmpdir2, datafiles, "git", False, suffix="-beta")
|
|
| 1157 |
+ |
|
| 1158 |
+ result = cli.run(project=project, args=['-C', alpha_workspace, 'workspace', 'close', alpha_element])
|
|
| 1159 |
+ result.assert_success()
|
|
| 1160 |
+ |
|
| 1161 |
+ |
|
| 1162 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 1163 |
+def test_external_reset_other(cli, datafiles, tmpdir_factory):
|
|
| 1164 |
+ tmpdir1 = tmpdir_factory.mktemp('')
|
|
| 1165 |
+ tmpdir2 = tmpdir_factory.mktemp('')
|
|
| 1166 |
+ # Making use of the assumption that it's the same project in both invocations of open_workspace
|
|
| 1167 |
+ alpha_element, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", False, suffix="-alpha")
|
|
| 1168 |
+ beta_element, _, beta_workspace = open_workspace(cli, tmpdir2, datafiles, "git", False, suffix="-beta")
|
|
| 1169 |
+ |
|
| 1170 |
+ result = cli.run(project=project, args=['-C', alpha_workspace, 'workspace', 'reset', beta_element])
|
|
| 1171 |
+ result.assert_success()
|
|
| 1172 |
+ |
|
| 1173 |
+ |
|
| 1174 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 1175 |
+def test_external_reset_self(cli, datafiles, tmpdir):
|
|
| 1176 |
+ element, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", False)
|
|
| 1177 |
+ |
|
| 1178 |
+ # Command succeeds
|
|
| 1179 |
+ result = cli.run(project=project, args=['-C', workspace, 'workspace', 'reset', element])
|
|
| 1180 |
+ result.assert_success()
|
|
| 1181 |
+ |
|
| 1182 |
+ # Successive commands still work (i.e. .bstproject.yaml hasn't been deleted)
|
|
| 1183 |
+ result = cli.run(project=project, args=['-C', workspace, 'workspace', 'list'])
|
|
| 1184 |
+ result.assert_success()
|
|
| 1185 |
+ |
|
| 1186 |
+ |
|
| 1187 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 1188 |
+def test_external_list(cli, datafiles, tmpdir_factory):
|
|
| 1189 |
+ tmpdir = tmpdir_factory.mktemp('')
|
|
| 1190 |
+ # Making use of the assumption that it's the same project in both invocations of open_workspace
|
|
| 1191 |
+ element, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", False)
|
|
| 1192 |
+ |
|
| 1193 |
+ result = cli.run(project=project, args=['-C', workspace, 'workspace', 'list'])
|
|
| 1194 |
+ result.assert_success()
|
| ... | ... | @@ -353,3 +353,29 @@ def test_integration_devices(cli, tmpdir, datafiles): |
| 353 | 353 |
|
| 354 | 354 |
result = execute_shell(cli, project, ["true"], element=element_name)
|
| 355 | 355 |
assert result.exit_code == 0
|
| 356 |
+ |
|
| 357 |
+ |
|
| 358 |
+# Test that a shell can be opened from an external workspace
|
|
| 359 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 360 |
+@pytest.mark.parametrize("build_shell", [("build"), ("nobuild")])
|
|
| 361 |
+@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
|
| 362 |
+def test_integration_external_workspace(cli, tmpdir_factory, datafiles, build_shell):
|
|
| 363 |
+ tmpdir = tmpdir_factory.mktemp("")
|
|
| 364 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 365 |
+ element_name = 'autotools/amhello.bst'
|
|
| 366 |
+ workspace_dir = os.path.join(str(tmpdir), 'workspace')
|
|
| 367 |
+ |
|
| 368 |
+ result = cli.run(project=project, args=[
|
|
| 369 |
+ 'workspace', 'open', '--directory', workspace_dir, element_name
|
|
| 370 |
+ ])
|
|
| 371 |
+ result.assert_success()
|
|
| 372 |
+ |
|
| 373 |
+ result = cli.run(project=project, args=['-C', workspace_dir, 'build', element_name])
|
|
| 374 |
+ result.assert_success()
|
|
| 375 |
+ |
|
| 376 |
+ command = ['shell']
|
|
| 377 |
+ if build_shell == 'build':
|
|
| 378 |
+ command.append('--build')
|
|
| 379 |
+ command.extend([element_name, '--', 'true'])
|
|
| 380 |
+ result = cli.run(project=project, cwd=workspace_dir, args=command)
|
|
| 381 |
+ result.assert_success()
|
| ... | ... | @@ -23,6 +23,7 @@ |
| 23 | 23 |
import os
|
| 24 | 24 |
import pytest
|
| 25 | 25 |
import subprocess
|
| 26 |
+import shutil
|
|
| 26 | 27 |
|
| 27 | 28 |
from buildstream._exceptions import ErrorDomain
|
| 28 | 29 |
from buildstream import _yaml
|
| ... | ... | @@ -920,3 +921,100 @@ def test_default_do_not_track_tags(cli, tmpdir, datafiles): |
| 920 | 921 |
|
| 921 | 922 |
element = _yaml.load(element_path)
|
| 922 | 923 |
assert 'tags' not in element['sources'][0]
|
| 924 |
+ |
|
| 925 |
+ |
|
| 926 |
+@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
|
|
| 927 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
|
|
| 928 |
+def test_overwrite_rogue_tag_multiple_remotes(cli, tmpdir, datafiles):
|
|
| 929 |
+ """When using multiple remotes in cache (i.e. when using aliases), we
|
|
| 930 |
+ need to make sure we override tags. This is not allowed to fetch
|
|
| 931 |
+ tags that were present from different origins
|
|
| 932 |
+ """
|
|
| 933 |
+ |
|
| 934 |
+ project = str(datafiles)
|
|
| 935 |
+ |
|
| 936 |
+ repofiles = os.path.join(str(tmpdir), 'repofiles')
|
|
| 937 |
+ os.makedirs(repofiles, exist_ok=True)
|
|
| 938 |
+ file0 = os.path.join(repofiles, 'file0')
|
|
| 939 |
+ with open(file0, 'w') as f:
|
|
| 940 |
+ f.write('test\n')
|
|
| 941 |
+ |
|
| 942 |
+ repo = create_repo('git', str(tmpdir))
|
|
| 943 |
+ |
|
| 944 |
+ top_commit = repo.create(repofiles)
|
|
| 945 |
+ |
|
| 946 |
+ repodir, reponame = os.path.split(repo.repo)
|
|
| 947 |
+ project_config = _yaml.load(os.path.join(project, 'project.conf'))
|
|
| 948 |
+ project_config['aliases'] = {
|
|
| 949 |
+ 'repo': 'http://example.com/'
|
|
| 950 |
+ }
|
|
| 951 |
+ project_config['mirrors'] = [
|
|
| 952 |
+ {
|
|
| 953 |
+ 'name': 'middle-earth',
|
|
| 954 |
+ 'aliases': {
|
|
| 955 |
+ 'repo': ['file://{}/'.format(repodir)]
|
|
| 956 |
+ }
|
|
| 957 |
+ }
|
|
| 958 |
+ ]
|
|
| 959 |
+ _yaml.dump(_yaml.node_sanitize(project_config), os.path.join(project, 'project.conf'))
|
|
| 960 |
+ |
|
| 961 |
+ repo.add_annotated_tag('tag', 'tag')
|
|
| 962 |
+ |
|
| 963 |
+ file1 = os.path.join(repofiles, 'file1')
|
|
| 964 |
+ with open(file1, 'w') as f:
|
|
| 965 |
+ f.write('test\n')
|
|
| 966 |
+ |
|
| 967 |
+ ref = repo.add_file(file1)
|
|
| 968 |
+ |
|
| 969 |
+ config = repo.source_config(ref=ref)
|
|
| 970 |
+ del config['track']
|
|
| 971 |
+ config['url'] = 'repo:{}'.format(reponame)
|
|
| 972 |
+ |
|
| 973 |
+ # Write out our test target
|
|
| 974 |
+ element = {
|
|
| 975 |
+ 'kind': 'import',
|
|
| 976 |
+ 'sources': [
|
|
| 977 |
+ config
|
|
| 978 |
+ ],
|
|
| 979 |
+ }
|
|
| 980 |
+ element_path = os.path.join(project, 'target.bst')
|
|
| 981 |
+ _yaml.dump(element, element_path)
|
|
| 982 |
+ |
|
| 983 |
+ result = cli.run(project=project, args=['build', 'target.bst'])
|
|
| 984 |
+ result.assert_success()
|
|
| 985 |
+ |
|
| 986 |
+ repo.checkout(top_commit)
|
|
| 987 |
+ |
|
| 988 |
+ file2 = os.path.join(repofiles, 'file2')
|
|
| 989 |
+ with open(file2, 'w') as f:
|
|
| 990 |
+ f.write('test\n')
|
|
| 991 |
+ |
|
| 992 |
+ new_ref = repo.add_file(file2)
|
|
| 993 |
+ |
|
| 994 |
+ repo.delete_tag('tag')
|
|
| 995 |
+ repo.add_annotated_tag('tag', 'tag')
|
|
| 996 |
+ repo.checkout('master')
|
|
| 997 |
+ |
|
| 998 |
+ otherpath = os.path.join(str(tmpdir), 'other_path')
|
|
| 999 |
+ shutil.copytree(repo.repo,
|
|
| 1000 |
+ os.path.join(otherpath, 'repo'))
|
|
| 1001 |
+ new_repo = create_repo('git', otherpath)
|
|
| 1002 |
+ |
|
| 1003 |
+ repodir, reponame = os.path.split(repo.repo)
|
|
| 1004 |
+ |
|
| 1005 |
+ _yaml.dump(_yaml.node_sanitize(project_config), os.path.join(project, 'project.conf'))
|
|
| 1006 |
+ |
|
| 1007 |
+ config = repo.source_config(ref=new_ref)
|
|
| 1008 |
+ del config['track']
|
|
| 1009 |
+ config['url'] = 'repo:{}'.format(reponame)
|
|
| 1010 |
+ |
|
| 1011 |
+ element = {
|
|
| 1012 |
+ 'kind': 'import',
|
|
| 1013 |
+ 'sources': [
|
|
| 1014 |
+ config
|
|
| 1015 |
+ ],
|
|
| 1016 |
+ }
|
|
| 1017 |
+ _yaml.dump(element, element_path)
|
|
| 1018 |
+ |
|
| 1019 |
+ result = cli.run(project=project, args=['build', 'target.bst'])
|
|
| 1020 |
+ result.assert_success()
|
| ... | ... | @@ -149,10 +149,10 @@ def resolve_symlinks(path, root): |
| 149 | 149 |
if target.startswith(os.path.sep):
|
| 150 | 150 |
# Absolute link - relative to root
|
| 151 | 151 |
location = os.path.join(root, target, tail)
|
| 152 |
+ return resolve_symlinks(location, root)
|
|
| 152 | 153 |
else:
|
| 153 |
- # Relative link - relative to symlink location
|
|
| 154 |
- location = os.path.join(location, target)
|
|
| 155 |
- return resolve_symlinks(location, root)
|
|
| 154 |
+ return resolve_symlinks(os.path.join(os.path.join(*components[:i]), target, tail), root)
|
|
| 155 |
+ |
|
| 156 | 156 |
# If we got here, no symlinks were found. Add on the final component and return.
|
| 157 | 157 |
location = os.path.join(location, components[-1])
|
| 158 | 158 |
return location
|
| ... | ... | @@ -199,7 +199,13 @@ def _import_test(tmpdir, original, overlay, generator_function, verify_contents= |
| 199 | 199 |
pass
|
| 200 | 200 |
else:
|
| 201 | 201 |
assert os.path.islink(realpath)
|
| 202 |
- assert os.readlink(realpath) == content
|
|
| 202 |
+ # We expect all storage to normalise absolute symlinks.
|
|
| 203 |
+ depth = len(path.split(os.path.sep)) - 1
|
|
| 204 |
+ if content.startswith(os.path.sep):
|
|
| 205 |
+ assert os.readlink(realpath) == os.path.sep.join([".."] * depth + [content[1:]])
|
|
| 206 |
+ else:
|
|
| 207 |
+ assert os.readlink(realpath) == content
|
|
| 208 |
+ |
|
| 203 | 209 |
elif typename == 'D':
|
| 204 | 210 |
# We can't do any more tests than this because it
|
| 205 | 211 |
# depends on things present in the original. Blank
|
| ... | ... | @@ -99,12 +99,15 @@ class Git(Repo): |
| 99 | 99 |
return config
|
| 100 | 100 |
|
| 101 | 101 |
def latest_commit(self):
|
| 102 |
- output = self._run_git('rev-parse', 'master', stdout=subprocess.PIPE).stdout
|
|
| 102 |
+ output = self._run_git('rev-parse', 'HEAD', stdout=subprocess.PIPE).stdout
|
|
| 103 | 103 |
return output.decode('UTF-8').strip()
|
| 104 | 104 |
|
| 105 | 105 |
def branch(self, branch_name):
|
| 106 | 106 |
self._run_git('checkout', '-b', branch_name)
|
| 107 | 107 |
|
| 108 |
+ def delete_tag(self, tag_name):
|
|
| 109 |
+ self._run_git('tag', '-d', tag_name)
|
|
| 110 |
+ |
|
| 108 | 111 |
def checkout(self, commit):
|
| 109 | 112 |
self._run_git('checkout', commit)
|
| 110 | 113 |
|
