Jonathan Maw pushed to branch jonathan/workspace-fragment-create at BuildStream / buildstream
Commits:
-
d5b5538d
by Jim MacArthur at 2018-11-27T15:32:19Z
-
f224d4cb
by Jim MacArthur at 2018-11-27T15:32:19Z
-
926c1446
by Jim MacArthur at 2018-11-27T15:33:34Z
-
d7cdb03e
by Jim MacArthur at 2018-11-27T15:33:36Z
-
95d8244f
by Jim MacArthur at 2018-11-27T15:33:36Z
-
7f2f62eb
by Jim MacArthur at 2018-11-27T15:33:36Z
-
c55ca3a5
by Jim MacArthur at 2018-11-27T15:33:36Z
-
2a8a3b19
by Jim MacArthur at 2018-11-27T18:17:03Z
-
7747219a
by Jonathan Maw at 2018-11-28T09:49:52Z
-
5e64132c
by Jonathan Maw at 2018-11-28T09:49:52Z
-
8156f894
by Jonathan Maw at 2018-11-28T09:49:52Z
-
c3fd438f
by Jonathan Maw at 2018-11-28T09:49:52Z
-
22999163
by Jonathan Maw at 2018-11-28T09:50:15Z
-
cb4527ac
by Jonathan Maw at 2018-11-28T09:50:18Z
-
22b34b5b
by Jonathan Maw at 2018-11-28T09:50:18Z
-
f0a69c35
by Jonathan Maw at 2018-11-28T09:50:18Z
-
c28b4f70
by Jonathan Maw at 2018-11-28T09:50:18Z
-
088916e5
by Jonathan Maw at 2018-11-28T09:50:18Z
-
186cfe7a
by Jonathan Maw at 2018-11-28T09:50:18Z
-
9bb1c0f9
by Jonathan Maw at 2018-11-28T09:50:18Z
20 changed files:
- NEWS
- buildstream/_artifactcache/artifactcache.py
- buildstream/_artifactcache/cascache.py
- buildstream/_context.py
- buildstream/_frontend/cli.py
- buildstream/_project.py
- buildstream/_stream.py
- buildstream/_workspaces.py
- buildstream/data/projectconfig.yaml
- buildstream/data/userconfig.yaml
- buildstream/element.py
- buildstream/sandbox/_sandboxremote.py
- buildstream/utils.py
- doc/source/format_project.rst
- tests/frontend/workspace.py
- tests/integration/shell.py
- + tests/sandboxes/remote-exec-config.py
- + tests/sandboxes/remote-exec-config/missing-certs/certificates/client.crt
- + tests/sandboxes/remote-exec-config/missing-certs/certificates/client.key
- + tests/sandboxes/remote-exec-config/missing-certs/element.bst
Changes:
... | ... | @@ -70,6 +70,9 @@ buildstream 1.3.1 |
70 | 70 |
o Add sandbox API for command batching and use it for build, script, and
|
71 | 71 |
compose elements.
|
72 | 72 |
|
73 |
+ o Opening a workspace now creates a .bstproject.yaml file that allows buildstream
|
|
74 |
+ commands to be run from a workspace that is not inside a project.
|
|
75 |
+ |
|
73 | 76 |
|
74 | 77 |
=================
|
75 | 78 |
buildstream 1.1.5
|
... | ... | @@ -21,7 +21,6 @@ import multiprocessing |
21 | 21 |
import os
|
22 | 22 |
import signal
|
23 | 23 |
import string
|
24 |
-from collections import namedtuple
|
|
25 | 24 |
from collections.abc import Mapping
|
26 | 25 |
|
27 | 26 |
from ..types import _KeyStrength
|
... | ... | @@ -31,7 +30,7 @@ from .. import _signals |
31 | 30 |
from .. import utils
|
32 | 31 |
from .. import _yaml
|
33 | 32 |
|
34 |
-from .cascache import CASCache, CASRemote
|
|
33 |
+from .cascache import CASRemote, CASRemoteSpec
|
|
35 | 34 |
|
36 | 35 |
|
37 | 36 |
CACHE_SIZE_FILE = "cache_size"
|
... | ... | @@ -45,48 +44,8 @@ CACHE_SIZE_FILE = "cache_size" |
45 | 44 |
# push (bool): Whether we should attempt to push artifacts to this cache,
|
46 | 45 |
# in addition to pulling from it.
|
47 | 46 |
#
|
48 |
-class ArtifactCacheSpec(namedtuple('ArtifactCacheSpec', 'url push server_cert client_key client_cert')):
|
|
49 |
- |
|
50 |
- # _new_from_config_node
|
|
51 |
- #
|
|
52 |
- # Creates an ArtifactCacheSpec() from a YAML loaded node
|
|
53 |
- #
|
|
54 |
- @staticmethod
|
|
55 |
- def _new_from_config_node(spec_node, basedir=None):
|
|
56 |
- _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert'])
|
|
57 |
- url = _yaml.node_get(spec_node, str, 'url')
|
|
58 |
- push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
|
|
59 |
- if not url:
|
|
60 |
- provenance = _yaml.node_get_provenance(spec_node, 'url')
|
|
61 |
- raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
62 |
- "{}: empty artifact cache URL".format(provenance))
|
|
63 |
- |
|
64 |
- server_cert = _yaml.node_get(spec_node, str, 'server-cert', default_value=None)
|
|
65 |
- if server_cert and basedir:
|
|
66 |
- server_cert = os.path.join(basedir, server_cert)
|
|
67 |
- |
|
68 |
- client_key = _yaml.node_get(spec_node, str, 'client-key', default_value=None)
|
|
69 |
- if client_key and basedir:
|
|
70 |
- client_key = os.path.join(basedir, client_key)
|
|
71 |
- |
|
72 |
- client_cert = _yaml.node_get(spec_node, str, 'client-cert', default_value=None)
|
|
73 |
- if client_cert and basedir:
|
|
74 |
- client_cert = os.path.join(basedir, client_cert)
|
|
75 |
- |
|
76 |
- if client_key and not client_cert:
|
|
77 |
- provenance = _yaml.node_get_provenance(spec_node, 'client-key')
|
|
78 |
- raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
79 |
- "{}: 'client-key' was specified without 'client-cert'".format(provenance))
|
|
80 |
- |
|
81 |
- if client_cert and not client_key:
|
|
82 |
- provenance = _yaml.node_get_provenance(spec_node, 'client-cert')
|
|
83 |
- raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
84 |
- "{}: 'client-cert' was specified without 'client-key'".format(provenance))
|
|
85 |
- |
|
86 |
- return ArtifactCacheSpec(url, push, server_cert, client_key, client_cert)
|
|
87 |
- |
|
88 |
- |
|
89 |
-ArtifactCacheSpec.__new__.__defaults__ = (None, None, None)
|
|
47 |
+class ArtifactCacheSpec(CASRemoteSpec):
|
|
48 |
+ pass
|
|
90 | 49 |
|
91 | 50 |
|
92 | 51 |
# An ArtifactCache manages artifacts.
|
... | ... | @@ -99,7 +58,7 @@ class ArtifactCache(): |
99 | 58 |
self.context = context
|
100 | 59 |
self.extractdir = os.path.join(context.artifactdir, 'extract')
|
101 | 60 |
|
102 |
- self.cas = CASCache(context.artifactdir)
|
|
61 |
+ self.cas = context.get_cascache()
|
|
103 | 62 |
|
104 | 63 |
self.global_remote_specs = []
|
105 | 64 |
self.project_remote_specs = {}
|
... | ... | @@ -792,34 +751,6 @@ class ArtifactCache(): |
792 | 751 |
|
793 | 752 |
return message_digest
|
794 | 753 |
|
795 |
- # verify_digest_pushed():
|
|
796 |
- #
|
|
797 |
- # Check whether the object is already on the server in which case
|
|
798 |
- # there is no need to upload it.
|
|
799 |
- #
|
|
800 |
- # Args:
|
|
801 |
- # project (Project): The current project
|
|
802 |
- # digest (Digest): The object digest.
|
|
803 |
- #
|
|
804 |
- def verify_digest_pushed(self, project, digest):
|
|
805 |
- |
|
806 |
- if self._has_push_remotes:
|
|
807 |
- push_remotes = [r for r in self._remotes[project] if r.spec.push]
|
|
808 |
- else:
|
|
809 |
- push_remotes = []
|
|
810 |
- |
|
811 |
- if not push_remotes:
|
|
812 |
- raise ArtifactError("verify_digest_pushed was called, but no remote artifact " +
|
|
813 |
- "servers are configured as push remotes.")
|
|
814 |
- |
|
815 |
- pushed = False
|
|
816 |
- |
|
817 |
- for remote in push_remotes:
|
|
818 |
- if self.cas.verify_digest_on_remote(remote, digest):
|
|
819 |
- pushed = True
|
|
820 |
- |
|
821 |
- return pushed
|
|
822 |
- |
|
823 | 754 |
# link_key():
|
824 | 755 |
#
|
825 | 756 |
# Add a key for an existing artifact.
|
... | ... | @@ -17,6 +17,7 @@ |
17 | 17 |
# Authors:
|
18 | 18 |
# Jürg Billeter <juerg billeter codethink co uk>
|
19 | 19 |
|
20 |
+from collections import namedtuple
|
|
20 | 21 |
import hashlib
|
21 | 22 |
import itertools
|
22 | 23 |
import io
|
... | ... | @@ -34,7 +35,8 @@ from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remo |
34 | 35 |
from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc
|
35 | 36 |
|
36 | 37 |
from .. import utils
|
37 |
-from .._exceptions import CASError
|
|
38 |
+from .._exceptions import CASError, LoadError, LoadErrorReason
|
|
39 |
+from .. import _yaml
|
|
38 | 40 |
|
39 | 41 |
|
40 | 42 |
# The default limit for gRPC messages is 4 MiB.
|
... | ... | @@ -42,6 +44,50 @@ from .._exceptions import CASError |
42 | 44 |
_MAX_PAYLOAD_BYTES = 1024 * 1024
|
43 | 45 |
|
44 | 46 |
|
47 |
+class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key client_cert')):
|
|
48 |
+ |
|
49 |
+ # _new_from_config_node
|
|
50 |
+ #
|
|
51 |
+ # Creates an CASRemoteSpec() from a YAML loaded node
|
|
52 |
+ #
|
|
53 |
+ @staticmethod
|
|
54 |
+ def _new_from_config_node(spec_node, basedir=None):
|
|
55 |
+ _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert'])
|
|
56 |
+ url = _yaml.node_get(spec_node, str, 'url')
|
|
57 |
+ push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
|
|
58 |
+ if not url:
|
|
59 |
+ provenance = _yaml.node_get_provenance(spec_node, 'url')
|
|
60 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
61 |
+ "{}: empty artifact cache URL".format(provenance))
|
|
62 |
+ |
|
63 |
+ server_cert = _yaml.node_get(spec_node, str, 'server-cert', default_value=None)
|
|
64 |
+ if server_cert and basedir:
|
|
65 |
+ server_cert = os.path.join(basedir, server_cert)
|
|
66 |
+ |
|
67 |
+ client_key = _yaml.node_get(spec_node, str, 'client-key', default_value=None)
|
|
68 |
+ if client_key and basedir:
|
|
69 |
+ client_key = os.path.join(basedir, client_key)
|
|
70 |
+ |
|
71 |
+ client_cert = _yaml.node_get(spec_node, str, 'client-cert', default_value=None)
|
|
72 |
+ if client_cert and basedir:
|
|
73 |
+ client_cert = os.path.join(basedir, client_cert)
|
|
74 |
+ |
|
75 |
+ if client_key and not client_cert:
|
|
76 |
+ provenance = _yaml.node_get_provenance(spec_node, 'client-key')
|
|
77 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
78 |
+ "{}: 'client-key' was specified without 'client-cert'".format(provenance))
|
|
79 |
+ |
|
80 |
+ if client_cert and not client_key:
|
|
81 |
+ provenance = _yaml.node_get_provenance(spec_node, 'client-cert')
|
|
82 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
83 |
+ "{}: 'client-cert' was specified without 'client-key'".format(provenance))
|
|
84 |
+ |
|
85 |
+ return CASRemoteSpec(url, push, server_cert, client_key, client_cert)
|
|
86 |
+ |
|
87 |
+ |
|
88 |
+CASRemoteSpec.__new__.__defaults__ = (None, None, None)
|
|
89 |
+ |
|
90 |
+ |
|
45 | 91 |
# A CASCache manages a CAS repository as specified in the Remote Execution API.
|
46 | 92 |
#
|
47 | 93 |
# Args:
|
... | ... | @@ -31,7 +31,8 @@ from ._exceptions import LoadError, LoadErrorReason, BstError |
31 | 31 |
from ._message import Message, MessageType
|
32 | 32 |
from ._profile import Topics, profile_start, profile_end
|
33 | 33 |
from ._artifactcache import ArtifactCache
|
34 |
-from ._workspaces import Workspaces
|
|
34 |
+from ._artifactcache.cascache import CASCache
|
|
35 |
+from ._workspaces import Workspaces, WorkspaceProjectCache
|
|
35 | 36 |
from .plugin import _plugin_lookup
|
36 | 37 |
|
37 | 38 |
|
... | ... | @@ -121,6 +122,10 @@ class Context(): |
121 | 122 |
# remove a workspace directory.
|
122 | 123 |
self.prompt_workspace_close_remove_dir = None
|
123 | 124 |
|
125 |
+ # Boolean, whether we double-check with the user that they meant to
|
|
126 |
+ # close the workspace when they're using it to access the project.
|
|
127 |
+ self.prompt_workspace_close_project_inaccessible = None
|
|
128 |
+ |
|
124 | 129 |
# Boolean, whether we double-check with the user that they meant to do
|
125 | 130 |
# a hard reset of a workspace, potentially losing changes.
|
126 | 131 |
self.prompt_workspace_reset_hard = None
|
... | ... | @@ -139,8 +144,10 @@ class Context(): |
139 | 144 |
self._projects = []
|
140 | 145 |
self._project_overrides = {}
|
141 | 146 |
self._workspaces = None
|
147 |
+ self._workspace_project_cache = WorkspaceProjectCache()
|
|
142 | 148 |
self._log_handle = None
|
143 | 149 |
self._log_filename = None
|
150 |
+ self._cascache = None
|
|
144 | 151 |
|
145 | 152 |
# load()
|
146 | 153 |
#
|
... | ... | @@ -248,12 +255,15 @@ class Context(): |
248 | 255 |
defaults, Mapping, 'prompt')
|
249 | 256 |
_yaml.node_validate(prompt, [
|
250 | 257 |
'auto-init', 'really-workspace-close-remove-dir',
|
258 |
+ 'really-workspace-close-project-inaccessible',
|
|
251 | 259 |
'really-workspace-reset-hard',
|
252 | 260 |
])
|
253 | 261 |
self.prompt_auto_init = _node_get_option_str(
|
254 | 262 |
prompt, 'auto-init', ['ask', 'no']) == 'ask'
|
255 | 263 |
self.prompt_workspace_close_remove_dir = _node_get_option_str(
|
256 | 264 |
prompt, 'really-workspace-close-remove-dir', ['ask', 'yes']) == 'ask'
|
265 |
+ self.prompt_workspace_close_project_inaccessible = _node_get_option_str(
|
|
266 |
+ prompt, 'really-workspace-close-project-inaccessible', ['ask', 'yes']) == 'ask'
|
|
257 | 267 |
self.prompt_workspace_reset_hard = _node_get_option_str(
|
258 | 268 |
prompt, 'really-workspace-reset-hard', ['ask', 'yes']) == 'ask'
|
259 | 269 |
|
... | ... | @@ -310,6 +320,16 @@ class Context(): |
310 | 320 |
def get_workspaces(self):
|
311 | 321 |
return self._workspaces
|
312 | 322 |
|
323 |
+ # get_workspace_project_cache():
|
|
324 |
+ #
|
|
325 |
+ # Return the WorkspaceProjectCache object used for this BuildStream invocation
|
|
326 |
+ #
|
|
327 |
+ # Returns:
|
|
328 |
+ # (WorkspaceProjectCache): The WorkspaceProjectCache object
|
|
329 |
+ #
|
|
330 |
+ def get_workspace_project_cache(self):
|
|
331 |
+ return self._workspace_project_cache
|
|
332 |
+ |
|
313 | 333 |
# get_overrides():
|
314 | 334 |
#
|
315 | 335 |
# Fetch the override dictionary for the active project. This returns
|
... | ... | @@ -620,6 +640,11 @@ class Context(): |
620 | 640 |
if not os.environ.get('XDG_DATA_HOME'):
|
621 | 641 |
os.environ['XDG_DATA_HOME'] = os.path.expanduser('~/.local/share')
|
622 | 642 |
|
643 |
+ def get_cascache(self):
|
|
644 |
+ if self._cascache is None:
|
|
645 |
+ self._cascache = CASCache(self.artifactdir)
|
|
646 |
+ return self._cascache
|
|
647 |
+ |
|
623 | 648 |
|
624 | 649 |
# _node_get_option_str()
|
625 | 650 |
#
|
... | ... | @@ -59,18 +59,9 @@ def complete_target(args, incomplete): |
59 | 59 |
:return: all the possible user-specified completions for the param
|
60 | 60 |
"""
|
61 | 61 |
|
62 |
+ from .. import utils
|
|
62 | 63 |
project_conf = 'project.conf'
|
63 | 64 |
|
64 |
- def ensure_project_dir(directory):
|
|
65 |
- directory = os.path.abspath(directory)
|
|
66 |
- while not os.path.isfile(os.path.join(directory, project_conf)):
|
|
67 |
- parent_dir = os.path.dirname(directory)
|
|
68 |
- if directory == parent_dir:
|
|
69 |
- break
|
|
70 |
- directory = parent_dir
|
|
71 |
- |
|
72 |
- return directory
|
|
73 |
- |
|
74 | 65 |
# First resolve the directory, in case there is an
|
75 | 66 |
# active --directory/-C option
|
76 | 67 |
#
|
... | ... | @@ -89,7 +80,7 @@ def complete_target(args, incomplete): |
89 | 80 |
else:
|
90 | 81 |
# Check if this directory or any of its parent directories
|
91 | 82 |
# contain a project config file
|
92 |
- base_directory = ensure_project_dir(base_directory)
|
|
83 |
+ base_directory = utils._search_upward_for_file(base_directory, project_conf)
|
|
93 | 84 |
|
94 | 85 |
# Now parse the project.conf just to find the element path,
|
95 | 86 |
# this is unfortunately a bit heavy.
|
... | ... | @@ -756,11 +747,18 @@ def workspace_close(app, remove_dir, all_, elements): |
756 | 747 |
|
757 | 748 |
elements = app.stream.redirect_element_names(elements)
|
758 | 749 |
|
759 |
- # Check that the workspaces in question exist
|
|
750 |
+ # Check that the workspaces in question exist, and that it's safe to
|
|
751 |
+ # remove them.
|
|
760 | 752 |
nonexisting = []
|
761 | 753 |
for element_name in elements:
|
762 | 754 |
if not app.stream.workspace_exists(element_name):
|
763 | 755 |
nonexisting.append(element_name)
|
756 |
+ if (app.stream.workspace_is_required(element_name) and app.interactive and
|
|
757 |
+ app.context.prompt_workspace_close_project_inaccessible):
|
|
758 |
+ click.echo("Removing '{}' will prevent you from running buildstream commands".format(element_name))
|
|
759 |
+ if not click.confirm('Are you sure you want to close this workspace?'):
|
|
760 |
+ click.echo('Aborting', err=True)
|
|
761 |
+ sys.exit(-1)
|
|
764 | 762 |
if nonexisting:
|
765 | 763 |
raise AppError("Workspace does not exist", detail="\n".join(nonexisting))
|
766 | 764 |
|
... | ... | @@ -30,6 +30,7 @@ from ._profile import Topics, profile_start, profile_end |
30 | 30 |
from ._exceptions import LoadError, LoadErrorReason
|
31 | 31 |
from ._options import OptionPool
|
32 | 32 |
from ._artifactcache import ArtifactCache
|
33 |
+from .sandbox import SandboxRemote
|
|
33 | 34 |
from ._elementfactory import ElementFactory
|
34 | 35 |
from ._sourcefactory import SourceFactory
|
35 | 36 |
from .plugin import CoreWarnings
|
... | ... | @@ -94,8 +95,10 @@ class Project(): |
94 | 95 |
# The project name
|
95 | 96 |
self.name = None
|
96 | 97 |
|
97 |
- # The project directory
|
|
98 |
- self.directory = self._ensure_project_dir(directory)
|
|
98 |
+ self._context = context # The invocation Context, a private member
|
|
99 |
+ |
|
100 |
+ # The project directory, and whether the project was found from an external workspace
|
|
101 |
+ self.directory, self._required_workspace_element = self._find_project_dir(directory)
|
|
99 | 102 |
|
100 | 103 |
# Absolute path to where elements are loaded from within the project
|
101 | 104 |
self.element_path = None
|
... | ... | @@ -116,7 +119,6 @@ class Project(): |
116 | 119 |
#
|
117 | 120 |
# Private Members
|
118 | 121 |
#
|
119 |
- self._context = context # The invocation Context
|
|
120 | 122 |
|
121 | 123 |
self._default_mirror = default_mirror # The name of the preferred mirror.
|
122 | 124 |
|
... | ... | @@ -130,7 +132,7 @@ class Project(): |
130 | 132 |
self._shell_host_files = [] # A list of HostMount objects
|
131 | 133 |
|
132 | 134 |
self.artifact_cache_specs = None
|
133 |
- self.remote_execution_url = None
|
|
135 |
+ self.remote_execution_specs = None
|
|
134 | 136 |
self._sandbox = None
|
135 | 137 |
self._splits = None
|
136 | 138 |
|
... | ... | @@ -370,6 +372,14 @@ class Project(): |
370 | 372 |
|
371 | 373 |
self._load_second_pass()
|
372 | 374 |
|
375 |
+ # required_workspace_element()
|
|
376 |
+ #
|
|
377 |
+ # Returns the element whose workspace is required to load this project,
|
|
378 |
+ # if any.
|
|
379 |
+ #
|
|
380 |
+ def required_workspace_element(self):
|
|
381 |
+ return self._required_workspace_element
|
|
382 |
+ |
|
373 | 383 |
# cleanup()
|
374 | 384 |
#
|
375 | 385 |
# Cleans up resources used loading elements
|
... | ... | @@ -493,9 +503,7 @@ class Project(): |
493 | 503 |
self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config, self.directory)
|
494 | 504 |
|
495 | 505 |
# Load remote-execution configuration for this project
|
496 |
- remote_execution = _yaml.node_get(config, Mapping, 'remote-execution')
|
|
497 |
- _yaml.node_validate(remote_execution, ['url'])
|
|
498 |
- self.remote_execution_url = _yaml.node_get(remote_execution, str, 'url')
|
|
506 |
+ self.remote_execution_specs = SandboxRemote.specs_from_config_node(config, self.directory)
|
|
499 | 507 |
|
500 | 508 |
# Load sandbox environment variables
|
501 | 509 |
self.base_environment = _yaml.node_get(config, Mapping, 'environment')
|
... | ... | @@ -651,7 +659,7 @@ class Project(): |
651 | 659 |
# Source url aliases
|
652 | 660 |
output._aliases = _yaml.node_get(config, Mapping, 'aliases', default_value={})
|
653 | 661 |
|
654 |
- # _ensure_project_dir()
|
|
662 |
+ # _find_project_dir()
|
|
655 | 663 |
#
|
656 | 664 |
# Returns path of the project directory, if a configuration file is found
|
657 | 665 |
# in given directory or any of its parent directories.
|
... | ... | @@ -662,18 +670,26 @@ class Project(): |
662 | 670 |
# Raises:
|
663 | 671 |
# LoadError if project.conf is not found
|
664 | 672 |
#
|
665 |
- def _ensure_project_dir(self, directory):
|
|
666 |
- directory = os.path.abspath(directory)
|
|
667 |
- while not os.path.isfile(os.path.join(directory, _PROJECT_CONF_FILE)):
|
|
668 |
- parent_dir = os.path.dirname(directory)
|
|
669 |
- if directory == parent_dir:
|
|
673 |
+ # Returns:
|
|
674 |
+ # (str) - the directory that contains the project, and
|
|
675 |
+ # (str) - the name of the element required to find the project, or an empty string
|
|
676 |
+ #
|
|
677 |
+ def _find_project_dir(self, directory):
|
|
678 |
+ workspace_element = ""
|
|
679 |
+ project_directory = utils._search_upward_for_file(directory, _PROJECT_CONF_FILE)
|
|
680 |
+ if not project_directory:
|
|
681 |
+ workspace_project_cache = self._context.get_workspace_project_cache()
|
|
682 |
+ workspace_project = workspace_project_cache.get(directory)
|
|
683 |
+ if workspace_project:
|
|
684 |
+ project_directory = workspace_project.get_default_path()
|
|
685 |
+ workspace_element = workspace_project.get_default_element()
|
|
686 |
+ else:
|
|
670 | 687 |
raise LoadError(
|
671 | 688 |
LoadErrorReason.MISSING_PROJECT_CONF,
|
672 | 689 |
'{} not found in current directory or any of its parent directories'
|
673 | 690 |
.format(_PROJECT_CONF_FILE))
|
674 |
- directory = parent_dir
|
|
675 | 691 |
|
676 |
- return directory
|
|
692 |
+ return project_directory, workspace_element
|
|
677 | 693 |
|
678 | 694 |
def _load_plugin_factories(self, config, output):
|
679 | 695 |
plugin_source_origins = [] # Origins of custom sources
|
... | ... | @@ -28,7 +28,7 @@ import tarfile |
28 | 28 |
from contextlib import contextmanager
|
29 | 29 |
from tempfile import TemporaryDirectory
|
30 | 30 |
|
31 |
-from ._exceptions import StreamError, ImplError, BstError, set_last_task_error
|
|
31 |
+from ._exceptions import StreamError, ImplError, BstError, set_last_task_error, LoadError, LoadErrorReason
|
|
32 | 32 |
from ._message import Message, MessageType
|
33 | 33 |
from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
|
34 | 34 |
from ._pipeline import Pipeline, PipelineSelection
|
... | ... | @@ -550,6 +550,8 @@ class Stream(): |
550 | 550 |
# So far this function has tried to catch as many issues as possible with out making any changes
|
551 | 551 |
# Now it dose the bits that can not be made atomic.
|
552 | 552 |
targetGenerator = zip(elements, expanded_directories)
|
553 |
+ workspace_project_cache = self._context.get_workspace_project_cache()
|
|
554 |
+ project = self._context.get_toplevel_project()
|
|
553 | 555 |
for target, directory in targetGenerator:
|
554 | 556 |
self._message(MessageType.INFO, "Creating workspace for element {}"
|
555 | 557 |
.format(target.name))
|
... | ... | @@ -574,6 +576,10 @@ class Stream(): |
574 | 576 |
with target.timed_activity("Staging sources to {}".format(directory)):
|
575 | 577 |
target._open_workspace()
|
576 | 578 |
|
579 |
+ workspace_project = workspace_project_cache.add(directory, project.directory,
|
|
580 |
+ target._get_full_name())
|
|
581 |
+ workspace_project.write()
|
|
582 |
+ |
|
577 | 583 |
# Saving the workspace once it is set up means that if the next workspace fails to be created before
|
578 | 584 |
# the configuration gets saved. The successfully created workspace still gets saved.
|
579 | 585 |
workspaces.save_config()
|
... | ... | @@ -601,6 +607,16 @@ class Stream(): |
601 | 607 |
except OSError as e:
|
602 | 608 |
raise StreamError("Could not remove '{}': {}"
|
603 | 609 |
.format(workspace.get_absolute_path(), e)) from e
|
610 |
+ else:
|
|
611 |
+ workspace_project_cache = self._context.get_workspace_project_cache()
|
|
612 |
+ try:
|
|
613 |
+ workspace_project_cache.remove(workspace.get_absolute_path())
|
|
614 |
+ except LoadError as e:
|
|
615 |
+ # We might be closing a workspace with a deleted directory
|
|
616 |
+ if e.reason == LoadErrorReason.MISSING_FILE:
|
|
617 |
+ pass
|
|
618 |
+ else:
|
|
619 |
+ raise
|
|
604 | 620 |
|
605 | 621 |
# Delete the workspace and save the configuration
|
606 | 622 |
workspaces.delete_workspace(element_name)
|
... | ... | @@ -644,6 +660,8 @@ class Stream(): |
644 | 660 |
for element in elements:
|
645 | 661 |
workspace = workspaces.get_workspace(element._get_full_name())
|
646 | 662 |
workspace_path = workspace.get_absolute_path()
|
663 |
+ workspace_project_cache = self._context.get_workspace_project_cache()
|
|
664 |
+ workspace_project = workspace_project_cache.get(workspace_path)
|
|
647 | 665 |
if soft:
|
648 | 666 |
workspace.prepared = False
|
649 | 667 |
self._message(MessageType.INFO, "Reset workspace state for {} at: {}"
|
... | ... | @@ -664,6 +682,8 @@ class Stream(): |
664 | 682 |
with element.timed_activity("Staging sources to {}".format(workspace_path)):
|
665 | 683 |
element._open_workspace()
|
666 | 684 |
|
685 |
+ workspace_project.write()
|
|
686 |
+ |
|
667 | 687 |
self._message(MessageType.INFO,
|
668 | 688 |
"Reset workspace for {} at: {}".format(element.name,
|
669 | 689 |
workspace_path))
|
... | ... | @@ -694,6 +714,20 @@ class Stream(): |
694 | 714 |
|
695 | 715 |
return False
|
696 | 716 |
|
717 |
+ # workspace_is_required()
|
|
718 |
+ #
|
|
719 |
+ # Checks whether the workspace belonging to element_name is required to
|
|
720 |
+ # load the project
|
|
721 |
+ #
|
|
722 |
+ # Args:
|
|
723 |
+ # element_name (str): The element whose workspace may be required
|
|
724 |
+ #
|
|
725 |
+ # Returns:
|
|
726 |
+ # (bool): True if the workspace is required
|
|
727 |
+ def workspace_is_required(self, element_name):
|
|
728 |
+ required_elm = self._project.required_workspace_element()
|
|
729 |
+ return required_elm == element_name
|
|
730 |
+ |
|
697 | 731 |
# workspace_list
|
698 | 732 |
#
|
699 | 733 |
# Serializes the workspaces and dumps them in YAML to stdout.
|
... | ... | @@ -25,6 +25,211 @@ from ._exceptions import LoadError, LoadErrorReason |
25 | 25 |
|
26 | 26 |
|
27 | 27 |
BST_WORKSPACE_FORMAT_VERSION = 3
|
28 |
+BST_WORKSPACE_PROJECT_FORMAT_VERSION = 1
|
|
29 |
+WORKSPACE_PROJECT_FILE = ".bstproject.yaml"
|
|
30 |
+ |
|
31 |
+ |
|
32 |
+# WorkspaceProject()
|
|
33 |
+#
|
|
34 |
+# An object to contain various helper functions and data required for
|
|
35 |
+# referring from a workspace back to buildstream.
|
|
36 |
+#
|
|
37 |
+# Args:
|
|
38 |
+# directory (str): The directory that the workspace exists in
|
|
39 |
+# project_path (str): The project path used to refer back
|
|
40 |
+# to buildstream projects.
|
|
41 |
+# element_name (str): The name of the element used to create this workspace.
|
|
42 |
+class WorkspaceProject():
|
|
43 |
+ def __init__(self, directory, project_path="", element_name=""):
|
|
44 |
+ self._projects = []
|
|
45 |
+ self._directory = directory
|
|
46 |
+ |
|
47 |
+ assert (project_path and element_name) or (not project_path and not element_name)
|
|
48 |
+ if project_path:
|
|
49 |
+ self._add_project(project_path, element_name)
|
|
50 |
+ |
|
51 |
+ # get_default_path()
|
|
52 |
+ #
|
|
53 |
+ # Retrieves the default path to a project.
|
|
54 |
+ #
|
|
55 |
+ # Returns:
|
|
56 |
+ # (str): The path to a project
|
|
57 |
+ def get_default_path(self):
|
|
58 |
+ return self._projects[0]['project-path']
|
|
59 |
+ |
|
60 |
+ # get_default_element()
|
|
61 |
+ #
|
|
62 |
+ # Retrieves the name of the element that owns this workspace.
|
|
63 |
+ #
|
|
64 |
+ # Returns:
|
|
65 |
+ # (str): The name of an element
|
|
66 |
+ def get_default_element(self):
|
|
67 |
+ return self._projects[0]['element-name']
|
|
68 |
+ |
|
69 |
+ # to_dict()
|
|
70 |
+ #
|
|
71 |
+ # Turn the members data into a dict for serialization purposes
|
|
72 |
+ #
|
|
73 |
+ # Returns:
|
|
74 |
+ # (dict): A dict representation of the WorkspaceProject
|
|
75 |
+ #
|
|
76 |
+ def to_dict(self):
|
|
77 |
+ ret = {
|
|
78 |
+ 'projects': self._projects,
|
|
79 |
+ 'format-version': BST_WORKSPACE_PROJECT_FORMAT_VERSION,
|
|
80 |
+ }
|
|
81 |
+ return ret
|
|
82 |
+ |
|
83 |
+ # from_dict()
|
|
84 |
+ #
|
|
85 |
+ # Loads a new WorkspaceProject from a simple dictionary
|
|
86 |
+ #
|
|
87 |
+ # Args:
|
|
88 |
+ # directory (str): The directory that the workspace exists in
|
|
89 |
+ # dictionary (dict): The dict to generate a WorkspaceProject from
|
|
90 |
+ #
|
|
91 |
+ # Returns:
|
|
92 |
+ # (WorkspaceProject): A newly instantiated WorkspaceProject
|
|
93 |
+ @classmethod
|
|
94 |
+ def from_dict(cls, directory, dictionary):
|
|
95 |
+ # Only know how to handle one format-version at the moment.
|
|
96 |
+ format_version = int(dictionary['format-version'])
|
|
97 |
+ assert format_version == BST_WORKSPACE_PROJECT_FORMAT_VERSION, \
|
|
98 |
+ "Format version {} not found in {}".format(BST_WORKSPACE_PROJECT_FORMAT_VERSION, dictionary)
|
|
99 |
+ |
|
100 |
+ workspace_project = cls(directory)
|
|
101 |
+ for item in dictionary['projects']:
|
|
102 |
+ workspace_project._add_project(item['project-path'], item['element-name'])
|
|
103 |
+ |
|
104 |
+ return workspace_project
|
|
105 |
+ |
|
106 |
+ # load()
|
|
107 |
+ #
|
|
108 |
+ # Loads the WorkspaceProject for a given directory. This directory may be a
|
|
109 |
+ # subdirectory of the workspace's directory.
|
|
110 |
+ #
|
|
111 |
+ # Args:
|
|
112 |
+ # directory (str): The directory
|
|
113 |
+ # Returns:
|
|
114 |
+ # (WorkspaceProject): The created WorkspaceProject, if in a workspace, or
|
|
115 |
+ # (NoneType): None, if the directory is not inside a workspace.
|
|
116 |
+ @classmethod
|
|
117 |
+ def load(cls, directory):
|
|
118 |
+ project_dir = cls.search_for_dir(directory)
|
|
119 |
+ if project_dir:
|
|
120 |
+ workspace_file = os.path.join(project_dir, WORKSPACE_PROJECT_FILE)
|
|
121 |
+ data_dict = _yaml.load(workspace_file)
|
|
122 |
+ return cls.from_dict(project_dir, data_dict)
|
|
123 |
+ else:
|
|
124 |
+ return None
|
|
125 |
+ |
|
126 |
+ # write()
|
|
127 |
+ #
|
|
128 |
+ # Writes the WorkspaceProject to disk
|
|
129 |
+ def write(self):
|
|
130 |
+ os.makedirs(self._directory, exist_ok=True)
|
|
131 |
+ _yaml.dump(self.to_dict(), self._get_filename())
|
|
132 |
+ |
|
133 |
+ # search_for_dir()
|
|
134 |
+ #
|
|
135 |
+ # Returns the directory that contains the workspace local project file,
|
|
136 |
+ # searching upwards from search_dir.
|
|
137 |
+ @staticmethod
|
|
138 |
+ def search_for_dir(search_dir):
|
|
139 |
+ return utils._search_upward_for_file(search_dir, WORKSPACE_PROJECT_FILE)
|
|
140 |
+ |
|
141 |
+ def _get_filename(self):
|
|
142 |
+ return os.path.join(self._directory, WORKSPACE_PROJECT_FILE)
|
|
143 |
+ |
|
144 |
+ def _add_project(self, project_path, element_name):
|
|
145 |
+ assert (project_path and element_name)
|
|
146 |
+ self._projects.append({'project-path': project_path, 'element-name': element_name})
|
|
147 |
+ |
|
148 |
+ |
|
149 |
+# WorkspaceProjectCache()
|
|
150 |
+#
|
|
151 |
+# A class to manage workspace project data for multiple workspaces.
|
|
152 |
+#
|
|
153 |
+class WorkspaceProjectCache():
|
|
154 |
+ def __init__(self):
|
|
155 |
+ self._projects = {} # Mapping of a workspace directory to its WorkspaceProject
|
|
156 |
+ |
|
157 |
+ # get()
|
|
158 |
+ #
|
|
159 |
+ # Returns a WorkspaceProject for a given directory, retrieving from the cache if
|
|
160 |
+ # present, and searching the filesystem for the file and loading it if not.
|
|
161 |
+ #
|
|
162 |
+ # Args:
|
|
163 |
+ # directory (str): The directory to search for a WorkspaceProject.
|
|
164 |
+ #
|
|
165 |
+ # Returns:
|
|
166 |
+ # (WorkspaceProject): The WorkspaceProject that was found for that directory.
|
|
167 |
+ # or (NoneType): None, if no WorkspaceProject can be found.
|
|
168 |
+ #
|
|
169 |
+ def get(self, directory):
|
|
170 |
+ try:
|
|
171 |
+ workspace_project = self._projects[directory]
|
|
172 |
+ except KeyError:
|
|
173 |
+ found_dir = WorkspaceProject.search_for_dir(directory)
|
|
174 |
+ if found_dir:
|
|
175 |
+ try:
|
|
176 |
+ workspace_project = self._projects[found_dir]
|
|
177 |
+ except KeyError:
|
|
178 |
+ workspace_project = WorkspaceProject.load(found_dir)
|
|
179 |
+ self._projects[found_dir] = workspace_project
|
|
180 |
+ else:
|
|
181 |
+ workspace_project = None
|
|
182 |
+ |
|
183 |
+ return workspace_project
|
|
184 |
+ |
|
185 |
+ # add()
|
|
186 |
+ #
|
|
187 |
+ # Adds the project path and element name to the WorkspaceProject that exists
|
|
188 |
+ # for that directory
|
|
189 |
+ #
|
|
190 |
+ # Args:
|
|
191 |
+ # directory (str): The directory to search for a WorkspaceProject.
|
|
192 |
+ # project_path (str): The path to the project that refers to this workspace
|
|
193 |
+ # element_name (str): The element in the project that was refers to this workspace
|
|
194 |
+ #
|
|
195 |
+ # Returns:
|
|
196 |
+ # (WorkspaceProject): The WorkspaceProject that was found for that directory.
|
|
197 |
+ #
|
|
198 |
+ def add(self, directory, project_path='', element_name=''):
|
|
199 |
+ workspace_project = self.get(directory)
|
|
200 |
+ if not workspace_project:
|
|
201 |
+ workspace_project = WorkspaceProject(directory)
|
|
202 |
+ self._projects[directory] = workspace_project
|
|
203 |
+ if project_path:
|
|
204 |
+ workspace_project._add_project(project_path, element_name)
|
|
205 |
+ return workspace_project
|
|
206 |
+ |
|
207 |
+ # remove()
|
|
208 |
+ #
|
|
209 |
+ # Removes the project path and element name from the WorkspaceProject that exists
|
|
210 |
+ # for that directory.
|
|
211 |
+ #
|
|
212 |
+ # NOTE: This currently just deletes the file, but with support for multiple
|
|
213 |
+ # projects opening the same workspace, this will involve decreasing the count
|
|
214 |
+ # and deleting the file if there are no more projects.
|
|
215 |
+ #
|
|
216 |
+ # Args:
|
|
217 |
+ # directory (str): The directory to search for a WorkspaceProject.
|
|
218 |
+ # project_path (str): **UNUSED** The path to the project that refers to this workspace
|
|
219 |
+ # element_name (str): **UNUSED** The element in the project that was refers to this workspace
|
|
220 |
+ #
|
|
221 |
+ def remove(self, directory, project_path='', element_name=''):
|
|
222 |
+ # NOTE: project_path and element_name will only be used when I implement
|
|
223 |
+ # multiple owners of a workspace
|
|
224 |
+ workspace_project = self.get(directory)
|
|
225 |
+ if not workspace_project:
|
|
226 |
+ raise LoadError(LoadErrorReason.MISSING_FILE,
|
|
227 |
+ "Failed to find a {} file to remove".format(WORKSPACE_PROJECT_FILE))
|
|
228 |
+ path = workspace_project._get_filename()
|
|
229 |
+ try:
|
|
230 |
+ os.unlink(path)
|
|
231 |
+ except FileNotFoundError:
|
|
232 |
+ pass
|
|
28 | 233 |
|
29 | 234 |
|
30 | 235 |
# Workspace()
|
... | ... | @@ -174,10 +379,15 @@ class Workspace(): |
174 | 379 |
if recalculate or self._key is None:
|
175 | 380 |
fullpath = self.get_absolute_path()
|
176 | 381 |
|
382 |
+ excluded_files = (WORKSPACE_PROJECT_FILE,)
|
|
383 |
+ |
|
177 | 384 |
# Get a list of tuples of the the project relative paths and fullpaths
|
178 | 385 |
if os.path.isdir(fullpath):
|
179 | 386 |
filelist = utils.list_relative_paths(fullpath)
|
180 |
- filelist = [(relpath, os.path.join(fullpath, relpath)) for relpath in filelist]
|
|
387 |
+ filelist = [
|
|
388 |
+ (relpath, os.path.join(fullpath, relpath)) for relpath in filelist
|
|
389 |
+ if relpath not in excluded_files
|
|
390 |
+ ]
|
|
181 | 391 |
else:
|
182 | 392 |
filelist = [(self.get_absolute_path(), fullpath)]
|
183 | 393 |
|
... | ... | @@ -196,7 +196,4 @@ shell: |
196 | 196 |
|
197 | 197 |
# Command to run when `bst shell` does not provide a command
|
198 | 198 |
#
|
199 |
- command: [ 'sh', '-i' ]
|
|
200 |
- |
|
201 |
-remote-execution:
|
|
202 |
- url: ""
|
|
\ No newline at end of file | ||
199 |
+ command: [ 'sh', '-i' ]
|
|
\ No newline at end of file |
... | ... | @@ -128,6 +128,14 @@ prompt: |
128 | 128 |
#
|
129 | 129 |
really-workspace-close-remove-dir: ask
|
130 | 130 |
|
131 |
+ # Whether to really proceed with 'bst workspace close' when doing so would
|
|
132 |
+ # stop them from running bst commands in this workspace.
|
|
133 |
+ #
|
|
134 |
+ # ask - Ask the user if they are sure.
|
|
135 |
+ # yes - Always close, without asking.
|
|
136 |
+ #
|
|
137 |
+ really-workspace-close-project-inaccessible: ask
|
|
138 |
+ |
|
131 | 139 |
# Whether to really proceed with 'bst workspace reset' doing a hard reset of
|
132 | 140 |
# a workspace, potentially losing changes.
|
133 | 141 |
#
|
... | ... | @@ -255,9 +255,9 @@ class Element(Plugin): |
255 | 255 |
|
256 | 256 |
# Extract remote execution URL
|
257 | 257 |
if not self.__is_junction:
|
258 |
- self.__remote_execution_url = project.remote_execution_url
|
|
258 |
+ self.__remote_execution_specs = project.remote_execution_specs
|
|
259 | 259 |
else:
|
260 |
- self.__remote_execution_url = None
|
|
260 |
+ self.__remote_execution_specs = None
|
|
261 | 261 |
|
262 | 262 |
# Extract Sandbox config
|
263 | 263 |
self.__sandbox_config = self.__extract_sandbox_config(meta)
|
... | ... | @@ -2171,7 +2171,7 @@ class Element(Plugin): |
2171 | 2171 |
# supports it.
|
2172 | 2172 |
#
|
2173 | 2173 |
def __use_remote_execution(self):
|
2174 |
- return self.__remote_execution_url and self.BST_VIRTUAL_DIRECTORY
|
|
2174 |
+ return self.__remote_execution_specs and self.BST_VIRTUAL_DIRECTORY
|
|
2175 | 2175 |
|
2176 | 2176 |
# __sandbox():
|
2177 | 2177 |
#
|
... | ... | @@ -2207,13 +2207,13 @@ class Element(Plugin): |
2207 | 2207 |
stdout=stdout,
|
2208 | 2208 |
stderr=stderr,
|
2209 | 2209 |
config=config,
|
2210 |
- server_url=self.__remote_execution_url,
|
|
2210 |
+ specs=self.__remote_execution_specs,
|
|
2211 | 2211 |
bare_directory=bare_directory,
|
2212 | 2212 |
allow_real_directory=False)
|
2213 | 2213 |
yield sandbox
|
2214 | 2214 |
|
2215 | 2215 |
elif directory is not None and os.path.exists(directory):
|
2216 |
- if allow_remote and self.__remote_execution_url:
|
|
2216 |
+ if allow_remote and self.__remote_execution_specs:
|
|
2217 | 2217 |
self.warn("Artifact {} is configured to use remote execution but element plugin does not support it."
|
2218 | 2218 |
.format(self.name), detail="Element plugin '{kind}' does not support virtual directories."
|
2219 | 2219 |
.format(kind=self.get_kind()), warning_token="remote-failure")
|
... | ... | @@ -20,6 +20,7 @@ |
20 | 20 |
|
21 | 21 |
import os
|
22 | 22 |
import shlex
|
23 |
+from collections import namedtuple
|
|
23 | 24 |
from urllib.parse import urlparse
|
24 | 25 |
from functools import partial
|
25 | 26 |
|
... | ... | @@ -33,7 +34,13 @@ from .. import _signals |
33 | 34 |
from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
|
34 | 35 |
from .._protos.google.rpc import code_pb2
|
35 | 36 |
from .._exceptions import SandboxError
|
37 |
+from .. import _yaml
|
|
36 | 38 |
from .._protos.google.longrunning import operations_pb2, operations_pb2_grpc
|
39 |
+from .._artifactcache.cascache import CASRemote, CASRemoteSpec
|
|
40 |
+ |
|
41 |
+ |
|
42 |
+class RemoteExecutionSpec(namedtuple('RemoteExecutionSpec', 'exec_service storage_service')):
|
|
43 |
+ pass
|
|
37 | 44 |
|
38 | 45 |
|
39 | 46 |
# SandboxRemote()
|
... | ... | @@ -46,18 +53,70 @@ class SandboxRemote(Sandbox): |
46 | 53 |
def __init__(self, *args, **kwargs):
|
47 | 54 |
super().__init__(*args, **kwargs)
|
48 | 55 |
|
49 |
- url = urlparse(kwargs['server_url'])
|
|
50 |
- if not url.scheme or not url.hostname or not url.port:
|
|
51 |
- raise SandboxError("Configured remote URL '{}' does not match the expected layout. "
|
|
52 |
- .format(kwargs['server_url']) +
|
|
53 |
- "It should be of the form <protocol>://<domain name>:<port>.")
|
|
54 |
- elif url.scheme != 'http':
|
|
55 |
- raise SandboxError("Configured remote '{}' uses an unsupported protocol. "
|
|
56 |
- "Only plain HTTP is currenlty supported (no HTTPS).")
|
|
56 |
+ config = kwargs['specs'] # This should be a RemoteExecutionSpec
|
|
57 |
+ if config is None:
|
|
58 |
+ return
|
|
59 |
+ |
|
60 |
+ self.storage_url = config.storage_service['url']
|
|
61 |
+ self.exec_url = config.exec_service['url']
|
|
57 | 62 |
|
58 |
- self.server_url = '{}:{}'.format(url.hostname, url.port)
|
|
63 |
+ self.storage_remote_spec = CASRemoteSpec(self.storage_url, push=True,
|
|
64 |
+ server_cert=config.storage_service['server-cert'],
|
|
65 |
+ client_key=config.storage_service['client-key'],
|
|
66 |
+ client_cert=config.storage_service['client-cert'])
|
|
59 | 67 |
self.operation_name = None
|
60 | 68 |
|
69 |
+ @staticmethod
|
|
70 |
+ def specs_from_config_node(config_node, basedir):
|
|
71 |
+ |
|
72 |
+ def require_node(config, keyname):
|
|
73 |
+ val = config.get(keyname)
|
|
74 |
+ if val is None:
|
|
75 |
+ provenance = _yaml.node_get_provenance(remote_config, key=keyname)
|
|
76 |
+ raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
|
|
77 |
+ "{}: '{}' was not present in the remote "
|
|
78 |
+ "execution configuration (remote-execution). "
|
|
79 |
+ .format(str(provenance), keyname))
|
|
80 |
+ return val
|
|
81 |
+ |
|
82 |
+ remote_config = config_node.get("remote-execution", None)
|
|
83 |
+ if remote_config is None:
|
|
84 |
+ return None
|
|
85 |
+ |
|
86 |
+ # Maintain some backwards compatibility with older configs, in which 'url' was the only valid key for
|
|
87 |
+ # remote-execution.
|
|
88 |
+ |
|
89 |
+ tls_keys = ['client-key', 'client-cert', 'server-cert']
|
|
90 |
+ |
|
91 |
+ _yaml.node_validate(remote_config, ['execution-service', 'storage-service', 'url'])
|
|
92 |
+ remote_exec_service_config = require_node(remote_config, 'execution-service')
|
|
93 |
+ remote_exec_storage_config = require_node(remote_config, 'storage-service')
|
|
94 |
+ |
|
95 |
+ _yaml.node_validate(remote_exec_service_config, ['url'])
|
|
96 |
+ _yaml.node_validate(remote_exec_storage_config, ['url'] + tls_keys)
|
|
97 |
+ |
|
98 |
+ if 'url' in remote_config:
|
|
99 |
+ if 'execution-service' not in remote_config:
|
|
100 |
+ remote_config['execution-service'] = {'url': remote_config['url']}
|
|
101 |
+ else:
|
|
102 |
+ provenance = _yaml.node_get_provenance(remote_config, key='url')
|
|
103 |
+ raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
|
|
104 |
+ "{}: 'url' and 'execution-service' keys were found in the remote "
|
|
105 |
+ "execution configuration (remote-execution). "
|
|
106 |
+ "You can only specify one of these."
|
|
107 |
+ .format(str(provenance)))
|
|
108 |
+ |
|
109 |
+ for key in tls_keys:
|
|
110 |
+ if key not in remote_exec_storage_config:
|
|
111 |
+ provenance = _yaml.node_get_provenance(remote_config, key='storage-service')
|
|
112 |
+ raise _yaml.LoadError(_yaml.LoadErrorReason.INVALID_DATA,
|
|
113 |
+ "{}: The keys {} are necessary for the storage-service section of "
|
|
114 |
+ "remote-execution configuration. Your config is missing '{}'."
|
|
115 |
+ .format(str(provenance), tls_keys, key))
|
|
116 |
+ |
|
117 |
+ spec = RemoteExecutionSpec(remote_config['execution-service'], remote_config['storage-service'])
|
|
118 |
+ return spec
|
|
119 |
+ |
|
61 | 120 |
def run_remote_command(self, command, input_root_digest, working_directory, environment):
|
62 | 121 |
# Sends an execution request to the remote execution server.
|
63 | 122 |
#
|
... | ... | @@ -75,12 +134,13 @@ class SandboxRemote(Sandbox): |
75 | 134 |
output_directories=[self._output_directory],
|
76 | 135 |
platform=None)
|
77 | 136 |
context = self._get_context()
|
78 |
- cascache = context.artifactcache
|
|
137 |
+ cascache = context.get_cascache()
|
|
138 |
+ casremote = CASRemote(self.storage_remote_spec)
|
|
139 |
+ |
|
79 | 140 |
# Upload the Command message to the remote CAS server
|
80 |
- command_digest = cascache.push_message(self._get_project(), remote_command)
|
|
81 |
- if not command_digest or not cascache.verify_digest_pushed(self._get_project(), command_digest):
|
|
141 |
+ command_digest = cascache.push_message(casremote, remote_command)
|
|
142 |
+ if not command_digest or not cascache.verify_digest_on_remote(casremote, command_digest):
|
|
82 | 143 |
raise SandboxError("Failed pushing build command to remote CAS.")
|
83 |
- |
|
84 | 144 |
# Create and send the action.
|
85 | 145 |
action = remote_execution_pb2.Action(command_digest=command_digest,
|
86 | 146 |
input_root_digest=input_root_digest,
|
... | ... | @@ -88,12 +148,21 @@ class SandboxRemote(Sandbox): |
88 | 148 |
do_not_cache=False)
|
89 | 149 |
|
90 | 150 |
# Upload the Action message to the remote CAS server
|
91 |
- action_digest = cascache.push_message(self._get_project(), action)
|
|
92 |
- if not action_digest or not cascache.verify_digest_pushed(self._get_project(), action_digest):
|
|
151 |
+ action_digest = cascache.push_message(casremote, action)
|
|
152 |
+ if not action_digest or not cascache.verify_digest_on_remote(casremote, action_digest):
|
|
93 | 153 |
raise SandboxError("Failed pushing build action to remote CAS.")
|
94 | 154 |
|
95 | 155 |
# Next, try to create a communication channel to the BuildGrid server.
|
96 |
- channel = grpc.insecure_channel(self.server_url)
|
|
156 |
+ url = urlparse(self.exec_url)
|
|
157 |
+ if not url.port:
|
|
158 |
+ raise SandboxError("You must supply a protocol and port number in the execution-service url, "
|
|
159 |
+ "for example: http://buildservice:50051.")
|
|
160 |
+ if url.scheme == 'http':
|
|
161 |
+ channel = grpc.insecure_channel('{}:{}'.format(url.hostname, url.port))
|
|
162 |
+ else:
|
|
163 |
+ raise SandboxError("Remote execution currently only supports the 'http' protocol "
|
|
164 |
+ "and '{}' was supplied.".format(url.scheme))
|
|
165 |
+ |
|
97 | 166 |
stub = remote_execution_pb2_grpc.ExecutionStub(channel)
|
98 | 167 |
request = remote_execution_pb2.ExecuteRequest(action_digest=action_digest,
|
99 | 168 |
skip_cache_lookup=False)
|
... | ... | @@ -119,7 +188,7 @@ class SandboxRemote(Sandbox): |
119 | 188 |
status_code = e.code()
|
120 | 189 |
if status_code == grpc.StatusCode.UNAVAILABLE:
|
121 | 190 |
raise SandboxError("Failed contacting remote execution server at {}."
|
122 |
- .format(self.server_url))
|
|
191 |
+ .format(self.exec_url))
|
|
123 | 192 |
|
124 | 193 |
elif status_code in (grpc.StatusCode.INVALID_ARGUMENT,
|
125 | 194 |
grpc.StatusCode.FAILED_PRECONDITION,
|
... | ... | @@ -190,9 +259,11 @@ class SandboxRemote(Sandbox): |
190 | 259 |
raise SandboxError("Output directory structure had no digest attached.")
|
191 | 260 |
|
192 | 261 |
context = self._get_context()
|
193 |
- cascache = context.artifactcache
|
|
262 |
+ cascache = context.get_cascache()
|
|
263 |
+ casremote = CASRemote(self.storage_remote_spec)
|
|
264 |
+ |
|
194 | 265 |
# Now do a pull to ensure we have the necessary parts.
|
195 |
- dir_digest = cascache.pull_tree(self._get_project(), tree_digest)
|
|
266 |
+ dir_digest = cascache.pull_tree(casremote, tree_digest)
|
|
196 | 267 |
if dir_digest is None or not dir_digest.hash or not dir_digest.size_bytes:
|
197 | 268 |
raise SandboxError("Output directory structure pulling from remote failed.")
|
198 | 269 |
|
... | ... | @@ -218,18 +289,23 @@ class SandboxRemote(Sandbox): |
218 | 289 |
# Upload sources
|
219 | 290 |
upload_vdir = self.get_virtual_directory()
|
220 | 291 |
|
292 |
+ cascache = self._get_context().get_cascache()
|
|
221 | 293 |
if isinstance(upload_vdir, FileBasedDirectory):
|
222 | 294 |
# Make a new temporary directory to put source in
|
223 |
- upload_vdir = CasBasedDirectory(self._get_context().artifactcache.cas, ref=None)
|
|
295 |
+ upload_vdir = CasBasedDirectory(cascache, ref=None)
|
|
224 | 296 |
upload_vdir.import_files(self.get_virtual_directory()._get_underlying_directory())
|
225 | 297 |
|
226 | 298 |
upload_vdir.recalculate_hash()
|
227 | 299 |
|
228 |
- context = self._get_context()
|
|
229 |
- cascache = context.artifactcache
|
|
300 |
+ casremote = CASRemote(self.storage_remote_spec)
|
|
230 | 301 |
# Now, push that key (without necessarily needing a ref) to the remote.
|
231 |
- cascache.push_directory(self._get_project(), upload_vdir)
|
|
232 |
- if not cascache.verify_digest_pushed(self._get_project(), upload_vdir.ref):
|
|
302 |
+ |
|
303 |
+ try:
|
|
304 |
+ cascache.push_directory(casremote, upload_vdir)
|
|
305 |
+ except grpc.RpcError as e:
|
|
306 |
+ raise SandboxError("Failed to push source directory to remote: {}".format(e)) from e
|
|
307 |
+ |
|
308 |
+ if not cascache.verify_digest_on_remote(casremote, upload_vdir.ref):
|
|
233 | 309 |
raise SandboxError("Failed to verify that source has been pushed to the remote artifact cache.")
|
234 | 310 |
|
235 | 311 |
# Now transmit the command to execute
|
... | ... | @@ -1242,3 +1242,17 @@ def _deduplicate(iterable, key=None): |
1242 | 1242 |
def _get_link_mtime(path):
|
1243 | 1243 |
path_stat = os.lstat(path)
|
1244 | 1244 |
return path_stat.st_mtime
|
1245 |
+ |
|
1246 |
+ |
|
1247 |
+# Returns the first directory to contain filename, or an empty string if
|
|
1248 |
+# none found
|
|
1249 |
+#
|
|
1250 |
+def _search_upward_for_file(directory, filename):
|
|
1251 |
+ directory = os.path.abspath(directory)
|
|
1252 |
+ while not os.path.isfile(os.path.join(directory, filename)):
|
|
1253 |
+ parent_dir = os.path.dirname(directory)
|
|
1254 |
+ if directory == parent_dir:
|
|
1255 |
+ return ""
|
|
1256 |
+ directory = parent_dir
|
|
1257 |
+ |
|
1258 |
+ return directory
|
... | ... | @@ -201,10 +201,10 @@ with an artifact share. |
201 | 201 |
#
|
202 | 202 |
artifacts:
|
203 | 203 |
# A remote cache from which to download prebuilt artifacts
|
204 |
- - url: https://foo.com/artifacts:11001
|
|
204 |
+ - url: https://foo.com:11001
|
|
205 | 205 |
server.cert: server.crt
|
206 | 206 |
# A remote cache from which to upload/download built/prebuilt artifacts
|
207 |
- - url: https://foo.com/artifacts:11002
|
|
207 |
+ - url: https://foo.com:11002
|
|
208 | 208 |
server-cert: server.crt
|
209 | 209 |
client-cert: client.crt
|
210 | 210 |
client-key: client.key
|
... | ... | @@ -231,10 +231,24 @@ using the `remote-execution` option: |
231 | 231 |
remote-execution:
|
232 | 232 |
|
233 | 233 |
# A url defining a remote execution server
|
234 |
- url: http://buildserver.example.com:50051
|
|
234 |
+ execution-service:
|
|
235 |
+ url: http://buildserver.example.com:50051
|
|
236 |
+ storage-service:
|
|
237 |
+ - url: https://foo.com:11002/
|
|
238 |
+ server-cert: server.crt
|
|
239 |
+ client-cert: client.crt
|
|
240 |
+ client-key: client.key
|
|
241 |
+ |
|
242 |
+The execution-service part of remote execution does not support encrypted
|
|
243 |
+connections yet, so the protocol must always be http.
|
|
244 |
+ |
|
245 |
+storage-service specifies a remote CAS store and the parameters are the
|
|
246 |
+same as those used to specify an :ref:`artifact server <artifacts>`.
|
|
235 | 247 |
|
236 |
-The url should contain a hostname and port separated by ':'. Only plain HTTP is
|
|
237 |
-currently suported (no HTTPS).
|
|
248 |
+The storage service may be the same endpoint used for artifact
|
|
249 |
+caching. Remote execution cannot work without push access to the
|
|
250 |
+storage endpoint, so you must specify a client certificate and key,
|
|
251 |
+and a server certificate.
|
|
238 | 252 |
|
239 | 253 |
The Remote Execution API can be found via https://github.com/bazelbuild/remote-apis.
|
240 | 254 |
|
... | ... | @@ -31,6 +31,7 @@ import shutil |
31 | 31 |
import subprocess
|
32 | 32 |
from ruamel.yaml.comments import CommentedSet
|
33 | 33 |
from tests.testutils import cli, create_repo, ALL_REPO_KINDS, wait_for_cache_granularity
|
34 |
+from tests.testutils import create_artifact_share
|
|
34 | 35 |
|
35 | 36 |
from buildstream import _yaml
|
36 | 37 |
from buildstream._exceptions import ErrorDomain, LoadError, LoadErrorReason
|
... | ... | @@ -615,9 +616,12 @@ def test_list(cli, tmpdir, datafiles): |
615 | 616 |
@pytest.mark.datafiles(DATA_DIR)
|
616 | 617 |
@pytest.mark.parametrize("kind", repo_kinds)
|
617 | 618 |
@pytest.mark.parametrize("strict", [("strict"), ("non-strict")])
|
618 |
-def test_build(cli, tmpdir, datafiles, kind, strict):
|
|
619 |
+@pytest.mark.parametrize("call_from", [("project"), ("workspace")])
|
|
620 |
+def test_build(cli, tmpdir_factory, datafiles, kind, strict, call_from):
|
|
621 |
+ tmpdir = tmpdir_factory.mktemp('')
|
|
619 | 622 |
element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
|
620 | 623 |
checkout = os.path.join(str(tmpdir), 'checkout')
|
624 |
+ args_pre = ['-C', workspace] if call_from == "workspace" else []
|
|
621 | 625 |
|
622 | 626 |
# Modify workspace
|
623 | 627 |
shutil.rmtree(os.path.join(workspace, 'usr', 'bin'))
|
... | ... | @@ -640,15 +644,14 @@ def test_build(cli, tmpdir, datafiles, kind, strict): |
640 | 644 |
# Build modified workspace
|
641 | 645 |
assert cli.get_element_state(project, element_name) == 'buildable'
|
642 | 646 |
assert cli.get_element_key(project, element_name) == "{:?<64}".format('')
|
643 |
- result = cli.run(project=project, args=['build', element_name])
|
|
647 |
+ result = cli.run(project=project, args=args_pre + ['build', element_name])
|
|
644 | 648 |
result.assert_success()
|
645 | 649 |
assert cli.get_element_state(project, element_name) == 'cached'
|
646 | 650 |
assert cli.get_element_key(project, element_name) != "{:?<64}".format('')
|
647 | 651 |
|
648 | 652 |
# Checkout the result
|
649 |
- result = cli.run(project=project, args=[
|
|
650 |
- 'checkout', element_name, checkout
|
|
651 |
- ])
|
|
653 |
+ result = cli.run(project=project,
|
|
654 |
+ args=args_pre + ['checkout', element_name, checkout])
|
|
652 | 655 |
result.assert_success()
|
653 | 656 |
|
654 | 657 |
# Check that the pony.conf from the modified workspace exists
|
... | ... | @@ -1055,3 +1058,131 @@ def test_multiple_failed_builds(cli, tmpdir, datafiles): |
1055 | 1058 |
result = cli.run(project=project, args=["build", element_name])
|
1056 | 1059 |
assert "BUG" not in result.stderr
|
1057 | 1060 |
assert cli.get_element_state(project, element_name) != "cached"
|
1061 |
+ |
|
1062 |
+ |
|
1063 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
1064 |
+def test_external_fetch(cli, datafiles, tmpdir_factory):
|
|
1065 |
+ # Fetching from a workspace outside a project doesn't fail horribly
|
|
1066 |
+ tmpdir = tmpdir_factory.mktemp('')
|
|
1067 |
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", False)
|
|
1068 |
+ |
|
1069 |
+ result = cli.run(project=project, args=['-C', workspace, 'fetch', element_name])
|
|
1070 |
+ result.assert_success()
|
|
1071 |
+ |
|
1072 |
+ # We already fetched it by opening the workspace, but we're also checking
|
|
1073 |
+ # `bst show` works here
|
|
1074 |
+ assert cli.get_element_state(project, element_name) == 'buildable'
|
|
1075 |
+ |
|
1076 |
+ |
|
1077 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
1078 |
+def test_external_push_pull(cli, datafiles, tmpdir_factory):
|
|
1079 |
+ # Pushing and pulling to/from an artifact cache works from an external workspace
|
|
1080 |
+ tmpdir = tmpdir_factory.mktemp('')
|
|
1081 |
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", False)
|
|
1082 |
+ |
|
1083 |
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
|
1084 |
+ result = cli.run(project=project, args=['-C', workspace, 'build', element_name])
|
|
1085 |
+ result.assert_success()
|
|
1086 |
+ |
|
1087 |
+ cli.configure({
|
|
1088 |
+ 'artifacts': {'url': share.repo, 'push': True}
|
|
1089 |
+ })
|
|
1090 |
+ |
|
1091 |
+ result = cli.run(project=project, args=['-C', workspace, 'push', element_name])
|
|
1092 |
+ result.assert_success()
|
|
1093 |
+ |
|
1094 |
+ result = cli.run(project=project, args=['-C', workspace, 'pull', '--deps', 'all', element_name])
|
|
1095 |
+ result.assert_success()
|
|
1096 |
+ |
|
1097 |
+ |
|
1098 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
1099 |
+def test_external_track(cli, datafiles, tmpdir_factory):
|
|
1100 |
+ # Tracking does not get horribly confused
|
|
1101 |
+ tmpdir = tmpdir_factory.mktemp('')
|
|
1102 |
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", True)
|
|
1103 |
+ |
|
1104 |
+ # The workspace is necessarily already tracked, so we only care that
|
|
1105 |
+ # there's no weird errors.
|
|
1106 |
+ result = cli.run(project=project, args=['-C', workspace, 'track', element_name])
|
|
1107 |
+ result.assert_success()
|
|
1108 |
+ |
|
1109 |
+ |
|
1110 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
1111 |
+def test_external_open_other(cli, datafiles, tmpdir_factory):
|
|
1112 |
+ # >From inside an external workspace, open another workspace
|
|
1113 |
+ tmpdir1 = tmpdir_factory.mktemp('')
|
|
1114 |
+ tmpdir2 = tmpdir_factory.mktemp('')
|
|
1115 |
+ # Making use of the assumption that it's the same project in both invocations of open_workspace
|
|
1116 |
+ alpha_element, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", False, suffix="-alpha")
|
|
1117 |
+ beta_element, _, beta_workspace = open_workspace(cli, tmpdir2, datafiles, "git", False, suffix="-beta")
|
|
1118 |
+ |
|
1119 |
+ # Closing the other element first, because I'm too lazy to create an
|
|
1120 |
+ # element without opening it
|
|
1121 |
+ result = cli.run(project=project, args=['workspace', 'close', beta_element])
|
|
1122 |
+ result.assert_success()
|
|
1123 |
+ |
|
1124 |
+ result = cli.run(project=project, args=[
|
|
1125 |
+ '-C', alpha_workspace, 'workspace', 'open', '--force', '--directory', beta_workspace, beta_element
|
|
1126 |
+ ])
|
|
1127 |
+ result.assert_success()
|
|
1128 |
+ |
|
1129 |
+ |
|
1130 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
1131 |
+def test_external_close_other(cli, datafiles, tmpdir_factory):
|
|
1132 |
+ # >From inside an external workspace, close the other workspace
|
|
1133 |
+ tmpdir1 = tmpdir_factory.mktemp('')
|
|
1134 |
+ tmpdir2 = tmpdir_factory.mktemp('')
|
|
1135 |
+ # Making use of the assumption that it's the same project in both invocations of open_workspace
|
|
1136 |
+ alpha_element, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", False, suffix="-alpha")
|
|
1137 |
+ beta_element, _, beta_workspace = open_workspace(cli, tmpdir2, datafiles, "git", False, suffix="-beta")
|
|
1138 |
+ |
|
1139 |
+ result = cli.run(project=project, args=['-C', alpha_workspace, 'workspace', 'close', beta_element])
|
|
1140 |
+ result.assert_success()
|
|
1141 |
+ |
|
1142 |
+ |
|
1143 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
1144 |
+def test_external_close_self(cli, datafiles, tmpdir_factory):
|
|
1145 |
+ # >From inside an external workspace, close it
|
|
1146 |
+ tmpdir1 = tmpdir_factory.mktemp('')
|
|
1147 |
+ tmpdir2 = tmpdir_factory.mktemp('')
|
|
1148 |
+ # Making use of the assumption that it's the same project in both invocations of open_workspace
|
|
1149 |
+ alpha_element, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", False, suffix="-alpha")
|
|
1150 |
+ beta_element, _, beta_workspace = open_workspace(cli, tmpdir2, datafiles, "git", False, suffix="-beta")
|
|
1151 |
+ |
|
1152 |
+ result = cli.run(project=project, args=['-C', alpha_workspace, 'workspace', 'close', alpha_element])
|
|
1153 |
+ result.assert_success()
|
|
1154 |
+ |
|
1155 |
+ |
|
1156 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
1157 |
+def test_external_reset_other(cli, datafiles, tmpdir_factory):
|
|
1158 |
+ tmpdir1 = tmpdir_factory.mktemp('')
|
|
1159 |
+ tmpdir2 = tmpdir_factory.mktemp('')
|
|
1160 |
+ # Making use of the assumption that it's the same project in both invocations of open_workspace
|
|
1161 |
+ alpha_element, project, alpha_workspace = open_workspace(cli, tmpdir1, datafiles, "git", False, suffix="-alpha")
|
|
1162 |
+ beta_element, _, beta_workspace = open_workspace(cli, tmpdir2, datafiles, "git", False, suffix="-beta")
|
|
1163 |
+ |
|
1164 |
+ result = cli.run(project=project, args=['-C', alpha_workspace, 'workspace', 'reset', beta_element])
|
|
1165 |
+ result.assert_success()
|
|
1166 |
+ |
|
1167 |
+ |
|
1168 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
1169 |
+def test_external_reset_self(cli, datafiles, tmpdir):
|
|
1170 |
+ element, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", False)
|
|
1171 |
+ |
|
1172 |
+ # Command succeeds
|
|
1173 |
+ result = cli.run(project=project, args=['-C', workspace, 'workspace', 'reset', element])
|
|
1174 |
+ result.assert_success()
|
|
1175 |
+ |
|
1176 |
+ # Successive commands still work (i.e. .bstproject.yaml hasn't been deleted)
|
|
1177 |
+ result = cli.run(project=project, args=['-C', workspace, 'workspace', 'list'])
|
|
1178 |
+ result.assert_success()
|
|
1179 |
+ |
|
1180 |
+ |
|
1181 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
1182 |
+def test_external_list(cli, datafiles, tmpdir_factory):
|
|
1183 |
+ tmpdir = tmpdir_factory.mktemp('')
|
|
1184 |
+ # Making use of the assumption that it's the same project in both invocations of open_workspace
|
|
1185 |
+ element, project, workspace = open_workspace(cli, tmpdir, datafiles, "git", False)
|
|
1186 |
+ |
|
1187 |
+ result = cli.run(project=project, args=['-C', workspace, 'workspace', 'list'])
|
|
1188 |
+ result.assert_success()
|
... | ... | @@ -353,3 +353,29 @@ def test_integration_devices(cli, tmpdir, datafiles): |
353 | 353 |
|
354 | 354 |
result = execute_shell(cli, project, ["true"], element=element_name)
|
355 | 355 |
assert result.exit_code == 0
|
356 |
+ |
|
357 |
+ |
|
358 |
+# Test that a shell can be opened from an external workspace
|
|
359 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
360 |
+@pytest.mark.parametrize("build_shell", [("build"), ("nobuild")])
|
|
361 |
+@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
|
362 |
+def test_integration_external_workspace(cli, tmpdir_factory, datafiles, build_shell):
|
|
363 |
+ tmpdir = tmpdir_factory.mktemp("")
|
|
364 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
365 |
+ element_name = 'autotools/amhello.bst'
|
|
366 |
+ workspace_dir = os.path.join(str(tmpdir), 'workspace')
|
|
367 |
+ |
|
368 |
+ result = cli.run(project=project, args=[
|
|
369 |
+ 'workspace', 'open', '--directory', workspace_dir, element_name
|
|
370 |
+ ])
|
|
371 |
+ result.assert_success()
|
|
372 |
+ |
|
373 |
+ result = cli.run(project=project, args=['-C', workspace_dir, 'build', element_name])
|
|
374 |
+ result.assert_success()
|
|
375 |
+ |
|
376 |
+ command = ['shell']
|
|
377 |
+ if build_shell == 'build':
|
|
378 |
+ command.append('--build')
|
|
379 |
+ command.extend([element_name, '--', 'true'])
|
|
380 |
+ result = cli.run(project=project, cwd=workspace_dir, args=command)
|
|
381 |
+ result.assert_success()
|
1 |
+import pytest
|
|
2 |
+ |
|
3 |
+import itertools
|
|
4 |
+import os
|
|
5 |
+ |
|
6 |
+from buildstream import _yaml
|
|
7 |
+from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
|
8 |
+ |
|
9 |
+from tests.testutils.runcli import cli
|
|
10 |
+ |
|
11 |
+DATA_DIR = os.path.join(
|
|
12 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
13 |
+ "remote-exec-config"
|
|
14 |
+)
|
|
15 |
+ |
|
16 |
+# Tests that we get a useful error message when supplying invalid
|
|
17 |
+# remote execution configurations.
|
|
18 |
+ |
|
19 |
+ |
|
20 |
+# Assert that if both 'url' (the old style) and 'execution-service' (the new style)
|
|
21 |
+# are used at once, a LoadError results.
|
|
22 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
23 |
+def test_old_and_new_configs(cli, datafiles):
|
|
24 |
+ project = os.path.join(datafiles.dirname, datafiles.basename, 'missing-certs')
|
|
25 |
+ |
|
26 |
+ project_conf = {
|
|
27 |
+ 'name': 'test',
|
|
28 |
+ |
|
29 |
+ 'remote-execution': {
|
|
30 |
+ 'url': 'https://cache.example.com:12345',
|
|
31 |
+ 'execution-service': {
|
|
32 |
+ 'url': 'http://localhost:8088'
|
|
33 |
+ },
|
|
34 |
+ 'storage-service': {
|
|
35 |
+ 'url': 'http://charactron:11001',
|
|
36 |
+ }
|
|
37 |
+ }
|
|
38 |
+ }
|
|
39 |
+ project_conf_file = os.path.join(project, 'project.conf')
|
|
40 |
+ _yaml.dump(project_conf, project_conf_file)
|
|
41 |
+ |
|
42 |
+ # Use `pull` here to ensure we try to initialize the remotes, triggering the error
|
|
43 |
+ #
|
|
44 |
+ # This does not happen for a simple `bst show`.
|
|
45 |
+ result = cli.run(project=project, args=['pull', 'element.bst'])
|
|
46 |
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA, "specify one")
|
|
47 |
+ |
|
48 |
+ |
|
49 |
+# Assert that if either the client key or client cert is specified
|
|
50 |
+# without specifying its counterpart, we get a comprehensive LoadError
|
|
51 |
+# instead of an unhandled exception.
|
|
52 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
53 |
+@pytest.mark.parametrize('config_key, config_value', [
|
|
54 |
+ ('client-cert', 'client.crt'),
|
|
55 |
+ ('client-key', 'client.key')
|
|
56 |
+])
|
|
57 |
+def test_missing_certs(cli, datafiles, config_key, config_value):
|
|
58 |
+ project = os.path.join(datafiles.dirname, datafiles.basename, 'missing-certs')
|
|
59 |
+ |
|
60 |
+ project_conf = {
|
|
61 |
+ 'name': 'test',
|
|
62 |
+ |
|
63 |
+ 'remote-execution': {
|
|
64 |
+ 'execution-service': {
|
|
65 |
+ 'url': 'http://localhost:8088'
|
|
66 |
+ },
|
|
67 |
+ 'storage-service': {
|
|
68 |
+ 'url': 'http://charactron:11001',
|
|
69 |
+ config_key: config_value,
|
|
70 |
+ }
|
|
71 |
+ }
|
|
72 |
+ }
|
|
73 |
+ project_conf_file = os.path.join(project, 'project.conf')
|
|
74 |
+ _yaml.dump(project_conf, project_conf_file)
|
|
75 |
+ |
|
76 |
+ # Use `pull` here to ensure we try to initialize the remotes, triggering the error
|
|
77 |
+ #
|
|
78 |
+ # This does not happen for a simple `bst show`.
|
|
79 |
+ result = cli.run(project=project, args=['show', 'element.bst'])
|
|
80 |
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA, "Your config is missing")
|
|
81 |
+ |
|
82 |
+ |
|
83 |
+# Assert that if incomplete information is supplied we get a sensible error message.
|
|
84 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
85 |
+def test_empty_config(cli, datafiles):
|
|
86 |
+ project = os.path.join(datafiles.dirname, datafiles.basename, 'missing-certs')
|
|
87 |
+ |
|
88 |
+ project_conf = {
|
|
89 |
+ 'name': 'test',
|
|
90 |
+ |
|
91 |
+ 'remote-execution': {
|
|
92 |
+ }
|
|
93 |
+ }
|
|
94 |
+ project_conf_file = os.path.join(project, 'project.conf')
|
|
95 |
+ _yaml.dump(project_conf, project_conf_file)
|
|
96 |
+ |
|
97 |
+ # Use `pull` here to ensure we try to initialize the remotes, triggering the error
|
|
98 |
+ #
|
|
99 |
+ # This does not happen for a simple `bst show`.
|
|
100 |
+ result = cli.run(project=project, args=['pull', 'element.bst'])
|
|
101 |
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA, "specify one")
|
1 |
+kind: autotools
|