Benjamin Schubert pushed to branch bschubert/mr938-comments at BuildStream / buildstream
Commits:
-
06e28860
by Benjamin Schubert at 2018-11-19T15:52:24Z
-
dd36cfbc
by Benjamin Schubert at 2018-11-19T15:52:24Z
-
eac7274d
by Benjamin Schubert at 2018-11-19T16:29:07Z
-
625dfe1f
by Chandan Singh at 2018-11-19T16:41:17Z
-
d4f12184
by Chandan Singh at 2018-11-19T17:15:10Z
-
ba6c96e8
by Richard Maw at 2018-11-19T19:54:30Z
-
8a0dc3a3
by Jürg Billeter at 2018-11-19T19:54:30Z
-
8722aced
by Jürg Billeter at 2018-11-19T19:54:30Z
-
fc56ffa4
by Jürg Billeter at 2018-11-19T19:54:30Z
-
3f663d82
by Jürg Billeter at 2018-11-19T20:36:57Z
-
b498cce7
by Angelos Evripiotis at 2018-11-20T11:17:38Z
-
eb2d376f
by Angelos Evripiotis at 2018-11-20T11:17:38Z
-
b81c4333
by Angelos Evripiotis at 2018-11-20T11:17:38Z
-
27ca6593
by Angelos Evripiotis at 2018-11-20T11:19:33Z
-
7ae3a3d2
by Angelos Evripiotis at 2018-11-20T11:43:49Z
-
8071c00c
by Angelos Evripiotis at 2018-11-20T12:12:11Z
-
fd41b0b5
by Raoul Hidalgo Charman at 2018-11-20T16:32:10Z
-
c6306b88
by Jürg Billeter at 2018-11-20T16:59:45Z
-
bfa8b4ca
by Benjamin Schubert at 2018-11-20T17:29:13Z
-
9fa091fa
by Benjamin Schubert at 2018-11-20T17:29:13Z
18 changed files:
- NEWS
- buildstream/_context.py
- buildstream/_frontend/app.py
- buildstream/_frontend/cli.py
- buildstream/_yaml.py
- buildstream/data/userconfig.yaml
- buildstream/element.py
- buildstream/sandbox/_sandboxremote.py
- buildstream/sandbox/sandbox.py
- buildstream/storage/_casbaseddirectory.py
- buildstream/utils.py
- conftest.py
- tests/artifactcache/push.py
- tests/integration/cachedfail.py
- tests/plugins/pipeline.py
- tests/sandboxes/storage-tests.py
- tests/storage/virtual_directory_import.py
- tests/testutils/artifactshare.py
Changes:
... | ... | @@ -45,6 +45,12 @@ buildstream 1.3.1 |
45 | 45 |
instead of just a specially-formatted build-root with a `root` and `scratch`
|
46 | 46 |
subdirectory.
|
47 | 47 |
|
48 |
+ o The buildstream.conf file learned new 'prompt.auto-init',
|
|
49 |
+ 'prompt.really-workspace-close-remove-dir', and
|
|
50 |
+ 'prompt.really-workspace-reset-hard' options. These allow users to suppress
|
|
51 |
+ certain confirmation prompts, e.g. double-checking that the user meant to
|
|
52 |
+ run the command as typed.
|
|
53 |
+ |
|
48 | 54 |
o Due to the element `build tree` being cached in the respective artifact their
|
49 | 55 |
size in some cases has significantly increased. In *most* cases the build trees
|
50 | 56 |
are not utilised when building targets, as such by default bst 'pull' & 'build'
|
... | ... | @@ -55,6 +61,8 @@ buildstream 1.3.1 |
55 | 61 |
with cached artifacts, only 'complete' elements can be pushed. If the element
|
56 | 62 |
is expected to have a populated build tree then it must be cached before pushing.
|
57 | 63 |
|
64 |
+ o Added new `bst source-checkout` command to checkout sources of an element.
|
|
65 |
+ |
|
58 | 66 |
|
59 | 67 |
=================
|
60 | 68 |
buildstream 1.1.5
|
... | ... | @@ -63,25 +63,25 @@ class Context(): |
63 | 63 |
self.artifactdir = None
|
64 | 64 |
|
65 | 65 |
# The locations from which to push and pull prebuilt artifacts
|
66 |
- self.artifact_cache_specs = []
|
|
66 |
+ self.artifact_cache_specs = None
|
|
67 | 67 |
|
68 | 68 |
# The directory to store build logs
|
69 | 69 |
self.logdir = None
|
70 | 70 |
|
71 | 71 |
# The abbreviated cache key length to display in the UI
|
72 |
- self.log_key_length = 0
|
|
72 |
+ self.log_key_length = None
|
|
73 | 73 |
|
74 | 74 |
# Whether debug mode is enabled
|
75 |
- self.log_debug = False
|
|
75 |
+ self.log_debug = None
|
|
76 | 76 |
|
77 | 77 |
# Whether verbose mode is enabled
|
78 |
- self.log_verbose = False
|
|
78 |
+ self.log_verbose = None
|
|
79 | 79 |
|
80 | 80 |
# Maximum number of lines to print from build logs
|
81 |
- self.log_error_lines = 0
|
|
81 |
+ self.log_error_lines = None
|
|
82 | 82 |
|
83 | 83 |
# Maximum number of lines to print in the master log for a detailed message
|
84 |
- self.log_message_lines = 0
|
|
84 |
+ self.log_message_lines = None
|
|
85 | 85 |
|
86 | 86 |
# Format string for printing the pipeline at startup time
|
87 | 87 |
self.log_element_format = None
|
... | ... | @@ -90,23 +90,38 @@ class Context(): |
90 | 90 |
self.log_message_format = None
|
91 | 91 |
|
92 | 92 |
# Maximum number of fetch or refresh tasks
|
93 |
- self.sched_fetchers = 4
|
|
93 |
+ self.sched_fetchers = None
|
|
94 | 94 |
|
95 | 95 |
# Maximum number of build tasks
|
96 |
- self.sched_builders = 4
|
|
96 |
+ self.sched_builders = None
|
|
97 | 97 |
|
98 | 98 |
# Maximum number of push tasks
|
99 |
- self.sched_pushers = 4
|
|
99 |
+ self.sched_pushers = None
|
|
100 | 100 |
|
101 | 101 |
# Maximum number of retries for network tasks
|
102 |
- self.sched_network_retries = 2
|
|
102 |
+ self.sched_network_retries = None
|
|
103 | 103 |
|
104 | 104 |
# What to do when a build fails in non interactive mode
|
105 |
- self.sched_error_action = 'continue'
|
|
105 |
+ self.sched_error_action = None
|
|
106 |
+ |
|
107 |
+ # Size of the artifact cache in bytes
|
|
108 |
+ self.config_cache_quota = None
|
|
106 | 109 |
|
107 | 110 |
# Whether or not to attempt to pull build trees globally
|
108 | 111 |
self.pull_buildtrees = None
|
109 | 112 |
|
113 |
+ # Boolean, whether to offer to create a project for the user, if we are
|
|
114 |
+ # invoked outside of a directory where we can resolve the project.
|
|
115 |
+ self.prompt_auto_init = None
|
|
116 |
+ |
|
117 |
+ # Boolean, whether we double-check with the user that they meant to
|
|
118 |
+ # remove a workspace directory.
|
|
119 |
+ self.prompt_workspace_close_remove_dir = None
|
|
120 |
+ |
|
121 |
+ # Boolean, whether we double-check with the user that they meant to do
|
|
122 |
+ # a hard reset of a workspace, potentially losing changes.
|
|
123 |
+ self.prompt_workspace_reset_hard = None
|
|
124 |
+ |
|
110 | 125 |
# Whether elements must be rebuilt when their dependencies have changed
|
111 | 126 |
self._strict_build_plan = None
|
112 | 127 |
|
... | ... | @@ -123,7 +138,6 @@ class Context(): |
123 | 138 |
self._workspaces = None
|
124 | 139 |
self._log_handle = None
|
125 | 140 |
self._log_filename = None
|
126 |
- self.config_cache_quota = 'infinity'
|
|
127 | 141 |
|
128 | 142 |
# load()
|
129 | 143 |
#
|
... | ... | @@ -163,7 +177,7 @@ class Context(): |
163 | 177 |
_yaml.node_validate(defaults, [
|
164 | 178 |
'sourcedir', 'builddir', 'artifactdir', 'logdir',
|
165 | 179 |
'scheduler', 'artifacts', 'logging', 'projects',
|
166 |
- 'cache'
|
|
180 |
+ 'cache', 'prompt'
|
|
167 | 181 |
])
|
168 | 182 |
|
169 | 183 |
for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir']:
|
... | ... | @@ -183,7 +197,7 @@ class Context(): |
183 | 197 |
cache = _yaml.node_get(defaults, Mapping, 'cache')
|
184 | 198 |
_yaml.node_validate(cache, ['quota', 'pull-buildtrees'])
|
185 | 199 |
|
186 |
- self.config_cache_quota = _yaml.node_get(cache, str, 'quota', default_value='infinity')
|
|
200 |
+ self.config_cache_quota = _yaml.node_get(cache, str, 'quota')
|
|
187 | 201 |
|
188 | 202 |
# Load artifact share configuration
|
189 | 203 |
self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
|
... | ... | @@ -212,12 +226,34 @@ class Context(): |
212 | 226 |
'on-error', 'fetchers', 'builders',
|
213 | 227 |
'pushers', 'network-retries'
|
214 | 228 |
])
|
215 |
- self.sched_error_action = _yaml.node_get(scheduler, str, 'on-error')
|
|
229 |
+ self.sched_error_action = _node_get_option_str(
|
|
230 |
+ scheduler, 'on-error', ['continue', 'quit', 'terminate'])
|
|
216 | 231 |
self.sched_fetchers = _yaml.node_get(scheduler, int, 'fetchers')
|
217 | 232 |
self.sched_builders = _yaml.node_get(scheduler, int, 'builders')
|
218 | 233 |
self.sched_pushers = _yaml.node_get(scheduler, int, 'pushers')
|
219 | 234 |
self.sched_network_retries = _yaml.node_get(scheduler, int, 'network-retries')
|
220 | 235 |
|
236 |
+ # Load prompt preferences
|
|
237 |
+ #
|
|
238 |
+ # We convert string options to booleans here, so we can be both user
|
|
239 |
+ # and coder-friendly. The string options are worded to match the
|
|
240 |
+ # responses the user would give at the cli, for least surprise. The
|
|
241 |
+ # booleans are converted here because it's easiest to eyeball that the
|
|
242 |
+ # strings are right.
|
|
243 |
+ #
|
|
244 |
+ prompt = _yaml.node_get(
|
|
245 |
+ defaults, Mapping, 'prompt')
|
|
246 |
+ _yaml.node_validate(prompt, [
|
|
247 |
+ 'auto-init', 'really-workspace-close-remove-dir',
|
|
248 |
+ 'really-workspace-reset-hard',
|
|
249 |
+ ])
|
|
250 |
+ self.prompt_auto_init = _node_get_option_str(
|
|
251 |
+ prompt, 'auto-init', ['ask', 'no']) == 'ask'
|
|
252 |
+ self.prompt_workspace_close_remove_dir = _node_get_option_str(
|
|
253 |
+ prompt, 'really-workspace-close-remove-dir', ['ask', 'yes']) == 'ask'
|
|
254 |
+ self.prompt_workspace_reset_hard = _node_get_option_str(
|
|
255 |
+ prompt, 'really-workspace-reset-hard', ['ask', 'yes']) == 'ask'
|
|
256 |
+ |
|
221 | 257 |
# Load per-projects overrides
|
222 | 258 |
self._project_overrides = _yaml.node_get(defaults, Mapping, 'projects', default_value={})
|
223 | 259 |
|
... | ... | @@ -228,13 +264,6 @@ class Context(): |
228 | 264 |
|
229 | 265 |
profile_end(Topics.LOAD_CONTEXT, 'load')
|
230 | 266 |
|
231 |
- valid_actions = ['continue', 'quit']
|
|
232 |
- if self.sched_error_action not in valid_actions:
|
|
233 |
- provenance = _yaml.node_get_provenance(scheduler, 'on-error')
|
|
234 |
- raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
235 |
- "{}: on-error should be one of: {}".format(
|
|
236 |
- provenance, ", ".join(valid_actions)))
|
|
237 |
- |
|
238 | 267 |
@property
|
239 | 268 |
def artifactcache(self):
|
240 | 269 |
if not self._artifactcache:
|
... | ... | @@ -587,3 +616,30 @@ class Context(): |
587 | 616 |
os.environ['XDG_CONFIG_HOME'] = os.path.expanduser('~/.config')
|
588 | 617 |
if not os.environ.get('XDG_DATA_HOME'):
|
589 | 618 |
os.environ['XDG_DATA_HOME'] = os.path.expanduser('~/.local/share')
|
619 |
+ |
|
620 |
+ |
|
621 |
+# _node_get_option_str()
|
|
622 |
+#
|
|
623 |
+# Like _yaml.node_get(), but also checks value is one of the allowed option
|
|
624 |
+# strings. Fetches a value from a dictionary node, and makes sure it's one of
|
|
625 |
+# the pre-defined options.
|
|
626 |
+#
|
|
627 |
+# Args:
|
|
628 |
+# node (dict): The dictionary node
|
|
629 |
+# key (str): The key to get a value for in node
|
|
630 |
+# allowed_options (iterable): Only accept these values
|
|
631 |
+#
|
|
632 |
+# Returns:
|
|
633 |
+# The value, if found in 'node'.
|
|
634 |
+#
|
|
635 |
+# Raises:
|
|
636 |
+# LoadError, when the value is not of the expected type, or is not found.
|
|
637 |
+#
|
|
638 |
+def _node_get_option_str(node, key, allowed_options):
|
|
639 |
+ result = _yaml.node_get(node, str, key)
|
|
640 |
+ if result not in allowed_options:
|
|
641 |
+ provenance = _yaml.node_get_provenance(node, key)
|
|
642 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
643 |
+ "{}: {} should be one of: {}".format(
|
|
644 |
+ provenance, key, ", ".join(allowed_options)))
|
|
645 |
+ return result
|
... | ... | @@ -222,9 +222,10 @@ class App(): |
222 | 222 |
# Let's automatically start a `bst init` session in this case
|
223 | 223 |
if e.reason == LoadErrorReason.MISSING_PROJECT_CONF and self.interactive:
|
224 | 224 |
click.echo("A project was not detected in the directory: {}".format(directory), err=True)
|
225 |
- click.echo("", err=True)
|
|
226 |
- if click.confirm("Would you like to create a new project here ?"):
|
|
227 |
- self.init_project(None)
|
|
225 |
+ if self.context.prompt_auto_init:
|
|
226 |
+ click.echo("", err=True)
|
|
227 |
+ if click.confirm("Would you like to create a new project here?"):
|
|
228 |
+ self.init_project(None)
|
|
228 | 229 |
|
229 | 230 |
self._error_exit(e, "Error loading project")
|
230 | 231 |
|
... | ... | @@ -772,7 +772,7 @@ def workspace_close(app, remove_dir, all_, elements): |
772 | 772 |
if nonexisting:
|
773 | 773 |
raise AppError("Workspace does not exist", detail="\n".join(nonexisting))
|
774 | 774 |
|
775 |
- if app.interactive and remove_dir:
|
|
775 |
+ if app.interactive and remove_dir and app.context.prompt_workspace_close_remove_dir:
|
|
776 | 776 |
if not click.confirm('This will remove all your changes, are you sure?'):
|
777 | 777 |
click.echo('Aborting', err=True)
|
778 | 778 |
sys.exit(-1)
|
... | ... | @@ -806,7 +806,7 @@ def workspace_reset(app, soft, track_, all_, elements): |
806 | 806 |
if all_ and not app.stream.workspace_exists():
|
807 | 807 |
raise AppError("No open workspaces to reset")
|
808 | 808 |
|
809 |
- if app.interactive and not soft:
|
|
809 |
+ if app.interactive and not soft and app.context.prompt_workspace_reset_hard:
|
|
810 | 810 |
if not click.confirm('This will remove all your changes, are you sure?'):
|
811 | 811 |
click.echo('Aborting', err=True)
|
812 | 812 |
sys.exit(-1)
|
... | ... | @@ -351,6 +351,7 @@ _sentinel = object() |
351 | 351 |
# expected_type (type): The expected type for the value being searched
|
352 | 352 |
# key (str): The key to get a value for in node
|
353 | 353 |
# indices (list of ints): Optionally decend into lists of lists
|
354 |
+# default_value: Optionally return this value if the key is not found
|
|
354 | 355 |
#
|
355 | 356 |
# Returns:
|
356 | 357 |
# The value if found in node, otherwise default_value is returned
|
... | ... | @@ -100,3 +100,35 @@ logging: |
100 | 100 |
|
101 | 101 |
[%{elapsed}][%{key}][%{element}] %{action} %{message}
|
102 | 102 |
|
103 |
+#
|
|
104 |
+# Prompt overrides
|
|
105 |
+#
|
|
106 |
+# Here you can suppress 'are you sure?' and other kinds of prompts by supplying
|
|
107 |
+# override values. Note that e.g. 'yes' and 'no' have the same meaning here as
|
|
108 |
+# they do in the actual cli prompt.
|
|
109 |
+#
|
|
110 |
+prompt:
|
|
111 |
+ |
|
112 |
+ # Whether to create a project with 'bst init' if we are invoked outside of a
|
|
113 |
+ # directory where we can resolve the project.
|
|
114 |
+ #
|
|
115 |
+ # ask - Prompt the user to choose.
|
|
116 |
+ # no - Never create the project.
|
|
117 |
+ #
|
|
118 |
+ auto-init: ask
|
|
119 |
+ |
|
120 |
+ # Whether to really proceed with 'bst workspace close --remove-dir' removing
|
|
121 |
+ # a workspace directory, potentially losing changes.
|
|
122 |
+ #
|
|
123 |
+ # ask - Ask the user if they are sure.
|
|
124 |
+ # yes - Always remove, without asking.
|
|
125 |
+ #
|
|
126 |
+ really-workspace-close-remove-dir: ask
|
|
127 |
+ |
|
128 |
+ # Whether to really proceed with 'bst workspace reset' doing a hard reset of
|
|
129 |
+ # a workspace, potentially losing changes.
|
|
130 |
+ #
|
|
131 |
+ # ask - Ask the user if they are sure.
|
|
132 |
+ # yes - Always hard reset, without asking.
|
|
133 |
+ #
|
|
134 |
+ really-workspace-reset-hard: ask
|
... | ... | @@ -85,7 +85,8 @@ import shutil |
85 | 85 |
from . import _yaml
|
86 | 86 |
from ._variables import Variables
|
87 | 87 |
from ._versions import BST_CORE_ARTIFACT_VERSION
|
88 |
-from ._exceptions import BstError, LoadError, LoadErrorReason, ImplError, ErrorDomain
|
|
88 |
+from ._exceptions import BstError, LoadError, LoadErrorReason, ImplError, \
|
|
89 |
+ ErrorDomain
|
|
89 | 90 |
from .utils import UtilError
|
90 | 91 |
from . import Plugin, Consistency, Scope
|
91 | 92 |
from . import SandboxFlags
|
... | ... | @@ -1553,7 +1554,6 @@ class Element(Plugin): |
1553 | 1554 |
self.__dynamic_public = _yaml.node_copy(self.__public)
|
1554 | 1555 |
|
1555 | 1556 |
# Call the abstract plugin methods
|
1556 |
- collect = None
|
|
1557 | 1557 |
try:
|
1558 | 1558 |
# Step 1 - Configure
|
1559 | 1559 |
self.configure_sandbox(sandbox)
|
... | ... | @@ -1564,7 +1564,7 @@ class Element(Plugin): |
1564 | 1564 |
# Step 4 - Assemble
|
1565 | 1565 |
collect = self.assemble(sandbox) # pylint: disable=assignment-from-no-return
|
1566 | 1566 |
self.__set_build_result(success=True, description="succeeded")
|
1567 |
- except BstError as e:
|
|
1567 |
+ except ElementError as e:
|
|
1568 | 1568 |
# Shelling into a sandbox is useful to debug this error
|
1569 | 1569 |
e.sandbox = True
|
1570 | 1570 |
|
... | ... | @@ -1586,104 +1586,105 @@ class Element(Plugin): |
1586 | 1586 |
self.warn("Failed to preserve workspace state for failed build sysroot: {}"
|
1587 | 1587 |
.format(e))
|
1588 | 1588 |
|
1589 |
- if isinstance(e, ElementError):
|
|
1590 |
- collect = e.collect # pylint: disable=no-member
|
|
1591 |
- |
|
1592 | 1589 |
self.__set_build_result(success=False, description=str(e), detail=e.detail)
|
1590 |
+ self._cache_artifact(rootdir, sandbox, e.collect)
|
|
1591 |
+ |
|
1593 | 1592 |
raise
|
1593 |
+ else:
|
|
1594 |
+ return self._cache_artifact(rootdir, sandbox, collect)
|
|
1594 | 1595 |
finally:
|
1595 |
- if collect is not None:
|
|
1596 |
- try:
|
|
1597 |
- sandbox_vroot = sandbox.get_virtual_directory()
|
|
1598 |
- collectvdir = sandbox_vroot.descend(collect.lstrip(os.sep).split(os.sep))
|
|
1599 |
- except VirtualDirectoryError:
|
|
1600 |
- # No collect directory existed
|
|
1601 |
- collectvdir = None
|
|
1602 |
- |
|
1603 |
- # Create artifact directory structure
|
|
1604 |
- assembledir = os.path.join(rootdir, 'artifact')
|
|
1605 |
- filesdir = os.path.join(assembledir, 'files')
|
|
1606 |
- logsdir = os.path.join(assembledir, 'logs')
|
|
1607 |
- metadir = os.path.join(assembledir, 'meta')
|
|
1608 |
- buildtreedir = os.path.join(assembledir, 'buildtree')
|
|
1609 |
- os.mkdir(assembledir)
|
|
1610 |
- if collect is not None and collectvdir is not None:
|
|
1611 |
- os.mkdir(filesdir)
|
|
1612 |
- os.mkdir(logsdir)
|
|
1613 |
- os.mkdir(metadir)
|
|
1614 |
- os.mkdir(buildtreedir)
|
|
1615 |
- |
|
1616 |
- # Hard link files from collect dir to files directory
|
|
1617 |
- if collect is not None and collectvdir is not None:
|
|
1618 |
- collectvdir.export_files(filesdir, can_link=True)
|
|
1619 |
- |
|
1620 |
- try:
|
|
1621 |
- sandbox_vroot = sandbox.get_virtual_directory()
|
|
1622 |
- sandbox_build_dir = sandbox_vroot.descend(
|
|
1623 |
- self.get_variable('build-root').lstrip(os.sep).split(os.sep))
|
|
1624 |
- # Hard link files from build-root dir to buildtreedir directory
|
|
1625 |
- sandbox_build_dir.export_files(buildtreedir)
|
|
1626 |
- except VirtualDirectoryError:
|
|
1627 |
- # Directory could not be found. Pre-virtual
|
|
1628 |
- # directory behaviour was to continue silently
|
|
1629 |
- # if the directory could not be found.
|
|
1630 |
- pass
|
|
1631 |
- |
|
1632 |
- # Copy build log
|
|
1633 |
- log_filename = context.get_log_filename()
|
|
1634 |
- self._build_log_path = os.path.join(logsdir, 'build.log')
|
|
1635 |
- if log_filename:
|
|
1636 |
- shutil.copyfile(log_filename, self._build_log_path)
|
|
1637 |
- |
|
1638 |
- # Store public data
|
|
1639 |
- _yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
|
|
1640 |
- |
|
1641 |
- # Store result
|
|
1642 |
- build_result_dict = {"success": self.__build_result[0], "description": self.__build_result[1]}
|
|
1643 |
- if self.__build_result[2] is not None:
|
|
1644 |
- build_result_dict["detail"] = self.__build_result[2]
|
|
1645 |
- _yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
|
|
1646 |
- |
|
1647 |
- # ensure we have cache keys
|
|
1648 |
- self._assemble_done()
|
|
1649 |
- |
|
1650 |
- # Store keys.yaml
|
|
1651 |
- _yaml.dump(_yaml.node_sanitize({
|
|
1652 |
- 'strong': self._get_cache_key(),
|
|
1653 |
- 'weak': self._get_cache_key(_KeyStrength.WEAK),
|
|
1654 |
- }), os.path.join(metadir, 'keys.yaml'))
|
|
1655 |
- |
|
1656 |
- # Store dependencies.yaml
|
|
1657 |
- _yaml.dump(_yaml.node_sanitize({
|
|
1658 |
- e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
|
|
1659 |
- }), os.path.join(metadir, 'dependencies.yaml'))
|
|
1660 |
- |
|
1661 |
- # Store workspaced.yaml
|
|
1662 |
- _yaml.dump(_yaml.node_sanitize({
|
|
1663 |
- 'workspaced': True if self._get_workspace() else False
|
|
1664 |
- }), os.path.join(metadir, 'workspaced.yaml'))
|
|
1665 |
- |
|
1666 |
- # Store workspaced-dependencies.yaml
|
|
1667 |
- _yaml.dump(_yaml.node_sanitize({
|
|
1668 |
- 'workspaced-dependencies': [
|
|
1669 |
- e.name for e in self.dependencies(Scope.BUILD)
|
|
1670 |
- if e._get_workspace()
|
|
1671 |
- ]
|
|
1672 |
- }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
|
|
1673 |
- |
|
1674 |
- with self.timed_activity("Caching artifact"):
|
|
1675 |
- artifact_size = utils._get_dir_size(assembledir)
|
|
1676 |
- self.__artifacts.commit(self, assembledir, self.__get_cache_keys_for_commit())
|
|
1677 |
- |
|
1678 |
- if collect is not None and collectvdir is None:
|
|
1679 |
- raise ElementError(
|
|
1680 |
- "Directory '{}' was not found inside the sandbox, "
|
|
1681 |
- "unable to collect artifact contents"
|
|
1682 |
- .format(collect))
|
|
1683 |
- |
|
1684 |
- # Finally cleanup the build dir
|
|
1685 | 1596 |
cleanup_rootdir()
|
1686 | 1597 |
|
1598 |
+ def _cache_artifact(self, rootdir, sandbox, collect):
|
|
1599 |
+ if collect is not None:
|
|
1600 |
+ try:
|
|
1601 |
+ sandbox_vroot = sandbox.get_virtual_directory()
|
|
1602 |
+ collectvdir = sandbox_vroot.descend(collect.lstrip(os.sep).split(os.sep))
|
|
1603 |
+ except VirtualDirectoryError:
|
|
1604 |
+ # No collect directory existed
|
|
1605 |
+ collectvdir = None
|
|
1606 |
+ |
|
1607 |
+ # Create artifact directory structure
|
|
1608 |
+ assembledir = os.path.join(rootdir, 'artifact')
|
|
1609 |
+ filesdir = os.path.join(assembledir, 'files')
|
|
1610 |
+ logsdir = os.path.join(assembledir, 'logs')
|
|
1611 |
+ metadir = os.path.join(assembledir, 'meta')
|
|
1612 |
+ buildtreedir = os.path.join(assembledir, 'buildtree')
|
|
1613 |
+ os.mkdir(assembledir)
|
|
1614 |
+ if collect is not None and collectvdir is not None:
|
|
1615 |
+ os.mkdir(filesdir)
|
|
1616 |
+ os.mkdir(logsdir)
|
|
1617 |
+ os.mkdir(metadir)
|
|
1618 |
+ os.mkdir(buildtreedir)
|
|
1619 |
+ |
|
1620 |
+ # Hard link files from collect dir to files directory
|
|
1621 |
+ if collect is not None and collectvdir is not None:
|
|
1622 |
+ collectvdir.export_files(filesdir, can_link=True)
|
|
1623 |
+ |
|
1624 |
+ try:
|
|
1625 |
+ sandbox_vroot = sandbox.get_virtual_directory()
|
|
1626 |
+ sandbox_build_dir = sandbox_vroot.descend(
|
|
1627 |
+ self.get_variable('build-root').lstrip(os.sep).split(os.sep))
|
|
1628 |
+ # Hard link files from build-root dir to buildtreedir directory
|
|
1629 |
+ sandbox_build_dir.export_files(buildtreedir)
|
|
1630 |
+ except VirtualDirectoryError:
|
|
1631 |
+ # Directory could not be found. Pre-virtual
|
|
1632 |
+ # directory behaviour was to continue silently
|
|
1633 |
+ # if the directory could not be found.
|
|
1634 |
+ pass
|
|
1635 |
+ |
|
1636 |
+ # Copy build log
|
|
1637 |
+ log_filename = self._get_context().get_log_filename()
|
|
1638 |
+ self._build_log_path = os.path.join(logsdir, 'build.log')
|
|
1639 |
+ if log_filename:
|
|
1640 |
+ shutil.copyfile(log_filename, self._build_log_path)
|
|
1641 |
+ |
|
1642 |
+ # Store public data
|
|
1643 |
+ _yaml.dump(_yaml.node_sanitize(self.__dynamic_public), os.path.join(metadir, 'public.yaml'))
|
|
1644 |
+ |
|
1645 |
+ # Store result
|
|
1646 |
+ build_result_dict = {"success": self.__build_result[0], "description": self.__build_result[1]}
|
|
1647 |
+ if self.__build_result[2] is not None:
|
|
1648 |
+ build_result_dict["detail"] = self.__build_result[2]
|
|
1649 |
+ _yaml.dump(build_result_dict, os.path.join(metadir, 'build-result.yaml'))
|
|
1650 |
+ |
|
1651 |
+ # ensure we have cache keys
|
|
1652 |
+ self._assemble_done()
|
|
1653 |
+ |
|
1654 |
+ # Store keys.yaml
|
|
1655 |
+ _yaml.dump(_yaml.node_sanitize({
|
|
1656 |
+ 'strong': self._get_cache_key(),
|
|
1657 |
+ 'weak': self._get_cache_key(_KeyStrength.WEAK),
|
|
1658 |
+ }), os.path.join(metadir, 'keys.yaml'))
|
|
1659 |
+ |
|
1660 |
+ # Store dependencies.yaml
|
|
1661 |
+ _yaml.dump(_yaml.node_sanitize({
|
|
1662 |
+ e.name: e._get_cache_key() for e in self.dependencies(Scope.BUILD)
|
|
1663 |
+ }), os.path.join(metadir, 'dependencies.yaml'))
|
|
1664 |
+ |
|
1665 |
+ # Store workspaced.yaml
|
|
1666 |
+ _yaml.dump(_yaml.node_sanitize({
|
|
1667 |
+ 'workspaced': True if self._get_workspace() else False
|
|
1668 |
+ }), os.path.join(metadir, 'workspaced.yaml'))
|
|
1669 |
+ |
|
1670 |
+ # Store workspaced-dependencies.yaml
|
|
1671 |
+ _yaml.dump(_yaml.node_sanitize({
|
|
1672 |
+ 'workspaced-dependencies': [
|
|
1673 |
+ e.name for e in self.dependencies(Scope.BUILD)
|
|
1674 |
+ if e._get_workspace()
|
|
1675 |
+ ]
|
|
1676 |
+ }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
|
|
1677 |
+ |
|
1678 |
+ with self.timed_activity("Caching artifact"):
|
|
1679 |
+ artifact_size = utils._get_dir_size(assembledir)
|
|
1680 |
+ self.__artifacts.commit(self, assembledir, self.__get_cache_keys_for_commit())
|
|
1681 |
+ |
|
1682 |
+ if collect is not None and collectvdir is None:
|
|
1683 |
+ raise ElementError(
|
|
1684 |
+ "Directory '{}' was not found inside the sandbox, "
|
|
1685 |
+ "unable to collect artifact contents"
|
|
1686 |
+ .format(collect))
|
|
1687 |
+ |
|
1687 | 1688 |
return artifact_size
|
1688 | 1689 |
|
1689 | 1690 |
def _get_build_log(self):
|
... | ... | @@ -20,15 +20,18 @@ |
20 | 20 |
|
21 | 21 |
import os
|
22 | 22 |
from urllib.parse import urlparse
|
23 |
+from functools import partial
|
|
23 | 24 |
|
24 | 25 |
import grpc
|
25 | 26 |
|
26 | 27 |
from . import Sandbox
|
27 | 28 |
from ..storage._filebaseddirectory import FileBasedDirectory
|
28 | 29 |
from ..storage._casbaseddirectory import CasBasedDirectory
|
30 |
+from .. import _signals
|
|
29 | 31 |
from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
|
30 | 32 |
from .._protos.google.rpc import code_pb2
|
31 | 33 |
from .._exceptions import SandboxError
|
34 |
+from .._protos.google.longrunning import operations_pb2, operations_pb2_grpc
|
|
32 | 35 |
|
33 | 36 |
|
34 | 37 |
# SandboxRemote()
|
... | ... | @@ -51,6 +54,7 @@ class SandboxRemote(Sandbox): |
51 | 54 |
"Only plain HTTP is currenlty supported (no HTTPS).")
|
52 | 55 |
|
53 | 56 |
self.server_url = '{}:{}'.format(url.hostname, url.port)
|
57 |
+ self.operation_name = None
|
|
54 | 58 |
|
55 | 59 |
def run_remote_command(self, command, input_root_digest, working_directory, environment):
|
56 | 60 |
# Sends an execution request to the remote execution server.
|
... | ... | @@ -102,10 +106,13 @@ class SandboxRemote(Sandbox): |
102 | 106 |
operation_iterator = stub.WaitExecution(request)
|
103 | 107 |
|
104 | 108 |
for operation in operation_iterator:
|
109 |
+ if not self.operation_name:
|
|
110 |
+ self.operation_name = operation.name
|
|
105 | 111 |
if operation.done:
|
106 | 112 |
return operation
|
107 | 113 |
else:
|
108 | 114 |
last_operation = operation
|
115 |
+ |
|
109 | 116 |
except grpc.RpcError as e:
|
110 | 117 |
status_code = e.code()
|
111 | 118 |
if status_code == grpc.StatusCode.UNAVAILABLE:
|
... | ... | @@ -125,19 +132,39 @@ class SandboxRemote(Sandbox): |
125 | 132 |
|
126 | 133 |
return last_operation
|
127 | 134 |
|
135 |
+ # Set up signal handler to trigger cancel_operation on SIGTERM
|
|
128 | 136 |
operation = None
|
129 |
- with self._get_context().timed_activity("Waiting for the remote build to complete"):
|
|
137 |
+ with self._get_context().timed_activity("Waiting for the remote build to complete"), \
|
|
138 |
+ _signals.terminator(partial(self.cancel_operation, channel)):
|
|
130 | 139 |
operation = __run_remote_command(stub, execute_request=request)
|
131 | 140 |
if operation is None:
|
132 | 141 |
return None
|
133 | 142 |
elif operation.done:
|
134 | 143 |
return operation
|
135 |
- |
|
136 | 144 |
while operation is not None and not operation.done:
|
137 | 145 |
operation = __run_remote_command(stub, running_operation=operation)
|
138 | 146 |
|
139 | 147 |
return operation
|
140 | 148 |
|
149 |
+ def cancel_operation(self, channel):
|
|
150 |
+ # If we don't have the name can't send request.
|
|
151 |
+ if self.operation_name is None:
|
|
152 |
+ return
|
|
153 |
+ |
|
154 |
+ stub = operations_pb2_grpc.OperationsStub(channel)
|
|
155 |
+ request = operations_pb2.CancelOperationRequest(
|
|
156 |
+ name=str(self.operation_name))
|
|
157 |
+ |
|
158 |
+ try:
|
|
159 |
+ stub.CancelOperation(request)
|
|
160 |
+ except grpc.RpcError as e:
|
|
161 |
+ if (e.code() == grpc.StatusCode.UNIMPLEMENTED or
|
|
162 |
+ e.code() == grpc.StatusCode.INVALID_ARGUMENT):
|
|
163 |
+ pass
|
|
164 |
+ else:
|
|
165 |
+ raise SandboxError("Failed trying to send CancelOperation request: "
|
|
166 |
+ "{} ({})".format(e.details(), e.code().name))
|
|
167 |
+ |
|
141 | 168 |
def process_job_output(self, output_directories, output_files):
|
142 | 169 |
# Reads the remote execution server response to an execution request.
|
143 | 170 |
#
|
... | ... | @@ -182,7 +209,7 @@ class SandboxRemote(Sandbox): |
182 | 209 |
# to replace the sandbox's virtual directory with that. Creating a new virtual directory object
|
183 | 210 |
# from another hash will be interesting, though...
|
184 | 211 |
|
185 |
- new_dir = CasBasedDirectory(self._get_context(), ref=dir_digest)
|
|
212 |
+ new_dir = CasBasedDirectory(self._get_context().artifactcache.cas, ref=dir_digest)
|
|
186 | 213 |
self._set_virtual_directory(new_dir)
|
187 | 214 |
|
188 | 215 |
def run(self, command, flags, *, cwd=None, env=None):
|
... | ... | @@ -191,7 +218,7 @@ class SandboxRemote(Sandbox): |
191 | 218 |
|
192 | 219 |
if isinstance(upload_vdir, FileBasedDirectory):
|
193 | 220 |
# Make a new temporary directory to put source in
|
194 |
- upload_vdir = CasBasedDirectory(self._get_context(), ref=None)
|
|
221 |
+ upload_vdir = CasBasedDirectory(self._get_context().artifactcache.cas, ref=None)
|
|
195 | 222 |
upload_vdir.import_files(self.get_virtual_directory()._get_underlying_directory())
|
196 | 223 |
|
197 | 224 |
upload_vdir.recalculate_hash()
|
... | ... | @@ -156,7 +156,7 @@ class Sandbox(): |
156 | 156 |
"""
|
157 | 157 |
if self._vdir is None or self._never_cache_vdirs:
|
158 | 158 |
if 'BST_CAS_DIRECTORIES' in os.environ:
|
159 |
- self._vdir = CasBasedDirectory(self.__context, ref=None)
|
|
159 |
+ self._vdir = CasBasedDirectory(self.__context.artifactcache.cas, ref=None)
|
|
160 | 160 |
else:
|
161 | 161 |
self._vdir = FileBasedDirectory(self._root)
|
162 | 162 |
return self._vdir
|
... | ... | @@ -249,13 +249,11 @@ class CasBasedDirectory(Directory): |
249 | 249 |
_pb2_path_sep = "/"
|
250 | 250 |
_pb2_absolute_path_prefix = "/"
|
251 | 251 |
|
252 |
- def __init__(self, context, ref=None, parent=None, common_name="untitled", filename=None):
|
|
253 |
- self.context = context
|
|
254 |
- self.cas_directory = os.path.join(context.artifactdir, 'cas')
|
|
252 |
+ def __init__(self, cas_cache, ref=None, parent=None, common_name="untitled", filename=None):
|
|
255 | 253 |
self.filename = filename
|
256 | 254 |
self.common_name = common_name
|
257 | 255 |
self.pb2_directory = remote_execution_pb2.Directory()
|
258 |
- self.cas_cache = context.artifactcache.cas
|
|
256 |
+ self.cas_cache = cas_cache
|
|
259 | 257 |
if ref:
|
260 | 258 |
with open(self.cas_cache.objpath(ref), 'rb') as f:
|
261 | 259 |
self.pb2_directory.ParseFromString(f.read())
|
... | ... | @@ -270,7 +268,7 @@ class CasBasedDirectory(Directory): |
270 | 268 |
if self._directory_read:
|
271 | 269 |
return
|
272 | 270 |
for entry in self.pb2_directory.directories:
|
273 |
- buildStreamDirectory = CasBasedDirectory(self.context, ref=entry.digest,
|
|
271 |
+ buildStreamDirectory = CasBasedDirectory(self.cas_cache, ref=entry.digest,
|
|
274 | 272 |
parent=self, filename=entry.name)
|
275 | 273 |
self.index[entry.name] = IndexEntry(entry, buildstream_object=buildStreamDirectory)
|
276 | 274 |
for entry in self.pb2_directory.files:
|
... | ... | @@ -333,7 +331,7 @@ class CasBasedDirectory(Directory): |
333 | 331 |
.format(name, str(self), type(newdir)))
|
334 | 332 |
dirnode = self._find_pb2_entry(name)
|
335 | 333 |
else:
|
336 |
- newdir = CasBasedDirectory(self.context, parent=self, filename=name)
|
|
334 |
+ newdir = CasBasedDirectory(self.cas_cache, parent=self, filename=name)
|
|
337 | 335 |
dirnode = self.pb2_directory.directories.add()
|
338 | 336 |
|
339 | 337 |
dirnode.name = name
|
... | ... | @@ -505,17 +505,19 @@ def get_bst_version(): |
505 | 505 |
.format(__version__))
|
506 | 506 |
|
507 | 507 |
|
508 |
-def move_atomic(source, destination, ensure_parents=True):
|
|
508 |
+def move_atomic(source, destination, *, ensure_parents=True):
|
|
509 | 509 |
"""Move the source to the destination using atomic primitives.
|
510 | 510 |
|
511 | 511 |
This uses `os.rename` to move a file or directory to a new destination.
|
512 | 512 |
It wraps some `OSError` thrown errors to ensure their handling is correct.
|
513 | 513 |
|
514 | 514 |
The main reason for this to exist is that rename can throw different errors
|
515 |
- for the same symptom (https://www.unix.com/man-page/POSIX/3posix/rename/).
|
|
515 |
+ for the same symptom (https://www.unix.com/man-page/POSIX/3posix/rename/)
|
|
516 |
+ when we are moving a directory.
|
|
516 | 517 |
|
517 | 518 |
We are especially interested here in the case when the destination already
|
518 |
- exists. In this case, either EEXIST or ENOTEMPTY are thrown.
|
|
519 |
+ exists, is a directory and is not empty. In this case, either EEXIST or
|
|
520 |
+ ENOTEMPTY can be thrown.
|
|
519 | 521 |
|
520 | 522 |
In order to ensure consistent handling of these exceptions, this function
|
521 | 523 |
should be used instead of `os.rename`
|
... | ... | @@ -525,6 +527,10 @@ def move_atomic(source, destination, ensure_parents=True): |
525 | 527 |
destination (str or Path): destination to which to move the source
|
526 | 528 |
ensure_parents (bool): Whether or not to create the parent's directories
|
527 | 529 |
of the destination (default: True)
|
530 |
+ Raises:
|
|
531 |
+ DirectoryExistsError: if the destination directory already exists and is
|
|
532 |
+ not empty
|
|
533 |
+ OSError: if another filesystem level error occured
|
|
528 | 534 |
"""
|
529 | 535 |
if ensure_parents:
|
530 | 536 |
os.makedirs(os.path.dirname(str(destination)), exist_ok=True)
|
... | ... | @@ -56,6 +56,10 @@ def integration_cache(request): |
56 | 56 |
pass
|
57 | 57 |
|
58 | 58 |
|
59 |
-@pytest.fixture(autouse=True)
|
|
60 | 59 |
def clean_platform_cache():
|
61 | 60 |
Platform._instance = None
|
61 |
+ |
|
62 |
+ |
|
63 |
+@pytest.fixture(autouse=True)
|
|
64 |
+def ensure_platform_cache_is_clean():
|
|
65 |
+ clean_platform_cache()
|
... | ... | @@ -225,7 +225,7 @@ def _test_push_directory(user_config_file, project_dir, artifact_dir, artifact_d |
225 | 225 |
|
226 | 226 |
if cas.has_push_remotes():
|
227 | 227 |
# Create a CasBasedDirectory from local CAS cache content
|
228 |
- directory = CasBasedDirectory(context, ref=artifact_digest)
|
|
228 |
+ directory = CasBasedDirectory(context.artifactcache.cas, ref=artifact_digest)
|
|
229 | 229 |
|
230 | 230 |
# Push the CasBasedDirectory object
|
231 | 231 |
cas.push_directory(project, directory)
|
... | ... | @@ -4,6 +4,8 @@ import pytest |
4 | 4 |
from buildstream import _yaml
|
5 | 5 |
from buildstream._exceptions import ErrorDomain
|
6 | 6 |
|
7 |
+from conftest import clean_platform_cache
|
|
8 |
+ |
|
7 | 9 |
from tests.testutils import cli_integration as cli, create_artifact_share
|
8 | 10 |
from tests.testutils.site import IS_LINUX
|
9 | 11 |
|
... | ... | @@ -158,3 +160,40 @@ def test_push_cached_fail(cli, tmpdir, datafiles, on_error): |
158 | 160 |
assert cli.get_element_state(project, 'element.bst') == 'failed'
|
159 | 161 |
# This element should have been pushed to the remote
|
160 | 162 |
assert share.has_artifact('test', 'element.bst', cli.get_element_key(project, 'element.bst'))
|
163 |
+ |
|
164 |
+ |
|
165 |
+@pytest.mark.skipif(not IS_LINUX, reason='Only available on linux')
|
|
166 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
167 |
+def test_host_tools_errors_are_not_cached(cli, tmpdir, datafiles):
|
|
168 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
169 |
+ element_path = os.path.join(project, 'elements', 'element.bst')
|
|
170 |
+ |
|
171 |
+ # Write out our test target
|
|
172 |
+ element = {
|
|
173 |
+ 'kind': 'script',
|
|
174 |
+ 'depends': [
|
|
175 |
+ {
|
|
176 |
+ 'filename': 'base.bst',
|
|
177 |
+ 'type': 'build',
|
|
178 |
+ },
|
|
179 |
+ ],
|
|
180 |
+ 'config': {
|
|
181 |
+ 'commands': [
|
|
182 |
+ 'true',
|
|
183 |
+ ],
|
|
184 |
+ },
|
|
185 |
+ }
|
|
186 |
+ _yaml.dump(element, element_path)
|
|
187 |
+ |
|
188 |
+ # Build without access to host tools, this will fail
|
|
189 |
+ result1 = cli.run(project=project, args=['build', 'element.bst'], env={'PATH': ''})
|
|
190 |
+ result1.assert_task_error(ErrorDomain.SANDBOX, 'unavailable-local-sandbox')
|
|
191 |
+ assert cli.get_element_state(project, 'element.bst') == 'buildable'
|
|
192 |
+ |
|
193 |
+ # clean the cache before running again
|
|
194 |
+ clean_platform_cache()
|
|
195 |
+ |
|
196 |
+ # When rebuilding, this should work
|
|
197 |
+ result2 = cli.run(project=project, args=['build', 'element.bst'])
|
|
198 |
+ result2.assert_success()
|
|
199 |
+ assert cli.get_element_state(project, 'element.bst') == 'cached'
|
... | ... | @@ -14,9 +14,10 @@ DATA_DIR = os.path.join( |
14 | 14 |
|
15 | 15 |
def create_pipeline(tmpdir, basedir, target):
|
16 | 16 |
context = Context()
|
17 |
- project = Project(basedir, context)
|
|
17 |
+ context.load()
|
|
18 | 18 |
context.deploydir = os.path.join(str(tmpdir), 'deploy')
|
19 | 19 |
context.artifactdir = os.path.join(str(tmpdir), 'artifact')
|
20 |
+ project = Project(basedir, context)
|
|
20 | 21 |
|
21 | 22 |
def dummy_handler(message, context):
|
22 | 23 |
pass
|
... | ... | @@ -3,7 +3,7 @@ import pytest |
3 | 3 |
|
4 | 4 |
from buildstream._exceptions import ErrorDomain
|
5 | 5 |
|
6 |
-from buildstream._context import Context
|
|
6 |
+from buildstream._artifactcache.cascache import CASCache
|
|
7 | 7 |
from buildstream.storage._casbaseddirectory import CasBasedDirectory
|
8 | 8 |
from buildstream.storage._filebaseddirectory import FileBasedDirectory
|
9 | 9 |
|
... | ... | @@ -17,9 +17,8 @@ def setup_backend(backend_class, tmpdir): |
17 | 17 |
if backend_class == FileBasedDirectory:
|
18 | 18 |
return backend_class(os.path.join(tmpdir, "vdir"))
|
19 | 19 |
else:
|
20 |
- context = Context()
|
|
21 |
- context.artifactdir = os.path.join(tmpdir, "cas")
|
|
22 |
- return backend_class(context)
|
|
20 |
+ cas_cache = CASCache(tmpdir)
|
|
21 |
+ return backend_class(cas_cache)
|
|
23 | 22 |
|
24 | 23 |
|
25 | 24 |
@pytest.mark.parametrize("backend", [
|
... | ... | @@ -15,18 +15,6 @@ from buildstream import utils |
15 | 15 |
# These are comparitive tests that check that FileBasedDirectory and
|
16 | 16 |
# CasBasedDirectory act identically.
|
17 | 17 |
|
18 |
- |
|
19 |
-class FakeArtifactCache():
|
|
20 |
- def __init__(self):
|
|
21 |
- self.cas = None
|
|
22 |
- |
|
23 |
- |
|
24 |
-class FakeContext():
|
|
25 |
- def __init__(self):
|
|
26 |
- self.artifactdir = ''
|
|
27 |
- self.artifactcache = FakeArtifactCache()
|
|
28 |
- |
|
29 |
- |
|
30 | 18 |
# This is a set of example file system contents. It's a set of trees
|
31 | 19 |
# which are either expected to be problematic or were found to be
|
32 | 20 |
# problematic during random testing.
|
... | ... | @@ -120,8 +108,8 @@ def file_contents_are(path, contents): |
120 | 108 |
return file_contents(path) == contents
|
121 | 109 |
|
122 | 110 |
|
123 |
-def create_new_casdir(root_number, fake_context, tmpdir):
|
|
124 |
- d = CasBasedDirectory(fake_context)
|
|
111 |
+def create_new_casdir(root_number, cas_cache, tmpdir):
|
|
112 |
+ d = CasBasedDirectory(cas_cache)
|
|
125 | 113 |
d.import_files(os.path.join(tmpdir, "content", "root{}".format(root_number)))
|
126 | 114 |
assert d.ref.hash != empty_hash_ref
|
127 | 115 |
return d
|
... | ... | @@ -175,20 +163,19 @@ def directory_not_empty(path): |
175 | 163 |
|
176 | 164 |
|
177 | 165 |
def _import_test(tmpdir, original, overlay, generator_function, verify_contents=False):
|
178 |
- fake_context = FakeContext()
|
|
179 |
- fake_context.artifactcache.cas = CASCache(tmpdir)
|
|
166 |
+ cas_cache = CASCache(tmpdir)
|
|
180 | 167 |
# Create some fake content
|
181 | 168 |
generator_function(original, tmpdir)
|
182 | 169 |
if original != overlay:
|
183 | 170 |
generator_function(overlay, tmpdir)
|
184 | 171 |
|
185 |
- d = create_new_casdir(original, fake_context, tmpdir)
|
|
172 |
+ d = create_new_casdir(original, cas_cache, tmpdir)
|
|
186 | 173 |
|
187 |
- duplicate_cas = create_new_casdir(original, fake_context, tmpdir)
|
|
174 |
+ duplicate_cas = create_new_casdir(original, cas_cache, tmpdir)
|
|
188 | 175 |
|
189 | 176 |
assert duplicate_cas.ref.hash == d.ref.hash
|
190 | 177 |
|
191 |
- d2 = create_new_casdir(overlay, fake_context, tmpdir)
|
|
178 |
+ d2 = create_new_casdir(overlay, cas_cache, tmpdir)
|
|
192 | 179 |
d.import_files(d2)
|
193 | 180 |
export_dir = os.path.join(tmpdir, "output-{}-{}".format(original, overlay))
|
194 | 181 |
roundtrip_dir = os.path.join(tmpdir, "roundtrip-{}-{}".format(original, overlay))
|
... | ... | @@ -247,15 +234,14 @@ def test_random_cas_import(cli, tmpdir, original): |
247 | 234 |
|
248 | 235 |
|
249 | 236 |
def _listing_test(tmpdir, root, generator_function):
|
250 |
- fake_context = FakeContext()
|
|
251 |
- fake_context.artifactcache.cas = CASCache(tmpdir)
|
|
237 |
+ cas_cache = CASCache(tmpdir)
|
|
252 | 238 |
# Create some fake content
|
253 | 239 |
generator_function(root, tmpdir)
|
254 | 240 |
|
255 | 241 |
d = create_new_filedir(root, tmpdir)
|
256 | 242 |
filelist = list(d.list_relative_paths())
|
257 | 243 |
|
258 |
- d2 = create_new_casdir(root, fake_context, tmpdir)
|
|
244 |
+ d2 = create_new_casdir(root, cas_cache, tmpdir)
|
|
259 | 245 |
filelist2 = list(d2.list_relative_paths())
|
260 | 246 |
|
261 | 247 |
assert filelist == filelist2
|
... | ... | @@ -11,8 +11,8 @@ from multiprocessing import Process, Queue |
11 | 11 |
import pytest_cov
|
12 | 12 |
|
13 | 13 |
from buildstream import _yaml
|
14 |
+from buildstream._artifactcache.cascache import CASCache
|
|
14 | 15 |
from buildstream._artifactcache.casserver import create_server
|
15 |
-from buildstream._context import Context
|
|
16 | 16 |
from buildstream._exceptions import CASError
|
17 | 17 |
from buildstream._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
18 | 18 |
|
... | ... | @@ -45,10 +45,7 @@ class ArtifactShare(): |
45 | 45 |
|
46 | 46 |
os.makedirs(self.repodir)
|
47 | 47 |
|
48 |
- context = Context()
|
|
49 |
- context.artifactdir = self.repodir
|
|
50 |
- |
|
51 |
- self.cas = context.artifactcache.cas
|
|
48 |
+ self.cas = CASCache(self.repodir)
|
|
52 | 49 |
|
53 | 50 |
self.total_space = total_space
|
54 | 51 |
self.free_space = free_space
|