Angelos Evripiotis pushed to branch aevri/are_you_sure at BuildStream / buildstream
Commits:
-
3590ca8c
by Abderrahim Kitouni at 2019-01-28T14:59:19Z
-
39b952dc
by Abderrahim Kitouni at 2019-01-28T15:19:04Z
-
80b36d0c
by Javier Jardón at 2019-01-28T17:54:52Z
-
a1ab48da
by Valentin David at 2019-01-28T21:30:26Z
-
2fcb4491
by Jürg Billeter at 2019-01-28T22:36:22Z
-
1c05a092
by Valentin David at 2019-01-29T05:58:17Z
-
785da59c
by Jürg Billeter at 2019-01-29T06:49:10Z
-
ddef91ea
by Valentin David at 2019-01-29T07:23:35Z
-
6a4c8611
by Jürg Billeter at 2019-01-29T08:18:45Z
-
86c8e414
by Angelos Evripiotis at 2019-01-29T10:39:29Z
8 changed files:
- NEWS
- buildstream/_cas/casserver.py
- buildstream/_context.py
- buildstream/_frontend/cli.py
- buildstream/_scheduler/scheduler.py
- buildstream/data/userconfig.yaml
- conftest.py
- requirements/requirements.in
Changes:
... | ... | @@ -50,6 +50,11 @@ buildstream 1.3.1 |
50 | 50 |
an error message and a hint instead, to avoid bothering folks that just
|
51 | 51 |
made a mistake.
|
52 | 52 |
|
53 |
+ o BREAKING CHANGE: The unconditional 'Are you sure?' prompts have been
|
|
54 |
+ removed. These would always ask you if you were sure when running
|
|
55 |
+ 'bst workspace close --remove-dir' or 'bst workspace reset'. They got in
|
|
56 |
+ the way too often.
|
|
57 |
+ |
|
53 | 58 |
o Failed builds are included in the cache as well.
|
54 | 59 |
`bst checkout` will provide anything in `%{install-root}`.
|
55 | 60 |
A build including cached fails will cause any dependant elements
|
... | ... | @@ -87,12 +92,6 @@ buildstream 1.3.1 |
87 | 92 |
instead of just a specially-formatted build-root with a `root` and `scratch`
|
88 | 93 |
subdirectory.
|
89 | 94 |
|
90 |
- o The buildstream.conf file learned new
|
|
91 |
- 'prompt.really-workspace-close-remove-dir' and
|
|
92 |
- 'prompt.really-workspace-reset-hard' options. These allow users to suppress
|
|
93 |
- certain confirmation prompts, e.g. double-checking that the user meant to
|
|
94 |
- run the command as typed.
|
|
95 |
- |
|
96 | 95 |
o Due to the element `build tree` being cached in the respective artifact their
|
97 | 96 |
size in some cases has significantly increased. In *most* cases the build trees
|
98 | 97 |
are not utilised when building targets, as such by default bst 'pull' & 'build'
|
... | ... | @@ -324,7 +324,7 @@ class _ContentAddressableStorageServicer(remote_execution_pb2_grpc.ContentAddres |
324 | 324 |
blob_response.digest.size_bytes = digest.size_bytes
|
325 | 325 |
|
326 | 326 |
if len(blob_request.data) != digest.size_bytes:
|
327 |
- blob_response.status.code = grpc.StatusCode.FAILED_PRECONDITION
|
|
327 |
+ blob_response.status.code = code_pb2.FAILED_PRECONDITION
|
|
328 | 328 |
continue
|
329 | 329 |
|
330 | 330 |
try:
|
... | ... | @@ -335,10 +335,10 @@ class _ContentAddressableStorageServicer(remote_execution_pb2_grpc.ContentAddres |
335 | 335 |
out.flush()
|
336 | 336 |
server_digest = self.cas.add_object(path=out.name)
|
337 | 337 |
if server_digest.hash != digest.hash:
|
338 |
- blob_response.status.code = grpc.StatusCode.FAILED_PRECONDITION
|
|
338 |
+ blob_response.status.code = code_pb2.FAILED_PRECONDITION
|
|
339 | 339 |
|
340 | 340 |
except ArtifactTooLargeException:
|
341 |
- blob_response.status.code = grpc.StatusCode.RESOURCE_EXHAUSTED
|
|
341 |
+ blob_response.status.code = code_pb2.RESOURCE_EXHAUSTED
|
|
342 | 342 |
|
343 | 343 |
return response
|
344 | 344 |
|
... | ... | @@ -121,18 +121,10 @@ class Context(): |
121 | 121 |
# Whether or not to attempt to pull build trees globally
|
122 | 122 |
self.pull_buildtrees = None
|
123 | 123 |
|
124 |
- # Boolean, whether we double-check with the user that they meant to
|
|
125 |
- # remove a workspace directory.
|
|
126 |
- self.prompt_workspace_close_remove_dir = None
|
|
127 |
- |
|
128 | 124 |
# Boolean, whether we double-check with the user that they meant to
|
129 | 125 |
# close the workspace when they're using it to access the project.
|
130 | 126 |
self.prompt_workspace_close_project_inaccessible = None
|
131 | 127 |
|
132 |
- # Boolean, whether we double-check with the user that they meant to do
|
|
133 |
- # a hard reset of a workspace, potentially losing changes.
|
|
134 |
- self.prompt_workspace_reset_hard = None
|
|
135 |
- |
|
136 | 128 |
# Whether elements must be rebuilt when their dependencies have changed
|
137 | 129 |
self._strict_build_plan = None
|
138 | 130 |
|
... | ... | @@ -260,16 +252,10 @@ class Context(): |
260 | 252 |
prompt = _yaml.node_get(
|
261 | 253 |
defaults, Mapping, 'prompt')
|
262 | 254 |
_yaml.node_validate(prompt, [
|
263 |
- 'really-workspace-close-remove-dir',
|
|
264 | 255 |
'really-workspace-close-project-inaccessible',
|
265 |
- 'really-workspace-reset-hard',
|
|
266 | 256 |
])
|
267 |
- self.prompt_workspace_close_remove_dir = _node_get_option_str(
|
|
268 |
- prompt, 'really-workspace-close-remove-dir', ['ask', 'yes']) == 'ask'
|
|
269 | 257 |
self.prompt_workspace_close_project_inaccessible = _node_get_option_str(
|
270 | 258 |
prompt, 'really-workspace-close-project-inaccessible', ['ask', 'yes']) == 'ask'
|
271 |
- self.prompt_workspace_reset_hard = _node_get_option_str(
|
|
272 |
- prompt, 'really-workspace-reset-hard', ['ask', 'yes']) == 'ask'
|
|
273 | 259 |
|
274 | 260 |
# Load per-projects overrides
|
275 | 261 |
self._project_overrides = _yaml.node_get(defaults, Mapping, 'projects', default_value={})
|
... | ... | @@ -841,11 +841,6 @@ def workspace_close(app, remove_dir, all_, elements): |
841 | 841 |
if nonexisting:
|
842 | 842 |
raise AppError("Workspace does not exist", detail="\n".join(nonexisting))
|
843 | 843 |
|
844 |
- if app.interactive and remove_dir and app.context.prompt_workspace_close_remove_dir:
|
|
845 |
- if not click.confirm('This will remove all your changes, are you sure?'):
|
|
846 |
- click.echo('Aborting', err=True)
|
|
847 |
- sys.exit(-1)
|
|
848 |
- |
|
849 | 844 |
for element_name in elements:
|
850 | 845 |
app.stream.workspace_close(element_name, remove_dir=remove_dir)
|
851 | 846 |
|
... | ... | @@ -879,11 +874,6 @@ def workspace_reset(app, soft, track_, all_, elements): |
879 | 874 |
if all_ and not app.stream.workspace_exists():
|
880 | 875 |
raise AppError("No open workspaces to reset")
|
881 | 876 |
|
882 |
- if app.interactive and not soft and app.context.prompt_workspace_reset_hard:
|
|
883 |
- if not click.confirm('This will remove all your changes, are you sure?'):
|
|
884 |
- click.echo('Aborting', err=True)
|
|
885 |
- sys.exit(-1)
|
|
886 |
- |
|
887 | 877 |
if all_:
|
888 | 878 |
elements = tuple(element_name for element_name, _ in app.context.get_workspaces().list())
|
889 | 879 |
|
... | ... | @@ -314,10 +314,10 @@ class Scheduler(): |
314 | 314 |
# job (Job): The job to spawn
|
315 | 315 |
#
|
316 | 316 |
def _spawn_job(self, job):
|
317 |
- job.spawn()
|
|
318 | 317 |
self._active_jobs.append(job)
|
319 | 318 |
if self._job_start_callback:
|
320 | 319 |
self._job_start_callback(job)
|
320 |
+ job.spawn()
|
|
321 | 321 |
|
322 | 322 |
# Callback for the cache size job
|
323 | 323 |
def _cache_size_job_complete(self, status, cache_size):
|
... | ... | @@ -112,14 +112,6 @@ logging: |
112 | 112 |
#
|
113 | 113 |
prompt:
|
114 | 114 |
|
115 |
- # Whether to really proceed with 'bst workspace close --remove-dir' removing
|
|
116 |
- # a workspace directory, potentially losing changes.
|
|
117 |
- #
|
|
118 |
- # ask - Ask the user if they are sure.
|
|
119 |
- # yes - Always remove, without asking.
|
|
120 |
- #
|
|
121 |
- really-workspace-close-remove-dir: ask
|
|
122 |
- |
|
123 | 115 |
# Whether to really proceed with 'bst workspace close' when doing so would
|
124 | 116 |
# stop them from running bst commands in this workspace.
|
125 | 117 |
#
|
... | ... | @@ -127,11 +119,3 @@ prompt: |
127 | 119 |
# yes - Always close, without asking.
|
128 | 120 |
#
|
129 | 121 |
really-workspace-close-project-inaccessible: ask
|
130 |
- |
|
131 |
- # Whether to really proceed with 'bst workspace reset' doing a hard reset of
|
|
132 |
- # a workspace, potentially losing changes.
|
|
133 |
- #
|
|
134 |
- # ask - Ask the user if they are sure.
|
|
135 |
- # yes - Always hard reset, without asking.
|
|
136 |
- #
|
|
137 |
- really-workspace-reset-hard: ask
|
... | ... | @@ -54,6 +54,7 @@ class IntegrationCache(): |
54 | 54 |
|
55 | 55 |
def __init__(self, cache):
|
56 | 56 |
cache = os.path.abspath(cache)
|
57 |
+ os.makedirs(cache, exist_ok=True)
|
|
57 | 58 |
|
58 | 59 |
# Use the same sources every time
|
59 | 60 |
self.sources = os.path.join(cache, 'sources')
|
1 |
-Click
|
|
1 |
+Click >= 7.0
|
|
2 | 2 |
grpcio >= 1.10
|
3 | 3 |
Jinja2 >= 2.10
|
4 | 4 |
pluginbase
|
5 |
-protobuf >= 3.5
|
|
5 |
+protobuf >= 3.6
|
|
6 | 6 |
psutil
|
7 | 7 |
# According to ruamel.yaml's PyPI page, we are suppose to use
|
8 | 8 |
# "<=0.15" in production until 0.15 becomes API stable.
|