Gökçen Nurlu pushed to branch gokcennurlu/remote_url_override_push_error at BuildStream / buildstream
Commits:
-
98c15463
by Tristan Van Berkom at 2018-12-05T07:04:56Z
-
5d77b871
by Tristan Van Berkom at 2018-12-05T07:37:54Z
-
26d09661
by Gökçen Nurlu at 2018-12-05T10:06:19Z
-
c172b37a
by Gökçen Nurlu at 2018-12-05T10:06:19Z
-
eb0fe05c
by Gökçen Nurlu at 2018-12-05T10:06:19Z
-
294443af
by Gökçen Nurlu at 2018-12-05T10:06:19Z
6 changed files:
- buildstream/_artifactcache/artifactcache.py
- buildstream/_scheduler/queues/buildqueue.py
- buildstream/_scheduler/queues/queue.py
- buildstream/_stream.py
- tests/artifactcache/pull.py
- tests/artifactcache/push.py
Changes:
... | ... | @@ -110,36 +110,42 @@ class ArtifactCache(): |
110 | 110 |
# assume project and element names are not allowed to contain slashes
|
111 | 111 |
return '{0}/{1}/{2}'.format(project.name, element_name, key)
|
112 | 112 |
|
113 |
+ # get_remotes_from_projects()
|
|
114 |
+ #
|
|
115 |
+ # Generates list artifact caches based on project configuration
|
|
116 |
+ #
|
|
117 |
+ # Returns:
|
|
118 |
+ # (list of (list of ArtifactCacheSpec, Project)): Configurations each are
|
|
119 |
+ # ready to be consumed by `self._set_remotes()`
|
|
120 |
+ #
|
|
121 |
+ # This requires that all of the projects which are to be processed in the session
|
|
122 |
+ # have already been loaded and are observable in the Context.
|
|
123 |
+ #
|
|
124 |
+ def get_remotes_from_projects(self):
|
|
125 |
+ return [
|
|
126 |
+ (_configured_remote_artifact_cache_specs(self.context, prj), prj)
|
|
127 |
+ for prj in self.context.get_projects()
|
|
128 |
+ ]
|
|
129 |
+ |
|
113 | 130 |
# setup_remotes():
|
114 | 131 |
#
|
115 | 132 |
# Sets up which remotes to use
|
116 | 133 |
#
|
117 | 134 |
# Args:
|
118 |
- # use_config (bool): Whether to use project configuration
|
|
119 |
- # remote_url (str): Remote artifact cache URL
|
|
135 |
+ # remotes (list of (list of ArtifactCacheSpec, Project)): Configurations which are
|
|
136 |
+ # ready to be consumed by `self._set_remotes()`
|
|
120 | 137 |
#
|
121 | 138 |
# This requires that all of the projects which are to be processed in the session
|
122 | 139 |
# have already been loaded and are observable in the Context.
|
123 | 140 |
#
|
124 |
- def setup_remotes(self, *, use_config=False, remote_url=None):
|
|
125 |
- |
|
141 |
+ def setup_remotes(self, *, remotes=None):
|
|
126 | 142 |
# Ensure we do not double-initialise since this can be expensive
|
127 | 143 |
assert not self._remotes_setup
|
128 | 144 |
self._remotes_setup = True
|
129 | 145 |
|
130 |
- # Initialize remote artifact caches. We allow the commandline to override
|
|
131 |
- # the user config in some cases (for example `bst push --remote=...`).
|
|
132 |
- has_remote_caches = False
|
|
133 |
- if remote_url:
|
|
134 |
- self._set_remotes([ArtifactCacheSpec(remote_url, push=True)])
|
|
135 |
- has_remote_caches = True
|
|
136 |
- if use_config:
|
|
137 |
- for project in self.context.get_projects():
|
|
138 |
- artifact_caches = _configured_remote_artifact_cache_specs(self.context, project)
|
|
139 |
- if artifact_caches: # artifact_caches is a list of ArtifactCacheSpec instances
|
|
140 |
- self._set_remotes(artifact_caches, project=project)
|
|
141 |
- has_remote_caches = True
|
|
142 |
- if has_remote_caches:
|
|
146 |
+ if remotes:
|
|
147 |
+ for caches, project in remotes:
|
|
148 |
+ self._set_remotes(caches, project=project)
|
|
143 | 149 |
self._initialize_remotes()
|
144 | 150 |
|
145 | 151 |
# specs_from_config_node()
|
... | ... | @@ -106,10 +106,16 @@ class BuildQueue(Queue): |
106 | 106 |
|
107 | 107 |
def done(self, job, element, result, success):
|
108 | 108 |
|
109 |
- if success:
|
|
110 |
- # Inform element in main process that assembly is done
|
|
111 |
- element._assemble_done()
|
|
109 |
+ # Inform element in main process that assembly is done
|
|
110 |
+ element._assemble_done()
|
|
112 | 111 |
|
113 |
- # This has to be done after _assemble_done, such that the
|
|
114 |
- # element may register its cache key as required
|
|
112 |
+ # This has to be done after _assemble_done, such that the
|
|
113 |
+ # element may register its cache key as required
|
|
114 |
+ #
|
|
115 |
+ # FIXME: Element._assemble() does not report both the failure state and the
|
|
116 |
+ # size of the newly cached failed artifact, so we can only adjust the
|
|
117 |
+ # artifact cache size for a successful build even though we know a
|
|
118 |
+ # failed build also grows the artifact cache size.
|
|
119 |
+ #
|
|
120 |
+ if success:
|
|
115 | 121 |
self._check_cache_size(job, element, result)
|
... | ... | @@ -292,7 +292,6 @@ class Queue(): |
292 | 292 |
# See the Job object for an explanation of the call signature
|
293 | 293 |
#
|
294 | 294 |
def _job_done(self, job, element, success, result):
|
295 |
- element._update_state()
|
|
296 | 295 |
|
297 | 296 |
# Update values that need to be synchronized in the main task
|
298 | 297 |
# before calling any queue implementation
|
... | ... | @@ -28,6 +28,7 @@ import tarfile |
28 | 28 |
from contextlib import contextmanager
|
29 | 29 |
from tempfile import TemporaryDirectory
|
30 | 30 |
|
31 |
+from ._artifactcache import ArtifactCacheSpec
|
|
31 | 32 |
from ._exceptions import StreamError, ImplError, BstError, set_last_task_error
|
32 | 33 |
from ._message import Message, MessageType
|
33 | 34 |
from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
|
... | ... | @@ -305,6 +306,7 @@ class Stream(): |
305 | 306 |
selection=selection,
|
306 | 307 |
use_artifact_config=use_config,
|
307 | 308 |
artifact_remote_url=remote,
|
309 |
+ artifact_remote_can_push=False,
|
|
308 | 310 |
fetch_subprojects=True)
|
309 | 311 |
|
310 | 312 |
if not self._artifacts.has_fetch_remotes():
|
... | ... | @@ -343,6 +345,7 @@ class Stream(): |
343 | 345 |
selection=selection,
|
344 | 346 |
use_artifact_config=use_config,
|
345 | 347 |
artifact_remote_url=remote,
|
348 |
+ artifact_remote_can_push=True,
|
|
346 | 349 |
fetch_subprojects=True)
|
347 | 350 |
|
348 | 351 |
if not self._artifacts.has_push_remotes():
|
... | ... | @@ -922,7 +925,8 @@ class Stream(): |
922 | 925 |
# track_except_targets (list of str): Specified targets to except from fetching
|
923 | 926 |
# track_cross_junctions (bool): Whether tracking should cross junction boundaries
|
924 | 927 |
# use_artifact_config (bool): Whether to initialize artifacts with the config
|
925 |
- # artifact_remote_url (bool): A remote url for initializing the artifacts
|
|
928 |
+ # artifact_remote_url (str): A remote url for initializing the artifacts
|
|
929 |
+ # artifact_remote_can_push (bool): Whether `artifact_remote_url` can be used to push
|
|
926 | 930 |
# fetch_subprojects (bool): Whether to fetch subprojects while loading
|
927 | 931 |
#
|
928 | 932 |
# Returns:
|
... | ... | @@ -937,6 +941,7 @@ class Stream(): |
937 | 941 |
track_cross_junctions=False,
|
938 | 942 |
use_artifact_config=False,
|
939 | 943 |
artifact_remote_url=None,
|
944 |
+ artifact_remote_can_push=False,
|
|
940 | 945 |
fetch_subprojects=False,
|
941 | 946 |
dynamic_plan=False):
|
942 | 947 |
|
... | ... | @@ -1000,12 +1005,20 @@ class Stream(): |
1000 | 1005 |
self._pipeline.resolve_elements(track_selected)
|
1001 | 1006 |
return [], track_selected
|
1002 | 1007 |
|
1003 |
- # ArtifactCache.setup_remotes expects all projects to be fully loaded
|
|
1004 |
- for project in self._context.get_projects():
|
|
1005 |
- project.ensure_fully_loaded()
|
|
1006 |
- |
|
1008 |
+ if use_artifact_config:
|
|
1009 |
+ # ArtifactCache.get_remotes_from_projects expects all projects to be
|
|
1010 |
+ # fully loaded
|
|
1011 |
+ for project in self._context.get_projects():
|
|
1012 |
+ project.ensure_fully_loaded()
|
|
1013 |
+ remotes = self._artifacts.get_remotes_from_projects()
|
|
1014 |
+ else:
|
|
1015 |
+ # Build the ArtifactCacheSpec instance based on `--remote`
|
|
1016 |
+ remotes = [(
|
|
1017 |
+ [ArtifactCacheSpec(artifact_remote_url, push=artifact_remote_can_push)],
|
|
1018 |
+ None
|
|
1019 |
+ )]
|
|
1007 | 1020 |
# Connect to remote caches, this needs to be done before resolving element state
|
1008 |
- self._artifacts.setup_remotes(use_config=use_artifact_config, remote_url=artifact_remote_url)
|
|
1021 |
+ self._artifacts.setup_remotes(remotes=remotes)
|
|
1009 | 1022 |
|
1010 | 1023 |
# Now move on to loading primary selection.
|
1011 | 1024 |
#
|
... | ... | @@ -146,7 +146,8 @@ def _test_pull(user_config_file, project_dir, artifact_dir, |
146 | 146 |
element = project.load_elements([element_name])[0]
|
147 | 147 |
|
148 | 148 |
# Manually setup the CAS remote
|
149 |
- cas.setup_remotes(use_config=True)
|
|
149 |
+ remotes = cas.get_remotes_from_projects()
|
|
150 |
+ cas.setup_remotes(remotes=remotes)
|
|
150 | 151 |
|
151 | 152 |
if cas.has_push_remotes(element=element):
|
152 | 153 |
# Push the element's artifact
|
... | ... | @@ -284,7 +285,8 @@ def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest |
284 | 285 |
cas = artifactcache.cas
|
285 | 286 |
|
286 | 287 |
# Manually setup the CAS remote
|
287 |
- artifactcache.setup_remotes(use_config=True)
|
|
288 |
+ remotes = artifactcache.get_remotes_from_projects()
|
|
289 |
+ artifactcache.setup_remotes(remotes=remotes)
|
|
288 | 290 |
|
289 | 291 |
if artifactcache.has_push_remotes():
|
290 | 292 |
directory = remote_execution_pb2.Directory()
|
... | ... | @@ -319,7 +321,8 @@ def _test_pull_tree(user_config_file, project_dir, artifact_dir, artifact_digest |
319 | 321 |
cas = context.artifactcache
|
320 | 322 |
|
321 | 323 |
# Manually setup the CAS remote
|
322 |
- cas.setup_remotes(use_config=True)
|
|
324 |
+ remotes = cas.get_remotes_from_projects()
|
|
325 |
+ cas.setup_remotes(remotes=remotes)
|
|
323 | 326 |
|
324 | 327 |
if cas.has_push_remotes():
|
325 | 328 |
# Pull the artifact using the Tree object
|
... | ... | @@ -125,8 +125,8 @@ def _test_push(user_config_file, project_dir, artifact_dir, |
125 | 125 |
element = project.load_elements([element_name])[0]
|
126 | 126 |
|
127 | 127 |
# Manually setup the CAS remote
|
128 |
- cas.setup_remotes(use_config=True)
|
|
129 |
- cas.initialize_remotes()
|
|
128 |
+ remotes = cas.get_remotes_from_projects()
|
|
129 |
+ cas.setup_remotes(remotes=remotes)
|
|
130 | 130 |
|
131 | 131 |
if cas.has_push_remotes(element=element):
|
132 | 132 |
# Push the element's artifact
|
... | ... | @@ -185,8 +185,8 @@ def test_push_directory(cli, tmpdir, datafiles): |
185 | 185 |
assert artifactcache.contains(element, element_key)
|
186 | 186 |
|
187 | 187 |
# Manually setup the CAS remote
|
188 |
- artifactcache.setup_remotes(use_config=True)
|
|
189 |
- artifactcache.initialize_remotes()
|
|
188 |
+ remotes = artifactcache.get_remotes_from_projects()
|
|
189 |
+ artifactcache.setup_remotes(remotes=remotes)
|
|
190 | 190 |
assert artifactcache.has_push_remotes(element=element)
|
191 | 191 |
|
192 | 192 |
# Recreate the CasBasedDirectory object from the cached artifact
|
... | ... | @@ -231,8 +231,8 @@ def _test_push_directory(user_config_file, project_dir, artifact_dir, artifact_d |
231 | 231 |
cas = context.artifactcache
|
232 | 232 |
|
233 | 233 |
# Manually setup the CAS remote
|
234 |
- cas.setup_remotes(use_config=True)
|
|
235 |
- cas.initialize_remotes()
|
|
234 |
+ remotes = cas.get_remotes_from_projects()
|
|
235 |
+ cas.setup_remotes(remotes=remotes)
|
|
236 | 236 |
|
237 | 237 |
if cas.has_push_remotes():
|
238 | 238 |
# Create a CasBasedDirectory from local CAS cache content
|
... | ... | @@ -307,8 +307,8 @@ def _test_push_message(user_config_file, project_dir, artifact_dir, queue): |
307 | 307 |
cas = context.artifactcache
|
308 | 308 |
|
309 | 309 |
# Manually setup the CAS remote
|
310 |
- cas.setup_remotes(use_config=True)
|
|
311 |
- cas.initialize_remotes()
|
|
310 |
+ remotes = cas.get_remotes_from_projects()
|
|
311 |
+ cas.setup_remotes(remotes=remotes)
|
|
312 | 312 |
|
313 | 313 |
if cas.has_push_remotes():
|
314 | 314 |
# Create an example message object
|