Jim MacArthur pushed to branch jmac/remote_exec_checkout_fix at BuildStream / buildstream
Commits:
-
335d5a45
by Adam Jones at 2018-09-19T13:00:45Z
-
667dd4ca
by Javier Jardón at 2018-09-19T13:25:36Z
-
68ef69e4
by Tristan Van Berkom at 2018-09-21T05:20:46Z
-
662c729f
by Tristan Van Berkom at 2018-09-21T05:59:30Z
-
461a0588
by Jim MacArthur at 2018-09-21T10:53:11Z
-
aa9caaac
by Jim MacArthur at 2018-09-21T10:53:11Z
-
2aae68c7
by Jim MacArthur at 2018-09-21T10:53:11Z
-
ca1bb72c
by Jim MacArthur at 2018-09-21T10:53:11Z
6 changed files:
- CONTRIBUTING.rst
- buildstream/_artifactcache/cascache.py
- buildstream/element.py
- buildstream/sandbox/_sandboxremote.py
- buildstream/source.py
- tests/artifactcache/push.py
Changes:
... | ... | @@ -11,10 +11,9 @@ before being considered for inclusion, we strongly recommend proposing |
11 | 11 |
in advance of commencing work.
|
12 | 12 |
|
13 | 13 |
If you are experiencing an issue with BuildStream or would like to submit a small patch/feature, then
|
14 |
-you can open issue `here <https://gitlab.com/BuildStream/buildstream/issues/new?issue%5Bassignee_id%5D=&issue%5Bmilestone_id%5D=>`
|
|
14 |
+you can `oepn an issue <https://gitlab.com/BuildStream/buildstream/issues/new?issue%5Bassignee_id%5D=&issue%5Bmilestone_id%5D=>`_
|
|
15 | 15 |
|
16 |
-For policies on how to submit and issue and how to use our project labels, we recommend that you read the policies guide
|
|
17 |
-`here <https://gitlab.com/BuildStream/nosoftware/alignment/blob/master/BuildStream_policies.md>`
|
|
16 |
+For policies on how to submit and issue and how to use our project labels, we recommend that you read the `policies guide <https://gitlab.com/BuildStream/nosoftware/alignment/blob/master/BuildStream_policies.md>`_
|
|
18 | 17 |
|
19 | 18 |
New features must be well documented and tested either in our main
|
20 | 19 |
test suite if possible, or otherwise in the integration tests.
|
... | ... | @@ -348,19 +348,29 @@ class CASCache(ArtifactCache): |
348 | 348 |
return pushed
|
349 | 349 |
|
350 | 350 |
def push_directory(self, project, directory):
|
351 |
+ """ Push the given virtual directory to all remotes.
|
|
352 |
+ |
|
353 |
+ Args:
|
|
354 |
+ project (Project): The current project
|
|
355 |
+ directory (Directory): A virtual directory object to push.
|
|
356 |
+ |
|
357 |
+ Raises: ArtifactError if no push remotes are configured.
|
|
358 |
+ """
|
|
351 | 359 |
|
352 | 360 |
push_remotes = [r for r in self._remotes[project] if r.spec.push]
|
353 | 361 |
|
362 |
+ if not push_remotes:
|
|
363 |
+ raise ArtifactError("CASCache: push_directory was called, but no remote artifact " +
|
|
364 |
+ "servers are configured as push remotes.")
|
|
365 |
+ |
|
354 | 366 |
if directory.ref is None:
|
355 |
- return None
|
|
367 |
+ return
|
|
356 | 368 |
|
357 | 369 |
for remote in push_remotes:
|
358 | 370 |
remote.init()
|
359 | 371 |
|
360 | 372 |
self._send_directory(remote, directory.ref)
|
361 | 373 |
|
362 |
- return directory.ref
|
|
363 |
- |
|
364 | 374 |
def push_message(self, project, message):
|
365 | 375 |
|
366 | 376 |
push_remotes = [r for r in self._remotes[project] if r.spec.push]
|
... | ... | @@ -2137,14 +2137,11 @@ class Element(Plugin): |
2137 | 2137 |
project = self._get_project()
|
2138 | 2138 |
platform = Platform.get_platform()
|
2139 | 2139 |
|
2140 |
- if self.__remote_execution_url and self.BST_VIRTUAL_DIRECTORY:
|
|
2141 |
- if not self.__artifacts.has_push_remotes(element=self):
|
|
2142 |
- # Give an early warning if remote execution will not work
|
|
2143 |
- raise ElementError("Artifact {} is configured to use remote execution but has no push remotes. "
|
|
2144 |
- .format(self.name) +
|
|
2145 |
- "The remote artifact server(s) may not be correctly configured or contactable.")
|
|
2146 |
- |
|
2147 |
- self.info("Using a remote sandbox for artifact {}".format(self.name))
|
|
2140 |
+ if (directory is not None and
|
|
2141 |
+ self.__remote_execution_url and
|
|
2142 |
+ self.BST_VIRTUAL_DIRECTORY):
|
|
2143 |
+ |
|
2144 |
+ self.info("Using a remote sandbox for artifact {} with directory '{}'".format(self.name, directory))
|
|
2148 | 2145 |
|
2149 | 2146 |
sandbox = SandboxRemote(context, project,
|
2150 | 2147 |
directory,
|
... | ... | @@ -173,8 +173,8 @@ class SandboxRemote(Sandbox): |
173 | 173 |
platform = Platform.get_platform()
|
174 | 174 |
cascache = platform.artifactcache
|
175 | 175 |
# Now, push that key (without necessarily needing a ref) to the remote.
|
176 |
- vdir_digest = cascache.push_directory(self._get_project(), upload_vdir)
|
|
177 |
- if not vdir_digest or not cascache.verify_digest_pushed(self._get_project(), vdir_digest):
|
|
176 |
+ cascache.push_directory(self._get_project(), upload_vdir)
|
|
177 |
+ if not cascache.verify_digest_pushed(self._get_project(), upload_vdir.ref):
|
|
178 | 178 |
raise SandboxError("Failed to verify that source has been pushed to the remote artifact cache.")
|
179 | 179 |
|
180 | 180 |
# Set up environment and working directory
|
... | ... | @@ -930,6 +930,38 @@ class Source(Plugin): |
930 | 930 |
# Local Private Methods #
|
931 | 931 |
#############################################################
|
932 | 932 |
|
933 |
+ # __clone_for_uri()
|
|
934 |
+ #
|
|
935 |
+ # Clone the source with an alternative URI setup for the alias
|
|
936 |
+ # which this source uses.
|
|
937 |
+ #
|
|
938 |
+ # This is used for iteration over source mirrors.
|
|
939 |
+ #
|
|
940 |
+ # Args:
|
|
941 |
+ # uri (str): The alternative URI for this source's alias
|
|
942 |
+ #
|
|
943 |
+ # Returns:
|
|
944 |
+ # (Source): A new clone of this Source, with the specified URI
|
|
945 |
+ # as the value of the alias this Source has marked as
|
|
946 |
+ # primary with either mark_download_url() or
|
|
947 |
+ # translate_url().
|
|
948 |
+ #
|
|
949 |
+ def __clone_for_uri(self, uri):
|
|
950 |
+ project = self._get_project()
|
|
951 |
+ context = self._get_context()
|
|
952 |
+ alias = self._get_alias()
|
|
953 |
+ source_kind = type(self)
|
|
954 |
+ |
|
955 |
+ clone = source_kind(context, project, self.__meta, alias_override=(alias, uri))
|
|
956 |
+ |
|
957 |
+ # Do the necessary post instantiation routines here
|
|
958 |
+ #
|
|
959 |
+ clone._preflight()
|
|
960 |
+ clone._load_ref()
|
|
961 |
+ clone._update_state()
|
|
962 |
+ |
|
963 |
+ return clone
|
|
964 |
+ |
|
933 | 965 |
# Tries to call fetch for every mirror, stopping once it succeeds
|
934 | 966 |
def __do_fetch(self, **kwargs):
|
935 | 967 |
project = self._get_project()
|
... | ... | @@ -968,12 +1000,8 @@ class Source(Plugin): |
968 | 1000 |
self.fetch(**kwargs)
|
969 | 1001 |
return
|
970 | 1002 |
|
971 |
- context = self._get_context()
|
|
972 |
- source_kind = type(self)
|
|
973 | 1003 |
for uri in project.get_alias_uris(alias, first_pass=self.__first_pass):
|
974 |
- new_source = source_kind(context, project, self.__meta,
|
|
975 |
- alias_override=(alias, uri))
|
|
976 |
- new_source._preflight()
|
|
1004 |
+ new_source = self.__clone_for_uri(uri)
|
|
977 | 1005 |
try:
|
978 | 1006 |
new_source.fetch(**kwargs)
|
979 | 1007 |
# FIXME: Need to consider temporary vs. permanent failures,
|
... | ... | @@ -1006,9 +1034,7 @@ class Source(Plugin): |
1006 | 1034 |
# NOTE: We are assuming here that tracking only requires substituting the
|
1007 | 1035 |
# first alias used
|
1008 | 1036 |
for uri in reversed(project.get_alias_uris(alias, first_pass=self.__first_pass)):
|
1009 |
- new_source = source_kind(context, project, self.__meta,
|
|
1010 |
- alias_override=(alias, uri))
|
|
1011 |
- new_source._preflight()
|
|
1037 |
+ new_source = self.__clone_for_uri(uri)
|
|
1012 | 1038 |
try:
|
1013 | 1039 |
ref = new_source.track(**kwargs)
|
1014 | 1040 |
# FIXME: Need to consider temporary vs. permanent failures,
|
... | ... | @@ -228,9 +228,9 @@ def _test_push_directory(user_config_file, project_dir, artifact_dir, artifact_d |
228 | 228 |
directory = CasBasedDirectory(context, ref=artifact_digest)
|
229 | 229 |
|
230 | 230 |
# Push the CasBasedDirectory object
|
231 |
- directory_digest = cas.push_directory(project, directory)
|
|
231 |
+ cas.push_directory(project, directory)
|
|
232 | 232 |
|
233 |
- queue.put(directory_digest.hash)
|
|
233 |
+ queue.put(directory.ref.hash)
|
|
234 | 234 |
else:
|
235 | 235 |
queue.put("No remote configured")
|
236 | 236 |
|