Tom Pollard pushed to branch tpollard/566 at BuildStream / buildstream
Commits:
-
46fd668e
by Tom Pollard at 2018-12-21T13:08:55Z
5 changed files:
- buildstream/_artifactcache/artifactcache.py
- buildstream/_artifactcache/cascache.py
- buildstream/element.py
- + tests/integration/pushbuildtrees.py
- tests/testutils/runcli.py
Changes:
... | ... | @@ -74,6 +74,7 @@ class ArtifactCache(): |
74 | 74 |
|
75 | 75 |
self._has_fetch_remotes = False
|
76 | 76 |
self._has_push_remotes = False
|
77 |
+ self._has_partial_push_remotes = False
|
|
77 | 78 |
|
78 | 79 |
os.makedirs(self.extractdir, exist_ok=True)
|
79 | 80 |
|
... | ... | @@ -398,6 +399,8 @@ class ArtifactCache(): |
398 | 399 |
self._has_fetch_remotes = True
|
399 | 400 |
if remote_spec.push:
|
400 | 401 |
self._has_push_remotes = True
|
402 |
+ if remote_spec.partial_push:
|
|
403 |
+ self._has_partial_push_remotes = True
|
|
401 | 404 |
|
402 | 405 |
remotes[remote_spec.url] = CASRemote(remote_spec)
|
403 | 406 |
|
... | ... | @@ -596,6 +599,31 @@ class ArtifactCache(): |
596 | 599 |
remotes_for_project = self._remotes[element._get_project()]
|
597 | 600 |
return any(remote.spec.push for remote in remotes_for_project)
|
598 | 601 |
|
602 |
+ # has_partial_push_remotes():
|
|
603 |
+ #
|
|
604 |
+ # Check whether any remote repositories are available for pushing
|
|
605 |
+ # non-complete artifacts
|
|
606 |
+ #
|
|
607 |
+ # Args:
|
|
608 |
+ # element (Element): The Element to check
|
|
609 |
+ #
|
|
610 |
+ # Returns:
|
|
611 |
+ # (bool): True if any remote repository is configured for optional
|
|
612 |
+ # partial pushes, False otherwise
|
|
613 |
+ #
|
|
614 |
+ def has_partial_push_remotes(self, *, element=None):
|
|
615 |
+ # If there's no partial push remotes available, we can't partial push at all
|
|
616 |
+ if not self._has_partial_push_remotes:
|
|
617 |
+ return False
|
|
618 |
+ elif element is None:
|
|
619 |
+ # At least one remote is set to allow partial pushes
|
|
620 |
+ return True
|
|
621 |
+ else:
|
|
622 |
+ # Check whether the specified element's project has push remotes configured
|
|
623 |
+ # to not accept partial artifact pushes
|
|
624 |
+ remotes_for_project = self._remotes[element._get_project()]
|
|
625 |
+ return any(remote.spec.partial_push for remote in remotes_for_project)
|
|
626 |
+ |
|
599 | 627 |
# push():
|
600 | 628 |
#
|
601 | 629 |
# Push committed artifact to remote repository.
|
... | ... | @@ -603,6 +631,8 @@ class ArtifactCache(): |
603 | 631 |
# Args:
|
604 | 632 |
# element (Element): The Element whose artifact is to be pushed
|
605 | 633 |
# keys (list): The cache keys to use
|
634 |
+ # partial(bool): If the artifact is cached in a partial state
|
|
635 |
+ # subdir(string): Optional subdir to not push
|
|
606 | 636 |
#
|
607 | 637 |
# Returns:
|
608 | 638 |
# (bool): True if any remote was updated, False if no pushes were required
|
... | ... | @@ -610,12 +640,25 @@ class ArtifactCache(): |
610 | 640 |
# Raises:
|
611 | 641 |
# (ArtifactError): if there was an error
|
612 | 642 |
#
|
613 |
- def push(self, element, keys):
|
|
643 |
+ def push(self, element, keys, partial=False, subdir=None):
|
|
614 | 644 |
refs = [self.get_artifact_fullname(element, key) for key in list(keys)]
|
615 | 645 |
|
616 | 646 |
project = element._get_project()
|
617 | 647 |
|
618 |
- push_remotes = [r for r in self._remotes[project] if r.spec.push]
|
|
648 |
+ push_remotes = []
|
|
649 |
+ partial_remotes = []
|
|
650 |
+ |
|
651 |
+ # Create list of remotes to push to, given current element and partial push config
|
|
652 |
+ if not partial:
|
|
653 |
+ push_remotes = [r for r in self._remotes[project] if (r.spec.push and not r.spec.partial_push)]
|
|
654 |
+ |
|
655 |
+ if self._has_partial_push_remotes:
|
|
656 |
+ # Create a specific list of the remotes expecting the artifact to be push in a partial
|
|
657 |
+ # state. This list needs to be pushed in a partial state, without the optional subdir if
|
|
658 |
+ # exists locally. No need to attempt pushing a partial artifact to a remote that is queued to
|
|
659 |
+ # to also recieve a full artifact
|
|
660 |
+ partial_remotes = [r for r in self._remotes[project] if (r.spec.partial_push and r.spec.push) and
|
|
661 |
+ r not in push_remotes]
|
|
619 | 662 |
|
620 | 663 |
pushed = False
|
621 | 664 |
|
... | ... | @@ -632,6 +675,19 @@ class ArtifactCache(): |
632 | 675 |
remote.spec.url, element._get_brief_display_key()
|
633 | 676 |
))
|
634 | 677 |
|
678 |
+ for remote in partial_remotes:
|
|
679 |
+ remote.init()
|
|
680 |
+ display_key = element._get_brief_display_key()
|
|
681 |
+ element.status("Pushing partial artifact {} -> {}".format(display_key, remote.spec.url))
|
|
682 |
+ |
|
683 |
+ if self.cas.push(refs, remote, subdir=subdir):
|
|
684 |
+ element.info("Pushed partial artifact {} -> {}".format(display_key, remote.spec.url))
|
|
685 |
+ pushed = True
|
|
686 |
+ else:
|
|
687 |
+ element.info("Remote ({}) already has {} partial cached".format(
|
|
688 |
+ remote.spec.url, element._get_brief_display_key()
|
|
689 |
+ ))
|
|
690 |
+ |
|
635 | 691 |
return pushed
|
636 | 692 |
|
637 | 693 |
# pull():
|
... | ... | @@ -659,14 +715,23 @@ class ArtifactCache(): |
659 | 715 |
element.status("Pulling artifact {} <- {}".format(display_key, remote.spec.url))
|
660 | 716 |
|
661 | 717 |
if self.cas.pull(ref, remote, progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs):
|
662 |
- element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
|
|
663 | 718 |
if subdir:
|
664 |
- # Attempt to extract subdir into artifact extract dir if it already exists
|
|
665 |
- # without containing the subdir. If the respective artifact extract dir does not
|
|
666 |
- # exist a complete extraction will complete.
|
|
667 |
- self.extract(element, key, subdir)
|
|
668 |
- # no need to pull from additional remotes
|
|
669 |
- return True
|
|
719 |
+ if not self.contains_subdir_artifact(element, key, subdir):
|
|
720 |
+ # The pull was expecting the specific subdit to be present, attempt
|
|
721 |
+ # to find it in other available remotes
|
|
722 |
+ element.info("Pulled partial artifact {} <- {}. Attempting to retrieve {} from remotes"
|
|
723 |
+ .format(display_key, remote.spec.url, subdir))
|
|
724 |
+ else:
|
|
725 |
+ element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
|
|
726 |
+ # Attempt to extract subdir into artifact extract dir if it already exists
|
|
727 |
+ # without containing the subdir. If the respective artifact extract dir does not
|
|
728 |
+ # exist a complete extraction will complete.
|
|
729 |
+ self.extract(element, key, subdir)
|
|
730 |
+ # no need to pull from additional remotes
|
|
731 |
+ return True
|
|
732 |
+ else:
|
|
733 |
+ element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
|
|
734 |
+ return True
|
|
670 | 735 |
else:
|
671 | 736 |
element.info("Remote ({}) does not have {} cached".format(
|
672 | 737 |
remote.spec.url, element._get_brief_display_key()
|
... | ... | @@ -45,7 +45,8 @@ from .. import _yaml |
45 | 45 |
_MAX_PAYLOAD_BYTES = 1024 * 1024
|
46 | 46 |
|
47 | 47 |
|
48 |
-class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key client_cert instance_name')):
|
|
48 |
+class CASRemoteSpec(namedtuple('CASRemoteSpec',
|
|
49 |
+ 'url push partial_push server_cert client_key client_cert instance_name')):
|
|
49 | 50 |
|
50 | 51 |
# _new_from_config_node
|
51 | 52 |
#
|
... | ... | @@ -53,9 +54,13 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key |
53 | 54 |
#
|
54 | 55 |
@staticmethod
|
55 | 56 |
def _new_from_config_node(spec_node, basedir=None):
|
56 |
- _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance_name'])
|
|
57 |
+ _yaml.node_validate(spec_node,
|
|
58 |
+ ['url', 'push', 'allow-partial-push', 'server-cert', 'client-key',
|
|
59 |
+ 'client-cert', 'instance_name'])
|
|
57 | 60 |
url = _yaml.node_get(spec_node, str, 'url')
|
58 | 61 |
push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
|
62 |
+ partial_push = _yaml.node_get(spec_node, bool, 'allow-partial-push', default_value=False)
|
|
63 |
+ |
|
59 | 64 |
if not url:
|
60 | 65 |
provenance = _yaml.node_get_provenance(spec_node, 'url')
|
61 | 66 |
raise LoadError(LoadErrorReason.INVALID_DATA,
|
... | ... | @@ -85,10 +90,10 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key |
85 | 90 |
raise LoadError(LoadErrorReason.INVALID_DATA,
|
86 | 91 |
"{}: 'client-cert' was specified without 'client-key'".format(provenance))
|
87 | 92 |
|
88 |
- return CASRemoteSpec(url, push, server_cert, client_key, client_cert, instance_name)
|
|
93 |
+ return CASRemoteSpec(url, push, partial_push, server_cert, client_key, client_cert, instance_name)
|
|
89 | 94 |
|
90 | 95 |
|
91 |
-CASRemoteSpec.__new__.__defaults__ = (None, None, None, None)
|
|
96 |
+CASRemoteSpec.__new__.__defaults__ = (False, None, None, None, None)
|
|
92 | 97 |
|
93 | 98 |
|
94 | 99 |
class BlobNotFound(CASError):
|
... | ... | @@ -283,34 +288,40 @@ class CASCache(): |
283 | 288 |
# (bool): True if pull was successful, False if ref was not available
|
284 | 289 |
#
|
285 | 290 |
def pull(self, ref, remote, *, progress=None, subdir=None, excluded_subdirs=None):
|
286 |
- try:
|
|
287 |
- remote.init()
|
|
288 | 291 |
|
289 |
- request = buildstream_pb2.GetReferenceRequest(instance_name=remote.spec.instance_name)
|
|
290 |
- request.key = ref
|
|
291 |
- response = remote.ref_storage.GetReference(request)
|
|
292 |
+ while True:
|
|
293 |
+ try:
|
|
294 |
+ remote.init()
|
|
292 | 295 |
|
293 |
- tree = remote_execution_pb2.Digest()
|
|
294 |
- tree.hash = response.digest.hash
|
|
295 |
- tree.size_bytes = response.digest.size_bytes
|
|
296 |
+ request = buildstream_pb2.GetReferenceRequest(instance_name=remote.spec.instance_name)
|
|
297 |
+ request.key = ref
|
|
298 |
+ response = remote.ref_storage.GetReference(request)
|
|
296 | 299 |
|
297 |
- # Check if the element artifact is present, if so just fetch the subdir.
|
|
298 |
- if subdir and os.path.exists(self.objpath(tree)):
|
|
299 |
- self._fetch_subdir(remote, tree, subdir)
|
|
300 |
- else:
|
|
301 |
- # Fetch artifact, excluded_subdirs determined in pullqueue
|
|
302 |
- self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
|
|
300 |
+ tree = remote_execution_pb2.Digest()
|
|
301 |
+ tree.hash = response.digest.hash
|
|
302 |
+ tree.size_bytes = response.digest.size_bytes
|
|
303 | 303 |
|
304 |
- self.set_ref(ref, tree)
|
|
304 |
+ # Check if the element artifact is present, if so just fetch the subdir.
|
|
305 |
+ if subdir and os.path.exists(self.objpath(tree)):
|
|
306 |
+ self._fetch_subdir(remote, tree, subdir)
|
|
307 |
+ else:
|
|
308 |
+ # Fetch artifact, excluded_subdirs determined in pullqueue
|
|
309 |
+ self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
|
|
305 | 310 |
|
306 |
- return True
|
|
307 |
- except grpc.RpcError as e:
|
|
308 |
- if e.code() != grpc.StatusCode.NOT_FOUND:
|
|
309 |
- raise CASError("Failed to pull ref {}: {}".format(ref, e)) from e
|
|
310 |
- else:
|
|
311 |
- return False
|
|
312 |
- except BlobNotFound as e:
|
|
313 |
- return False
|
|
311 |
+ self.set_ref(ref, tree)
|
|
312 |
+ |
|
313 |
+ return True
|
|
314 |
+ except grpc.RpcError as e:
|
|
315 |
+ if e.code() != grpc.StatusCode.NOT_FOUND:
|
|
316 |
+ raise CASError("Failed to pull ref {}: {}".format(ref, e)) from e
|
|
317 |
+ else:
|
|
318 |
+ return False
|
|
319 |
+ except BlobNotFound as e:
|
|
320 |
+ if not excluded_subdirs and subdir:
|
|
321 |
+ # Could not complete a full pull, attempt partial
|
|
322 |
+ excluded_subdirs, subdir = subdir, excluded_subdirs
|
|
323 |
+ else:
|
|
324 |
+ return False
|
|
314 | 325 |
|
315 | 326 |
# pull_tree():
|
316 | 327 |
#
|
... | ... | @@ -355,6 +366,7 @@ class CASCache(): |
355 | 366 |
# Args:
|
356 | 367 |
# refs (list): The refs to push
|
357 | 368 |
# remote (CASRemote): The remote to push to
|
369 |
+ # subdir (string): Optional specific subdir to exempt from the push
|
|
358 | 370 |
#
|
359 | 371 |
# Returns:
|
360 | 372 |
# (bool): True if any remote was updated, False if no pushes were required
|
... | ... | @@ -362,7 +374,7 @@ class CASCache(): |
362 | 374 |
# Raises:
|
363 | 375 |
# (CASError): if there was an error
|
364 | 376 |
#
|
365 |
- def push(self, refs, remote):
|
|
377 |
+ def push(self, refs, remote, subdir=None):
|
|
366 | 378 |
skipped_remote = True
|
367 | 379 |
try:
|
368 | 380 |
for ref in refs:
|
... | ... | @@ -384,7 +396,7 @@ class CASCache(): |
384 | 396 |
# Intentionally re-raise RpcError for outer except block.
|
385 | 397 |
raise
|
386 | 398 |
|
387 |
- self._send_directory(remote, tree)
|
|
399 |
+ self._send_directory(remote, tree, excluded_dir=subdir)
|
|
388 | 400 |
|
389 | 401 |
request = buildstream_pb2.UpdateReferenceRequest(instance_name=remote.spec.instance_name)
|
390 | 402 |
request.keys.append(ref)
|
... | ... | @@ -866,10 +878,17 @@ class CASCache(): |
866 | 878 |
a += 1
|
867 | 879 |
b += 1
|
868 | 880 |
|
869 |
- def _reachable_refs_dir(self, reachable, tree, update_mtime=False):
|
|
881 |
+ def _reachable_refs_dir(self, reachable, tree, update_mtime=False, subdir=False):
|
|
870 | 882 |
if tree.hash in reachable:
|
871 | 883 |
return
|
872 | 884 |
|
885 |
+ # If looping through subdir digests, skip processing if
|
|
886 |
+ # ref path does not exist, allowing for partial objects
|
|
887 |
+ if subdir and not os.path.exists(self.objpath(tree)):
|
|
888 |
+ return
|
|
889 |
+ |
|
890 |
+ # Raises FileNotFound exception is path does not exist,
|
|
891 |
+ # which should only be entered on the top level digest
|
|
873 | 892 |
if update_mtime:
|
874 | 893 |
os.utime(self.objpath(tree))
|
875 | 894 |
|
... | ... | @@ -886,9 +905,9 @@ class CASCache(): |
886 | 905 |
reachable.add(filenode.digest.hash)
|
887 | 906 |
|
888 | 907 |
for dirnode in directory.directories:
|
889 |
- self._reachable_refs_dir(reachable, dirnode.digest, update_mtime=update_mtime)
|
|
908 |
+ self._reachable_refs_dir(reachable, dirnode.digest, update_mtime=update_mtime, subdir=True)
|
|
890 | 909 |
|
891 |
- def _required_blobs(self, directory_digest):
|
|
910 |
+ def _required_blobs(self, directory_digest, excluded_dir=None):
|
|
892 | 911 |
# parse directory, and recursively add blobs
|
893 | 912 |
d = remote_execution_pb2.Digest()
|
894 | 913 |
d.hash = directory_digest.hash
|
... | ... | @@ -907,7 +926,8 @@ class CASCache(): |
907 | 926 |
yield d
|
908 | 927 |
|
909 | 928 |
for dirnode in directory.directories:
|
910 |
- yield from self._required_blobs(dirnode.digest)
|
|
929 |
+ if dirnode.name != excluded_dir:
|
|
930 |
+ yield from self._required_blobs(dirnode.digest)
|
|
911 | 931 |
|
912 | 932 |
def _fetch_blob(self, remote, digest, stream):
|
913 | 933 |
resource_name_components = ['blobs', digest.hash, str(digest.size_bytes)]
|
... | ... | @@ -1029,6 +1049,7 @@ class CASCache(): |
1029 | 1049 |
objpath = self._ensure_blob(remote, dir_digest)
|
1030 | 1050 |
|
1031 | 1051 |
directory = remote_execution_pb2.Directory()
|
1052 |
+ |
|
1032 | 1053 |
with open(objpath, 'rb') as f:
|
1033 | 1054 |
directory.ParseFromString(f.read())
|
1034 | 1055 |
|
... | ... | @@ -1104,9 +1125,8 @@ class CASCache(): |
1104 | 1125 |
|
1105 | 1126 |
assert response.committed_size == digest.size_bytes
|
1106 | 1127 |
|
1107 |
- def _send_directory(self, remote, digest, u_uid=uuid.uuid4()):
|
|
1108 |
- required_blobs = self._required_blobs(digest)
|
|
1109 |
- |
|
1128 |
+ def _send_directory(self, remote, digest, u_uid=uuid.uuid4(), excluded_dir=None):
|
|
1129 |
+ required_blobs = self._required_blobs(digest, excluded_dir=excluded_dir)
|
|
1110 | 1130 |
missing_blobs = dict()
|
1111 | 1131 |
# Limit size of FindMissingBlobs request
|
1112 | 1132 |
for required_blobs_group in _grouper(required_blobs, 512):
|
... | ... | @@ -1801,13 +1801,19 @@ class Element(Plugin): |
1801 | 1801 |
# (bool): True if this element does not need a push job to be created
|
1802 | 1802 |
#
|
1803 | 1803 |
def _skip_push(self):
|
1804 |
+ |
|
1804 | 1805 |
if not self.__artifacts.has_push_remotes(element=self):
|
1805 | 1806 |
# No push remotes for this element's project
|
1806 | 1807 |
return True
|
1807 | 1808 |
|
1808 | 1809 |
# Do not push elements that aren't cached, or that are cached with a dangling buildtree
|
1809 |
- # artifact unless element type is expected to have an an empty buildtree directory
|
|
1810 |
- if not self._cached_buildtree():
|
|
1810 |
+ # artifact unless element type is expected to have an an empty buildtree directory. Check
|
|
1811 |
+ # that this default behaviour is not overriden via a remote configured to allow pushing
|
|
1812 |
+ # artifacts without their corresponding buildtree.
|
|
1813 |
+ if not self._cached():
|
|
1814 |
+ return True
|
|
1815 |
+ |
|
1816 |
+ if not self._cached_buildtree() and not self.__artifacts.has_partial_push_remotes(element=self):
|
|
1811 | 1817 |
return True
|
1812 | 1818 |
|
1813 | 1819 |
# Do not push tainted artifact
|
... | ... | @@ -1818,7 +1824,8 @@ class Element(Plugin): |
1818 | 1824 |
|
1819 | 1825 |
# _push():
|
1820 | 1826 |
#
|
1821 |
- # Push locally cached artifact to remote artifact repository.
|
|
1827 |
+ # Push locally cached artifact to remote artifact repository. An attempt
|
|
1828 |
+ # will be made to push partial artifacts given current config
|
|
1822 | 1829 |
#
|
1823 | 1830 |
# Returns:
|
1824 | 1831 |
# (bool): True if the remote was updated, False if it already existed
|
... | ... | @@ -1831,8 +1838,19 @@ class Element(Plugin): |
1831 | 1838 |
self.warn("Not pushing tainted artifact.")
|
1832 | 1839 |
return False
|
1833 | 1840 |
|
1834 |
- # Push all keys used for local commit
|
|
1835 |
- pushed = self.__artifacts.push(self, self.__get_cache_keys_for_commit())
|
|
1841 |
+ # Push all keys used for local commit, this could be full or partial,
|
|
1842 |
+ # given previous _skip_push() logic. If buildtree isn't cached, then
|
|
1843 |
+ # set partial push
|
|
1844 |
+ |
|
1845 |
+ partial = False
|
|
1846 |
+ subdir = 'buildtree'
|
|
1847 |
+ if not self._cached_buildtree():
|
|
1848 |
+ partial = True
|
|
1849 |
+ |
|
1850 |
+ pushed = self.__artifacts.push(self, self.__get_cache_keys_for_commit(), partial=partial, subdir=subdir)
|
|
1851 |
+ |
|
1852 |
+ # Artifact might be cached in the server partially with the top level ref existing.
|
|
1853 |
+ # Check if we need to attempt a push of a locally cached buildtree given current config
|
|
1836 | 1854 |
if not pushed:
|
1837 | 1855 |
return False
|
1838 | 1856 |
|
1 |
+import os
|
|
2 |
+import shutil
|
|
3 |
+import pytest
|
|
4 |
+ |
|
5 |
+from tests.testutils import cli_integration as cli, create_artifact_share
|
|
6 |
+from tests.testutils.integration import assert_contains
|
|
7 |
+from tests.testutils.site import HAVE_BWRAP, IS_LINUX
|
|
8 |
+from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
|
9 |
+ |
|
10 |
+ |
|
11 |
+DATA_DIR = os.path.join(
|
|
12 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
13 |
+ "project"
|
|
14 |
+)
|
|
15 |
+ |
|
16 |
+ |
|
17 |
+# Remove artifact cache & set cli.config value of pull-buildtrees
|
|
18 |
+# to false, which is the default user context. The cache has to be
|
|
19 |
+# cleared as just forcefully removing the refpath leaves dangling objects.
|
|
20 |
+def default_state(cli, tmpdir, share):
|
|
21 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
|
|
22 |
+ cli.configure({
|
|
23 |
+ 'artifacts': {'url': share.repo, 'push': False},
|
|
24 |
+ 'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
|
|
25 |
+ 'cache': {'pull-buildtrees': False},
|
|
26 |
+ })
|
|
27 |
+ |
|
28 |
+ |
|
29 |
+# Tests to capture the integration of the optionl push of buildtrees.
|
|
30 |
+# The behaviour should encompass pushing artifacts that are already cached
|
|
31 |
+# without a buildtree as well as artifacts that are cached with their buildtree.
|
|
32 |
+# This option is handled via 'allow-partial-push' on a per artifact remote config
|
|
33 |
+# node basis. Multiple remote config nodes can point to the same url and as such can
|
|
34 |
+# have different 'allow-partial-push' options, tests need to cover this
|
|
35 |
+@pytest.mark.integration
|
|
36 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
37 |
+@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
|
38 |
+def test_pushbuildtrees(cli, tmpdir, datafiles, integration_cache):
|
|
39 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
40 |
+ element_name = 'autotools/amhello.bst'
|
|
41 |
+ |
|
42 |
+ # Create artifact shares for pull & push testing
|
|
43 |
+ with create_artifact_share(os.path.join(str(tmpdir), 'share1')) as share1,\
|
|
44 |
+ create_artifact_share(os.path.join(str(tmpdir), 'share2')) as share2,\
|
|
45 |
+ create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3:
|
|
46 |
+ |
|
47 |
+ cli.configure({
|
|
48 |
+ 'artifacts': {'url': share1.repo, 'push': True},
|
|
49 |
+ 'artifactdir': os.path.join(str(tmpdir), 'artifacts')
|
|
50 |
+ })
|
|
51 |
+ |
|
52 |
+ cli.configure({'artifacts': [{'url': share1.repo, 'push': True},
|
|
53 |
+ {'url': share2.repo, 'push': True, 'allow-partial-push': True}]})
|
|
54 |
+ |
|
55 |
+ # Build autotools element, checked pushed, delete local.
|
|
56 |
+ # As share 2 has push & allow-partial-push set a true, it
|
|
57 |
+ # should have pushed the artifacts, without the cached buildtrees,
|
|
58 |
+ # to it.
|
|
59 |
+ result = cli.run(project=project, args=['build', element_name])
|
|
60 |
+ assert result.exit_code == 0
|
|
61 |
+ assert cli.get_element_state(project, element_name) == 'cached'
|
|
62 |
+ elementdigest = share1.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
|
63 |
+ buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
|
|
64 |
+ elementdigest.hash, 'buildtree')
|
|
65 |
+ assert os.path.isdir(buildtreedir)
|
|
66 |
+ assert element_name in result.get_partial_pushed_elements()
|
|
67 |
+ assert share1.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
|
68 |
+ assert share2.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
|
69 |
+ default_state(cli, tmpdir, share1)
|
|
70 |
+ |
|
71 |
+ # Check that after explictly pulling an artifact without it's buildtree,
|
|
72 |
+ # we can push it to another remote that is configured to accept the partial
|
|
73 |
+ # artifact
|
|
74 |
+ result = cli.run(project=project, args=['pull', element_name])
|
|
75 |
+ assert element_name in result.get_pulled_elements()
|
|
76 |
+ cli.configure({'artifacts': {'url': share3.repo, 'push': True, 'allow-partial-push': True}})
|
|
77 |
+ assert cli.get_element_state(project, element_name) == 'cached'
|
|
78 |
+ assert not os.path.isdir(buildtreedir)
|
|
79 |
+ result = cli.run(project=project, args=['push', element_name])
|
|
80 |
+ assert result.exit_code == 0
|
|
81 |
+ assert element_name in result.get_partial_pushed_elements()
|
|
82 |
+ assert element_name not in result.get_pushed_elements()
|
|
83 |
+ assert share3.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
|
84 |
+ default_state(cli, tmpdir, share3)
|
|
85 |
+ |
|
86 |
+ # Delete the local cache and pull the partial artifact from share 3,
|
|
87 |
+ # this should not include the buildtree when extracted locally, even when
|
|
88 |
+ # pull-buildtrees is given as a cli parameter as no available remotes will
|
|
89 |
+ # contain the buildtree
|
|
90 |
+ assert not os.path.isdir(buildtreedir)
|
|
91 |
+ assert cli.get_element_state(project, element_name) != 'cached'
|
|
92 |
+ result = cli.run(project=project, args=['--pull-buildtrees', 'pull', element_name])
|
|
93 |
+ assert element_name in result.get_partial_pulled_elements()
|
|
94 |
+ assert not os.path.isdir(buildtreedir)
|
|
95 |
+ default_state(cli, tmpdir, share3)
|
|
96 |
+ |
|
97 |
+ # Delete the local cache and attempt to pull a 'full' artifact, including its
|
|
98 |
+ # buildtree. As with before share3 being the first listed remote will not have
|
|
99 |
+ # the buildtree available and should spawn a partial pull. Having share1 as the
|
|
100 |
+ # second available remote should allow the buildtree to be pulled thus 'completing'
|
|
101 |
+ # the artifact
|
|
102 |
+ cli.configure({'artifacts': [{'url': share3.repo, 'allow-partial-push': True},
|
|
103 |
+ {'url': share1.repo, 'push': True}]})
|
|
104 |
+ assert cli.get_element_state(project, element_name) != 'cached'
|
|
105 |
+ result = cli.run(project=project, args=['--pull-buildtrees', 'pull', element_name])
|
|
106 |
+ assert element_name in result.get_partial_pulled_elements()
|
|
107 |
+ assert element_name in result.get_pulled_elements()
|
|
108 |
+ assert "Attempting to retrieve buildtree from remotes" in result.stderr
|
|
109 |
+ assert os.path.isdir(buildtreedir)
|
|
110 |
+ assert cli.get_element_state(project, element_name) == 'cached'
|
... | ... | @@ -191,6 +191,13 @@ class Result(): |
191 | 191 |
|
192 | 192 |
return list(pushed)
|
193 | 193 |
|
194 |
+ def get_partial_pushed_elements(self):
|
|
195 |
+ pushed = re.findall(r'\[\s*push:(\S+)\s*\]\s*INFO\s*Pushed partial artifact', self.stderr)
|
|
196 |
+ if pushed is None:
|
|
197 |
+ return []
|
|
198 |
+ |
|
199 |
+ return list(pushed)
|
|
200 |
+ |
|
194 | 201 |
def get_pulled_elements(self):
|
195 | 202 |
pulled = re.findall(r'\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled artifact', self.stderr)
|
196 | 203 |
if pulled is None:
|
... | ... | @@ -198,6 +205,13 @@ class Result(): |
198 | 205 |
|
199 | 206 |
return list(pulled)
|
200 | 207 |
|
208 |
+ def get_partial_pulled_elements(self):
|
|
209 |
+ pulled = re.findall(r'\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled partial artifact', self.stderr)
|
|
210 |
+ if pulled is None:
|
|
211 |
+ return []
|
|
212 |
+ |
|
213 |
+ return list(pulled)
|
|
214 |
+ |
|
201 | 215 |
|
202 | 216 |
class Cli():
|
203 | 217 |
|