Tom Pollard pushed to branch tpollard/566 at BuildStream / buildstream
Commits:
-
f9c6338b
by Tom Pollard at 2019-01-11T14:33:14Z
7 changed files:
- buildstream/_artifactcache/artifactcache.py
- buildstream/_artifactcache/cascache.py
- buildstream/element.py
- doc/source/using_config.rst
- tests/artifactcache/config.py
- + tests/integration/pushbuildtrees.py
- tests/testutils/runcli.py
Changes:
| ... | ... | @@ -74,6 +74,7 @@ class ArtifactCache(): |
| 74 | 74 |
|
| 75 | 75 |
self._has_fetch_remotes = False
|
| 76 | 76 |
self._has_push_remotes = False
|
| 77 |
+ self._has_partial_push_remotes = False
|
|
| 77 | 78 |
|
| 78 | 79 |
os.makedirs(self.extractdir, exist_ok=True)
|
| 79 | 80 |
|
| ... | ... | @@ -398,6 +399,9 @@ class ArtifactCache(): |
| 398 | 399 |
self._has_fetch_remotes = True
|
| 399 | 400 |
if remote_spec.push:
|
| 400 | 401 |
self._has_push_remotes = True
|
| 402 |
+ # Partial push requires generic push option to also be set
|
|
| 403 |
+ if remote_spec.partial_push:
|
|
| 404 |
+ self._has_partial_push_remotes = True
|
|
| 401 | 405 |
|
| 402 | 406 |
remotes[remote_spec.url] = CASRemote(remote_spec)
|
| 403 | 407 |
|
| ... | ... | @@ -596,6 +600,32 @@ class ArtifactCache(): |
| 596 | 600 |
remotes_for_project = self._remotes[element._get_project()]
|
| 597 | 601 |
return any(remote.spec.push for remote in remotes_for_project)
|
| 598 | 602 |
|
| 603 |
+ # has_partial_push_remotes():
|
|
| 604 |
+ #
|
|
| 605 |
+ # Check whether any remote repositories are available for pushing
|
|
| 606 |
+ # non-complete artifacts. This option requires the generic push value
|
|
| 607 |
+ # to also be set.
|
|
| 608 |
+ #
|
|
| 609 |
+ # Args:
|
|
| 610 |
+ # element (Element): The Element to check
|
|
| 611 |
+ #
|
|
| 612 |
+ # Returns:
|
|
| 613 |
+ # (bool): True if any remote repository is configured for optional
|
|
| 614 |
+ # partial pushes, False otherwise
|
|
| 615 |
+ #
|
|
| 616 |
+ def has_partial_push_remotes(self, *, element=None):
|
|
| 617 |
+ # If there's no partial push remotes available, we can't partial push at all
|
|
| 618 |
+ if not self._has_partial_push_remotes:
|
|
| 619 |
+ return False
|
|
| 620 |
+ elif element is None:
|
|
| 621 |
+ # At least one remote is set to allow partial pushes
|
|
| 622 |
+ return True
|
|
| 623 |
+ else:
|
|
| 624 |
+ # Check whether the specified element's project has push remotes configured
|
|
| 625 |
+ # to not accept partial artifact pushes
|
|
| 626 |
+ remotes_for_project = self._remotes[element._get_project()]
|
|
| 627 |
+ return any(remote.spec.partial_push for remote in remotes_for_project)
|
|
| 628 |
+ |
|
| 599 | 629 |
# push():
|
| 600 | 630 |
#
|
| 601 | 631 |
# Push committed artifact to remote repository.
|
| ... | ... | @@ -603,6 +633,8 @@ class ArtifactCache(): |
| 603 | 633 |
# Args:
|
| 604 | 634 |
# element (Element): The Element whose artifact is to be pushed
|
| 605 | 635 |
# keys (list): The cache keys to use
|
| 636 |
+ # partial(bool): If the artifact is cached in a partial state
|
|
| 637 |
+ # subdir(string): Optional subdir to not push
|
|
| 606 | 638 |
#
|
| 607 | 639 |
# Returns:
|
| 608 | 640 |
# (bool): True if any remote was updated, False if no pushes were required
|
| ... | ... | @@ -610,12 +642,25 @@ class ArtifactCache(): |
| 610 | 642 |
# Raises:
|
| 611 | 643 |
# (ArtifactError): if there was an error
|
| 612 | 644 |
#
|
| 613 |
- def push(self, element, keys):
|
|
| 645 |
+ def push(self, element, keys, partial=False, subdir=None):
|
|
| 614 | 646 |
refs = [self.get_artifact_fullname(element, key) for key in list(keys)]
|
| 615 | 647 |
|
| 616 | 648 |
project = element._get_project()
|
| 617 | 649 |
|
| 618 |
- push_remotes = [r for r in self._remotes[project] if r.spec.push]
|
|
| 650 |
+ push_remotes = []
|
|
| 651 |
+ partial_remotes = []
|
|
| 652 |
+ |
|
| 653 |
+ # Create list of remotes to push to, given current element and partial push config
|
|
| 654 |
+ if not partial:
|
|
| 655 |
+ push_remotes = [r for r in self._remotes[project] if (r.spec.push and not r.spec.partial_push)]
|
|
| 656 |
+ |
|
| 657 |
+ if self._has_partial_push_remotes:
|
|
| 658 |
+ # Create a specific list of the remotes expecting the artifact to be push in a partial
|
|
| 659 |
+ # state. This list needs to be pushed in a partial state, without the optional subdir if
|
|
| 660 |
+ # exists locally. No need to attempt pushing a partial artifact to a remote that is queued to
|
|
| 661 |
+ # to also recieve a full artifact
|
|
| 662 |
+ partial_remotes = [r for r in self._remotes[project] if (r.spec.partial_push and r.spec.push) and
|
|
| 663 |
+ r not in push_remotes]
|
|
| 619 | 664 |
|
| 620 | 665 |
pushed = False
|
| 621 | 666 |
|
| ... | ... | @@ -624,7 +669,9 @@ class ArtifactCache(): |
| 624 | 669 |
display_key = element._get_brief_display_key()
|
| 625 | 670 |
element.status("Pushing artifact {} -> {}".format(display_key, remote.spec.url))
|
| 626 | 671 |
|
| 627 |
- if self.cas.push(refs, remote):
|
|
| 672 |
+ # Passing the optional subdir allows for remote artifacts that are cached in a 'partial'
|
|
| 673 |
+ # state to be completed
|
|
| 674 |
+ if self.cas.push(refs, remote, subdir=subdir):
|
|
| 628 | 675 |
element.info("Pushed artifact {} -> {}".format(display_key, remote.spec.url))
|
| 629 | 676 |
pushed = True
|
| 630 | 677 |
else:
|
| ... | ... | @@ -632,6 +679,19 @@ class ArtifactCache(): |
| 632 | 679 |
remote.spec.url, element._get_brief_display_key()
|
| 633 | 680 |
))
|
| 634 | 681 |
|
| 682 |
+ for remote in partial_remotes:
|
|
| 683 |
+ remote.init()
|
|
| 684 |
+ display_key = element._get_brief_display_key()
|
|
| 685 |
+ element.status("Pushing partial artifact {} -> {}".format(display_key, remote.spec.url))
|
|
| 686 |
+ |
|
| 687 |
+ if self.cas.push(refs, remote, excluded_subdirs=subdir):
|
|
| 688 |
+ element.info("Pushed partial artifact {} -> {}".format(display_key, remote.spec.url))
|
|
| 689 |
+ pushed = True
|
|
| 690 |
+ else:
|
|
| 691 |
+ element.info("Remote ({}) already has {} partial cached".format(
|
|
| 692 |
+ remote.spec.url, element._get_brief_display_key()
|
|
| 693 |
+ ))
|
|
| 694 |
+ |
|
| 635 | 695 |
return pushed
|
| 636 | 696 |
|
| 637 | 697 |
# pull():
|
| ... | ... | @@ -659,14 +719,23 @@ class ArtifactCache(): |
| 659 | 719 |
element.status("Pulling artifact {} <- {}".format(display_key, remote.spec.url))
|
| 660 | 720 |
|
| 661 | 721 |
if self.cas.pull(ref, remote, progress=progress, subdir=subdir, excluded_subdirs=excluded_subdirs):
|
| 662 |
- element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
|
|
| 663 | 722 |
if subdir:
|
| 664 |
- # Attempt to extract subdir into artifact extract dir if it already exists
|
|
| 665 |
- # without containing the subdir. If the respective artifact extract dir does not
|
|
| 666 |
- # exist a complete extraction will complete.
|
|
| 667 |
- self.extract(element, key, subdir)
|
|
| 668 |
- # no need to pull from additional remotes
|
|
| 669 |
- return True
|
|
| 723 |
+ if not self.contains_subdir_artifact(element, key, subdir):
|
|
| 724 |
+ # The pull was expecting the specific subdir to be present in the remote, attempt
|
|
| 725 |
+ # to find it in other available remotes
|
|
| 726 |
+ element.info("Pulled partial artifact {} <- {}. Attempting to retrieve {} from remotes"
|
|
| 727 |
+ .format(display_key, remote.spec.url, subdir))
|
|
| 728 |
+ else:
|
|
| 729 |
+ element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
|
|
| 730 |
+ # Attempt to extract subdir into artifact extract dir if it already exists
|
|
| 731 |
+ # without containing the subdir. If the respective artifact extract dir does not
|
|
| 732 |
+ # exist a complete extraction will complete.
|
|
| 733 |
+ self.extract(element, key, subdir)
|
|
| 734 |
+ # no need to pull from additional remotes
|
|
| 735 |
+ return True
|
|
| 736 |
+ else:
|
|
| 737 |
+ element.info("Pulled artifact {} <- {}".format(display_key, remote.spec.url))
|
|
| 738 |
+ return True
|
|
| 670 | 739 |
else:
|
| 671 | 740 |
element.info("Remote ({}) does not have {} cached".format(
|
| 672 | 741 |
remote.spec.url, element._get_brief_display_key()
|
| ... | ... | @@ -45,7 +45,8 @@ from .. import _yaml |
| 45 | 45 |
_MAX_PAYLOAD_BYTES = 1024 * 1024
|
| 46 | 46 |
|
| 47 | 47 |
|
| 48 |
-class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key client_cert instance_name')):
|
|
| 48 |
+class CASRemoteSpec(namedtuple('CASRemoteSpec',
|
|
| 49 |
+ 'url push partial_push server_cert client_key client_cert instance_name')):
|
|
| 49 | 50 |
|
| 50 | 51 |
# _new_from_config_node
|
| 51 | 52 |
#
|
| ... | ... | @@ -53,9 +54,18 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key |
| 53 | 54 |
#
|
| 54 | 55 |
@staticmethod
|
| 55 | 56 |
def _new_from_config_node(spec_node, basedir=None):
|
| 56 |
- _yaml.node_validate(spec_node, ['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance-name'])
|
|
| 57 |
+ _yaml.node_validate(spec_node,
|
|
| 58 |
+ ['url', 'push', 'allow-partial-push', 'server-cert', 'client-key',
|
|
| 59 |
+ 'client-cert', 'instance_name'])
|
|
| 57 | 60 |
url = _yaml.node_get(spec_node, str, 'url')
|
| 58 | 61 |
push = _yaml.node_get(spec_node, bool, 'push', default_value=False)
|
| 62 |
+ partial_push = _yaml.node_get(spec_node, bool, 'allow-partial-push', default_value=False)
|
|
| 63 |
+ |
|
| 64 |
+ # partial_push depends on push, raise error if not configured correctly
|
|
| 65 |
+ if partial_push and not push:
|
|
| 66 |
+ provenance = _yaml.node_get_provenance(spec_node, 'allow-partial-push')
|
|
| 67 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
| 68 |
+ "{}: allow-partial-push also requires push to be set".format(provenance))
|
|
| 59 | 69 |
if not url:
|
| 60 | 70 |
provenance = _yaml.node_get_provenance(spec_node, 'url')
|
| 61 | 71 |
raise LoadError(LoadErrorReason.INVALID_DATA,
|
| ... | ... | @@ -85,10 +95,10 @@ class CASRemoteSpec(namedtuple('CASRemoteSpec', 'url push server_cert client_key |
| 85 | 95 |
raise LoadError(LoadErrorReason.INVALID_DATA,
|
| 86 | 96 |
"{}: 'client-cert' was specified without 'client-key'".format(provenance))
|
| 87 | 97 |
|
| 88 |
- return CASRemoteSpec(url, push, server_cert, client_key, client_cert, instance_name)
|
|
| 98 |
+ return CASRemoteSpec(url, push, partial_push, server_cert, client_key, client_cert, instance_name)
|
|
| 89 | 99 |
|
| 90 | 100 |
|
| 91 |
-CASRemoteSpec.__new__.__defaults__ = (None, None, None, None)
|
|
| 101 |
+CASRemoteSpec.__new__.__defaults__ = (False, None, None, None, None)
|
|
| 92 | 102 |
|
| 93 | 103 |
|
| 94 | 104 |
class BlobNotFound(CASError):
|
| ... | ... | @@ -283,34 +293,47 @@ class CASCache(): |
| 283 | 293 |
# (bool): True if pull was successful, False if ref was not available
|
| 284 | 294 |
#
|
| 285 | 295 |
def pull(self, ref, remote, *, progress=None, subdir=None, excluded_subdirs=None):
|
| 286 |
- try:
|
|
| 287 |
- remote.init()
|
|
| 288 | 296 |
|
| 289 |
- request = buildstream_pb2.GetReferenceRequest(instance_name=remote.spec.instance_name)
|
|
| 290 |
- request.key = ref
|
|
| 291 |
- response = remote.ref_storage.GetReference(request)
|
|
| 297 |
+ tree_found = False
|
|
| 292 | 298 |
|
| 293 |
- tree = remote_execution_pb2.Digest()
|
|
| 294 |
- tree.hash = response.digest.hash
|
|
| 295 |
- tree.size_bytes = response.digest.size_bytes
|
|
| 299 |
+ while True:
|
|
| 300 |
+ try:
|
|
| 301 |
+ if not tree_found:
|
|
| 302 |
+ remote.init()
|
|
| 296 | 303 |
|
| 297 |
- # Check if the element artifact is present, if so just fetch the subdir.
|
|
| 298 |
- if subdir and os.path.exists(self.objpath(tree)):
|
|
| 299 |
- self._fetch_subdir(remote, tree, subdir)
|
|
| 300 |
- else:
|
|
| 301 |
- # Fetch artifact, excluded_subdirs determined in pullqueue
|
|
| 302 |
- self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
|
|
| 304 |
+ request = buildstream_pb2.GetReferenceRequest(instance_name=remote.spec.instance_name)
|
|
| 305 |
+ request.key = ref
|
|
| 306 |
+ response = remote.ref_storage.GetReference(request)
|
|
| 303 | 307 |
|
| 304 |
- self.set_ref(ref, tree)
|
|
| 308 |
+ tree = remote_execution_pb2.Digest()
|
|
| 309 |
+ tree.hash = response.digest.hash
|
|
| 310 |
+ tree.size_bytes = response.digest.size_bytes
|
|
| 305 | 311 |
|
| 306 |
- return True
|
|
| 307 |
- except grpc.RpcError as e:
|
|
| 308 |
- if e.code() != grpc.StatusCode.NOT_FOUND:
|
|
| 309 |
- raise CASError("Failed to pull ref {}: {}".format(ref, e)) from e
|
|
| 310 |
- else:
|
|
| 311 |
- return False
|
|
| 312 |
- except BlobNotFound as e:
|
|
| 313 |
- return False
|
|
| 312 |
+ # Check if the element artifact is present, if so just fetch the subdir.
|
|
| 313 |
+ if subdir and os.path.exists(self.objpath(tree)):
|
|
| 314 |
+ self._fetch_subdir(remote, tree, subdir)
|
|
| 315 |
+ else:
|
|
| 316 |
+ # Fetch artifact, excluded_subdirs determined in pullqueue
|
|
| 317 |
+ self._fetch_directory(remote, tree, excluded_subdirs=excluded_subdirs)
|
|
| 318 |
+ |
|
| 319 |
+ self.set_ref(ref, tree)
|
|
| 320 |
+ |
|
| 321 |
+ return True
|
|
| 322 |
+ except grpc.RpcError as e:
|
|
| 323 |
+ if e.code() != grpc.StatusCode.NOT_FOUND:
|
|
| 324 |
+ raise CASError("Failed to pull ref {}: {}".format(ref, e)) from e
|
|
| 325 |
+ else:
|
|
| 326 |
+ return False
|
|
| 327 |
+ except BlobNotFound as e:
|
|
| 328 |
+ if not excluded_subdirs and subdir:
|
|
| 329 |
+ # The remote has the top level digest but could not complete a full pull,
|
|
| 330 |
+ # attempt partial without the need to initialise and check for the artifact
|
|
| 331 |
+ # digest. This default behaviour of dropping back to partial pulls could
|
|
| 332 |
+ # be made a configurable warning given at artfictcache level.
|
|
| 333 |
+ tree_found = True
|
|
| 334 |
+ excluded_subdirs, subdir = subdir, excluded_subdirs
|
|
| 335 |
+ else:
|
|
| 336 |
+ return False
|
|
| 314 | 337 |
|
| 315 | 338 |
# pull_tree():
|
| 316 | 339 |
#
|
| ... | ... | @@ -355,6 +378,8 @@ class CASCache(): |
| 355 | 378 |
# Args:
|
| 356 | 379 |
# refs (list): The refs to push
|
| 357 | 380 |
# remote (CASRemote): The remote to push to
|
| 381 |
+ # subdir (string): Optional specific subdir to include in the push
|
|
| 382 |
+ # excluded_subdirs (list): The optional list of subdirs to not push
|
|
| 358 | 383 |
#
|
| 359 | 384 |
# Returns:
|
| 360 | 385 |
# (bool): True if any remote was updated, False if no pushes were required
|
| ... | ... | @@ -362,7 +387,7 @@ class CASCache(): |
| 362 | 387 |
# Raises:
|
| 363 | 388 |
# (CASError): if there was an error
|
| 364 | 389 |
#
|
| 365 |
- def push(self, refs, remote):
|
|
| 390 |
+ def push(self, refs, remote, *, subdir=None, excluded_subdirs=None):
|
|
| 366 | 391 |
skipped_remote = True
|
| 367 | 392 |
try:
|
| 368 | 393 |
for ref in refs:
|
| ... | ... | @@ -376,15 +401,18 @@ class CASCache(): |
| 376 | 401 |
response = remote.ref_storage.GetReference(request)
|
| 377 | 402 |
|
| 378 | 403 |
if response.digest.hash == tree.hash and response.digest.size_bytes == tree.size_bytes:
|
| 379 |
- # ref is already on the server with the same tree
|
|
| 380 |
- continue
|
|
| 404 |
+ # ref is already on the server with the same tree, however it might be partially cached.
|
|
| 405 |
+ # If artifact is not set to be pushed partially attempt to 'complete' the remote artifact if
|
|
| 406 |
+ # needed, else continue.
|
|
| 407 |
+ if excluded_subdirs or self.verify_digest_on_remote(remote, self._get_subdir(tree, subdir)):
|
|
| 408 |
+ continue
|
|
| 381 | 409 |
|
| 382 | 410 |
except grpc.RpcError as e:
|
| 383 | 411 |
if e.code() != grpc.StatusCode.NOT_FOUND:
|
| 384 | 412 |
# Intentionally re-raise RpcError for outer except block.
|
| 385 | 413 |
raise
|
| 386 | 414 |
|
| 387 |
- self._send_directory(remote, tree)
|
|
| 415 |
+ self._send_directory(remote, tree, excluded_dir=excluded_subdirs)
|
|
| 388 | 416 |
|
| 389 | 417 |
request = buildstream_pb2.UpdateReferenceRequest(instance_name=remote.spec.instance_name)
|
| 390 | 418 |
request.keys.append(ref)
|
| ... | ... | @@ -866,10 +894,17 @@ class CASCache(): |
| 866 | 894 |
a += 1
|
| 867 | 895 |
b += 1
|
| 868 | 896 |
|
| 869 |
- def _reachable_refs_dir(self, reachable, tree, update_mtime=False):
|
|
| 897 |
+ def _reachable_refs_dir(self, reachable, tree, update_mtime=False, subdir=False):
|
|
| 870 | 898 |
if tree.hash in reachable:
|
| 871 | 899 |
return
|
| 872 | 900 |
|
| 901 |
+ # If looping through subdir digests, skip processing if
|
|
| 902 |
+ # ref path does not exist, allowing for partial objects
|
|
| 903 |
+ if subdir and not os.path.exists(self.objpath(tree)):
|
|
| 904 |
+ return
|
|
| 905 |
+ |
|
| 906 |
+ # Raises FileNotFound exception is path does not exist,
|
|
| 907 |
+ # which should only be thrown on the top level digest
|
|
| 873 | 908 |
if update_mtime:
|
| 874 | 909 |
os.utime(self.objpath(tree))
|
| 875 | 910 |
|
| ... | ... | @@ -886,9 +921,9 @@ class CASCache(): |
| 886 | 921 |
reachable.add(filenode.digest.hash)
|
| 887 | 922 |
|
| 888 | 923 |
for dirnode in directory.directories:
|
| 889 |
- self._reachable_refs_dir(reachable, dirnode.digest, update_mtime=update_mtime)
|
|
| 924 |
+ self._reachable_refs_dir(reachable, dirnode.digest, update_mtime=update_mtime, subdir=True)
|
|
| 890 | 925 |
|
| 891 |
- def _required_blobs(self, directory_digest):
|
|
| 926 |
+ def _required_blobs(self, directory_digest, excluded_dir=None):
|
|
| 892 | 927 |
# parse directory, and recursively add blobs
|
| 893 | 928 |
d = remote_execution_pb2.Digest()
|
| 894 | 929 |
d.hash = directory_digest.hash
|
| ... | ... | @@ -907,7 +942,8 @@ class CASCache(): |
| 907 | 942 |
yield d
|
| 908 | 943 |
|
| 909 | 944 |
for dirnode in directory.directories:
|
| 910 |
- yield from self._required_blobs(dirnode.digest)
|
|
| 945 |
+ if dirnode.name != excluded_dir:
|
|
| 946 |
+ yield from self._required_blobs(dirnode.digest)
|
|
| 911 | 947 |
|
| 912 | 948 |
def _fetch_blob(self, remote, digest, stream):
|
| 913 | 949 |
resource_name_components = ['blobs', digest.hash, str(digest.size_bytes)]
|
| ... | ... | @@ -1029,6 +1065,7 @@ class CASCache(): |
| 1029 | 1065 |
objpath = self._ensure_blob(remote, dir_digest)
|
| 1030 | 1066 |
|
| 1031 | 1067 |
directory = remote_execution_pb2.Directory()
|
| 1068 |
+ |
|
| 1032 | 1069 |
with open(objpath, 'rb') as f:
|
| 1033 | 1070 |
directory.ParseFromString(f.read())
|
| 1034 | 1071 |
|
| ... | ... | @@ -1104,9 +1141,8 @@ class CASCache(): |
| 1104 | 1141 |
|
| 1105 | 1142 |
assert response.committed_size == digest.size_bytes
|
| 1106 | 1143 |
|
| 1107 |
- def _send_directory(self, remote, digest, u_uid=uuid.uuid4()):
|
|
| 1108 |
- required_blobs = self._required_blobs(digest)
|
|
| 1109 |
- |
|
| 1144 |
+ def _send_directory(self, remote, digest, u_uid=uuid.uuid4(), excluded_dir=None):
|
|
| 1145 |
+ required_blobs = self._required_blobs(digest, excluded_dir=excluded_dir)
|
|
| 1110 | 1146 |
missing_blobs = dict()
|
| 1111 | 1147 |
# Limit size of FindMissingBlobs request
|
| 1112 | 1148 |
for required_blobs_group in _grouper(required_blobs, 512):
|
| ... | ... | @@ -1800,13 +1800,19 @@ class Element(Plugin): |
| 1800 | 1800 |
# (bool): True if this element does not need a push job to be created
|
| 1801 | 1801 |
#
|
| 1802 | 1802 |
def _skip_push(self):
|
| 1803 |
+ |
|
| 1803 | 1804 |
if not self.__artifacts.has_push_remotes(element=self):
|
| 1804 | 1805 |
# No push remotes for this element's project
|
| 1805 | 1806 |
return True
|
| 1806 | 1807 |
|
| 1807 | 1808 |
# Do not push elements that aren't cached, or that are cached with a dangling buildtree
|
| 1808 |
- # artifact unless element type is expected to have an an empty buildtree directory
|
|
| 1809 |
- if not self._cached_buildtree():
|
|
| 1809 |
+ # artifact unless element type is expected to have an an empty buildtree directory. Check
|
|
| 1810 |
+ # that this default behaviour is not overriden via a remote configured to allow pushing
|
|
| 1811 |
+ # artifacts without their corresponding buildtree.
|
|
| 1812 |
+ if not self._cached():
|
|
| 1813 |
+ return True
|
|
| 1814 |
+ |
|
| 1815 |
+ if not self._cached_buildtree() and not self.__artifacts.has_partial_push_remotes(element=self):
|
|
| 1810 | 1816 |
return True
|
| 1811 | 1817 |
|
| 1812 | 1818 |
# Do not push tainted artifact
|
| ... | ... | @@ -1817,11 +1823,14 @@ class Element(Plugin): |
| 1817 | 1823 |
|
| 1818 | 1824 |
# _push():
|
| 1819 | 1825 |
#
|
| 1820 |
- # Push locally cached artifact to remote artifact repository.
|
|
| 1826 |
+ # Push locally cached artifact to remote artifact repository. An attempt
|
|
| 1827 |
+ # will be made to push partial artifacts if given current config dictates.
|
|
| 1828 |
+ # If a remote set for 'full' artifact pushes is found to be cached partially
|
|
| 1829 |
+ # in the remote, an attempt will be made to 'complete' it.
|
|
| 1821 | 1830 |
#
|
| 1822 | 1831 |
# Returns:
|
| 1823 | 1832 |
# (bool): True if the remote was updated, False if it already existed
|
| 1824 |
- # and no updated was required
|
|
| 1833 |
+ # and no update was required
|
|
| 1825 | 1834 |
#
|
| 1826 | 1835 |
def _push(self):
|
| 1827 | 1836 |
self.__assert_cached()
|
| ... | ... | @@ -1830,8 +1839,17 @@ class Element(Plugin): |
| 1830 | 1839 |
self.warn("Not pushing tainted artifact.")
|
| 1831 | 1840 |
return False
|
| 1832 | 1841 |
|
| 1833 |
- # Push all keys used for local commit
|
|
| 1834 |
- pushed = self.__artifacts.push(self, self.__get_cache_keys_for_commit())
|
|
| 1842 |
+ # Push all keys used for local commit, this could be full or partial,
|
|
| 1843 |
+ # given previous _skip_push() logic. If buildtree isn't cached, then
|
|
| 1844 |
+ # set partial push
|
|
| 1845 |
+ |
|
| 1846 |
+ partial = False
|
|
| 1847 |
+ subdir = 'buildtree'
|
|
| 1848 |
+ if not self._cached_buildtree():
|
|
| 1849 |
+ partial = True
|
|
| 1850 |
+ |
|
| 1851 |
+ pushed = self.__artifacts.push(self, self.__get_cache_keys_for_commit(), partial=partial, subdir=subdir)
|
|
| 1852 |
+ |
|
| 1835 | 1853 |
if not pushed:
|
| 1836 | 1854 |
return False
|
| 1837 | 1855 |
|
| ... | ... | @@ -59,6 +59,15 @@ configuration: |
| 59 | 59 |
# Add another cache to pull from
|
| 60 | 60 |
- url: https://anothercache.com/artifacts:8080
|
| 61 | 61 |
server-cert: another_server.crt
|
| 62 |
+ # Add a cache to push/pull to/from, specifying
|
|
| 63 |
+ that you wish to push artifacts in a 'partial'
|
|
| 64 |
+ state (this being without the respective buildtree).
|
|
| 65 |
+ Note that allow-partial-push requires push to also
|
|
| 66 |
+ be set.
|
|
| 67 |
+ - url: https://anothercache.com/artifacts:11003
|
|
| 68 |
+ push: true
|
|
| 69 |
+ allow-partial-push: true
|
|
| 70 |
+ |
|
| 62 | 71 |
|
| 63 | 72 |
.. note::
|
| 64 | 73 |
|
| ... | ... | @@ -86,6 +95,14 @@ configuration: |
| 86 | 95 |
# Add another cache to pull from
|
| 87 | 96 |
- url: https://ourprojectcache.com/artifacts:8080
|
| 88 | 97 |
server-cert: project_server.crt
|
| 98 |
+ # Add a cache to push/pull to/from, specifying
|
|
| 99 |
+ that you wish to push artifacts in a 'partial'
|
|
| 100 |
+ state (this being without the respective buildtree).
|
|
| 101 |
+ Note that allow-partial-push requires push to also
|
|
| 102 |
+ be set.
|
|
| 103 |
+ - url: https://anothercache.com/artifacts:11003
|
|
| 104 |
+ push: true
|
|
| 105 |
+ allow-partial-push: true
|
|
| 89 | 106 |
|
| 90 | 107 |
|
| 91 | 108 |
.. note::
|
| ... | ... | @@ -140,3 +140,28 @@ def test_missing_certs(cli, datafiles, config_key, config_value): |
| 140 | 140 |
# This does not happen for a simple `bst show`.
|
| 141 | 141 |
result = cli.run(project=project, args=['pull', 'element.bst'])
|
| 142 | 142 |
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
|
| 143 |
+ |
|
| 144 |
+ |
|
| 145 |
+# Assert that if allow-partial-push is specified as true without push also being
|
|
| 146 |
+# set likewise, we get a comprehensive LoadError instead of an unhandled exception.
|
|
| 147 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 148 |
+def test_partial_push_error(cli, datafiles):
|
|
| 149 |
+ project = os.path.join(datafiles.dirname, datafiles.basename, 'project', 'elements')
|
|
| 150 |
+ |
|
| 151 |
+ project_conf = {
|
|
| 152 |
+ 'name': 'test',
|
|
| 153 |
+ |
|
| 154 |
+ 'artifacts': {
|
|
| 155 |
+ 'url': 'https://cache.example.com:12345',
|
|
| 156 |
+ 'allow-partial-push': 'True'
|
|
| 157 |
+ }
|
|
| 158 |
+ }
|
|
| 159 |
+ project_conf_file = os.path.join(project, 'project.conf')
|
|
| 160 |
+ _yaml.dump(project_conf, project_conf_file)
|
|
| 161 |
+ |
|
| 162 |
+ # Use `pull` here to ensure we try to initialize the remotes, triggering the error
|
|
| 163 |
+ #
|
|
| 164 |
+ # This does not happen for a simple `bst show`.
|
|
| 165 |
+ result = cli.run(project=project, args=['pull', 'target.bst'])
|
|
| 166 |
+ result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
|
|
| 167 |
+ assert "allow-partial-push also requires push to be set" in result.stderr
|
| 1 |
+import os
|
|
| 2 |
+import shutil
|
|
| 3 |
+import pytest
|
|
| 4 |
+import subprocess
|
|
| 5 |
+ |
|
| 6 |
+from buildstream import _yaml
|
|
| 7 |
+from tests.testutils import cli_integration as cli, create_artifact_share
|
|
| 8 |
+from tests.testutils.integration import assert_contains
|
|
| 9 |
+from tests.testutils.site import HAVE_BWRAP, IS_LINUX
|
|
| 10 |
+from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
|
| 11 |
+ |
|
| 12 |
+ |
|
| 13 |
+DATA_DIR = os.path.join(
|
|
| 14 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
| 15 |
+ "project"
|
|
| 16 |
+)
|
|
| 17 |
+ |
|
| 18 |
+ |
|
| 19 |
+# Remove artifact cache & set cli.config value of pull-buildtrees
|
|
| 20 |
+# to false, which is the default user context. The cache has to be
|
|
| 21 |
+# cleared as just forcefully removing the refpath leaves dangling objects.
|
|
| 22 |
+def default_state(cli, tmpdir, share):
|
|
| 23 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'artifacts'))
|
|
| 24 |
+ cli.configure({
|
|
| 25 |
+ 'artifacts': {'url': share.repo, 'push': False},
|
|
| 26 |
+ 'artifactdir': os.path.join(str(tmpdir), 'artifacts'),
|
|
| 27 |
+ 'cache': {'pull-buildtrees': False},
|
|
| 28 |
+ })
|
|
| 29 |
+ |
|
| 30 |
+ |
|
| 31 |
+# Tests to capture the integration of the optionl push of buildtrees.
|
|
| 32 |
+# The behaviour should encompass pushing artifacts that are already cached
|
|
| 33 |
+# without a buildtree as well as artifacts that are cached with their buildtree.
|
|
| 34 |
+# This option is handled via 'allow-partial-push' on a per artifact remote config
|
|
| 35 |
+# node basis. Multiple remote config nodes can point to the same url and as such can
|
|
| 36 |
+# have different 'allow-partial-push' options, tests need to cover this using project
|
|
| 37 |
+# confs.
|
|
| 38 |
+@pytest.mark.integration
|
|
| 39 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 40 |
+@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
|
| 41 |
+def test_pushbuildtrees(cli, tmpdir, datafiles, integration_cache):
|
|
| 42 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 43 |
+ element_name = 'autotools/amhello.bst'
|
|
| 44 |
+ |
|
| 45 |
+ # Create artifact shares for pull & push testing
|
|
| 46 |
+ with create_artifact_share(os.path.join(str(tmpdir), 'share1')) as share1,\
|
|
| 47 |
+ create_artifact_share(os.path.join(str(tmpdir), 'share2')) as share2,\
|
|
| 48 |
+ create_artifact_share(os.path.join(str(tmpdir), 'share3')) as share3,\
|
|
| 49 |
+ create_artifact_share(os.path.join(str(tmpdir), 'share4')) as share4:
|
|
| 50 |
+ |
|
| 51 |
+ cli.configure({
|
|
| 52 |
+ 'artifacts': {'url': share1.repo, 'push': True},
|
|
| 53 |
+ 'artifactdir': os.path.join(str(tmpdir), 'artifacts')
|
|
| 54 |
+ })
|
|
| 55 |
+ |
|
| 56 |
+ cli.configure({'artifacts': [{'url': share1.repo, 'push': True},
|
|
| 57 |
+ {'url': share2.repo, 'push': True, 'allow-partial-push': True}]})
|
|
| 58 |
+ |
|
| 59 |
+ # Build autotools element, checked pushed, delete local.
|
|
| 60 |
+ # As share 2 has push & allow-partial-push set a true, it
|
|
| 61 |
+ # should have pushed the artifacts, without the cached buildtrees,
|
|
| 62 |
+ # to it.
|
|
| 63 |
+ result = cli.run(project=project, args=['build', element_name])
|
|
| 64 |
+ assert result.exit_code == 0
|
|
| 65 |
+ assert cli.get_element_state(project, element_name) == 'cached'
|
|
| 66 |
+ elementdigest = share1.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
|
| 67 |
+ buildtreedir = os.path.join(str(tmpdir), 'artifacts', 'extract', 'test', 'autotools-amhello',
|
|
| 68 |
+ elementdigest.hash, 'buildtree')
|
|
| 69 |
+ assert os.path.isdir(buildtreedir)
|
|
| 70 |
+ assert element_name in result.get_partial_pushed_elements()
|
|
| 71 |
+ assert element_name in result.get_pushed_elements()
|
|
| 72 |
+ assert share1.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
|
| 73 |
+ assert share2.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
|
| 74 |
+ default_state(cli, tmpdir, share1)
|
|
| 75 |
+ |
|
| 76 |
+ # Check that after explictly pulling an artifact without it's buildtree,
|
|
| 77 |
+ # we can push it to another remote that is configured to accept the partial
|
|
| 78 |
+ # artifact
|
|
| 79 |
+ result = cli.run(project=project, args=['pull', element_name])
|
|
| 80 |
+ assert element_name in result.get_pulled_elements()
|
|
| 81 |
+ cli.configure({'artifacts': {'url': share3.repo, 'push': True, 'allow-partial-push': True}})
|
|
| 82 |
+ assert cli.get_element_state(project, element_name) == 'cached'
|
|
| 83 |
+ assert not os.path.isdir(buildtreedir)
|
|
| 84 |
+ result = cli.run(project=project, args=['push', element_name])
|
|
| 85 |
+ assert result.exit_code == 0
|
|
| 86 |
+ assert element_name in result.get_partial_pushed_elements()
|
|
| 87 |
+ assert element_name not in result.get_pushed_elements()
|
|
| 88 |
+ assert share3.has_artifact('test', element_name, cli.get_element_key(project, element_name))
|
|
| 89 |
+ default_state(cli, tmpdir, share3)
|
|
| 90 |
+ |
|
| 91 |
+ # Delete the local cache and pull the partial artifact from share 3,
|
|
| 92 |
+ # this should not include the buildtree when extracted locally, even when
|
|
| 93 |
+ # pull-buildtrees is given as a cli parameter as no available remotes will
|
|
| 94 |
+ # contain the buildtree
|
|
| 95 |
+ assert not os.path.isdir(buildtreedir)
|
|
| 96 |
+ assert cli.get_element_state(project, element_name) != 'cached'
|
|
| 97 |
+ result = cli.run(project=project, args=['--pull-buildtrees', 'pull', element_name])
|
|
| 98 |
+ assert element_name in result.get_partial_pulled_elements()
|
|
| 99 |
+ assert not os.path.isdir(buildtreedir)
|
|
| 100 |
+ default_state(cli, tmpdir, share3)
|
|
| 101 |
+ |
|
| 102 |
+ # Delete the local cache and attempt to pull a 'full' artifact, including its
|
|
| 103 |
+ # buildtree. As with before share3 being the first listed remote will not have
|
|
| 104 |
+ # the buildtree available and should spawn a partial pull. Having share1 as the
|
|
| 105 |
+ # second available remote should allow the buildtree to be pulled thus 'completing'
|
|
| 106 |
+ # the artifact
|
|
| 107 |
+ cli.configure({'artifacts': [{'url': share3.repo, 'push': True, 'allow-partial-push': True},
|
|
| 108 |
+ {'url': share1.repo, 'push': True}]})
|
|
| 109 |
+ assert cli.get_element_state(project, element_name) != 'cached'
|
|
| 110 |
+ result = cli.run(project=project, args=['--pull-buildtrees', 'pull', element_name])
|
|
| 111 |
+ assert element_name in result.get_partial_pulled_elements()
|
|
| 112 |
+ assert element_name in result.get_pulled_elements()
|
|
| 113 |
+ assert "Attempting to retrieve buildtree from remotes" in result.stderr
|
|
| 114 |
+ assert os.path.isdir(buildtreedir)
|
|
| 115 |
+ assert cli.get_element_state(project, element_name) == 'cached'
|
|
| 116 |
+ |
|
| 117 |
+ # Test that we are able to 'complete' an artifact on a server which is cached partially,
|
|
| 118 |
+ # but has now been configured for full artifact pushing. This should require only pushing
|
|
| 119 |
+ # the missing blobs, which should be those of just the buildtree. In this case changing
|
|
| 120 |
+ # share3 to full pushes should exercise this
|
|
| 121 |
+ cli.configure({'artifacts': {'url': share3.repo, 'push': True}})
|
|
| 122 |
+ result = cli.run(project=project, args=['push', element_name])
|
|
| 123 |
+ assert element_name in result.get_pushed_elements()
|
|
| 124 |
+
|
|
| 125 |
+ # Ensure that the same remote url can be defined multiple times with differing push
|
|
| 126 |
+ # config. Buildstream supports the same remote having different configurations which
|
|
| 127 |
+ # partial pushing could be different for elements defined at a top level project.conf to
|
|
| 128 |
+ # those from a junctioned project. Assert that elements are pushed to the same remote in
|
|
| 129 |
+ # a state defined via their respective project.confs
|
|
| 130 |
+ default_state(cli, tmpdir, share1)
|
|
| 131 |
+ cli.configure({'artifactdir': os.path.join(str(tmpdir), 'artifacts')}, reset=True)
|
|
| 132 |
+ junction = os.path.join(project, 'elements', 'junction')
|
|
| 133 |
+ os.mkdir(junction)
|
|
| 134 |
+ shutil.copy2(os.path.join(project, 'elements', element_name), junction)
|
|
| 135 |
+ |
|
| 136 |
+ junction_conf = {}
|
|
| 137 |
+ project_conf = {}
|
|
| 138 |
+ junction_conf['name'] = 'amhello'
|
|
| 139 |
+ junction_conf['artifacts'] = {'url': share4.repo, 'push': True, 'allow-partial-push': True}
|
|
| 140 |
+ _yaml.dump(junction_conf, os.path.join(junction, 'project.conf'))
|
|
| 141 |
+ project_conf['artifacts'] = {'url': share4.repo, 'push': True}
|
|
| 142 |
+ |
|
| 143 |
+ # Read project.conf, the junction project.conf and buildstream.conf
|
|
| 144 |
+ # before running bst
|
|
| 145 |
+ with open(os.path.join(project, 'project.conf'), 'r') as f:
|
|
| 146 |
+ print(f.read())
|
|
| 147 |
+ with open(os.path.join(junction, 'project.conf'), 'r') as f:
|
|
| 148 |
+ print(f.read())
|
|
| 149 |
+ with open(os.path.join(project, 'cache', 'buildstream.conf'), 'r') as f:
|
|
| 150 |
+ print(f.read())
|
|
| 151 |
+ |
|
| 152 |
+ result = cli.run(project=project, args=['build', 'junction/amhello.bst'], project_config=project_conf)
|
|
| 153 |
+ |
|
| 154 |
+ # Read project.conf, the junction project.conf and buildstream.conf
|
|
| 155 |
+ # after running bst
|
|
| 156 |
+ with open(os.path.join(project, 'project.conf'), 'r') as f:
|
|
| 157 |
+ print(f.read())
|
|
| 158 |
+ with open(os.path.join(junction, 'project.conf'), 'r') as f:
|
|
| 159 |
+ print(f.read())
|
|
| 160 |
+ with open(os.path.join(project, 'cache', 'buildstream.conf'), 'r') as f:
|
|
| 161 |
+ print(f.read())
|
|
| 162 |
+ |
|
| 163 |
+ assert 'junction/amhello.bst' in result.get_partial_pushed_elements()
|
|
| 164 |
+ assert 'base/base-alpine.bst' in result.get_pushed_elements()
|
| ... | ... | @@ -208,6 +208,13 @@ class Result(): |
| 208 | 208 |
|
| 209 | 209 |
return list(pushed)
|
| 210 | 210 |
|
| 211 |
+ def get_partial_pushed_elements(self):
|
|
| 212 |
+ pushed = re.findall(r'\[\s*push:(\S+)\s*\]\s*INFO\s*Pushed partial artifact', self.stderr)
|
|
| 213 |
+ if pushed is None:
|
|
| 214 |
+ return []
|
|
| 215 |
+ |
|
| 216 |
+ return list(pushed)
|
|
| 217 |
+ |
|
| 211 | 218 |
def get_pulled_elements(self):
|
| 212 | 219 |
pulled = re.findall(r'\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled artifact', self.stderr)
|
| 213 | 220 |
if pulled is None:
|
| ... | ... | @@ -215,6 +222,13 @@ class Result(): |
| 215 | 222 |
|
| 216 | 223 |
return list(pulled)
|
| 217 | 224 |
|
| 225 |
+ def get_partial_pulled_elements(self):
|
|
| 226 |
+ pulled = re.findall(r'\[\s*pull:(\S+)\s*\]\s*INFO\s*Pulled partial artifact', self.stderr)
|
|
| 227 |
+ if pulled is None:
|
|
| 228 |
+ return []
|
|
| 229 |
+ |
|
| 230 |
+ return list(pulled)
|
|
| 231 |
+ |
|
| 218 | 232 |
|
| 219 | 233 |
class Cli():
|
| 220 | 234 |
|
| ... | ... | @@ -235,11 +249,15 @@ class Cli(): |
| 235 | 249 |
#
|
| 236 | 250 |
# Args:
|
| 237 | 251 |
# config (dict): The user configuration to use
|
| 252 |
+ # reset (bool): Optional reset of stored config
|
|
| 238 | 253 |
#
|
| 239 |
- def configure(self, config):
|
|
| 254 |
+ def configure(self, config, reset=False):
|
|
| 240 | 255 |
if self.config is None:
|
| 241 | 256 |
self.config = {}
|
| 242 | 257 |
|
| 258 |
+ if reset:
|
|
| 259 |
+ self.config.clear()
|
|
| 260 |
+ |
|
| 243 | 261 |
for key, val in config.items():
|
| 244 | 262 |
self.config[key] = val
|
| 245 | 263 |
|
