James Ennis pushed to branch jennis/refactor_artifact_log at BuildStream / buildstream
Commits:
-
24d29a17
by Javier Jardón at 2019-02-12T12:27:42Z
-
80dcf65d
by Javier Jardón at 2019-02-12T12:27:42Z
-
58b84905
by Javier Jardón at 2019-02-12T12:27:42Z
-
95d9b9ae
by Javier Jardón at 2019-02-12T13:31:56Z
-
b022cd75
by James Ennis at 2019-02-12T15:55:34Z
-
e0553888
by James Ennis at 2019-02-12T15:55:34Z
-
91f7fd76
by James Ennis at 2019-02-12T15:55:34Z
-
c81c4ecd
by James Ennis at 2019-02-12T15:55:34Z
-
4a8fcce5
by James Ennis at 2019-02-12T15:55:34Z
-
0c81795d
by James Ennis at 2019-02-12T15:55:34Z
-
ff842a46
by James Ennis at 2019-02-12T15:55:34Z
-
da377eeb
by James Ennis at 2019-02-12T15:55:34Z
-
f972df95
by James Ennis at 2019-02-12T15:55:34Z
-
b464e46c
by James Ennis at 2019-02-12T15:55:34Z
-
9d7952df
by James Ennis at 2019-02-12T15:55:34Z
-
1f6ea091
by James Ennis at 2019-02-12T15:55:34Z
-
cd5dd5da
by James Ennis at 2019-02-12T15:55:34Z
-
b7cb1ee2
by James Ennis at 2019-02-12T15:55:34Z
12 changed files:
- .gitlab-ci.yml
- buildstream/_artifactcache.py
- + buildstream/_artifactelement.py
- buildstream/_cas/cascache.py
- buildstream/_exceptions.py
- buildstream/_frontend/cli.py
- buildstream/_loader/metaelement.py
- buildstream/_project.py
- buildstream/_stream.py
- buildstream/element.py
- tests/artifactcache/pull.py
- tests/artifactcache/push.py
Changes:
| ... | ... | @@ -53,26 +53,20 @@ tests-fedora-28: |
| 53 | 53 |
image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:28-master-46405991
|
| 54 | 54 |
<<: *tests
|
| 55 | 55 |
|
| 56 |
-tests-ubuntu-18.04:
|
|
| 57 |
- image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-ubuntu:18.04-master-46405991
|
|
| 56 |
+tests-fedora-29:
|
|
| 57 |
+ image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:29-master-47052095
|
|
| 58 | 58 |
<<: *tests
|
| 59 | 59 |
|
| 60 |
-tests-python-3.7-stretch:
|
|
| 61 |
- image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-python:3.7-stretch-master-46405991
|
|
| 60 |
+tests-ubuntu-18.04:
|
|
| 61 |
+ image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-ubuntu:18.04-master-46405991
|
|
| 62 | 62 |
<<: *tests
|
| 63 | 63 |
|
| 64 |
- variables:
|
|
| 65 |
- # Note that we explicitly specify TOXENV in this case because this
|
|
| 66 |
- # image has both 3.6 and 3.7 versions. python3.6 cannot be removed because
|
|
| 67 |
- # some of our base dependencies declare it as their runtime dependency.
|
|
| 68 |
- TOXENV: py37
|
|
| 69 |
- |
|
| 70 | 64 |
tests-centos-7.6:
|
| 71 | 65 |
<<: *tests
|
| 72 | 66 |
image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-centos:7.6.1810-master-46405991
|
| 73 | 67 |
|
| 74 |
-overnight-fedora-28-aarch64:
|
|
| 75 |
- image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:aarch64-28-master-46405991
|
|
| 68 |
+overnight-fedora-29-aarch64:
|
|
| 69 |
+ image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:aarch64-29-master-47052095
|
|
| 76 | 70 |
tags:
|
| 77 | 71 |
- aarch64
|
| 78 | 72 |
<<: *tests
|
| ... | ... | @@ -91,7 +85,7 @@ overnight-fedora-28-aarch64: |
| 91 | 85 |
tests-unix:
|
| 92 | 86 |
# Use fedora here, to a) run a test on fedora and b) ensure that we
|
| 93 | 87 |
# can get rid of ostree - this is not possible with debian-8
|
| 94 |
- image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:28-master-46405991
|
|
| 88 |
+ image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:29-master-47052095
|
|
| 95 | 89 |
<<: *tests
|
| 96 | 90 |
variables:
|
| 97 | 91 |
BST_FORCE_BACKEND: "unix"
|
| ... | ... | @@ -109,7 +103,7 @@ tests-unix: |
| 109 | 103 |
|
| 110 | 104 |
tests-fedora-missing-deps:
|
| 111 | 105 |
# Ensure that tests behave nicely while missing bwrap and ostree
|
| 112 |
- image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:28-master-46405991
|
|
| 106 |
+ image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:29-master-47052095
|
|
| 113 | 107 |
<<: *tests
|
| 114 | 108 |
|
| 115 | 109 |
script:
|
| ... | ... | @@ -128,7 +122,7 @@ tests-fedora-update-deps: |
| 128 | 122 |
# Check if the tests pass after updating requirements to their latest
|
| 129 | 123 |
# allowed version.
|
| 130 | 124 |
allow_failure: true
|
| 131 |
- image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:28-master-46405991
|
|
| 125 |
+ image: registry.gitlab.com/buildstream/buildstream-docker-images/testsuite-fedora:29-master-47052095
|
|
| 132 | 126 |
<<: *tests
|
| 133 | 127 |
|
| 134 | 128 |
script:
|
| ... | ... | @@ -289,6 +283,7 @@ coverage: |
| 289 | 283 |
dependencies:
|
| 290 | 284 |
- tests-debian-9
|
| 291 | 285 |
- tests-fedora-28
|
| 286 |
+ - tests-fedora-29
|
|
| 292 | 287 |
- tests-fedora-missing-deps
|
| 293 | 288 |
- tests-ubuntu-18.04
|
| 294 | 289 |
- tests-unix
|
| ... | ... | @@ -19,7 +19,6 @@ |
| 19 | 19 |
|
| 20 | 20 |
import multiprocessing
|
| 21 | 21 |
import os
|
| 22 |
-import string
|
|
| 23 | 22 |
from collections.abc import Mapping
|
| 24 | 23 |
|
| 25 | 24 |
from .types import _KeyStrength
|
| ... | ... | @@ -29,6 +28,7 @@ from . import utils |
| 29 | 28 |
from . import _yaml
|
| 30 | 29 |
|
| 31 | 30 |
from ._cas import CASRemote, CASRemoteSpec
|
| 31 |
+from .storage._casbaseddirectory import CasBasedDirectory
|
|
| 32 | 32 |
|
| 33 | 33 |
|
| 34 | 34 |
CACHE_SIZE_FILE = "cache_size"
|
| ... | ... | @@ -112,37 +112,6 @@ class ArtifactCache(): |
| 112 | 112 |
|
| 113 | 113 |
self._calculate_cache_quota()
|
| 114 | 114 |
|
| 115 |
- # get_artifact_fullname()
|
|
| 116 |
- #
|
|
| 117 |
- # Generate a full name for an artifact, including the
|
|
| 118 |
- # project namespace, element name and cache key.
|
|
| 119 |
- #
|
|
| 120 |
- # This can also be used as a relative path safely, and
|
|
| 121 |
- # will normalize parts of the element name such that only
|
|
| 122 |
- # digits, letters and some select characters are allowed.
|
|
| 123 |
- #
|
|
| 124 |
- # Args:
|
|
| 125 |
- # element (Element): The Element object
|
|
| 126 |
- # key (str): The element's cache key
|
|
| 127 |
- #
|
|
| 128 |
- # Returns:
|
|
| 129 |
- # (str): The relative path for the artifact
|
|
| 130 |
- #
|
|
| 131 |
- def get_artifact_fullname(self, element, key):
|
|
| 132 |
- project = element._get_project()
|
|
| 133 |
- |
|
| 134 |
- # Normalize ostree ref unsupported chars
|
|
| 135 |
- valid_chars = string.digits + string.ascii_letters + '-._'
|
|
| 136 |
- element_name = ''.join([
|
|
| 137 |
- x if x in valid_chars else '_'
|
|
| 138 |
- for x in element.normal_name
|
|
| 139 |
- ])
|
|
| 140 |
- |
|
| 141 |
- assert key is not None
|
|
| 142 |
- |
|
| 143 |
- # assume project and element names are not allowed to contain slashes
|
|
| 144 |
- return '{0}/{1}/{2}'.format(project.name, element_name, key)
|
|
| 145 |
- |
|
| 146 | 115 |
# setup_remotes():
|
| 147 | 116 |
#
|
| 148 | 117 |
# Sets up which remotes to use
|
| ... | ... | @@ -241,7 +210,7 @@ class ArtifactCache(): |
| 241 | 210 |
for key in (strong_key, weak_key):
|
| 242 | 211 |
if key:
|
| 243 | 212 |
try:
|
| 244 |
- ref = self.get_artifact_fullname(element, key)
|
|
| 213 |
+ ref = element.get_artifact_name(key)
|
|
| 245 | 214 |
|
| 246 | 215 |
self.cas.update_mtime(ref)
|
| 247 | 216 |
except CASError:
|
| ... | ... | @@ -521,7 +490,7 @@ class ArtifactCache(): |
| 521 | 490 |
# Returns: True if the artifact is in the cache, False otherwise
|
| 522 | 491 |
#
|
| 523 | 492 |
def contains(self, element, key):
|
| 524 |
- ref = self.get_artifact_fullname(element, key)
|
|
| 493 |
+ ref = element.get_artifact_name(key)
|
|
| 525 | 494 |
|
| 526 | 495 |
return self.cas.contains(ref)
|
| 527 | 496 |
|
| ... | ... | @@ -538,19 +507,21 @@ class ArtifactCache(): |
| 538 | 507 |
# Returns: True if the subdir exists & is populated in the cache, False otherwise
|
| 539 | 508 |
#
|
| 540 | 509 |
def contains_subdir_artifact(self, element, key, subdir):
|
| 541 |
- ref = self.get_artifact_fullname(element, key)
|
|
| 510 |
+ ref = element.get_artifact_name(key)
|
|
| 542 | 511 |
return self.cas.contains_subdir_artifact(ref, subdir)
|
| 543 | 512 |
|
| 544 | 513 |
# list_artifacts():
|
| 545 | 514 |
#
|
| 546 | 515 |
# List artifacts in this cache in LRU order.
|
| 547 | 516 |
#
|
| 517 |
+ # Args:
|
|
| 518 |
+ # glob (str): An option glob _expression_ to be used to list artifacts satisfying the glob
|
|
| 519 |
+ #
|
|
| 548 | 520 |
# Returns:
|
| 549 |
- # ([str]) - A list of artifact names as generated by
|
|
| 550 |
- # `ArtifactCache.get_artifact_fullname` in LRU order
|
|
| 521 |
+ # ([str]) - A list of artifact names as generated in LRU order
|
|
| 551 | 522 |
#
|
| 552 |
- def list_artifacts(self):
|
|
| 553 |
- return self.cas.list_refs()
|
|
| 523 |
+ def list_artifacts(self, *, glob=None):
|
|
| 524 |
+ return self.cas.list_refs(glob=glob)
|
|
| 554 | 525 |
|
| 555 | 526 |
# remove():
|
| 556 | 527 |
#
|
| ... | ... | @@ -559,8 +530,7 @@ class ArtifactCache(): |
| 559 | 530 |
#
|
| 560 | 531 |
# Args:
|
| 561 | 532 |
# ref (artifact_name): The name of the artifact to remove (as
|
| 562 |
- # generated by
|
|
| 563 |
- # `ArtifactCache.get_artifact_fullname`)
|
|
| 533 |
+ # generated by `Element.get_artifact_name`)
|
|
| 564 | 534 |
#
|
| 565 | 535 |
# Returns:
|
| 566 | 536 |
# (int): The amount of space recovered in the cache, in bytes
|
| ... | ... | @@ -606,7 +576,7 @@ class ArtifactCache(): |
| 606 | 576 |
# Returns: path to extracted artifact
|
| 607 | 577 |
#
|
| 608 | 578 |
def extract(self, element, key, subdir=None):
|
| 609 |
- ref = self.get_artifact_fullname(element, key)
|
|
| 579 |
+ ref = element.get_artifact_name(key)
|
|
| 610 | 580 |
|
| 611 | 581 |
path = os.path.join(self.extractdir, element._get_project().name, element.normal_name)
|
| 612 | 582 |
|
| ... | ... | @@ -622,7 +592,7 @@ class ArtifactCache(): |
| 622 | 592 |
# keys (list): The cache keys to use
|
| 623 | 593 |
#
|
| 624 | 594 |
def commit(self, element, content, keys):
|
| 625 |
- refs = [self.get_artifact_fullname(element, key) for key in keys]
|
|
| 595 |
+ refs = [element.get_artifact_name(key) for key in keys]
|
|
| 626 | 596 |
|
| 627 | 597 |
self.cas.commit(refs, content)
|
| 628 | 598 |
|
| ... | ... | @@ -638,8 +608,8 @@ class ArtifactCache(): |
| 638 | 608 |
# subdir (str): A subdirectory to limit the comparison to
|
| 639 | 609 |
#
|
| 640 | 610 |
def diff(self, element, key_a, key_b, *, subdir=None):
|
| 641 |
- ref_a = self.get_artifact_fullname(element, key_a)
|
|
| 642 |
- ref_b = self.get_artifact_fullname(element, key_b)
|
|
| 611 |
+ ref_a = element.get_artifact_name(key_a)
|
|
| 612 |
+ ref_b = element.get_artifact_name(key_b)
|
|
| 643 | 613 |
|
| 644 | 614 |
return self.cas.diff(ref_a, ref_b, subdir=subdir)
|
| 645 | 615 |
|
| ... | ... | @@ -700,7 +670,7 @@ class ArtifactCache(): |
| 700 | 670 |
# (ArtifactError): if there was an error
|
| 701 | 671 |
#
|
| 702 | 672 |
def push(self, element, keys):
|
| 703 |
- refs = [self.get_artifact_fullname(element, key) for key in list(keys)]
|
|
| 673 |
+ refs = [element.get_artifact_name(key) for key in list(keys)]
|
|
| 704 | 674 |
|
| 705 | 675 |
project = element._get_project()
|
| 706 | 676 |
|
| ... | ... | @@ -738,7 +708,7 @@ class ArtifactCache(): |
| 738 | 708 |
# (bool): True if pull was successful, False if artifact was not available
|
| 739 | 709 |
#
|
| 740 | 710 |
def pull(self, element, key, *, progress=None, subdir=None, excluded_subdirs=None):
|
| 741 |
- ref = self.get_artifact_fullname(element, key)
|
|
| 711 |
+ ref = element.get_artifact_name(key)
|
|
| 742 | 712 |
|
| 743 | 713 |
project = element._get_project()
|
| 744 | 714 |
|
| ... | ... | @@ -850,11 +820,27 @@ class ArtifactCache(): |
| 850 | 820 |
# newkey (str): A new cache key for the artifact
|
| 851 | 821 |
#
|
| 852 | 822 |
def link_key(self, element, oldkey, newkey):
|
| 853 |
- oldref = self.get_artifact_fullname(element, oldkey)
|
|
| 854 |
- newref = self.get_artifact_fullname(element, newkey)
|
|
| 823 |
+ oldref = element.get_artifact_name(oldkey)
|
|
| 824 |
+ newref = element.get_artifact_name(newkey)
|
|
| 855 | 825 |
|
| 856 | 826 |
self.cas.link_ref(oldref, newref)
|
| 857 | 827 |
|
| 828 |
+ # get_artifact_logs():
|
|
| 829 |
+ #
|
|
| 830 |
+ # Get the logs of an existing artifact
|
|
| 831 |
+ #
|
|
| 832 |
+ # Args:
|
|
| 833 |
+ # ref (str): The ref of the artifact
|
|
| 834 |
+ #
|
|
| 835 |
+ # Returns:
|
|
| 836 |
+ # logsdir (CasBasedDirectory): A CasBasedDirectory containing the artifact's logs
|
|
| 837 |
+ #
|
|
| 838 |
+ def get_artifact_logs(self, ref):
|
|
| 839 |
+ descend = ["logs"]
|
|
| 840 |
+ cache_id = self.cas.resolve_ref(ref, update_mtime=True)
|
|
| 841 |
+ vdir = CasBasedDirectory(self.cas, cache_id).descend(descend)
|
|
| 842 |
+ return vdir
|
|
| 843 |
+ |
|
| 858 | 844 |
################################################
|
| 859 | 845 |
# Local Private Methods #
|
| 860 | 846 |
################################################
|
| 1 |
+#
|
|
| 2 |
+# Copyright (C) 2019 Bloomberg Finance LP
|
|
| 3 |
+#
|
|
| 4 |
+# This program is free software; you can redistribute it and/or
|
|
| 5 |
+# modify it under the terms of the GNU Lesser General Public
|
|
| 6 |
+# License as published by the Free Software Foundation; either
|
|
| 7 |
+# version 2 of the License, or (at your option) any later version.
|
|
| 8 |
+#
|
|
| 9 |
+# This library is distributed in the hope that it will be useful,
|
|
| 10 |
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
| 11 |
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
| 12 |
+# Lesser General Public License for more details.
|
|
| 13 |
+#
|
|
| 14 |
+# You should have received a copy of the GNU Lesser General Public
|
|
| 15 |
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
|
| 16 |
+#
|
|
| 17 |
+# Authors:
|
|
| 18 |
+# James Ennis <james ennis codethink co uk>
|
|
| 19 |
+from . import Element
|
|
| 20 |
+from . import _cachekey
|
|
| 21 |
+from ._exceptions import ArtifactElementError
|
|
| 22 |
+from ._loader.metaelement import MetaElement
|
|
| 23 |
+ |
|
| 24 |
+ |
|
| 25 |
+# ArtifactElement()
|
|
| 26 |
+#
|
|
| 27 |
+# Object to be used for directly processing an artifact
|
|
| 28 |
+#
|
|
| 29 |
+# Args:
|
|
| 30 |
+# context (Context): The Context object
|
|
| 31 |
+# ref (str): The artifact ref
|
|
| 32 |
+#
|
|
| 33 |
+class ArtifactElement(Element):
|
|
| 34 |
+ def __init__(self, context, ref):
|
|
| 35 |
+ _, element, key = verify_artifact_ref(ref)
|
|
| 36 |
+ |
|
| 37 |
+ self._ref = ref
|
|
| 38 |
+ self._key = key
|
|
| 39 |
+ |
|
| 40 |
+ project = context.get_toplevel_project()
|
|
| 41 |
+ meta = MetaElement(project, element) # NOTE element has no .bst suffix
|
|
| 42 |
+ plugin_conf = None
|
|
| 43 |
+ |
|
| 44 |
+ super().__init__(context, project, meta, plugin_conf)
|
|
| 45 |
+ |
|
| 46 |
+ # Override Element.get_artifact_name()
|
|
| 47 |
+ def get_artifact_name(self, key=None):
|
|
| 48 |
+ return self._ref
|
|
| 49 |
+ |
|
| 50 |
+ # Dummy configure method
|
|
| 51 |
+ def configure(self, node):
|
|
| 52 |
+ pass
|
|
| 53 |
+ |
|
| 54 |
+ # Dummy preflight method
|
|
| 55 |
+ def preflight(self):
|
|
| 56 |
+ pass
|
|
| 57 |
+ |
|
| 58 |
+ # Override Element._calculate_cache_key
|
|
| 59 |
+ def _calculate_cache_key(self, dependencies=None):
|
|
| 60 |
+ return self._key
|
|
| 61 |
+ |
|
| 62 |
+ |
|
| 63 |
+# verify_artifact_ref()
|
|
| 64 |
+#
|
|
| 65 |
+# Verify that a ref string matches the format of an artifact
|
|
| 66 |
+#
|
|
| 67 |
+# Args:
|
|
| 68 |
+# ref (str): The artifact ref
|
|
| 69 |
+#
|
|
| 70 |
+# Returns:
|
|
| 71 |
+# project (str): The project's name
|
|
| 72 |
+# element (str): The element's name
|
|
| 73 |
+# key (str): The cache key
|
|
| 74 |
+#
|
|
| 75 |
+# Raises:
|
|
| 76 |
+# ArtifactElementError if the ref string does not match
|
|
| 77 |
+# the expected format
|
|
| 78 |
+#
|
|
| 79 |
+def verify_artifact_ref(ref):
|
|
| 80 |
+ try:
|
|
| 81 |
+ project, element, key = ref.split('/', 2) # This will raise a Value error if unable to split
|
|
| 82 |
+ # Explicitly raise a ValueError if the key lenght is not as expected
|
|
| 83 |
+ if len(key) != len(_cachekey.generate_key({})):
|
|
| 84 |
+ raise ValueError
|
|
| 85 |
+ except ValueError:
|
|
| 86 |
+ raise ArtifactElementError("Artifact: {} is not of the expected format".format(ref))
|
|
| 87 |
+ |
|
| 88 |
+ return project, element, key
|
| ... | ... | @@ -24,6 +24,7 @@ import stat |
| 24 | 24 |
import errno
|
| 25 | 25 |
import uuid
|
| 26 | 26 |
import contextlib
|
| 27 |
+from fnmatch import fnmatch
|
|
| 27 | 28 |
|
| 28 | 29 |
import grpc
|
| 29 | 30 |
|
| ... | ... | @@ -474,22 +475,35 @@ class CASCache(): |
| 474 | 475 |
#
|
| 475 | 476 |
# List refs in Least Recently Modified (LRM) order.
|
| 476 | 477 |
#
|
| 478 |
+ # Args:
|
|
| 479 |
+ # glob (str) - An optional glob _expression_ to be used to list refs satisfying the glob
|
|
| 480 |
+ #
|
|
| 477 | 481 |
# Returns:
|
| 478 | 482 |
# (list) - A list of refs in LRM order
|
| 479 | 483 |
#
|
| 480 |
- def list_refs(self):
|
|
| 484 |
+ def list_refs(self, *, glob=None):
|
|
| 481 | 485 |
# string of: /path/to/repo/refs/heads
|
| 482 | 486 |
ref_heads = os.path.join(self.casdir, 'refs', 'heads')
|
| 487 |
+ path = ref_heads
|
|
| 488 |
+ |
|
| 489 |
+ if glob is not None:
|
|
| 490 |
+ globdir = os.path.dirname(glob)
|
|
| 491 |
+ if not any(c in "*?[" for c in globdir):
|
|
| 492 |
+ # path prefix contains no globbing characters so
|
|
| 493 |
+ # append the glob to optimise the os.walk()
|
|
| 494 |
+ path = os.path.join(ref_heads, globdir)
|
|
| 483 | 495 |
|
| 484 | 496 |
refs = []
|
| 485 | 497 |
mtimes = []
|
| 486 | 498 |
|
| 487 |
- for root, _, files in os.walk(ref_heads):
|
|
| 499 |
+ for root, _, files in os.walk(path):
|
|
| 488 | 500 |
for filename in files:
|
| 489 | 501 |
ref_path = os.path.join(root, filename)
|
| 490 |
- refs.append(os.path.relpath(ref_path, ref_heads))
|
|
| 491 |
- # Obtain the mtime (the time a file was last modified)
|
|
| 492 |
- mtimes.append(os.path.getmtime(ref_path))
|
|
| 502 |
+ relative_path = os.path.relpath(ref_path, ref_heads) # Relative to refs head
|
|
| 503 |
+ if not glob or fnmatch(relative_path, glob):
|
|
| 504 |
+ refs.append(relative_path)
|
|
| 505 |
+ # Obtain the mtime (the time a file was last modified)
|
|
| 506 |
+ mtimes.append(os.path.getmtime(ref_path))
|
|
| 493 | 507 |
|
| 494 | 508 |
# NOTE: Sorted will sort from earliest to latest, thus the
|
| 495 | 509 |
# first ref of this list will be the file modified earliest.
|
| ... | ... | @@ -344,3 +344,12 @@ class AppError(BstError): |
| 344 | 344 |
#
|
| 345 | 345 |
class SkipJob(Exception):
|
| 346 | 346 |
pass
|
| 347 |
+ |
|
| 348 |
+ |
|
| 349 |
+# ArtifactElementError
|
|
| 350 |
+#
|
|
| 351 |
+# Raised when errors are encountered by artifact elements
|
|
| 352 |
+#
|
|
| 353 |
+class ArtifactElementError(BstError):
|
|
| 354 |
+ def __init__(self, message, *, detail=None, reason=None):
|
|
| 355 |
+ super().__init__(message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason)
|
| 1 | 1 |
import os
|
| 2 | 2 |
import sys
|
| 3 | 3 |
from contextlib import ExitStack
|
| 4 |
-from fnmatch import fnmatch
|
|
| 5 | 4 |
from functools import partial
|
| 6 | 5 |
from tempfile import TemporaryDirectory
|
| 7 | 6 |
|
| ... | ... | @@ -901,38 +900,6 @@ def workspace_list(app): |
| 901 | 900 |
#############################################################
|
| 902 | 901 |
# Artifact Commands #
|
| 903 | 902 |
#############################################################
|
| 904 |
-def _classify_artifacts(names, cas, project_directory):
|
|
| 905 |
- element_targets = []
|
|
| 906 |
- artifact_refs = []
|
|
| 907 |
- element_globs = []
|
|
| 908 |
- artifact_globs = []
|
|
| 909 |
- |
|
| 910 |
- for name in names:
|
|
| 911 |
- if name.endswith('.bst'):
|
|
| 912 |
- if any(c in "*?[" for c in name):
|
|
| 913 |
- element_globs.append(name)
|
|
| 914 |
- else:
|
|
| 915 |
- element_targets.append(name)
|
|
| 916 |
- else:
|
|
| 917 |
- if any(c in "*?[" for c in name):
|
|
| 918 |
- artifact_globs.append(name)
|
|
| 919 |
- else:
|
|
| 920 |
- artifact_refs.append(name)
|
|
| 921 |
- |
|
| 922 |
- if element_globs:
|
|
| 923 |
- for dirpath, _, filenames in os.walk(project_directory):
|
|
| 924 |
- for filename in filenames:
|
|
| 925 |
- element_path = os.path.join(dirpath, filename).lstrip(project_directory).lstrip('/')
|
|
| 926 |
- if any(fnmatch(element_path, glob) for glob in element_globs):
|
|
| 927 |
- element_targets.append(element_path)
|
|
| 928 |
- |
|
| 929 |
- if artifact_globs:
|
|
| 930 |
- artifact_refs.extend(ref for ref in cas.list_refs()
|
|
| 931 |
- if any(fnmatch(ref, glob) for glob in artifact_globs))
|
|
| 932 |
- |
|
| 933 |
- return element_targets, artifact_refs
|
|
| 934 |
- |
|
| 935 |
- |
|
| 936 | 903 |
@cli.group(short_help="Manipulate cached artifacts")
|
| 937 | 904 |
def artifact():
|
| 938 | 905 |
"""Manipulate cached artifacts"""
|
| ... | ... | @@ -1111,53 +1078,24 @@ def artifact_push(app, elements, deps, remote): |
| 1111 | 1078 |
@click.pass_obj
|
| 1112 | 1079 |
def artifact_log(app, artifacts):
|
| 1113 | 1080 |
"""Show logs of all artifacts"""
|
| 1114 |
- from .._exceptions import CASError
|
|
| 1115 |
- from .._message import MessageType
|
|
| 1116 |
- from .._pipeline import PipelineSelection
|
|
| 1117 |
- from ..storage._casbaseddirectory import CasBasedDirectory
|
|
| 1118 |
- |
|
| 1119 |
- with ExitStack() as stack:
|
|
| 1120 |
- stack.enter_context(app.initialized())
|
|
| 1121 |
- cache = app.context.artifactcache
|
|
| 1122 |
- |
|
| 1123 |
- elements, artifacts = _classify_artifacts(artifacts, cache.cas,
|
|
| 1124 |
- app.project.directory)
|
|
| 1125 |
- |
|
| 1126 |
- vdirs = []
|
|
| 1127 |
- extractdirs = []
|
|
| 1128 |
- if artifacts:
|
|
| 1129 |
- for ref in artifacts:
|
|
| 1130 |
- try:
|
|
| 1131 |
- cache_id = cache.cas.resolve_ref(ref, update_mtime=True)
|
|
| 1132 |
- vdir = CasBasedDirectory(cache.cas, cache_id)
|
|
| 1133 |
- vdirs.append(vdir)
|
|
| 1134 |
- except CASError as e:
|
|
| 1135 |
- app._message(MessageType.WARN, "Artifact {} is not cached".format(ref), detail=str(e))
|
|
| 1136 |
- continue
|
|
| 1137 |
- if elements:
|
|
| 1138 |
- elements = app.stream.load_selection(elements, selection=PipelineSelection.NONE)
|
|
| 1139 |
- for element in elements:
|
|
| 1140 |
- if not element._cached():
|
|
| 1141 |
- app._message(MessageType.WARN, "Element {} is not cached".format(element))
|
|
| 1142 |
- continue
|
|
| 1143 |
- ref = cache.get_artifact_fullname(element, element._get_cache_key())
|
|
| 1144 |
- cache_id = cache.cas.resolve_ref(ref, update_mtime=True)
|
|
| 1145 |
- vdir = CasBasedDirectory(cache.cas, cache_id)
|
|
| 1146 |
- vdirs.append(vdir)
|
|
| 1147 |
- |
|
| 1148 |
- for vdir in vdirs:
|
|
| 1149 |
- # NOTE: If reading the logs feels unresponsive, here would be a good place to provide progress information.
|
|
| 1150 |
- logsdir = vdir.descend(["logs"])
|
|
| 1151 |
- td = stack.enter_context(TemporaryDirectory())
|
|
| 1152 |
- logsdir.export_files(td, can_link=True)
|
|
| 1153 |
- extractdirs.append(td)
|
|
| 1154 |
- |
|
| 1155 |
- for extractdir in extractdirs:
|
|
| 1156 |
- for log in (os.path.join(extractdir, log) for log in os.listdir(extractdir)):
|
|
| 1157 |
- # NOTE: Should click gain the ability to pass files to the pager this can be optimised.
|
|
| 1158 |
- with open(log) as f:
|
|
| 1159 |
- data = f.read()
|
|
| 1160 |
- click.echo_via_pager(data)
|
|
| 1081 |
+ with app.initialized():
|
|
| 1082 |
+ logsdirs = app.stream.artifact_log(artifacts)
|
|
| 1083 |
+ |
|
| 1084 |
+ with ExitStack() as stack:
|
|
| 1085 |
+ extractdirs = []
|
|
| 1086 |
+ for logsdir in logsdirs:
|
|
| 1087 |
+ # NOTE: If reading the logs feels unresponsive, here would be a good place
|
|
| 1088 |
+ # to provide progress information.
|
|
| 1089 |
+ td = stack.enter_context(TemporaryDirectory())
|
|
| 1090 |
+ logsdir.export_files(td, can_link=True)
|
|
| 1091 |
+ extractdirs.append(td)
|
|
| 1092 |
+ |
|
| 1093 |
+ for extractdir in extractdirs:
|
|
| 1094 |
+ for log in (os.path.join(extractdir, log) for log in os.listdir(extractdir)):
|
|
| 1095 |
+ # NOTE: Should click gain the ability to pass files to the pager this can be optimised.
|
|
| 1096 |
+ with open(log) as f:
|
|
| 1097 |
+ data = f.read()
|
|
| 1098 |
+ click.echo_via_pager(data)
|
|
| 1161 | 1099 |
|
| 1162 | 1100 |
|
| 1163 | 1101 |
##################################################################
|
| ... | ... | @@ -38,20 +38,20 @@ class MetaElement(): |
| 38 | 38 |
# sandbox: Configuration specific to the sandbox environment
|
| 39 | 39 |
# first_pass: The element is to be loaded with first pass configuration (junction)
|
| 40 | 40 |
#
|
| 41 |
- def __init__(self, project, name, kind, provenance, sources, config,
|
|
| 42 |
- variables, environment, env_nocache, public, sandbox,
|
|
| 43 |
- first_pass):
|
|
| 41 |
+ def __init__(self, project, name, kind=None, provenance=None, sources=None, config=None,
|
|
| 42 |
+ variables=None, environment=None, env_nocache=None, public=None,
|
|
| 43 |
+ sandbox=None, first_pass=False):
|
|
| 44 | 44 |
self.project = project
|
| 45 | 45 |
self.name = name
|
| 46 | 46 |
self.kind = kind
|
| 47 | 47 |
self.provenance = provenance
|
| 48 | 48 |
self.sources = sources
|
| 49 |
- self.config = config
|
|
| 50 |
- self.variables = variables
|
|
| 51 |
- self.environment = environment
|
|
| 52 |
- self.env_nocache = env_nocache
|
|
| 53 |
- self.public = public
|
|
| 54 |
- self.sandbox = sandbox
|
|
| 49 |
+ self.config = config or {}
|
|
| 50 |
+ self.variables = variables or {}
|
|
| 51 |
+ self.environment = environment or {}
|
|
| 52 |
+ self.env_nocache = env_nocache or []
|
|
| 53 |
+ self.public = public or {}
|
|
| 54 |
+ self.sandbox = sandbox or {}
|
|
| 55 | 55 |
self.build_dependencies = []
|
| 56 | 56 |
self.dependencies = []
|
| 57 | 57 |
self.first_pass = first_pass
|
| ... | ... | @@ -26,6 +26,7 @@ from . import utils |
| 26 | 26 |
from . import _cachekey
|
| 27 | 27 |
from . import _site
|
| 28 | 28 |
from . import _yaml
|
| 29 |
+from ._artifactelement import ArtifactElement
|
|
| 29 | 30 |
from ._profile import Topics, profile_start, profile_end
|
| 30 | 31 |
from ._exceptions import LoadError, LoadErrorReason
|
| 31 | 32 |
from ._options import OptionPool
|
| ... | ... | @@ -255,6 +256,19 @@ class Project(): |
| 255 | 256 |
else:
|
| 256 | 257 |
return self.config.element_factory.create(self._context, self, meta)
|
| 257 | 258 |
|
| 259 |
+ # create_artifact_element()
|
|
| 260 |
+ #
|
|
| 261 |
+ # Instantiate and return an ArtifactElement
|
|
| 262 |
+ #
|
|
| 263 |
+ # Args:
|
|
| 264 |
+ # ref (str): A string of the artifact ref
|
|
| 265 |
+ #
|
|
| 266 |
+ # Returns:
|
|
| 267 |
+ # (ArtifactElement): A newly created ArtifactElement object of the appropriate kind
|
|
| 268 |
+ #
|
|
| 269 |
+ def create_artifact_element(self, ref):
|
|
| 270 |
+ return ArtifactElement(self._context, ref)
|
|
| 271 |
+ |
|
| 258 | 272 |
# create_source()
|
| 259 | 273 |
#
|
| 260 | 274 |
# Instantiate and return a Source
|
| ... | ... | @@ -27,8 +27,10 @@ import shutil |
| 27 | 27 |
import tarfile
|
| 28 | 28 |
import tempfile
|
| 29 | 29 |
from contextlib import contextmanager, suppress
|
| 30 |
+from fnmatch import fnmatch
|
|
| 30 | 31 |
|
| 31 |
-from ._exceptions import StreamError, ImplError, BstError, set_last_task_error
|
|
| 32 |
+from ._artifactelement import verify_artifact_ref
|
|
| 33 |
+from ._exceptions import StreamError, ImplError, BstError, ArtifactElementError, set_last_task_error
|
|
| 32 | 34 |
from ._message import Message, MessageType
|
| 33 | 35 |
from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
|
| 34 | 36 |
from ._pipeline import Pipeline, PipelineSelection
|
| ... | ... | @@ -108,19 +110,21 @@ class Stream(): |
| 108 | 110 |
def load_selection(self, targets, *,
|
| 109 | 111 |
selection=PipelineSelection.NONE,
|
| 110 | 112 |
except_targets=(),
|
| 111 |
- use_artifact_config=False):
|
|
| 113 |
+ use_artifact_config=False,
|
|
| 114 |
+ load_refs=False):
|
|
| 112 | 115 |
|
| 113 | 116 |
profile_start(Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, '-') for t in targets))
|
| 114 | 117 |
|
| 115 |
- elements, _ = self._load(targets, (),
|
|
| 116 |
- selection=selection,
|
|
| 117 |
- except_targets=except_targets,
|
|
| 118 |
- fetch_subprojects=False,
|
|
| 119 |
- use_artifact_config=use_artifact_config)
|
|
| 118 |
+ target_objects, _ = self._load(targets, (),
|
|
| 119 |
+ selection=selection,
|
|
| 120 |
+ except_targets=except_targets,
|
|
| 121 |
+ fetch_subprojects=False,
|
|
| 122 |
+ use_artifact_config=use_artifact_config,
|
|
| 123 |
+ load_refs=load_refs)
|
|
| 120 | 124 |
|
| 121 | 125 |
profile_end(Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, '-') for t in targets))
|
| 122 | 126 |
|
| 123 |
- return elements
|
|
| 127 |
+ return target_objects
|
|
| 124 | 128 |
|
| 125 | 129 |
# shell()
|
| 126 | 130 |
#
|
| ... | ... | @@ -491,6 +495,31 @@ class Stream(): |
| 491 | 495 |
raise StreamError("Error while staging dependencies into a sandbox"
|
| 492 | 496 |
": '{}'".format(e), detail=e.detail, reason=e.reason) from e
|
| 493 | 497 |
|
| 498 |
+ # artifact_log()
|
|
| 499 |
+ #
|
|
| 500 |
+ # Show the full log of an artifact
|
|
| 501 |
+ #
|
|
| 502 |
+ # Args:
|
|
| 503 |
+ # targets (str): Targets to view the logs of
|
|
| 504 |
+ #
|
|
| 505 |
+ # Returns:
|
|
| 506 |
+ # logsdir (list): A list of CasBasedDirectory objects containing artifact logs
|
|
| 507 |
+ #
|
|
| 508 |
+ def artifact_log(self, targets):
|
|
| 509 |
+ # Return list of Element and/or ArtifactElement objects
|
|
| 510 |
+ target_objects = self.load_selection(targets, selection=PipelineSelection.NONE, load_refs=True)
|
|
| 511 |
+ |
|
| 512 |
+ logsdirs = []
|
|
| 513 |
+ for obj in target_objects:
|
|
| 514 |
+ ref = obj.get_artifact_name()
|
|
| 515 |
+ if not obj._cached():
|
|
| 516 |
+ self._message(MessageType.WARN, "{} is not cached".format(ref))
|
|
| 517 |
+ continue
|
|
| 518 |
+ |
|
| 519 |
+ logsdirs.append(self._artifacts.get_artifact_logs(ref))
|
|
| 520 |
+ |
|
| 521 |
+ return logsdirs
|
|
| 522 |
+ |
|
| 494 | 523 |
# source_checkout()
|
| 495 | 524 |
#
|
| 496 | 525 |
# Checkout sources of the target element to the specified location
|
| ... | ... | @@ -922,25 +951,36 @@ class Stream(): |
| 922 | 951 |
use_artifact_config=False,
|
| 923 | 952 |
artifact_remote_url=None,
|
| 924 | 953 |
fetch_subprojects=False,
|
| 925 |
- dynamic_plan=False):
|
|
| 954 |
+ dynamic_plan=False,
|
|
| 955 |
+ load_refs=False):
|
|
| 956 |
+ |
|
| 957 |
+ # Classify element and artifact strings
|
|
| 958 |
+ target_elements, target_artifacts = self._classify_artifacts(targets)
|
|
| 959 |
+ |
|
| 960 |
+ if target_artifacts and not load_refs:
|
|
| 961 |
+ detail = '\n'.join(target_artifacts)
|
|
| 962 |
+ raise ArtifactElementError("Cannot perform this operation with artifact refs:", detail=detail)
|
|
| 926 | 963 |
|
| 927 | 964 |
# Load rewritable if we have any tracking selection to make
|
| 928 | 965 |
rewritable = False
|
| 929 | 966 |
if track_targets:
|
| 930 | 967 |
rewritable = True
|
| 931 | 968 |
|
| 932 |
- # Load all targets
|
|
| 969 |
+ # Load all target elements
|
|
| 933 | 970 |
elements, except_elements, track_elements, track_except_elements = \
|
| 934 |
- self._pipeline.load([targets, except_targets, track_targets, track_except_targets],
|
|
| 971 |
+ self._pipeline.load([target_elements, except_targets, track_targets, track_except_targets],
|
|
| 935 | 972 |
rewritable=rewritable,
|
| 936 | 973 |
fetch_subprojects=fetch_subprojects)
|
| 937 | 974 |
|
| 975 |
+ # Obtain the ArtifactElement objects
|
|
| 976 |
+ artifacts = [self._project.create_artifact_element(ref) for ref in target_artifacts]
|
|
| 977 |
+ |
|
| 938 | 978 |
# Optionally filter out junction elements
|
| 939 | 979 |
if ignore_junction_targets:
|
| 940 | 980 |
elements = [e for e in elements if e.get_kind() != 'junction']
|
| 941 | 981 |
|
| 942 | 982 |
# Hold on to the targets
|
| 943 |
- self.targets = elements
|
|
| 983 |
+ self.targets = elements + artifacts
|
|
| 944 | 984 |
|
| 945 | 985 |
# Here we should raise an error if the track_elements targets
|
| 946 | 986 |
# are not dependencies of the primary targets, this is not
|
| ... | ... | @@ -997,9 +1037,9 @@ class Stream(): |
| 997 | 1037 |
|
| 998 | 1038 |
# Now move on to loading primary selection.
|
| 999 | 1039 |
#
|
| 1000 |
- self._pipeline.resolve_elements(elements)
|
|
| 1001 |
- selected = self._pipeline.get_selection(elements, selection, silent=False)
|
|
| 1002 |
- selected = self._pipeline.except_elements(elements,
|
|
| 1040 |
+ self._pipeline.resolve_elements(self.targets)
|
|
| 1041 |
+ selected = self._pipeline.get_selection(self.targets, selection, silent=False)
|
|
| 1042 |
+ selected = self._pipeline.except_elements(self.targets,
|
|
| 1003 | 1043 |
selected,
|
| 1004 | 1044 |
except_elements)
|
| 1005 | 1045 |
|
| ... | ... | @@ -1331,3 +1371,55 @@ class Stream(): |
| 1331 | 1371 |
required_list.append(element)
|
| 1332 | 1372 |
|
| 1333 | 1373 |
return required_list
|
| 1374 |
+ |
|
| 1375 |
+ # _classify_artifacts()
|
|
| 1376 |
+ #
|
|
| 1377 |
+ # Split up a list of targets into element names and artifact refs
|
|
| 1378 |
+ #
|
|
| 1379 |
+ # Args:
|
|
| 1380 |
+ # targets (list): A list of targets
|
|
| 1381 |
+ #
|
|
| 1382 |
+ # Returns:
|
|
| 1383 |
+ # (list): element names present in the targets
|
|
| 1384 |
+ # (list): artifact refs present in the targets
|
|
| 1385 |
+ #
|
|
| 1386 |
+ def _classify_artifacts(self, targets):
|
|
| 1387 |
+ element_targets = []
|
|
| 1388 |
+ artifact_refs = []
|
|
| 1389 |
+ element_globs = []
|
|
| 1390 |
+ artifact_globs = []
|
|
| 1391 |
+ |
|
| 1392 |
+ for target in targets:
|
|
| 1393 |
+ if target.endswith('.bst'):
|
|
| 1394 |
+ if any(c in "*?[" for c in target):
|
|
| 1395 |
+ element_globs.append(target)
|
|
| 1396 |
+ else:
|
|
| 1397 |
+ element_targets.append(target)
|
|
| 1398 |
+ else:
|
|
| 1399 |
+ if any(c in "*?[" for c in target):
|
|
| 1400 |
+ artifact_globs.append(target)
|
|
| 1401 |
+ else:
|
|
| 1402 |
+ try:
|
|
| 1403 |
+ verify_artifact_ref(target)
|
|
| 1404 |
+ except ArtifactElementError:
|
|
| 1405 |
+ element_targets.append(target)
|
|
| 1406 |
+ continue
|
|
| 1407 |
+ artifact_refs.append(target)
|
|
| 1408 |
+ |
|
| 1409 |
+ if element_globs:
|
|
| 1410 |
+ for dirpath, _, filenames in os.walk(self._project.element_path):
|
|
| 1411 |
+ for filename in filenames:
|
|
| 1412 |
+ element_path = os.path.join(dirpath, filename)
|
|
| 1413 |
+ length = len(self._project.element_path) + 1
|
|
| 1414 |
+ element_path = element_path[length:] # Strip out the element_path
|
|
| 1415 |
+ |
|
| 1416 |
+ if any(fnmatch(element_path, glob) for glob in element_globs):
|
|
| 1417 |
+ element_targets.append(element_path)
|
|
| 1418 |
+ |
|
| 1419 |
+ if artifact_globs:
|
|
| 1420 |
+ for glob in artifact_globs:
|
|
| 1421 |
+ artifact_refs.extend(self._artifacts.list_artifacts(glob=glob))
|
|
| 1422 |
+ if not artifact_refs:
|
|
| 1423 |
+ self._message(MessageType.WARN, "No artifacts found for globs: {}".format(', '.join(artifact_globs)))
|
|
| 1424 |
+ |
|
| 1425 |
+ return element_targets, artifact_refs
|
| ... | ... | @@ -82,6 +82,7 @@ import contextlib |
| 82 | 82 |
from contextlib import contextmanager
|
| 83 | 83 |
import tempfile
|
| 84 | 84 |
import shutil
|
| 85 |
+import string
|
|
| 85 | 86 |
|
| 86 | 87 |
from . import _yaml
|
| 87 | 88 |
from ._variables import Variables
|
| ... | ... | @@ -577,6 +578,38 @@ class Element(Plugin): |
| 577 | 578 |
self.__assert_cached()
|
| 578 | 579 |
return self.__compute_splits(include, exclude, orphans)
|
| 579 | 580 |
|
| 581 |
+ def get_artifact_name(self, key=None):
|
|
| 582 |
+ """Compute and return this element's full artifact name
|
|
| 583 |
+ |
|
| 584 |
+ Generate a full name for an artifact, including the project
|
|
| 585 |
+ namespace, element name and cache key.
|
|
| 586 |
+ |
|
| 587 |
+ This can also be used as a relative path safely, and
|
|
| 588 |
+ will normalize parts of the element name such that only
|
|
| 589 |
+ digits, letters and some select characters are allowed.
|
|
| 590 |
+ |
|
| 591 |
+ Args:
|
|
| 592 |
+ key (str): The element's cache key. Defaults to None
|
|
| 593 |
+ |
|
| 594 |
+ Returns:
|
|
| 595 |
+ (str): The relative path for the artifact
|
|
| 596 |
+ """
|
|
| 597 |
+ project = self._get_project()
|
|
| 598 |
+ if key is None:
|
|
| 599 |
+ key = self._get_cache_key()
|
|
| 600 |
+ |
|
| 601 |
+ assert key is not None
|
|
| 602 |
+ |
|
| 603 |
+ valid_chars = string.digits + string.ascii_letters + '-._'
|
|
| 604 |
+ element_name = ''.join([
|
|
| 605 |
+ x if x in valid_chars else '_'
|
|
| 606 |
+ for x in self.normal_name
|
|
| 607 |
+ ])
|
|
| 608 |
+ |
|
| 609 |
+ # Note that project names are not allowed to contain slashes. Element names containing
|
|
| 610 |
+ # a '/' will have this replaced with a '-' upon Element object instantiation.
|
|
| 611 |
+ return '{0}/{1}/{2}'.format(project.name, element_name, key)
|
|
| 612 |
+ |
|
| 580 | 613 |
def stage_artifact(self, sandbox, *, path=None, include=None, exclude=None, orphans=True, update_mtimes=None):
|
| 581 | 614 |
"""Stage this element's output artifact in the sandbox
|
| 582 | 615 |
|
| ... | ... | @@ -1118,7 +1151,7 @@ class Element(Plugin): |
| 1118 | 1151 |
e.name for e in self.dependencies(Scope.BUILD, recurse=False)
|
| 1119 | 1152 |
]
|
| 1120 | 1153 |
|
| 1121 |
- self.__weak_cache_key = self.__calculate_cache_key(dependencies)
|
|
| 1154 |
+ self.__weak_cache_key = self._calculate_cache_key(dependencies)
|
|
| 1122 | 1155 |
|
| 1123 | 1156 |
if self.__weak_cache_key is None:
|
| 1124 | 1157 |
# Weak cache key could not be calculated yet
|
| ... | ... | @@ -1147,8 +1180,7 @@ class Element(Plugin): |
| 1147 | 1180 |
dependencies = [
|
| 1148 | 1181 |
e.__strict_cache_key for e in self.dependencies(Scope.BUILD)
|
| 1149 | 1182 |
]
|
| 1150 |
- self.__strict_cache_key = self.__calculate_cache_key(dependencies)
|
|
| 1151 |
- |
|
| 1183 |
+ self.__strict_cache_key = self._calculate_cache_key(dependencies)
|
|
| 1152 | 1184 |
if self.__strict_cache_key is None:
|
| 1153 | 1185 |
# Strict cache key could not be calculated yet
|
| 1154 | 1186 |
return
|
| ... | ... | @@ -1190,7 +1222,7 @@ class Element(Plugin): |
| 1190 | 1222 |
dependencies = [
|
| 1191 | 1223 |
e._get_cache_key() for e in self.dependencies(Scope.BUILD)
|
| 1192 | 1224 |
]
|
| 1193 |
- self.__cache_key = self.__calculate_cache_key(dependencies)
|
|
| 1225 |
+ self.__cache_key = self._calculate_cache_key(dependencies)
|
|
| 1194 | 1226 |
|
| 1195 | 1227 |
if self.__cache_key is None:
|
| 1196 | 1228 |
# Strong cache key could not be calculated yet
|
| ... | ... | @@ -2032,41 +2064,7 @@ class Element(Plugin): |
| 2032 | 2064 |
source._fetch(previous_sources)
|
| 2033 | 2065 |
previous_sources.append(source)
|
| 2034 | 2066 |
|
| 2035 |
- #############################################################
|
|
| 2036 |
- # Private Local Methods #
|
|
| 2037 |
- #############################################################
|
|
| 2038 |
- |
|
| 2039 |
- # __update_source_state()
|
|
| 2040 |
- #
|
|
| 2041 |
- # Updates source consistency state
|
|
| 2042 |
- #
|
|
| 2043 |
- def __update_source_state(self):
|
|
| 2044 |
- |
|
| 2045 |
- # Cannot resolve source state until tracked
|
|
| 2046 |
- if self.__tracking_scheduled:
|
|
| 2047 |
- return
|
|
| 2048 |
- |
|
| 2049 |
- self.__consistency = Consistency.CACHED
|
|
| 2050 |
- workspace = self._get_workspace()
|
|
| 2051 |
- |
|
| 2052 |
- # Special case for workspaces
|
|
| 2053 |
- if workspace:
|
|
| 2054 |
- |
|
| 2055 |
- # A workspace is considered inconsistent in the case
|
|
| 2056 |
- # that its directory went missing
|
|
| 2057 |
- #
|
|
| 2058 |
- fullpath = workspace.get_absolute_path()
|
|
| 2059 |
- if not os.path.exists(fullpath):
|
|
| 2060 |
- self.__consistency = Consistency.INCONSISTENT
|
|
| 2061 |
- else:
|
|
| 2062 |
- |
|
| 2063 |
- # Determine overall consistency of the element
|
|
| 2064 |
- for source in self.__sources:
|
|
| 2065 |
- source._update_state()
|
|
| 2066 |
- source_consistency = source._get_consistency()
|
|
| 2067 |
- self.__consistency = min(self.__consistency, source_consistency)
|
|
| 2068 |
- |
|
| 2069 |
- # __calculate_cache_key():
|
|
| 2067 |
+ # _calculate_cache_key():
|
|
| 2070 | 2068 |
#
|
| 2071 | 2069 |
# Calculates the cache key
|
| 2072 | 2070 |
#
|
| ... | ... | @@ -2075,7 +2073,7 @@ class Element(Plugin): |
| 2075 | 2073 |
#
|
| 2076 | 2074 |
# None is returned if information for the cache key is missing.
|
| 2077 | 2075 |
#
|
| 2078 |
- def __calculate_cache_key(self, dependencies):
|
|
| 2076 |
+ def _calculate_cache_key(self, dependencies):
|
|
| 2079 | 2077 |
# No cache keys for dependencies which have no cache keys
|
| 2080 | 2078 |
if None in dependencies:
|
| 2081 | 2079 |
return None
|
| ... | ... | @@ -2114,6 +2112,40 @@ class Element(Plugin): |
| 2114 | 2112 |
|
| 2115 | 2113 |
return _cachekey.generate_key(cache_key_dict)
|
| 2116 | 2114 |
|
| 2115 |
+ #############################################################
|
|
| 2116 |
+ # Private Local Methods #
|
|
| 2117 |
+ #############################################################
|
|
| 2118 |
+ |
|
| 2119 |
+ # __update_source_state()
|
|
| 2120 |
+ #
|
|
| 2121 |
+ # Updates source consistency state
|
|
| 2122 |
+ #
|
|
| 2123 |
+ def __update_source_state(self):
|
|
| 2124 |
+ |
|
| 2125 |
+ # Cannot resolve source state until tracked
|
|
| 2126 |
+ if self.__tracking_scheduled:
|
|
| 2127 |
+ return
|
|
| 2128 |
+ |
|
| 2129 |
+ self.__consistency = Consistency.CACHED
|
|
| 2130 |
+ workspace = self._get_workspace()
|
|
| 2131 |
+ |
|
| 2132 |
+ # Special case for workspaces
|
|
| 2133 |
+ if workspace:
|
|
| 2134 |
+ |
|
| 2135 |
+ # A workspace is considered inconsistent in the case
|
|
| 2136 |
+ # that its directory went missing
|
|
| 2137 |
+ #
|
|
| 2138 |
+ fullpath = workspace.get_absolute_path()
|
|
| 2139 |
+ if not os.path.exists(fullpath):
|
|
| 2140 |
+ self.__consistency = Consistency.INCONSISTENT
|
|
| 2141 |
+ else:
|
|
| 2142 |
+ |
|
| 2143 |
+ # Determine overall consistency of the element
|
|
| 2144 |
+ for source in self.__sources:
|
|
| 2145 |
+ source._update_state()
|
|
| 2146 |
+ source_consistency = source._get_consistency()
|
|
| 2147 |
+ self.__consistency = min(self.__consistency, source_consistency)
|
|
| 2148 |
+ |
|
| 2117 | 2149 |
# __can_build_incrementally()
|
| 2118 | 2150 |
#
|
| 2119 | 2151 |
# Check if the element can be built incrementally, this
|
| ... | ... | @@ -2297,6 +2329,8 @@ class Element(Plugin): |
| 2297 | 2329 |
defaults['public'] = element_public
|
| 2298 | 2330 |
|
| 2299 | 2331 |
def __init_defaults(self, plugin_conf):
|
| 2332 |
+ if plugin_conf is None:
|
|
| 2333 |
+ return
|
|
| 2300 | 2334 |
|
| 2301 | 2335 |
# Defaults are loaded once per class and then reused
|
| 2302 | 2336 |
#
|
| ... | ... | @@ -211,7 +211,7 @@ def test_pull_tree(cli, tmpdir, datafiles): |
| 211 | 211 |
assert artifactcache.contains(element, element_key)
|
| 212 | 212 |
|
| 213 | 213 |
# Retrieve the Directory object from the cached artifact
|
| 214 |
- artifact_ref = artifactcache.get_artifact_fullname(element, element_key)
|
|
| 214 |
+ artifact_ref = element.get_artifact_name(element_key)
|
|
| 215 | 215 |
artifact_digest = cas.resolve_ref(artifact_ref)
|
| 216 | 216 |
|
| 217 | 217 |
queue = multiprocessing.Queue()
|
| ... | ... | @@ -190,7 +190,7 @@ def test_push_directory(cli, tmpdir, datafiles): |
| 190 | 190 |
assert artifactcache.has_push_remotes(element=element)
|
| 191 | 191 |
|
| 192 | 192 |
# Recreate the CasBasedDirectory object from the cached artifact
|
| 193 |
- artifact_ref = artifactcache.get_artifact_fullname(element, element_key)
|
|
| 193 |
+ artifact_ref = element.get_artifact_name(element_key)
|
|
| 194 | 194 |
artifact_digest = cas.resolve_ref(artifact_ref)
|
| 195 | 195 |
|
| 196 | 196 |
queue = multiprocessing.Queue()
|
