James Ennis pushed to branch jennis/refactor_artifact_log at BuildStream / buildstream
Commits:
- 
36746730
by Chandan Singh at 2019-01-31T10:50:05Z
- 
fa4a21ce
by Chandan Singh at 2019-01-31T12:15:43Z
- 
dd791373
by Chandan Singh at 2019-01-31T14:32:44Z
- 
96c0fbd6
by Chandan Singh at 2019-01-31T15:39:19Z
- 
d25e2795
by Benjamin Schubert at 2019-01-31T17:06:23Z
- 
2d0eebbf
by Benjamin Schubert at 2019-01-31T17:06:23Z
- 
583bd97d
by Benjamin Schubert at 2019-02-01T10:26:37Z
- 
51cec3da
by Phil Dawson at 2019-02-01T14:25:44Z
- 
2b38aabe
by Phil Dawson at 2019-02-01T15:33:00Z
- 
d79ecf92
by James Ennis at 2019-02-01T17:44:39Z
- 
2b11f510
by James Ennis at 2019-02-01T17:44:40Z
- 
24f554f1
by James Ennis at 2019-02-01T17:44:40Z
- 
ced4d314
by James Ennis at 2019-02-01T17:44:40Z
- 
97e47b66
by James Ennis at 2019-02-01T17:44:40Z
- 
70db3209
by James Ennis at 2019-02-01T17:44:40Z
- 
444c977e
by James Ennis at 2019-02-01T17:44:40Z
- 
8513172e
by James Ennis at 2019-02-01T17:44:40Z
- 
8400f66a
by James Ennis at 2019-02-01T17:44:40Z
- 
affe4af1
by James Ennis at 2019-02-01T17:44:40Z
- 
4e253996
by James Ennis at 2019-02-01T17:44:40Z
- 
ab5d0d8e
by James Ennis at 2019-02-01T17:44:40Z
- 
7db3c93c
by James Ennis at 2019-02-01T17:44:40Z
- 
3bb3c90c
by James Ennis at 2019-02-01T17:50:08Z
21 changed files:
- buildstream/_artifactcache.py
- + buildstream/_artifactelement.py
- buildstream/_cas/cascache.py
- buildstream/_exceptions.py
- buildstream/_frontend/cli.py
- buildstream/_loader/loadelement.py
- buildstream/_loader/loader.py
- buildstream/_loader/metaelement.py
- buildstream/_project.py
- buildstream/_stream.py
- buildstream/element.py
- tests/artifactcache/pull.py
- tests/artifactcache/push.py
- tests/cachekey/cachekey.py
- + tests/cachekey/project/elements/key-stability/aaa.bst
- + tests/cachekey/project/elements/key-stability/t1.bst
- + tests/cachekey/project/elements/key-stability/t2.bst
- + tests/cachekey/project/elements/key-stability/top-level.bst
- + tests/cachekey/project/elements/key-stability/zzz.bst
- tests/testutils/site.py
- tox.ini
Changes:
| ... | ... | @@ -19,7 +19,6 @@ | 
| 19 | 19 |  | 
| 20 | 20 |  import multiprocessing
 | 
| 21 | 21 |  import os
 | 
| 22 | -import string
 | |
| 23 | 22 |  from collections.abc import Mapping
 | 
| 24 | 23 |  | 
| 25 | 24 |  from .types import _KeyStrength
 | 
| ... | ... | @@ -112,37 +111,6 @@ class ArtifactCache(): | 
| 112 | 111 |  | 
| 113 | 112 |          self._calculate_cache_quota()
 | 
| 114 | 113 |  | 
| 115 | -    # get_artifact_fullname()
 | |
| 116 | -    #
 | |
| 117 | -    # Generate a full name for an artifact, including the
 | |
| 118 | -    # project namespace, element name and cache key.
 | |
| 119 | -    #
 | |
| 120 | -    # This can also be used as a relative path safely, and
 | |
| 121 | -    # will normalize parts of the element name such that only
 | |
| 122 | -    # digits, letters and some select characters are allowed.
 | |
| 123 | -    #
 | |
| 124 | -    # Args:
 | |
| 125 | -    #    element (Element): The Element object
 | |
| 126 | -    #    key (str): The element's cache key
 | |
| 127 | -    #
 | |
| 128 | -    # Returns:
 | |
| 129 | -    #    (str): The relative path for the artifact
 | |
| 130 | -    #
 | |
| 131 | -    def get_artifact_fullname(self, element, key):
 | |
| 132 | -        project = element._get_project()
 | |
| 133 | - | |
| 134 | -        # Normalize ostree ref unsupported chars
 | |
| 135 | -        valid_chars = string.digits + string.ascii_letters + '-._'
 | |
| 136 | -        element_name = ''.join([
 | |
| 137 | -            x if x in valid_chars else '_'
 | |
| 138 | -            for x in element.normal_name
 | |
| 139 | -        ])
 | |
| 140 | - | |
| 141 | -        assert key is not None
 | |
| 142 | - | |
| 143 | -        # assume project and element names are not allowed to contain slashes
 | |
| 144 | -        return '{0}/{1}/{2}'.format(project.name, element_name, key)
 | |
| 145 | - | |
| 146 | 114 |      # setup_remotes():
 | 
| 147 | 115 |      #
 | 
| 148 | 116 |      # Sets up which remotes to use
 | 
| ... | ... | @@ -241,7 +209,7 @@ class ArtifactCache(): | 
| 241 | 209 |              for key in (strong_key, weak_key):
 | 
| 242 | 210 |                  if key:
 | 
| 243 | 211 |                      try:
 | 
| 244 | -                        ref = self.get_artifact_fullname(element, key)
 | |
| 212 | +                        ref = element.get_artifact_name(key)
 | |
| 245 | 213 |  | 
| 246 | 214 |                          self.cas.update_mtime(ref)
 | 
| 247 | 215 |                      except CASError:
 | 
| ... | ... | @@ -521,7 +489,7 @@ class ArtifactCache(): | 
| 521 | 489 |      # Returns: True if the artifact is in the cache, False otherwise
 | 
| 522 | 490 |      #
 | 
| 523 | 491 |      def contains(self, element, key):
 | 
| 524 | -        ref = self.get_artifact_fullname(element, key)
 | |
| 492 | +        ref = element.get_artifact_name(key)
 | |
| 525 | 493 |  | 
| 526 | 494 |          return self.cas.contains(ref)
 | 
| 527 | 495 |  | 
| ... | ... | @@ -538,19 +506,21 @@ class ArtifactCache(): | 
| 538 | 506 |      # Returns: True if the subdir exists & is populated in the cache, False otherwise
 | 
| 539 | 507 |      #
 | 
| 540 | 508 |      def contains_subdir_artifact(self, element, key, subdir):
 | 
| 541 | -        ref = self.get_artifact_fullname(element, key)
 | |
| 509 | +        ref = element.get_artifact_name(key)
 | |
| 542 | 510 |          return self.cas.contains_subdir_artifact(ref, subdir)
 | 
| 543 | 511 |  | 
| 544 | 512 |      # list_artifacts():
 | 
| 545 | 513 |      #
 | 
| 546 | 514 |      # List artifacts in this cache in LRU order.
 | 
| 547 | 515 |      #
 | 
| 516 | +    # Args:
 | |
| 517 | +    #     glob (str): An option glob _expression_ to be used to list artifacts satisfying the glob
 | |
| 518 | +    #
 | |
| 548 | 519 |      # Returns:
 | 
| 549 | -    #     ([str]) - A list of artifact names as generated by
 | |
| 550 | -    #               `ArtifactCache.get_artifact_fullname` in LRU order
 | |
| 520 | +    #     ([str]) - A list of artifact names as generated in LRU order
 | |
| 551 | 521 |      #
 | 
| 552 | -    def list_artifacts(self):
 | |
| 553 | -        return self.cas.list_refs()
 | |
| 522 | +    def list_artifacts(self, *, glob=None):
 | |
| 523 | +        return self.cas.list_refs(glob=glob)
 | |
| 554 | 524 |  | 
| 555 | 525 |      # remove():
 | 
| 556 | 526 |      #
 | 
| ... | ... | @@ -559,8 +529,7 @@ class ArtifactCache(): | 
| 559 | 529 |      #
 | 
| 560 | 530 |      # Args:
 | 
| 561 | 531 |      #     ref (artifact_name): The name of the artifact to remove (as
 | 
| 562 | -    #                          generated by
 | |
| 563 | -    #                          `ArtifactCache.get_artifact_fullname`)
 | |
| 532 | +    #                          generated by `Element.get_artifact_name`)
 | |
| 564 | 533 |      #
 | 
| 565 | 534 |      # Returns:
 | 
| 566 | 535 |      #    (int): The amount of space recovered in the cache, in bytes
 | 
| ... | ... | @@ -606,7 +575,7 @@ class ArtifactCache(): | 
| 606 | 575 |      # Returns: path to extracted artifact
 | 
| 607 | 576 |      #
 | 
| 608 | 577 |      def extract(self, element, key, subdir=None):
 | 
| 609 | -        ref = self.get_artifact_fullname(element, key)
 | |
| 578 | +        ref = element.get_artifact_name(key)
 | |
| 610 | 579 |  | 
| 611 | 580 |          path = os.path.join(self.extractdir, element._get_project().name, element.normal_name)
 | 
| 612 | 581 |  | 
| ... | ... | @@ -622,7 +591,7 @@ class ArtifactCache(): | 
| 622 | 591 |      #     keys (list): The cache keys to use
 | 
| 623 | 592 |      #
 | 
| 624 | 593 |      def commit(self, element, content, keys):
 | 
| 625 | -        refs = [self.get_artifact_fullname(element, key) for key in keys]
 | |
| 594 | +        refs = [element.get_artifact_name(key) for key in keys]
 | |
| 626 | 595 |  | 
| 627 | 596 |          self.cas.commit(refs, content)
 | 
| 628 | 597 |  | 
| ... | ... | @@ -638,8 +607,8 @@ class ArtifactCache(): | 
| 638 | 607 |      #     subdir (str): A subdirectory to limit the comparison to
 | 
| 639 | 608 |      #
 | 
| 640 | 609 |      def diff(self, element, key_a, key_b, *, subdir=None):
 | 
| 641 | -        ref_a = self.get_artifact_fullname(element, key_a)
 | |
| 642 | -        ref_b = self.get_artifact_fullname(element, key_b)
 | |
| 610 | +        ref_a = element.get_artifact_name(key_a)
 | |
| 611 | +        ref_b = element.get_artifact_name(key_b)
 | |
| 643 | 612 |  | 
| 644 | 613 |          return self.cas.diff(ref_a, ref_b, subdir=subdir)
 | 
| 645 | 614 |  | 
| ... | ... | @@ -700,7 +669,7 @@ class ArtifactCache(): | 
| 700 | 669 |      #   (ArtifactError): if there was an error
 | 
| 701 | 670 |      #
 | 
| 702 | 671 |      def push(self, element, keys):
 | 
| 703 | -        refs = [self.get_artifact_fullname(element, key) for key in list(keys)]
 | |
| 672 | +        refs = [element.get_artifact_name(key) for key in list(keys)]
 | |
| 704 | 673 |  | 
| 705 | 674 |          project = element._get_project()
 | 
| 706 | 675 |  | 
| ... | ... | @@ -738,7 +707,7 @@ class ArtifactCache(): | 
| 738 | 707 |      #   (bool): True if pull was successful, False if artifact was not available
 | 
| 739 | 708 |      #
 | 
| 740 | 709 |      def pull(self, element, key, *, progress=None, subdir=None, excluded_subdirs=None):
 | 
| 741 | -        ref = self.get_artifact_fullname(element, key)
 | |
| 710 | +        ref = element.get_artifact_name(key)
 | |
| 742 | 711 |  | 
| 743 | 712 |          project = element._get_project()
 | 
| 744 | 713 |  | 
| ... | ... | @@ -850,11 +819,25 @@ class ArtifactCache(): | 
| 850 | 819 |      #     newkey (str): A new cache key for the artifact
 | 
| 851 | 820 |      #
 | 
| 852 | 821 |      def link_key(self, element, oldkey, newkey):
 | 
| 853 | -        oldref = self.get_artifact_fullname(element, oldkey)
 | |
| 854 | -        newref = self.get_artifact_fullname(element, newkey)
 | |
| 822 | +        oldref = element.get_artifact_name(oldkey)
 | |
| 823 | +        newref = element.get_artifact_name(newkey)
 | |
| 855 | 824 |  | 
| 856 | 825 |          self.cas.link_ref(oldref, newref)
 | 
| 857 | 826 |  | 
| 827 | +    # get_artifact_logs():
 | |
| 828 | +    #
 | |
| 829 | +    # Get the logs of an existing artifact
 | |
| 830 | +    #
 | |
| 831 | +    # Args:
 | |
| 832 | +    #     ref (str): The ref of the artifact
 | |
| 833 | +    #
 | |
| 834 | +    # Returns:
 | |
| 835 | +    #     logsdir (CasBasedDirectory): A CasBasedDirectory containing the artifact's logs
 | |
| 836 | +    #
 | |
| 837 | +    def get_artifact_logs(self, ref):
 | |
| 838 | +        descend = ["logs"]
 | |
| 839 | +        return self.cas.get_toplevel_dir(ref, descend)
 | |
| 840 | + | |
| 858 | 841 |      ################################################
 | 
| 859 | 842 |      #               Local Private Methods          #
 | 
| 860 | 843 |      ################################################
 | 
| 1 | +#
 | |
| 2 | +#  Copyright (C) 2019 Bloomberg Finance LP
 | |
| 3 | +#
 | |
| 4 | +#  This program is free software; you can redistribute it and/or
 | |
| 5 | +#  modify it under the terms of the GNU Lesser General Public
 | |
| 6 | +#  License as published by the Free Software Foundation; either
 | |
| 7 | +#  version 2 of the License, or (at your option) any later version.
 | |
| 8 | +#
 | |
| 9 | +#  This library is distributed in the hope that it will be useful,
 | |
| 10 | +#  but WITHOUT ANY WARRANTY; without even the implied warranty of
 | |
| 11 | +#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the GNU
 | |
| 12 | +#  Lesser General Public License for more details.
 | |
| 13 | +#
 | |
| 14 | +#  You should have received a copy of the GNU Lesser General Public
 | |
| 15 | +#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
 | |
| 16 | +#
 | |
| 17 | +#  Authors:
 | |
| 18 | +#        James Ennis <james ennis codethink co uk>
 | |
| 19 | +from . import Element
 | |
| 20 | +from ._exceptions import ArtifactElementError
 | |
| 21 | +from ._loader.metaelement import MetaElement
 | |
| 22 | + | |
| 23 | + | |
| 24 | +# ArtifactElement()
 | |
| 25 | +#
 | |
| 26 | +# Object to be used for directly processing an artifact
 | |
| 27 | +#
 | |
| 28 | +# Args:
 | |
| 29 | +#    context (Context): The Context object
 | |
| 30 | +#    ref (str): The artifact ref
 | |
| 31 | +#
 | |
| 32 | +class ArtifactElement(Element):
 | |
| 33 | +    def __init__(self, context, ref):
 | |
| 34 | +        self._ref = ref
 | |
| 35 | + | |
| 36 | +        # Ensure that the provided ref matches the form of an artifact
 | |
| 37 | +        try:
 | |
| 38 | +            _, element, key = ref.split('/', 2)
 | |
| 39 | +        except ValueError:
 | |
| 40 | +            raise ArtifactElementError("Artifact: {} is not of the expected format".format(ref))
 | |
| 41 | +        if len(key) != 64:
 | |
| 42 | +            raise ArtifactElementError("Artifact: {} is not of the expected format".format(ref))
 | |
| 43 | + | |
| 44 | +        self._key = key
 | |
| 45 | + | |
| 46 | +        project = context.get_toplevel_project()
 | |
| 47 | +        meta = MetaElement(project, element)  # NOTE element has no .bst suffix
 | |
| 48 | +        plugin_conf = None
 | |
| 49 | + | |
| 50 | +        super().__init__(context, project, meta, plugin_conf)
 | |
| 51 | + | |
| 52 | +    # Override Element.get_artifact_name()
 | |
| 53 | +    def get_artifact_name(self, key=None):
 | |
| 54 | +        return self._ref
 | |
| 55 | + | |
| 56 | +    # Dummy configure method
 | |
| 57 | +    def configure(self, node):
 | |
| 58 | +        pass
 | |
| 59 | + | |
| 60 | +    # Dummy preflight method
 | |
| 61 | +    def preflight(self):
 | |
| 62 | +        pass
 | |
| 63 | + | |
| 64 | +    # Override Element._calculate_cache_key
 | |
| 65 | +    def _calculate_cache_key(self, dependencies=None):
 | |
| 66 | +        return self._key | 
| ... | ... | @@ -24,6 +24,7 @@ import stat | 
| 24 | 24 |  import errno
 | 
| 25 | 25 |  import uuid
 | 
| 26 | 26 |  import contextlib
 | 
| 27 | +from fnmatch import fnmatch
 | |
| 27 | 28 |  | 
| 28 | 29 |  import grpc
 | 
| 29 | 30 |  | 
| ... | ... | @@ -32,6 +33,7 @@ from .._protos.buildstream.v2 import buildstream_pb2 | 
| 32 | 33 |  | 
| 33 | 34 |  from .. import utils
 | 
| 34 | 35 |  from .._exceptions import CASCacheError
 | 
| 36 | +from ..storage._casbaseddirectory import CasBasedDirectory
 | |
| 35 | 37 |  | 
| 36 | 38 |  from .casremote import BlobNotFound, _CASBatchRead, _CASBatchUpdate
 | 
| 37 | 39 |  | 
| ... | ... | @@ -472,22 +474,35 @@ class CASCache(): | 
| 472 | 474 |      #
 | 
| 473 | 475 |      # List refs in Least Recently Modified (LRM) order.
 | 
| 474 | 476 |      #
 | 
| 477 | +    # Args:
 | |
| 478 | +    #     glob (str) - An optional glob _expression_ to be used to list refs satisfying the glob
 | |
| 479 | +    #
 | |
| 475 | 480 |      # Returns:
 | 
| 476 | 481 |      #     (list) - A list of refs in LRM order
 | 
| 477 | 482 |      #
 | 
| 478 | -    def list_refs(self):
 | |
| 483 | +    def list_refs(self, *, glob=None):
 | |
| 479 | 484 |          # string of: /path/to/repo/refs/heads
 | 
| 480 | 485 |          ref_heads = os.path.join(self.casdir, 'refs', 'heads')
 | 
| 486 | +        path = ref_heads
 | |
| 487 | + | |
| 488 | +        if glob is not None:
 | |
| 489 | +            globdir = os.path.dirname(glob)
 | |
| 490 | +            if not any(c in "*?[" for c in globdir):
 | |
| 491 | +                # path prefix contains no globbing characters so
 | |
| 492 | +                # append the glob to optimise the os.walk()
 | |
| 493 | +                path = os.path.join(ref_heads, globdir)
 | |
| 481 | 494 |  | 
| 482 | 495 |          refs = []
 | 
| 483 | 496 |          mtimes = []
 | 
| 484 | 497 |  | 
| 485 | -        for root, _, files in os.walk(ref_heads):
 | |
| 498 | +        for root, _, files in os.walk(path):
 | |
| 486 | 499 |              for filename in files:
 | 
| 487 | 500 |                  ref_path = os.path.join(root, filename)
 | 
| 488 | -                refs.append(os.path.relpath(ref_path, ref_heads))
 | |
| 489 | -                # Obtain the mtime (the time a file was last modified)
 | |
| 490 | -                mtimes.append(os.path.getmtime(ref_path))
 | |
| 501 | +                relative_path = os.path.relpath(ref_path, ref_heads)  # Relative to refs head
 | |
| 502 | +                if not glob or fnmatch(relative_path, glob):
 | |
| 503 | +                    refs.append(relative_path)
 | |
| 504 | +                    # Obtain the mtime (the time a file was last modified)
 | |
| 505 | +                    mtimes.append(os.path.getmtime(ref_path))
 | |
| 491 | 506 |  | 
| 492 | 507 |          # NOTE: Sorted will sort from earliest to latest, thus the
 | 
| 493 | 508 |          # first ref of this list will be the file modified earliest.
 | 
| ... | ... | @@ -587,6 +602,22 @@ class CASCache(): | 
| 587 | 602 |          reachable = set()
 | 
| 588 | 603 |          self._reachable_refs_dir(reachable, tree, update_mtime=True)
 | 
| 589 | 604 |  | 
| 605 | +    # get_toplevel_dir()
 | |
| 606 | +    #
 | |
| 607 | +    # Return a CasBasedDirectory object of the specified sub_directories
 | |
| 608 | +    #
 | |
| 609 | +    # Args:
 | |
| 610 | +    #     ref (str): The artifact ref
 | |
| 611 | +    #     descend (list): A list of strings of artifact subdirectories
 | |
| 612 | +    #
 | |
| 613 | +    # Returns:
 | |
| 614 | +    #     (CasBasedDirectory): The CasBasedDirectory object
 | |
| 615 | +    #
 | |
| 616 | +    def get_toplevel_dir(self, ref, descend):
 | |
| 617 | +        cache_id = self.resolve_ref(ref, update_mtime=True)
 | |
| 618 | +        vdir = CasBasedDirectory(self, cache_id).descend(descend)
 | |
| 619 | +        return vdir
 | |
| 620 | + | |
| 590 | 621 |      ################################################
 | 
| 591 | 622 |      #             Local Private Methods            #
 | 
| 592 | 623 |      ################################################
 | 
| ... | ... | @@ -344,3 +344,12 @@ class AppError(BstError): | 
| 344 | 344 |  #
 | 
| 345 | 345 |  class SkipJob(Exception):
 | 
| 346 | 346 |      pass
 | 
| 347 | + | |
| 348 | + | |
| 349 | +# ArtifactElementError
 | |
| 350 | +#
 | |
| 351 | +# Raised when errors are encountered by artifact elements
 | |
| 352 | +#
 | |
| 353 | +class ArtifactElementError(BstError):
 | |
| 354 | +    def __init__(self, message, *, detail=None, reason=None, temporary=False):
 | |
| 355 | +        super().__init__(message, detail=detail, domain=ErrorDomain.ELEMENT, reason=reason, temporary=True) | 
| 1 | 1 |  import os
 | 
| 2 | 2 |  import sys
 | 
| 3 | 3 |  from contextlib import ExitStack
 | 
| 4 | -from fnmatch import fnmatch
 | |
| 5 | 4 |  from functools import partial
 | 
| 6 | 5 |  from tempfile import TemporaryDirectory
 | 
| 7 | 6 |  | 
| ... | ... | @@ -895,38 +894,6 @@ def workspace_list(app): | 
| 895 | 894 |  #############################################################
 | 
| 896 | 895 |  #                     Artifact Commands                     #
 | 
| 897 | 896 |  #############################################################
 | 
| 898 | -def _classify_artifacts(names, cas, project_directory):
 | |
| 899 | -    element_targets = []
 | |
| 900 | -    artifact_refs = []
 | |
| 901 | -    element_globs = []
 | |
| 902 | -    artifact_globs = []
 | |
| 903 | - | |
| 904 | -    for name in names:
 | |
| 905 | -        if name.endswith('.bst'):
 | |
| 906 | -            if any(c in "*?[" for c in name):
 | |
| 907 | -                element_globs.append(name)
 | |
| 908 | -            else:
 | |
| 909 | -                element_targets.append(name)
 | |
| 910 | -        else:
 | |
| 911 | -            if any(c in "*?[" for c in name):
 | |
| 912 | -                artifact_globs.append(name)
 | |
| 913 | -            else:
 | |
| 914 | -                artifact_refs.append(name)
 | |
| 915 | - | |
| 916 | -    if element_globs:
 | |
| 917 | -        for dirpath, _, filenames in os.walk(project_directory):
 | |
| 918 | -            for filename in filenames:
 | |
| 919 | -                element_path = os.path.join(dirpath, filename).lstrip(project_directory).lstrip('/')
 | |
| 920 | -                if any(fnmatch(element_path, glob) for glob in element_globs):
 | |
| 921 | -                    element_targets.append(element_path)
 | |
| 922 | - | |
| 923 | -    if artifact_globs:
 | |
| 924 | -        artifact_refs.extend(ref for ref in cas.list_refs()
 | |
| 925 | -                             if any(fnmatch(ref, glob) for glob in artifact_globs))
 | |
| 926 | - | |
| 927 | -    return element_targets, artifact_refs
 | |
| 928 | - | |
| 929 | - | |
| 930 | 897 |  @cli.group(short_help="Manipulate cached artifacts")
 | 
| 931 | 898 |  def artifact():
 | 
| 932 | 899 |      """Manipulate cached artifacts"""
 | 
| ... | ... | @@ -1105,53 +1072,30 @@ def artifact_push(app, elements, deps, remote): | 
| 1105 | 1072 |  @click.pass_obj
 | 
| 1106 | 1073 |  def artifact_log(app, artifacts):
 | 
| 1107 | 1074 |      """Show logs of all artifacts"""
 | 
| 1108 | -    from .._exceptions import CASError
 | |
| 1109 | -    from .._message import MessageType
 | |
| 1110 | -    from .._pipeline import PipelineSelection
 | |
| 1111 | -    from ..storage._casbaseddirectory import CasBasedDirectory
 | |
| 1112 | - | |
| 1113 | -    with ExitStack() as stack:
 | |
| 1114 | -        stack.enter_context(app.initialized())
 | |
| 1115 | -        cache = app.context.artifactcache
 | |
| 1075 | +    # Guess the element if we're in a workspace
 | |
| 1076 | +    if not artifacts:
 | |
| 1077 | +        guessed_target = app.context.guess_element()
 | |
| 1078 | +        if guessed_target:
 | |
| 1079 | +            artifacts = [guessed_target]
 | |
| 1116 | 1080 |  | 
| 1117 | -        elements, artifacts = _classify_artifacts(artifacts, cache.cas,
 | |
| 1118 | -                                                  app.project.directory)
 | |
| 1119 | - | |
| 1120 | -        vdirs = []
 | |
| 1121 | -        extractdirs = []
 | |
| 1122 | -        if artifacts:
 | |
| 1123 | -            for ref in artifacts:
 | |
| 1124 | -                try:
 | |
| 1125 | -                    cache_id = cache.cas.resolve_ref(ref, update_mtime=True)
 | |
| 1126 | -                    vdir = CasBasedDirectory(cache.cas, cache_id)
 | |
| 1127 | -                    vdirs.append(vdir)
 | |
| 1128 | -                except CASError as e:
 | |
| 1129 | -                    app._message(MessageType.WARN, "Artifact {} is not cached".format(ref), detail=str(e))
 | |
| 1130 | -                    continue
 | |
| 1131 | -        if elements:
 | |
| 1132 | -            elements = app.stream.load_selection(elements, selection=PipelineSelection.NONE)
 | |
| 1133 | -            for element in elements:
 | |
| 1134 | -                if not element._cached():
 | |
| 1135 | -                    app._message(MessageType.WARN, "Element {} is not cached".format(element))
 | |
| 1136 | -                    continue
 | |
| 1137 | -                ref = cache.get_artifact_fullname(element, element._get_cache_key())
 | |
| 1138 | -                cache_id = cache.cas.resolve_ref(ref, update_mtime=True)
 | |
| 1139 | -                vdir = CasBasedDirectory(cache.cas, cache_id)
 | |
| 1140 | -                vdirs.append(vdir)
 | |
| 1141 | - | |
| 1142 | -        for vdir in vdirs:
 | |
| 1143 | -            # NOTE: If reading the logs feels unresponsive, here would be a good place to provide progress information.
 | |
| 1144 | -            logsdir = vdir.descend(["logs"])
 | |
| 1145 | -            td = stack.enter_context(TemporaryDirectory())
 | |
| 1146 | -            logsdir.export_files(td, can_link=True)
 | |
| 1147 | -            extractdirs.append(td)
 | |
| 1148 | - | |
| 1149 | -        for extractdir in extractdirs:
 | |
| 1150 | -            for log in (os.path.join(extractdir, log) for log in os.listdir(extractdir)):
 | |
| 1151 | -                # NOTE: Should click gain the ability to pass files to the pager this can be optimised.
 | |
| 1152 | -                with open(log) as f:
 | |
| 1153 | -                    data = f.read()
 | |
| 1154 | -                    click.echo_via_pager(data)
 | |
| 1081 | +    with app.initialized():
 | |
| 1082 | +        logsdirs = app.stream.artifact_log(artifacts)
 | |
| 1083 | + | |
| 1084 | +        with ExitStack() as stack:
 | |
| 1085 | +            extractdirs = []
 | |
| 1086 | +            for logsdir in logsdirs:
 | |
| 1087 | +                # NOTE: If reading the logs feels unresponsive, here would be a good place
 | |
| 1088 | +                # to provide progress information.
 | |
| 1089 | +                td = stack.enter_context(TemporaryDirectory())
 | |
| 1090 | +                logsdir.export_files(td, can_link=True)
 | |
| 1091 | +                extractdirs.append(td)
 | |
| 1092 | + | |
| 1093 | +            for extractdir in extractdirs:
 | |
| 1094 | +                for log in (os.path.join(extractdir, log) for log in os.listdir(extractdir)):
 | |
| 1095 | +                    # NOTE: Should click gain the ability to pass files to the pager this can be optimised.
 | |
| 1096 | +                    with open(log) as f:
 | |
| 1097 | +                        data = f.read()
 | |
| 1098 | +                        click.echo_via_pager(data)
 | |
| 1155 | 1099 |  | 
| 1156 | 1100 |  | 
| 1157 | 1101 |  ##################################################################
 | 
| ... | ... | @@ -39,6 +39,20 @@ from .types import Symbol, Dependency | 
| 39 | 39 |  #    loader (Loader): The Loader object for this element
 | 
| 40 | 40 |  #
 | 
| 41 | 41 |  class LoadElement():
 | 
| 42 | +    # Dependency():
 | |
| 43 | +    #
 | |
| 44 | +    # A link from a LoadElement to its dependencies.
 | |
| 45 | +    #
 | |
| 46 | +    # Keeps a link to one of the current Element's dependencies, together with
 | |
| 47 | +    # its dependency type.
 | |
| 48 | +    #
 | |
| 49 | +    # Args:
 | |
| 50 | +    #    element (LoadElement): a LoadElement on which there is a dependency
 | |
| 51 | +    #    dep_type (str): the type of dependency this dependency link is
 | |
| 52 | +    class Dependency:
 | |
| 53 | +        def __init__(self, element, dep_type):
 | |
| 54 | +            self.element = element
 | |
| 55 | +            self.dep_type = dep_type
 | |
| 42 | 56 |  | 
| 43 | 57 |      def __init__(self, node, filename, loader):
 | 
| 44 | 58 |  | 
| ... | ... | @@ -74,8 +88,11 @@ class LoadElement(): | 
| 74 | 88 |              'build-depends', 'runtime-depends',
 | 
| 75 | 89 |          ])
 | 
| 76 | 90 |  | 
| 77 | -        # Extract the Dependencies
 | |
| 78 | -        self.deps = _extract_depends_from_node(self.node)
 | |
| 91 | +        self.dependencies = []
 | |
| 92 | + | |
| 93 | +    @property
 | |
| 94 | +    def junction(self):
 | |
| 95 | +        return self._loader.project.junction
 | |
| 79 | 96 |  | 
| 80 | 97 |      # depends():
 | 
| 81 | 98 |      #
 | 
| ... | ... | @@ -101,8 +118,8 @@ class LoadElement(): | 
| 101 | 118 |              return
 | 
| 102 | 119 |  | 
| 103 | 120 |          self._dep_cache = {}
 | 
| 104 | -        for dep in self.deps:
 | |
| 105 | -            elt = self._loader.get_element_for_dep(dep)
 | |
| 121 | +        for dep in self.dependencies:
 | |
| 122 | +            elt = dep.element
 | |
| 106 | 123 |  | 
| 107 | 124 |              # Ensure the cache of the element we depend on
 | 
| 108 | 125 |              elt._ensure_depends_cache()
 | 
| ... | ... | @@ -19,7 +19,6 @@ | 
| 19 | 19 |  | 
| 20 | 20 |  import os
 | 
| 21 | 21 |  from functools import cmp_to_key
 | 
| 22 | -from collections import namedtuple
 | |
| 23 | 22 |  from collections.abc import Mapping
 | 
| 24 | 23 |  import tempfile
 | 
| 25 | 24 |  import shutil
 | 
| ... | ... | @@ -32,8 +31,8 @@ from .._profile import Topics, profile_start, profile_end | 
| 32 | 31 |  from .._includes import Includes
 | 
| 33 | 32 |  from .._yamlcache import YamlCache
 | 
| 34 | 33 |  | 
| 35 | -from .types import Symbol, Dependency
 | |
| 36 | -from .loadelement import LoadElement
 | |
| 34 | +from .types import Symbol
 | |
| 35 | +from .loadelement import LoadElement, _extract_depends_from_node
 | |
| 37 | 36 |  from . import MetaElement
 | 
| 38 | 37 |  from . import MetaSource
 | 
| 39 | 38 |  from ..types import CoreWarnings
 | 
| ... | ... | @@ -112,7 +111,7 @@ class Loader(): | 
| 112 | 111 |  | 
| 113 | 112 |          # First pass, recursively load files and populate our table of LoadElements
 | 
| 114 | 113 |          #
 | 
| 115 | -        deps = []
 | |
| 114 | +        target_elements = []
 | |
| 116 | 115 |  | 
| 117 | 116 |          # XXX This will need to be changed to the context's top-level project if this method
 | 
| 118 | 117 |          # is ever used for subprojects
 | 
| ... | ... | @@ -122,10 +121,10 @@ class Loader(): | 
| 122 | 121 |          with YamlCache.open(self._context, cache_file) as yaml_cache:
 | 
| 123 | 122 |              for target in targets:
 | 
| 124 | 123 |                  profile_start(Topics.LOAD_PROJECT, target)
 | 
| 125 | -                junction, name, loader = self._parse_name(target, rewritable, ticker,
 | |
| 126 | -                                                          fetch_subprojects=fetch_subprojects)
 | |
| 127 | -                loader._load_file(name, rewritable, ticker, fetch_subprojects, yaml_cache)
 | |
| 128 | -                deps.append(Dependency(name, junction=junction))
 | |
| 124 | +                _junction, name, loader = self._parse_name(target, rewritable, ticker,
 | |
| 125 | +                                                           fetch_subprojects=fetch_subprojects)
 | |
| 126 | +                element = loader._load_file(name, rewritable, ticker, fetch_subprojects, yaml_cache)
 | |
| 127 | +                target_elements.append(element)
 | |
| 129 | 128 |                  profile_end(Topics.LOAD_PROJECT, target)
 | 
| 130 | 129 |  | 
| 131 | 130 |          #
 | 
| ... | ... | @@ -134,29 +133,29 @@ class Loader(): | 
| 134 | 133 |  | 
| 135 | 134 |          # Set up a dummy element that depends on all top-level targets
 | 
| 136 | 135 |          # to resolve potential circular dependencies between them
 | 
| 137 | -        DummyTarget = namedtuple('DummyTarget', ['name', 'full_name', 'deps'])
 | |
| 138 | - | |
| 139 | -        dummy = DummyTarget(name='', full_name='', deps=deps)
 | |
| 140 | -        self._elements[''] = dummy
 | |
| 136 | +        dummy_target = LoadElement("", "", self)
 | |
| 137 | +        dummy_target.dependencies.extend(
 | |
| 138 | +            LoadElement.Dependency(element, Symbol.RUNTIME)
 | |
| 139 | +            for element in target_elements
 | |
| 140 | +        )
 | |
| 141 | 141 |  | 
| 142 | 142 |          profile_key = "_".join(t for t in targets)
 | 
| 143 | 143 |          profile_start(Topics.CIRCULAR_CHECK, profile_key)
 | 
| 144 | -        self._check_circular_deps('')
 | |
| 144 | +        self._check_circular_deps(dummy_target)
 | |
| 145 | 145 |          profile_end(Topics.CIRCULAR_CHECK, profile_key)
 | 
| 146 | 146 |  | 
| 147 | 147 |          ret = []
 | 
| 148 | 148 |          #
 | 
| 149 | 149 |          # Sort direct dependencies of elements by their dependency ordering
 | 
| 150 | 150 |          #
 | 
| 151 | -        for target in targets:
 | |
| 152 | -            profile_start(Topics.SORT_DEPENDENCIES, target)
 | |
| 153 | -            junction, name, loader = self._parse_name(target, rewritable, ticker,
 | |
| 154 | -                                                      fetch_subprojects=fetch_subprojects)
 | |
| 155 | -            loader._sort_dependencies(name)
 | |
| 156 | -            profile_end(Topics.SORT_DEPENDENCIES, target)
 | |
| 151 | +        for element in target_elements:
 | |
| 152 | +            loader = element._loader
 | |
| 153 | +            profile_start(Topics.SORT_DEPENDENCIES, element.name)
 | |
| 154 | +            loader._sort_dependencies(element)
 | |
| 155 | +            profile_end(Topics.SORT_DEPENDENCIES, element.name)
 | |
| 157 | 156 |              # Finally, wrap what we have into LoadElements and return the target
 | 
| 158 | 157 |              #
 | 
| 159 | -            ret.append(loader._collect_element(name))
 | |
| 158 | +            ret.append(loader._collect_element(element))
 | |
| 160 | 159 |  | 
| 161 | 160 |          return ret
 | 
| 162 | 161 |  | 
| ... | ... | @@ -184,22 +183,6 @@ class Loader(): | 
| 184 | 183 |              if os.path.exists(self._tempdir):
 | 
| 185 | 184 |                  shutil.rmtree(self._tempdir)
 | 
| 186 | 185 |  | 
| 187 | -    # get_element_for_dep():
 | |
| 188 | -    #
 | |
| 189 | -    # Gets a cached LoadElement by Dependency object
 | |
| 190 | -    #
 | |
| 191 | -    # This is used by LoadElement
 | |
| 192 | -    #
 | |
| 193 | -    # Args:
 | |
| 194 | -    #    dep (Dependency): The dependency to search for
 | |
| 195 | -    #
 | |
| 196 | -    # Returns:
 | |
| 197 | -    #    (LoadElement): The cached LoadElement
 | |
| 198 | -    #
 | |
| 199 | -    def get_element_for_dep(self, dep):
 | |
| 200 | -        loader = self._get_loader_for_dep(dep)
 | |
| 201 | -        return loader._elements[dep.name]
 | |
| 202 | - | |
| 203 | 186 |      ###########################################
 | 
| 204 | 187 |      #            Private Methods              #
 | 
| 205 | 188 |      ###########################################
 | 
| ... | ... | @@ -272,8 +255,10 @@ class Loader(): | 
| 272 | 255 |  | 
| 273 | 256 |          self._elements[filename] = element
 | 
| 274 | 257 |  | 
| 258 | +        dependencies = _extract_depends_from_node(node)
 | |
| 259 | + | |
| 275 | 260 |          # Load all dependency files for the new LoadElement
 | 
| 276 | -        for dep in element.deps:
 | |
| 261 | +        for dep in dependencies:
 | |
| 277 | 262 |              if dep.junction:
 | 
| 278 | 263 |                  self._load_file(dep.junction, rewritable, ticker, fetch_subprojects, yaml_cache)
 | 
| 279 | 264 |                  loader = self._get_loader(dep.junction, rewritable=rewritable, ticker=ticker,
 | 
| ... | ... | @@ -288,7 +273,9 @@ class Loader(): | 
| 288 | 273 |                                  "{}: Cannot depend on junction"
 | 
| 289 | 274 |                                  .format(dep.provenance))
 | 
| 290 | 275 |  | 
| 291 | -        deps_names = [dep.name for dep in element.deps]
 | |
| 276 | +            element.dependencies.append(LoadElement.Dependency(dep_element, dep.dep_type))
 | |
| 277 | + | |
| 278 | +        deps_names = [dep.name for dep in dependencies]
 | |
| 292 | 279 |          self._warn_invalid_elements(deps_names)
 | 
| 293 | 280 |  | 
| 294 | 281 |          return element
 | 
| ... | ... | @@ -299,12 +286,12 @@ class Loader(): | 
| 299 | 286 |      # dependencies already resolved.
 | 
| 300 | 287 |      #
 | 
| 301 | 288 |      # Args:
 | 
| 302 | -    #    element_name (str): The element-path relative element name to check
 | |
| 289 | +    #    element (str): The element to check
 | |
| 303 | 290 |      #
 | 
| 304 | 291 |      # Raises:
 | 
| 305 | 292 |      #    (LoadError): In case there was a circular dependency error
 | 
| 306 | 293 |      #
 | 
| 307 | -    def _check_circular_deps(self, element_name, check_elements=None, validated=None, sequence=None):
 | |
| 294 | +    def _check_circular_deps(self, element, check_elements=None, validated=None, sequence=None):
 | |
| 308 | 295 |  | 
| 309 | 296 |          if check_elements is None:
 | 
| 310 | 297 |              check_elements = {}
 | 
| ... | ... | @@ -313,38 +300,31 @@ class Loader(): | 
| 313 | 300 |          if sequence is None:
 | 
| 314 | 301 |              sequence = []
 | 
| 315 | 302 |  | 
| 316 | -        element = self._elements[element_name]
 | |
| 317 | - | |
| 318 | -        # element name must be unique across projects
 | |
| 319 | -        # to be usable as key for the check_elements and validated dicts
 | |
| 320 | -        element_name = element.full_name
 | |
| 321 | - | |
| 322 | 303 |          # Skip already validated branches
 | 
| 323 | -        if validated.get(element_name) is not None:
 | |
| 304 | +        if validated.get(element) is not None:
 | |
| 324 | 305 |              return
 | 
| 325 | 306 |  | 
| 326 | -        if check_elements.get(element_name) is not None:
 | |
| 307 | +        if check_elements.get(element) is not None:
 | |
| 327 | 308 |              # Create `chain`, the loop of element dependencies from this
 | 
| 328 | 309 |              # element back to itself, by trimming everything before this
 | 
| 329 | 310 |              # element from the sequence under consideration.
 | 
| 330 | -            chain = sequence[sequence.index(element_name):]
 | |
| 331 | -            chain.append(element_name)
 | |
| 311 | +            chain = sequence[sequence.index(element.full_name):]
 | |
| 312 | +            chain.append(element.full_name)
 | |
| 332 | 313 |              raise LoadError(LoadErrorReason.CIRCULAR_DEPENDENCY,
 | 
| 333 | 314 |                              ("Circular dependency detected at element: {}\n" +
 | 
| 334 | 315 |                               "Dependency chain: {}")
 | 
| 335 | -                            .format(element.name, " -> ".join(chain)))
 | |
| 316 | +                            .format(element.full_name, " -> ".join(chain)))
 | |
| 336 | 317 |  | 
| 337 | 318 |          # Push / Check each dependency / Pop
 | 
| 338 | -        check_elements[element_name] = True
 | |
| 339 | -        sequence.append(element_name)
 | |
| 340 | -        for dep in element.deps:
 | |
| 341 | -            loader = self._get_loader_for_dep(dep)
 | |
| 342 | -            loader._check_circular_deps(dep.name, check_elements, validated, sequence)
 | |
| 343 | -        del check_elements[element_name]
 | |
| 319 | +        check_elements[element] = True
 | |
| 320 | +        sequence.append(element.full_name)
 | |
| 321 | +        for dep in element.dependencies:
 | |
| 322 | +            dep.element._loader._check_circular_deps(dep.element, check_elements, validated, sequence)
 | |
| 323 | +        del check_elements[element]
 | |
| 344 | 324 |          sequence.pop()
 | 
| 345 | 325 |  | 
| 346 | 326 |          # Eliminate duplicate paths
 | 
| 347 | -        validated[element_name] = True
 | |
| 327 | +        validated[element] = True
 | |
| 348 | 328 |  | 
| 349 | 329 |      # _sort_dependencies():
 | 
| 350 | 330 |      #
 | 
| ... | ... | @@ -357,28 +337,21 @@ class Loader(): | 
| 357 | 337 |      # sorts throughout the build process.
 | 
| 358 | 338 |      #
 | 
| 359 | 339 |      # Args:
 | 
| 360 | -    #    element_name (str): The element-path relative element name to sort
 | |
| 340 | +    #    element (LoadElement): The element to sort
 | |
| 361 | 341 |      #
 | 
| 362 | -    def _sort_dependencies(self, element_name, visited=None):
 | |
| 342 | +    def _sort_dependencies(self, element, visited=None):
 | |
| 363 | 343 |          if visited is None:
 | 
| 364 | -            visited = {}
 | |
| 344 | +            visited = set()
 | |
| 365 | 345 |  | 
| 366 | -        element = self._elements[element_name]
 | |
| 367 | - | |
| 368 | -        # element name must be unique across projects
 | |
| 369 | -        # to be usable as key for the visited dict
 | |
| 370 | -        element_name = element.full_name
 | |
| 371 | - | |
| 372 | -        if visited.get(element_name) is not None:
 | |
| 346 | +        if element in visited:
 | |
| 373 | 347 |              return
 | 
| 374 | 348 |  | 
| 375 | -        for dep in element.deps:
 | |
| 376 | -            loader = self._get_loader_for_dep(dep)
 | |
| 377 | -            loader._sort_dependencies(dep.name, visited=visited)
 | |
| 349 | +        for dep in element.dependencies:
 | |
| 350 | +            dep.element._loader._sort_dependencies(dep.element, visited=visited)
 | |
| 378 | 351 |  | 
| 379 | 352 |          def dependency_cmp(dep_a, dep_b):
 | 
| 380 | -            element_a = self.get_element_for_dep(dep_a)
 | |
| 381 | -            element_b = self.get_element_for_dep(dep_b)
 | |
| 353 | +            element_a = dep_a.element
 | |
| 354 | +            element_b = dep_b.element
 | |
| 382 | 355 |  | 
| 383 | 356 |              # Sort on inter element dependency first
 | 
| 384 | 357 |              if element_a.depends(element_b):
 | 
| ... | ... | @@ -395,21 +368,21 @@ class Loader(): | 
| 395 | 368 |                      return -1
 | 
| 396 | 369 |  | 
| 397 | 370 |              # All things being equal, string comparison.
 | 
| 398 | -            if dep_a.name > dep_b.name:
 | |
| 371 | +            if element_a.name > element_b.name:
 | |
| 399 | 372 |                  return 1
 | 
| 400 | -            elif dep_a.name < dep_b.name:
 | |
| 373 | +            elif element_a.name < element_b.name:
 | |
| 401 | 374 |                  return -1
 | 
| 402 | 375 |  | 
| 403 | 376 |              # Sort local elements before junction elements
 | 
| 404 | 377 |              # and use string comparison between junction elements
 | 
| 405 | -            if dep_a.junction and dep_b.junction:
 | |
| 406 | -                if dep_a.junction > dep_b.junction:
 | |
| 378 | +            if element_a.junction and element_b.junction:
 | |
| 379 | +                if element_a.junction > element_b.junction:
 | |
| 407 | 380 |                      return 1
 | 
| 408 | -                elif dep_a.junction < dep_b.junction:
 | |
| 381 | +                elif element_a.junction < element_b.junction:
 | |
| 409 | 382 |                      return -1
 | 
| 410 | -            elif dep_a.junction:
 | |
| 383 | +            elif element_a.junction:
 | |
| 411 | 384 |                  return -1
 | 
| 412 | -            elif dep_b.junction:
 | |
| 385 | +            elif element_b.junction:
 | |
| 413 | 386 |                  return 1
 | 
| 414 | 387 |  | 
| 415 | 388 |              # This wont ever happen
 | 
| ... | ... | @@ -418,26 +391,23 @@ class Loader(): | 
| 418 | 391 |          # Now dependency sort, we ensure that if any direct dependency
 | 
| 419 | 392 |          # directly or indirectly depends on another direct dependency,
 | 
| 420 | 393 |          # it is found later in the list.
 | 
| 421 | -        element.deps.sort(key=cmp_to_key(dependency_cmp))
 | |
| 394 | +        element.dependencies.sort(key=cmp_to_key(dependency_cmp))
 | |
| 422 | 395 |  | 
| 423 | -        visited[element_name] = True
 | |
| 396 | +        visited.add(element)
 | |
| 424 | 397 |  | 
| 425 | 398 |      # _collect_element()
 | 
| 426 | 399 |      #
 | 
| 427 | 400 |      # Collect the toplevel elements we have
 | 
| 428 | 401 |      #
 | 
| 429 | 402 |      # Args:
 | 
| 430 | -    #    element_name (str): The element-path relative element name to sort
 | |
| 403 | +    #    element (LoadElement): The element for which to load a MetaElement
 | |
| 431 | 404 |      #
 | 
| 432 | 405 |      # Returns:
 | 
| 433 | 406 |      #    (MetaElement): A recursively loaded MetaElement
 | 
| 434 | 407 |      #
 | 
| 435 | -    def _collect_element(self, element_name):
 | |
| 436 | - | |
| 437 | -        element = self._elements[element_name]
 | |
| 438 | - | |
| 408 | +    def _collect_element(self, element):
 | |
| 439 | 409 |          # Return the already built one, if we already built it
 | 
| 440 | -        meta_element = self._meta_elements.get(element_name)
 | |
| 410 | +        meta_element = self._meta_elements.get(element.name)
 | |
| 441 | 411 |          if meta_element:
 | 
| 442 | 412 |              return meta_element
 | 
| 443 | 413 |  | 
| ... | ... | @@ -461,10 +431,10 @@ class Loader(): | 
| 461 | 431 |                  del source[Symbol.DIRECTORY]
 | 
| 462 | 432 |  | 
| 463 | 433 |              index = sources.index(source)
 | 
| 464 | -            meta_source = MetaSource(element_name, index, element_kind, kind, source, directory)
 | |
| 434 | +            meta_source = MetaSource(element.name, index, element_kind, kind, source, directory)
 | |
| 465 | 435 |              meta_sources.append(meta_source)
 | 
| 466 | 436 |  | 
| 467 | -        meta_element = MetaElement(self.project, element_name, element_kind,
 | |
| 437 | +        meta_element = MetaElement(self.project, element.name, element_kind,
 | |
| 468 | 438 |                                     elt_provenance, meta_sources,
 | 
| 469 | 439 |                                     _yaml.node_get(node, Mapping, Symbol.CONFIG, default_value={}),
 | 
| 470 | 440 |                                     _yaml.node_get(node, Mapping, Symbol.VARIABLES, default_value={}),
 | 
| ... | ... | @@ -475,12 +445,12 @@ class Loader(): | 
| 475 | 445 |                                     element_kind == 'junction')
 | 
| 476 | 446 |  | 
| 477 | 447 |          # Cache it now, make sure it's already there before recursing
 | 
| 478 | -        self._meta_elements[element_name] = meta_element
 | |
| 448 | +        self._meta_elements[element.name] = meta_element
 | |
| 479 | 449 |  | 
| 480 | 450 |          # Descend
 | 
| 481 | -        for dep in element.deps:
 | |
| 482 | -            loader = self._get_loader_for_dep(dep)
 | |
| 483 | -            meta_dep = loader._collect_element(dep.name)
 | |
| 451 | +        for dep in element.dependencies:
 | |
| 452 | +            loader = dep.element._loader
 | |
| 453 | +            meta_dep = loader._collect_element(dep.element)
 | |
| 484 | 454 |              if dep.dep_type != 'runtime':
 | 
| 485 | 455 |                  meta_element.build_dependencies.append(meta_dep)
 | 
| 486 | 456 |              if dep.dep_type != 'build':
 | 
| ... | ... | @@ -539,7 +509,7 @@ class Loader(): | 
| 539 | 509 |                  return None
 | 
| 540 | 510 |  | 
| 541 | 511 |          # meta junction element
 | 
| 542 | -        meta_element = self._collect_element(filename)
 | |
| 512 | +        meta_element = self._collect_element(self._elements[filename])
 | |
| 543 | 513 |          if meta_element.kind != 'junction':
 | 
| 544 | 514 |              raise LoadError(LoadErrorReason.INVALID_DATA,
 | 
| 545 | 515 |                              "{}: Expected junction but element kind is {}".format(filename, meta_element.kind))
 | 
| ... | ... | @@ -601,23 +571,6 @@ class Loader(): | 
| 601 | 571 |  | 
| 602 | 572 |          return loader
 | 
| 603 | 573 |  | 
| 604 | -    # _get_loader_for_dep():
 | |
| 605 | -    #
 | |
| 606 | -    # Gets the appropriate Loader for a Dependency object
 | |
| 607 | -    #
 | |
| 608 | -    # Args:
 | |
| 609 | -    #    dep (Dependency): A Dependency object
 | |
| 610 | -    #
 | |
| 611 | -    # Returns:
 | |
| 612 | -    #    (Loader): The Loader object to use for this Dependency
 | |
| 613 | -    #
 | |
| 614 | -    def _get_loader_for_dep(self, dep):
 | |
| 615 | -        if dep.junction:
 | |
| 616 | -            # junction dependency, delegate to appropriate loader
 | |
| 617 | -            return self._loaders[dep.junction]
 | |
| 618 | -        else:
 | |
| 619 | -            return self
 | |
| 620 | - | |
| 621 | 574 |      # _parse_name():
 | 
| 622 | 575 |      #
 | 
| 623 | 576 |      # Get junction and base name of element along with loader for the sub-project
 | 
| ... | ... | @@ -38,20 +38,20 @@ class MetaElement(): | 
| 38 | 38 |      #    sandbox: Configuration specific to the sandbox environment
 | 
| 39 | 39 |      #    first_pass: The element is to be loaded with first pass configuration (junction)
 | 
| 40 | 40 |      #
 | 
| 41 | -    def __init__(self, project, name, kind, provenance, sources, config,
 | |
| 42 | -                 variables, environment, env_nocache, public, sandbox,
 | |
| 43 | -                 first_pass):
 | |
| 41 | +    def __init__(self, project, name, kind=None, provenance=None, sources=None, config=None,
 | |
| 42 | +                 variables=None, environment=None, env_nocache=None, public=None,
 | |
| 43 | +                 sandbox=None, first_pass=False):
 | |
| 44 | 44 |          self.project = project
 | 
| 45 | 45 |          self.name = name
 | 
| 46 | 46 |          self.kind = kind
 | 
| 47 | 47 |          self.provenance = provenance
 | 
| 48 | 48 |          self.sources = sources
 | 
| 49 | -        self.config = config
 | |
| 50 | -        self.variables = variables
 | |
| 51 | -        self.environment = environment
 | |
| 52 | -        self.env_nocache = env_nocache
 | |
| 53 | -        self.public = public
 | |
| 54 | -        self.sandbox = sandbox
 | |
| 49 | +        self.config = config or {}
 | |
| 50 | +        self.variables = variables or {}
 | |
| 51 | +        self.environment = environment or {}
 | |
| 52 | +        self.env_nocache = env_nocache or []
 | |
| 53 | +        self.public = public or {}
 | |
| 54 | +        self.sandbox = sandbox or {}
 | |
| 55 | 55 |          self.build_dependencies = []
 | 
| 56 | 56 |          self.dependencies = []
 | 
| 57 | 57 |          self.first_pass = first_pass | 
| ... | ... | @@ -26,6 +26,7 @@ from . import utils | 
| 26 | 26 |  from . import _cachekey
 | 
| 27 | 27 |  from . import _site
 | 
| 28 | 28 |  from . import _yaml
 | 
| 29 | +from ._artifactelement import ArtifactElement
 | |
| 29 | 30 |  from ._profile import Topics, profile_start, profile_end
 | 
| 30 | 31 |  from ._exceptions import LoadError, LoadErrorReason
 | 
| 31 | 32 |  from ._options import OptionPool
 | 
| ... | ... | @@ -255,6 +256,19 @@ class Project(): | 
| 255 | 256 |          else:
 | 
| 256 | 257 |              return self.config.element_factory.create(self._context, self, meta)
 | 
| 257 | 258 |  | 
| 259 | +    # create_artifact_element()
 | |
| 260 | +    #
 | |
| 261 | +    # Instantiate and return an ArtifactElement
 | |
| 262 | +    #
 | |
| 263 | +    # Args:
 | |
| 264 | +    #    ref (str): A string of the artifact ref
 | |
| 265 | +    #
 | |
| 266 | +    # Returns:
 | |
| 267 | +    #    (ArtifactElement): A newly created ArtifactElement object of the appropriate kind
 | |
| 268 | +    #
 | |
| 269 | +    def create_artifact_element(self, ref):
 | |
| 270 | +        return ArtifactElement(self._context, ref)
 | |
| 271 | + | |
| 258 | 272 |      # create_source()
 | 
| 259 | 273 |      #
 | 
| 260 | 274 |      # Instantiate and return a Source
 | 
| ... | ... | @@ -27,8 +27,9 @@ import shutil | 
| 27 | 27 |  import tarfile
 | 
| 28 | 28 |  import tempfile
 | 
| 29 | 29 |  from contextlib import contextmanager, suppress
 | 
| 30 | +from fnmatch import fnmatch
 | |
| 30 | 31 |  | 
| 31 | -from ._exceptions import StreamError, ImplError, BstError, set_last_task_error
 | |
| 32 | +from ._exceptions import StreamError, ImplError, BstError, ArtifactElementError, set_last_task_error
 | |
| 32 | 33 |  from ._message import Message, MessageType
 | 
| 33 | 34 |  from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
 | 
| 34 | 35 |  from ._pipeline import Pipeline, PipelineSelection
 | 
| ... | ... | @@ -108,19 +109,21 @@ class Stream(): | 
| 108 | 109 |      def load_selection(self, targets, *,
 | 
| 109 | 110 |                         selection=PipelineSelection.NONE,
 | 
| 110 | 111 |                         except_targets=(),
 | 
| 111 | -                       use_artifact_config=False):
 | |
| 112 | +                       use_artifact_config=False,
 | |
| 113 | +                       load_refs=False):
 | |
| 112 | 114 |  | 
| 113 | 115 |          profile_start(Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, '-') for t in targets))
 | 
| 114 | 116 |  | 
| 115 | -        elements, _ = self._load(targets, (),
 | |
| 116 | -                                 selection=selection,
 | |
| 117 | -                                 except_targets=except_targets,
 | |
| 118 | -                                 fetch_subprojects=False,
 | |
| 119 | -                                 use_artifact_config=use_artifact_config)
 | |
| 117 | +        target_objects, _ = self._load(targets, (),
 | |
| 118 | +                                       selection=selection,
 | |
| 119 | +                                       except_targets=except_targets,
 | |
| 120 | +                                       fetch_subprojects=False,
 | |
| 121 | +                                       use_artifact_config=use_artifact_config,
 | |
| 122 | +                                       load_refs=load_refs)
 | |
| 120 | 123 |  | 
| 121 | 124 |          profile_end(Topics.LOAD_SELECTION, "_".join(t.replace(os.sep, '-') for t in targets))
 | 
| 122 | 125 |  | 
| 123 | -        return elements
 | |
| 126 | +        return target_objects
 | |
| 124 | 127 |  | 
| 125 | 128 |      # shell()
 | 
| 126 | 129 |      #
 | 
| ... | ... | @@ -481,6 +484,31 @@ class Stream(): | 
| 481 | 484 |              raise StreamError("Error while staging dependencies into a sandbox"
 | 
| 482 | 485 |                                ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
 | 
| 483 | 486 |  | 
| 487 | +    # artifact_log()
 | |
| 488 | +    #
 | |
| 489 | +    # Show the full log of an artifact
 | |
| 490 | +    #
 | |
| 491 | +    # Args:
 | |
| 492 | +    #    targets (str): Targets to view the logs of
 | |
| 493 | +    #
 | |
| 494 | +    # Returns:
 | |
| 495 | +    #    logsdir (list): A list of CasBasedDirectory objects containing artifact logs
 | |
| 496 | +    #
 | |
| 497 | +    def artifact_log(self, targets):
 | |
| 498 | +        # Return list of Element and/or ArtifactElement objects
 | |
| 499 | +        target_objects = self.load_selection(targets, selection=PipelineSelection.NONE, load_refs=True)
 | |
| 500 | + | |
| 501 | +        logsdirs = []
 | |
| 502 | +        for obj in target_objects:
 | |
| 503 | +            ref = obj.get_artifact_name()
 | |
| 504 | +            if not obj._cached():
 | |
| 505 | +                self._message(MessageType.WARN, "{} is not cached".format(ref))
 | |
| 506 | +                continue
 | |
| 507 | + | |
| 508 | +            logsdirs.append(self._artifacts.get_artifact_logs(ref))
 | |
| 509 | + | |
| 510 | +        return logsdirs
 | |
| 511 | + | |
| 484 | 512 |      # source_checkout()
 | 
| 485 | 513 |      #
 | 
| 486 | 514 |      # Checkout sources of the target element to the specified location
 | 
| ... | ... | @@ -912,25 +940,36 @@ class Stream(): | 
| 912 | 940 |                use_artifact_config=False,
 | 
| 913 | 941 |                artifact_remote_url=None,
 | 
| 914 | 942 |                fetch_subprojects=False,
 | 
| 915 | -              dynamic_plan=False):
 | |
| 943 | +              dynamic_plan=False,
 | |
| 944 | +              load_refs=False):
 | |
| 945 | + | |
| 946 | +        # Classify element and artifact strings
 | |
| 947 | +        target_elements, target_artifacts = self._classify_artifacts(targets)
 | |
| 948 | + | |
| 949 | +        if target_artifacts and not load_refs:
 | |
| 950 | +            detail = ''.join(target_artifacts)
 | |
| 951 | +            raise ArtifactElementError("Cannot perform this operation with artifact refs:", detail=detail)
 | |
| 916 | 952 |  | 
| 917 | 953 |          # Load rewritable if we have any tracking selection to make
 | 
| 918 | 954 |          rewritable = False
 | 
| 919 | 955 |          if track_targets:
 | 
| 920 | 956 |              rewritable = True
 | 
| 921 | 957 |  | 
| 922 | -        # Load all targets
 | |
| 958 | +        # Load all target elements
 | |
| 923 | 959 |          elements, except_elements, track_elements, track_except_elements = \
 | 
| 924 | -            self._pipeline.load([targets, except_targets, track_targets, track_except_targets],
 | |
| 960 | +            self._pipeline.load([target_elements, except_targets, track_targets, track_except_targets],
 | |
| 925 | 961 |                                  rewritable=rewritable,
 | 
| 926 | 962 |                                  fetch_subprojects=fetch_subprojects)
 | 
| 927 | 963 |  | 
| 964 | +        # Obtain the ArtifactElement objects
 | |
| 965 | +        artifacts = [self._project.create_artifact_element(ref) for ref in target_artifacts]
 | |
| 966 | + | |
| 928 | 967 |          # Optionally filter out junction elements
 | 
| 929 | 968 |          if ignore_junction_targets:
 | 
| 930 | 969 |              elements = [e for e in elements if e.get_kind() != 'junction']
 | 
| 931 | 970 |  | 
| 932 | 971 |          # Hold on to the targets
 | 
| 933 | -        self.targets = elements
 | |
| 972 | +        self.targets = elements + artifacts
 | |
| 934 | 973 |  | 
| 935 | 974 |          # Here we should raise an error if the track_elements targets
 | 
| 936 | 975 |          # are not dependencies of the primary targets, this is not
 | 
| ... | ... | @@ -987,9 +1026,9 @@ class Stream(): | 
| 987 | 1026 |  | 
| 988 | 1027 |          # Now move on to loading primary selection.
 | 
| 989 | 1028 |          #
 | 
| 990 | -        self._pipeline.resolve_elements(elements)
 | |
| 991 | -        selected = self._pipeline.get_selection(elements, selection, silent=False)
 | |
| 992 | -        selected = self._pipeline.except_elements(elements,
 | |
| 1029 | +        self._pipeline.resolve_elements(self.targets)
 | |
| 1030 | +        selected = self._pipeline.get_selection(self.targets, selection, silent=False)
 | |
| 1031 | +        selected = self._pipeline.except_elements(self.targets,
 | |
| 993 | 1032 |                                                    selected,
 | 
| 994 | 1033 |                                                    except_elements)
 | 
| 995 | 1034 |  | 
| ... | ... | @@ -1321,3 +1360,59 @@ class Stream(): | 
| 1321 | 1360 |                  required_list.append(element)
 | 
| 1322 | 1361 |  | 
| 1323 | 1362 |          return required_list
 | 
| 1363 | + | |
| 1364 | +    # _classify_artifacts()
 | |
| 1365 | +    #
 | |
| 1366 | +    # Split up a list of targets into element names and artifact refs
 | |
| 1367 | +    #
 | |
| 1368 | +    # Args:
 | |
| 1369 | +    #    targets (list): A list of targets
 | |
| 1370 | +    #
 | |
| 1371 | +    # Returns:
 | |
| 1372 | +    #    (list): element names present in the targets
 | |
| 1373 | +    #    (list): artifact refs present in the targets
 | |
| 1374 | +    #
 | |
| 1375 | +    def _classify_artifacts(self, targets):
 | |
| 1376 | +        element_targets = []
 | |
| 1377 | +        artifact_refs = []
 | |
| 1378 | +        element_globs = []
 | |
| 1379 | +        artifact_globs = []
 | |
| 1380 | + | |
| 1381 | +        for target in targets:
 | |
| 1382 | +            if target.endswith('.bst'):
 | |
| 1383 | +                if any(c in "*?[" for c in target):
 | |
| 1384 | +                    element_globs.append(target)
 | |
| 1385 | +                else:
 | |
| 1386 | +                    element_targets.append(target)
 | |
| 1387 | +            else:
 | |
| 1388 | +                if any(c in "*?[" for c in target):
 | |
| 1389 | +                    artifact_globs.append(target)
 | |
| 1390 | +                else:
 | |
| 1391 | +                    try:
 | |
| 1392 | +                        ref = target.split('/', 2)
 | |
| 1393 | +                        key = ref[2]
 | |
| 1394 | +                    except IndexError:
 | |
| 1395 | +                        element_targets.append(target)
 | |
| 1396 | +                        continue
 | |
| 1397 | +                    if not len(key) == 64:
 | |
| 1398 | +                        element_targets.append(target)
 | |
| 1399 | +                        continue
 | |
| 1400 | +                    artifact_refs.append(target)
 | |
| 1401 | + | |
| 1402 | +        if element_globs:
 | |
| 1403 | +            for dirpath, _, filenames in os.walk(self._project.element_path):
 | |
| 1404 | +                for filename in filenames:
 | |
| 1405 | +                    element_path = os.path.join(dirpath, filename)
 | |
| 1406 | +                    length = len(self._project.element_path) + 1
 | |
| 1407 | +                    element_path = element_path[length:]  # Strip out the element_path
 | |
| 1408 | + | |
| 1409 | +                    if any(fnmatch(element_path, glob) for glob in element_globs):
 | |
| 1410 | +                        element_targets.append(element_path)
 | |
| 1411 | + | |
| 1412 | +        if artifact_globs:
 | |
| 1413 | +            for glob in artifact_globs:
 | |
| 1414 | +                artifact_refs.extend(self._artifacts.list_artifacts(glob=glob))
 | |
| 1415 | +            if not artifact_refs:
 | |
| 1416 | +                self._message(MessageType.WARN, "No artifacts found for globs: {}".format(', '.join(artifact_globs)))
 | |
| 1417 | + | |
| 1418 | +        return element_targets, artifact_refs | 
| ... | ... | @@ -82,6 +82,7 @@ import contextlib | 
| 82 | 82 |  from contextlib import contextmanager
 | 
| 83 | 83 |  import tempfile
 | 
| 84 | 84 |  import shutil
 | 
| 85 | +import string
 | |
| 85 | 86 |  | 
| 86 | 87 |  from . import _yaml
 | 
| 87 | 88 |  from ._variables import Variables
 | 
| ... | ... | @@ -577,6 +578,38 @@ class Element(Plugin): | 
| 577 | 578 |          self.__assert_cached()
 | 
| 578 | 579 |          return self.__compute_splits(include, exclude, orphans)
 | 
| 579 | 580 |  | 
| 581 | +    def get_artifact_name(self, key=None):
 | |
| 582 | +        """Compute and return this element's full artifact name
 | |
| 583 | + | |
| 584 | +        Generate a full name for an artifact, including the project
 | |
| 585 | +        namespace, element name and cache key.
 | |
| 586 | + | |
| 587 | +        This can also be used as a relative path safely, and
 | |
| 588 | +        will normalize parts of the element name such that only
 | |
| 589 | +        digits, letters and some select characters are allowed.
 | |
| 590 | + | |
| 591 | +        Args:
 | |
| 592 | +           key (str): The element's cache key. Defaults to None
 | |
| 593 | + | |
| 594 | +        Returns:
 | |
| 595 | +           (str): The relative path for the artifact
 | |
| 596 | +        """
 | |
| 597 | +        project = self._get_project()
 | |
| 598 | +        if key is None:
 | |
| 599 | +            key = self._get_cache_key()
 | |
| 600 | + | |
| 601 | +        assert key is not None
 | |
| 602 | + | |
| 603 | +        valid_chars = string.digits + string.ascii_letters + '-._'
 | |
| 604 | +        element_name = ''.join([
 | |
| 605 | +            x if x in valid_chars else '_'
 | |
| 606 | +            for x in self.normal_name
 | |
| 607 | +        ])
 | |
| 608 | + | |
| 609 | +        # Note that project names are not allowed to contain slashes. Element names containing
 | |
| 610 | +        # a '/' will have this replaced with a '-' upon Element object instantiation.
 | |
| 611 | +        return '{0}/{1}/{2}'.format(project.name, element_name, key)
 | |
| 612 | + | |
| 580 | 613 |      def stage_artifact(self, sandbox, *, path=None, include=None, exclude=None, orphans=True, update_mtimes=None):
 | 
| 581 | 614 |          """Stage this element's output artifact in the sandbox
 | 
| 582 | 615 |  | 
| ... | ... | @@ -1118,7 +1151,7 @@ class Element(Plugin): | 
| 1118 | 1151 |                      e.name for e in self.dependencies(Scope.BUILD, recurse=False)
 | 
| 1119 | 1152 |                  ]
 | 
| 1120 | 1153 |  | 
| 1121 | -            self.__weak_cache_key = self.__calculate_cache_key(dependencies)
 | |
| 1154 | +            self.__weak_cache_key = self._calculate_cache_key(dependencies)
 | |
| 1122 | 1155 |  | 
| 1123 | 1156 |              if self.__weak_cache_key is None:
 | 
| 1124 | 1157 |                  # Weak cache key could not be calculated yet
 | 
| ... | ... | @@ -1147,8 +1180,7 @@ class Element(Plugin): | 
| 1147 | 1180 |              dependencies = [
 | 
| 1148 | 1181 |                  e.__strict_cache_key for e in self.dependencies(Scope.BUILD)
 | 
| 1149 | 1182 |              ]
 | 
| 1150 | -            self.__strict_cache_key = self.__calculate_cache_key(dependencies)
 | |
| 1151 | - | |
| 1183 | +            self.__strict_cache_key = self._calculate_cache_key(dependencies)
 | |
| 1152 | 1184 |              if self.__strict_cache_key is None:
 | 
| 1153 | 1185 |                  # Strict cache key could not be calculated yet
 | 
| 1154 | 1186 |                  return
 | 
| ... | ... | @@ -1190,7 +1222,7 @@ class Element(Plugin): | 
| 1190 | 1222 |                  dependencies = [
 | 
| 1191 | 1223 |                      e._get_cache_key() for e in self.dependencies(Scope.BUILD)
 | 
| 1192 | 1224 |                  ]
 | 
| 1193 | -                self.__cache_key = self.__calculate_cache_key(dependencies)
 | |
| 1225 | +                self.__cache_key = self._calculate_cache_key(dependencies)
 | |
| 1194 | 1226 |  | 
| 1195 | 1227 |              if self.__cache_key is None:
 | 
| 1196 | 1228 |                  # Strong cache key could not be calculated yet
 | 
| ... | ... | @@ -2032,41 +2064,7 @@ class Element(Plugin): | 
| 2032 | 2064 |                  source._fetch(previous_sources)
 | 
| 2033 | 2065 |              previous_sources.append(source)
 | 
| 2034 | 2066 |  | 
| 2035 | -    #############################################################
 | |
| 2036 | -    #                   Private Local Methods                   #
 | |
| 2037 | -    #############################################################
 | |
| 2038 | - | |
| 2039 | -    # __update_source_state()
 | |
| 2040 | -    #
 | |
| 2041 | -    # Updates source consistency state
 | |
| 2042 | -    #
 | |
| 2043 | -    def __update_source_state(self):
 | |
| 2044 | - | |
| 2045 | -        # Cannot resolve source state until tracked
 | |
| 2046 | -        if self.__tracking_scheduled:
 | |
| 2047 | -            return
 | |
| 2048 | - | |
| 2049 | -        self.__consistency = Consistency.CACHED
 | |
| 2050 | -        workspace = self._get_workspace()
 | |
| 2051 | - | |
| 2052 | -        # Special case for workspaces
 | |
| 2053 | -        if workspace:
 | |
| 2054 | - | |
| 2055 | -            # A workspace is considered inconsistent in the case
 | |
| 2056 | -            # that its directory went missing
 | |
| 2057 | -            #
 | |
| 2058 | -            fullpath = workspace.get_absolute_path()
 | |
| 2059 | -            if not os.path.exists(fullpath):
 | |
| 2060 | -                self.__consistency = Consistency.INCONSISTENT
 | |
| 2061 | -        else:
 | |
| 2062 | - | |
| 2063 | -            # Determine overall consistency of the element
 | |
| 2064 | -            for source in self.__sources:
 | |
| 2065 | -                source._update_state()
 | |
| 2066 | -                source_consistency = source._get_consistency()
 | |
| 2067 | -                self.__consistency = min(self.__consistency, source_consistency)
 | |
| 2068 | - | |
| 2069 | -    # __calculate_cache_key():
 | |
| 2067 | +    # _calculate_cache_key():
 | |
| 2070 | 2068 |      #
 | 
| 2071 | 2069 |      # Calculates the cache key
 | 
| 2072 | 2070 |      #
 | 
| ... | ... | @@ -2075,7 +2073,7 @@ class Element(Plugin): | 
| 2075 | 2073 |      #
 | 
| 2076 | 2074 |      # None is returned if information for the cache key is missing.
 | 
| 2077 | 2075 |      #
 | 
| 2078 | -    def __calculate_cache_key(self, dependencies):
 | |
| 2076 | +    def _calculate_cache_key(self, dependencies):
 | |
| 2079 | 2077 |          # No cache keys for dependencies which have no cache keys
 | 
| 2080 | 2078 |          if None in dependencies:
 | 
| 2081 | 2079 |              return None
 | 
| ... | ... | @@ -2114,6 +2112,40 @@ class Element(Plugin): | 
| 2114 | 2112 |  | 
| 2115 | 2113 |          return _cachekey.generate_key(cache_key_dict)
 | 
| 2116 | 2114 |  | 
| 2115 | +    #############################################################
 | |
| 2116 | +    #                   Private Local Methods                   #
 | |
| 2117 | +    #############################################################
 | |
| 2118 | + | |
| 2119 | +    # __update_source_state()
 | |
| 2120 | +    #
 | |
| 2121 | +    # Updates source consistency state
 | |
| 2122 | +    #
 | |
| 2123 | +    def __update_source_state(self):
 | |
| 2124 | + | |
| 2125 | +        # Cannot resolve source state until tracked
 | |
| 2126 | +        if self.__tracking_scheduled:
 | |
| 2127 | +            return
 | |
| 2128 | + | |
| 2129 | +        self.__consistency = Consistency.CACHED
 | |
| 2130 | +        workspace = self._get_workspace()
 | |
| 2131 | + | |
| 2132 | +        # Special case for workspaces
 | |
| 2133 | +        if workspace:
 | |
| 2134 | + | |
| 2135 | +            # A workspace is considered inconsistent in the case
 | |
| 2136 | +            # that its directory went missing
 | |
| 2137 | +            #
 | |
| 2138 | +            fullpath = workspace.get_absolute_path()
 | |
| 2139 | +            if not os.path.exists(fullpath):
 | |
| 2140 | +                self.__consistency = Consistency.INCONSISTENT
 | |
| 2141 | +        else:
 | |
| 2142 | + | |
| 2143 | +            # Determine overall consistency of the element
 | |
| 2144 | +            for source in self.__sources:
 | |
| 2145 | +                source._update_state()
 | |
| 2146 | +                source_consistency = source._get_consistency()
 | |
| 2147 | +                self.__consistency = min(self.__consistency, source_consistency)
 | |
| 2148 | + | |
| 2117 | 2149 |      # __can_build_incrementally()
 | 
| 2118 | 2150 |      #
 | 
| 2119 | 2151 |      # Check if the element can be built incrementally, this
 | 
| ... | ... | @@ -2297,6 +2329,8 @@ class Element(Plugin): | 
| 2297 | 2329 |          defaults['public'] = element_public
 | 
| 2298 | 2330 |  | 
| 2299 | 2331 |      def __init_defaults(self, plugin_conf):
 | 
| 2332 | +        if plugin_conf is None:
 | |
| 2333 | +            return
 | |
| 2300 | 2334 |  | 
| 2301 | 2335 |          # Defaults are loaded once per class and then reused
 | 
| 2302 | 2336 |          #
 | 
| ... | ... | @@ -210,7 +210,7 @@ def test_pull_tree(cli, tmpdir, datafiles): | 
| 210 | 210 |          assert artifactcache.contains(element, element_key)
 | 
| 211 | 211 |  | 
| 212 | 212 |          # Retrieve the Directory object from the cached artifact
 | 
| 213 | -        artifact_ref = artifactcache.get_artifact_fullname(element, element_key)
 | |
| 213 | +        artifact_ref = element.get_artifact_name(element_key)
 | |
| 214 | 214 |          artifact_digest = cas.resolve_ref(artifact_ref)
 | 
| 215 | 215 |  | 
| 216 | 216 |          queue = multiprocessing.Queue()
 | 
| ... | ... | @@ -190,7 +190,7 @@ def test_push_directory(cli, tmpdir, datafiles): | 
| 190 | 190 |          assert artifactcache.has_push_remotes(element=element)
 | 
| 191 | 191 |  | 
| 192 | 192 |          # Recreate the CasBasedDirectory object from the cached artifact
 | 
| 193 | -        artifact_ref = artifactcache.get_artifact_fullname(element, element_key)
 | |
| 193 | +        artifact_ref = element.get_artifact_name(element_key)
 | |
| 194 | 194 |          artifact_digest = cas.resolve_ref(artifact_ref)
 | 
| 195 | 195 |  | 
| 196 | 196 |          queue = multiprocessing.Queue()
 | 
| ... | ... | @@ -214,3 +214,41 @@ def test_cache_key_fatal_warnings(cli, tmpdir, first_warnings, second_warnings, | 
| 214 | 214 |      second_keys = run_get_cache_key("second", second_warnings)
 | 
| 215 | 215 |  | 
| 216 | 216 |      assert compare_cache_keys(first_keys, second_keys) == identical_keys
 | 
| 217 | + | |
| 218 | + | |
| 219 | +@pytest.mark.datafiles(DATA_DIR)
 | |
| 220 | +def test_keys_stable_over_targets(cli, datafiles):
 | |
| 221 | +    root_element = 'elements/key-stability/top-level.bst'
 | |
| 222 | +    target1 = 'elements/key-stability/t1.bst'
 | |
| 223 | +    target2 = 'elements/key-stability/t2.bst'
 | |
| 224 | + | |
| 225 | +    project = os.path.join(datafiles.dirname, datafiles.basename)
 | |
| 226 | +    full_graph_result = cli.run(project=project, args=[
 | |
| 227 | +        'show',
 | |
| 228 | +        '--format', '%{name}::%{full-key}',
 | |
| 229 | +        root_element
 | |
| 230 | +    ])
 | |
| 231 | +    full_graph_result.assert_success()
 | |
| 232 | +    all_cache_keys = parse_output_keys(full_graph_result.output)
 | |
| 233 | + | |
| 234 | +    ordering1_result = cli.run(project=project, args=[
 | |
| 235 | +        'show',
 | |
| 236 | +        '--format', '%{name}::%{full-key}',
 | |
| 237 | +        target1,
 | |
| 238 | +        target2
 | |
| 239 | +    ])
 | |
| 240 | +    ordering1_result.assert_success()
 | |
| 241 | +    ordering1_cache_keys = parse_output_keys(ordering1_result.output)
 | |
| 242 | + | |
| 243 | +    ordering2_result = cli.run(project=project, args=[
 | |
| 244 | +        'show',
 | |
| 245 | +        '--format', '%{name}::%{full-key}',
 | |
| 246 | +        target2,
 | |
| 247 | +        target1
 | |
| 248 | +    ])
 | |
| 249 | +    ordering2_result.assert_success()
 | |
| 250 | +    ordering2_cache_keys = parse_output_keys(ordering2_result.output)
 | |
| 251 | + | |
| 252 | +    for element in ordering1_cache_keys:
 | |
| 253 | +        assert ordering1_cache_keys[element] == ordering2_cache_keys[element]
 | |
| 254 | +        assert ordering1_cache_keys[element] == all_cache_keys[element] | 
| 1 | +kind: import
 | |
| 2 | +sources:
 | |
| 3 | +- kind: local
 | |
| 4 | +  path: elements/key-stability/aaa.bst | 
| 1 | +kind: import
 | |
| 2 | +sources:
 | |
| 3 | +- kind: local
 | |
| 4 | +  path: elements/key-stability/t1.bst
 | |
| 5 | +depends:
 | |
| 6 | +- elements/key-stability/zzz.bst | 
| 1 | +kind: import
 | |
| 2 | +sources:
 | |
| 3 | +- kind: local
 | |
| 4 | +  path: elements/key-stability/t2.bst
 | |
| 5 | +depends:
 | |
| 6 | +- elements/key-stability/aaa.bst
 | |
| 7 | +- elements/key-stability/zzz.bst | 
| 1 | +kind: import
 | |
| 2 | +sources:
 | |
| 3 | +- kind: local
 | |
| 4 | +  path: elements/key-stability/top-level.bst
 | |
| 5 | +depends:
 | |
| 6 | +- elements/key-stability/t1.bst
 | |
| 7 | +- elements/key-stability/t2.bst | 
| 1 | +kind: import
 | |
| 2 | +sources:
 | |
| 3 | +- kind: local
 | |
| 4 | +  path: elements/key-stability/zzz.bst | 
| ... | ... | @@ -18,7 +18,7 @@ try: | 
| 18 | 18 |      utils.get_host_tool('git')
 | 
| 19 | 19 |      HAVE_GIT = True
 | 
| 20 | 20 |      out = str(subprocess.check_output(['git', '--version']), "utf-8")
 | 
| 21 | -    version = tuple(int(x) for x in out.split(' ', 2)[2].split('.'))
 | |
| 21 | +    version = tuple(int(x) for x in out.split(' ')[2].split('.'))
 | |
| 22 | 22 |      HAVE_OLD_GIT = version < (1, 8, 5)
 | 
| 23 | 23 |  except ProgramNotFoundError:
 | 
| 24 | 24 |      HAVE_GIT = False
 | 
| ... | ... | @@ -88,5 +88,5 @@ whitelist_externals = | 
| 88 | 88 |  commands =
 | 
| 89 | 89 |      python3 setup.py --command-packages=click_man.commands man_pages
 | 
| 90 | 90 |  deps =
 | 
| 91 | -    click-man
 | |
| 91 | +    click-man >= 0.3.0
 | |
| 92 | 92 |      -rrequirements/requirements.txt | 
