[Notes] [Git][BuildStream/buildstream][jmac/cache_artifacts_with_vdir] 25 commits: source.py: Add new delegate method validate_cache()



Title: GitLab

Jim MacArthur pushed to branch jmac/cache_artifacts_with_vdir at BuildStream / buildstream

Commits:

30 changed files:

Changes:

  • NEWS
    ... ... @@ -2,6 +2,10 @@
    2 2
     buildstream 1.3.1
    
    3 3
     =================
    
    4 4
     
    
    5
    +  o BREAKING CHANGE: Default strip-commands have been removed as they are too
    
    6
    +    specific. Recommendation if you are building in Linux is to use the
    
    7
    +    ones being used in freedesktop-sdk project, for example
    
    8
    +
    
    5 9
       o All elements must now be suffixed with `.bst`
    
    6 10
         Attempting to use an element that does not have the `.bst` extension,
    
    7 11
         will result in a warning.
    

  • buildstream/_artifactcache/cascache.py
    ... ... @@ -39,6 +39,7 @@ from .. import utils
    39 39
     from .._exceptions import CASError, LoadError, LoadErrorReason
    
    40 40
     from .. import _yaml
    
    41 41
     
    
    42
    +from ..storage._casbaseddirectory import CasBasedDirectory
    
    42 43
     
    
    43 44
     # The default limit for gRPC messages is 4 MiB.
    
    44 45
     # Limit payload to 1 MiB to leave sufficient headroom for metadata.
    
    ... ... @@ -768,6 +769,9 @@ class CASCache():
    768 769
         #     (Digest): Digest object for the directory added.
    
    769 770
         #
    
    770 771
         def _commit_directory(self, path, *, dir_digest=None):
    
    772
    +        if isinstance(path, CasBasedDirectory):
    
    773
    +            return self.add_object(digest=dir_digest, buffer=path.pb2_directory.SerializeToString())
    
    774
    +
    
    771 775
             directory = remote_execution_pb2.Directory()
    
    772 776
     
    
    773 777
             for name in sorted(os.listdir(path)):
    

  • buildstream/_stream.py
    ... ... @@ -1199,7 +1199,7 @@ class Stream():
    1199 1199
                 element_source_dir = self._get_element_dirname(directory, element)
    
    1200 1200
                 if list(element.sources()):
    
    1201 1201
                     os.makedirs(element_source_dir)
    
    1202
    -                element._stage_sources_at(element_source_dir)
    
    1202
    +                element._stage_sources_at(element_source_dir, mount_workspaces=False)
    
    1203 1203
     
    
    1204 1204
         # Write a master build script to the sandbox
    
    1205 1205
         def _write_build_script(self, directory, elements):
    

  • buildstream/_versions.py
    ... ... @@ -23,7 +23,7 @@
    23 23
     # This version is bumped whenever enhancements are made
    
    24 24
     # to the `project.conf` format or the core element format.
    
    25 25
     #
    
    26
    -BST_FORMAT_VERSION = 19
    
    26
    +BST_FORMAT_VERSION = 20
    
    27 27
     
    
    28 28
     
    
    29 29
     # The base BuildStream artifact version
    

  • buildstream/_yaml.py
    ... ... @@ -352,6 +352,7 @@ _sentinel = object()
    352 352
     #    key (str): The key to get a value for in node
    
    353 353
     #    indices (list of ints): Optionally decend into lists of lists
    
    354 354
     #    default_value: Optionally return this value if the key is not found
    
    355
    +#    allow_none: (bool): Allow None to be a valid value
    
    355 356
     #
    
    356 357
     # Returns:
    
    357 358
     #    The value if found in node, otherwise default_value is returned
    
    ... ... @@ -362,7 +363,7 @@ _sentinel = object()
    362 363
     # Note:
    
    363 364
     #    Returned strings are stripped of leading and trailing whitespace
    
    364 365
     #
    
    365
    -def node_get(node, expected_type, key, indices=None, default_value=_sentinel):
    
    366
    +def node_get(node, expected_type, key, indices=None, *, default_value=_sentinel, allow_none=False):
    
    366 367
         value = node.get(key, default_value)
    
    367 368
         provenance = node_get_provenance(node)
    
    368 369
         if value is _sentinel:
    
    ... ... @@ -377,8 +378,8 @@ def node_get(node, expected_type, key, indices=None, default_value=_sentinel):
    377 378
                 value = value[index]
    
    378 379
                 path += '[{:d}]'.format(index)
    
    379 380
     
    
    380
    -    # We want to allow None as a valid value for any type
    
    381
    -    if value is None:
    
    381
    +    # Optionally allow None as a valid value for any type
    
    382
    +    if value is None and (allow_none or default_value is None):
    
    382 383
             return None
    
    383 384
     
    
    384 385
         if not isinstance(value, expected_type):
    

  • buildstream/buildelement.py
    ... ... @@ -35,6 +35,14 @@ This section will give a brief summary of how some of the common features work,
    35 35
     some of them or the variables they use will be further detailed in the following
    
    36 36
     sections.
    
    37 37
     
    
    38
    +The `strip-binaries` variable
    
    39
    +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    
    40
    +The `strip-binaries` variable is by default **empty**. You need to use the
    
    41
    +appropiate commands depending of the system you are building.
    
    42
    +If you are targetting Linux, ones known to work are the ones used by the
    
    43
    +`freedesktop-sdk <https://freedesktop-sdk.io/>`_, you can take a look to them in their
    
    44
    +`project.conf <https://gitlab.com/freedesktop-sdk/freedesktop-sdk/blob/freedesktop-sdk-18.08.21/project.conf#L74>`_
    
    45
    +
    
    38 46
     Location for running commands
    
    39 47
     ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    
    40 48
     The ``command-subdir`` variable sets where the build commands will be executed,
    

  • buildstream/data/projectconfig.yaml
    ... ... @@ -44,38 +44,8 @@ variables:
    44 44
       # Indicates the build installation directory in the sandbox
    
    45 45
       install-root: /buildstream-install
    
    46 46
     
    
    47
    -  # Arguments for tooling used when stripping debug symbols
    
    48
    -  objcopy-link-args: --add-gnu-debuglink
    
    49
    -  objcopy-extract-args: |
    
    50
    -
    
    51
    -    --only-keep-debug --compress-debug-sections
    
    52
    -
    
    53
    -  strip-args: |
    
    54
    -
    
    55
    -    --remove-section=.comment --remove-section=.note --strip-unneeded
    
    56
    -
    
    57
    -  # Generic implementation for stripping debugging symbols
    
    58
    -  strip-binaries: |
    
    59
    -
    
    60
    -    cd "%{install-root}" && find -type f \
    
    61
    -      '(' -perm -111 -o -name '*.so*' \
    
    62
    -          -o -name '*.cmxs' -o -name '*.node' ')' \
    
    63
    -      -exec sh -ec \
    
    64
    -      'read -n4 hdr <"$1" # check for elf header
    
    65
    -       case "$1" in
    
    66
    -         %{install-root}%{debugdir}/*)
    
    67
    -           exit 0
    
    68
    -           ;;
    
    69
    -       esac
    
    70
    -       if [ "$hdr" != "$(printf \\x7fELF)" ]; then
    
    71
    -           exit 0
    
    72
    -       fi
    
    73
    -       debugfile="%{install-root}%{debugdir}/$1"
    
    74
    -       mkdir -p "$(dirname "$debugfile")"
    
    75
    -       objcopy %{objcopy-extract-args} "$1" "$debugfile"
    
    76
    -       chmod 644 "$debugfile"
    
    77
    -       strip %{strip-args} "$1"
    
    78
    -       objcopy %{objcopy-link-args} "$debugfile" "$1"' - {} ';'
    
    47
    +  # You need to override this with the commands specific for your system
    
    48
    +  strip-binaries: ""
    
    79 49
     
    
    80 50
       # Generic implementation for reproducible python builds
    
    81 51
       fix-pyc-timestamps: |
    
    ... ... @@ -196,4 +166,4 @@ shell:
    196 166
     
    
    197 167
       # Command to run when `bst shell` does not provide a command
    
    198 168
       #
    
    199
    -  command: [ 'sh', '-i' ]
    \ No newline at end of file
    169
    +  command: [ 'sh', '-i' ]

  • buildstream/element.py
    ... ... @@ -102,6 +102,7 @@ from .types import _KeyStrength, CoreWarnings
    102 102
     
    
    103 103
     from .storage.directory import Directory
    
    104 104
     from .storage._filebaseddirectory import FileBasedDirectory
    
    105
    +from .storage._casbaseddirectory import CasBasedDirectory
    
    105 106
     from .storage.directory import VirtualDirectoryError
    
    106 107
     
    
    107 108
     
    
    ... ... @@ -1634,35 +1635,38 @@ class Element(Plugin):
    1634 1635
                     # No collect directory existed
    
    1635 1636
                     collectvdir = None
    
    1636 1637
     
    
    1638
    +        assemblevdir = CasBasedDirectory(cas_cache=self._get_context().artifactcache.cas, ref=None)
    
    1639
    +        logsvdir = assemblevdir.descend("logs", create=True)
    
    1640
    +        metavdir = assemblevdir.descend("meta", create=True)
    
    1641
    +
    
    1637 1642
             # Create artifact directory structure
    
    1638 1643
             assembledir = os.path.join(rootdir, 'artifact')
    
    1639
    -        filesdir = os.path.join(assembledir, 'files')
    
    1640 1644
             logsdir = os.path.join(assembledir, 'logs')
    
    1641 1645
             metadir = os.path.join(assembledir, 'meta')
    
    1642
    -        buildtreedir = os.path.join(assembledir, 'buildtree')
    
    1643 1646
             os.mkdir(assembledir)
    
    1644
    -        if collect is not None and collectvdir is not None:
    
    1645
    -            os.mkdir(filesdir)
    
    1646 1647
             os.mkdir(logsdir)
    
    1647 1648
             os.mkdir(metadir)
    
    1648
    -        os.mkdir(buildtreedir)
    
    1649 1649
     
    
    1650
    -        # Hard link files from collect dir to files directory
    
    1651 1650
             if collect is not None and collectvdir is not None:
    
    1652
    -            collectvdir.export_files(filesdir, can_link=True)
    
    1651
    +            if isinstance(collectvdir, CasBasedDirectory):
    
    1652
    +                assemblevdir.fast_directory_import("files", collectvdir)
    
    1653
    +            else:
    
    1654
    +                filesvdir = assemblevdir.descend("files", create=True)
    
    1655
    +                filesvdir.import_files(collectvdir, can_link=True)
    
    1653 1656
     
    
    1657
    +        sandbox_vroot = sandbox.get_virtual_directory()
    
    1654 1658
             try:
    
    1655
    -            sandbox_vroot = sandbox.get_virtual_directory()
    
    1656 1659
                 sandbox_build_dir = sandbox_vroot.descend(
    
    1657 1660
                     self.get_variable('build-root').lstrip(os.sep).split(os.sep))
    
    1658
    -            # Hard link files from build-root dir to buildtreedir directory
    
    1659
    -            sandbox_build_dir.export_files(buildtreedir)
    
    1661
    +            assemblevdir.fast_directory_import("buildtree", sandbox_build_dir)
    
    1660 1662
             except VirtualDirectoryError:
    
    1661 1663
                 # Directory could not be found. Pre-virtual
    
    1662 1664
                 # directory behaviour was to continue silently
    
    1663
    -            # if the directory could not be found.
    
    1664
    -            pass
    
    1665
    +            # if the directory could not be found, but we must create
    
    1666
    +            # the directory.
    
    1667
    +            assemblevdir.descend("buildtree", create=True)
    
    1665 1668
     
    
    1669
    +        # Write some logs out to normal directories: logsdir and metadir
    
    1666 1670
             # Copy build log
    
    1667 1671
             log_filename = self._get_context().get_log_filename()
    
    1668 1672
             self._build_log_path = os.path.join(logsdir, 'build.log')
    
    ... ... @@ -1705,9 +1709,12 @@ class Element(Plugin):
    1705 1709
                 ]
    
    1706 1710
             }), os.path.join(metadir, 'workspaced-dependencies.yaml'))
    
    1707 1711
     
    
    1708
    -        with self.timed_activity("Caching artifact"):
    
    1709
    -            artifact_size = utils._get_dir_size(assembledir)
    
    1710
    -            self.__artifacts.commit(self, assembledir, self.__get_cache_keys_for_commit())
    
    1712
    +        metavdir.import_files(metadir)
    
    1713
    +        logsvdir.import_files(logsdir)
    
    1714
    +
    
    1715
    +        artifact_size = assemblevdir.get_size()
    
    1716
    +        with self.timed_activity("Caching artifact of size {}".format(artifact_size)):
    
    1717
    +            self.__artifacts.commit(self, assemblevdir, self.__get_cache_keys_for_commit())
    
    1711 1718
     
    
    1712 1719
             if collect is not None and collectvdir is None:
    
    1713 1720
                 raise ElementError(
    

  • buildstream/plugin.py
    ... ... @@ -323,7 +323,7 @@ class Plugin():
    323 323
             provenance = _yaml.node_get_provenance(node, key=member_name)
    
    324 324
             return str(provenance)
    
    325 325
     
    
    326
    -    def node_get_member(self, node, expected_type, member_name, default=_yaml._sentinel):
    
    326
    +    def node_get_member(self, node, expected_type, member_name, default=_yaml._sentinel, *, allow_none=False):
    
    327 327
             """Fetch the value of a node member, raising an error if the value is
    
    328 328
             missing or incorrectly typed.
    
    329 329
     
    
    ... ... @@ -332,6 +332,7 @@ class Plugin():
    332 332
                expected_type (type): The expected type of the node member
    
    333 333
                member_name (str): The name of the member to fetch
    
    334 334
                default (expected_type): A value to return when *member_name* is not specified in *node*
    
    335
    +           allow_none (bool): Allow explicitly set None values in the YAML (*Since: 1.4*)
    
    335 336
     
    
    336 337
             Returns:
    
    337 338
                The value of *member_name* in *node*, otherwise *default*
    
    ... ... @@ -352,7 +353,7 @@ class Plugin():
    352 353
               # Fetch an optional integer
    
    353 354
               level = self.node_get_member(node, int, 'level', -1)
    
    354 355
             """
    
    355
    -        return _yaml.node_get(node, expected_type, member_name, default_value=default)
    
    356
    +        return _yaml.node_get(node, expected_type, member_name, default_value=default, allow_none=allow_none)
    
    356 357
     
    
    357 358
         def node_get_project_path(self, node, key, *,
    
    358 359
                                   check_is_file=False, check_is_dir=False):
    

  • buildstream/plugins/sources/git.py
    ... ... @@ -133,7 +133,22 @@ details on common configuration options for sources.
    133 133
     
    
    134 134
     This plugin provides the following :ref:`configurable warnings <configurable_warnings>`:
    
    135 135
     
    
    136
    -- ``git:inconsistent-submodule`` - A submodule was found to be missing from the underlying git repository.
    
    136
    +- ``git:inconsistent-submodule`` - A submodule present in the git repository's .gitmodules was never
    
    137
    +  added with `git submodule add`.
    
    138
    +
    
    139
    +- ``git:unlisted-submodule`` - A submodule is present in the git repository but was not specified in
    
    140
    +  the source configuration and was not disabled for checkout.
    
    141
    +
    
    142
    +  .. note::
    
    143
    +
    
    144
    +     The ``git:unlisted-submodule`` warning is available since :ref:`format version 20 <project_format_version>`
    
    145
    +
    
    146
    +- ``git:invalid-submodule`` - A submodule is specified in the source configuration but does not exist
    
    147
    +  in the repository.
    
    148
    +
    
    149
    +  .. note::
    
    150
    +
    
    151
    +     The ``git:invalid-submodule`` warning is available since :ref:`format version 20 <project_format_version>`
    
    137 152
     
    
    138 153
     This plugin also utilises the following configurable :class:`core warnings <buildstream.types.CoreWarnings>`:
    
    139 154
     
    
    ... ... @@ -158,6 +173,8 @@ GIT_MODULES = '.gitmodules'
    158 173
     
    
    159 174
     # Warnings
    
    160 175
     WARN_INCONSISTENT_SUBMODULE = "inconsistent-submodule"
    
    176
    +WARN_UNLISTED_SUBMODULE = "unlisted-submodule"
    
    177
    +WARN_INVALID_SUBMODULE = "invalid-submodule"
    
    161 178
     
    
    162 179
     
    
    163 180
     # Because of handling of submodules, we maintain a GitMirror
    
    ... ... @@ -305,7 +322,7 @@ class GitMirror(SourceFetcher):
    305 322
     
    
    306 323
             return ref, list(tags)
    
    307 324
     
    
    308
    -    def stage(self, directory, track=None):
    
    325
    +    def stage(self, directory):
    
    309 326
             fullpath = os.path.join(directory, self.path)
    
    310 327
     
    
    311 328
             # Using --shared here avoids copying the objects into the checkout, in any
    
    ... ... @@ -324,11 +341,7 @@ class GitMirror(SourceFetcher):
    324 341
     
    
    325 342
             self._rebuild_git(fullpath)
    
    326 343
     
    
    327
    -        # Check that the user specified ref exists in the track if provided & not already tracked
    
    328
    -        if track:
    
    329
    -            self.assert_ref_in_track(fullpath, track)
    
    330
    -
    
    331
    -    def init_workspace(self, directory, track=None):
    
    344
    +    def init_workspace(self, directory):
    
    332 345
             fullpath = os.path.join(directory, self.path)
    
    333 346
             url = self.source.translate_url(self.url)
    
    334 347
     
    
    ... ... @@ -344,10 +357,6 @@ class GitMirror(SourceFetcher):
    344 357
                              fail="Failed to checkout git ref {}".format(self.ref),
    
    345 358
                              cwd=fullpath)
    
    346 359
     
    
    347
    -        # Check that the user specified ref exists in the track if provided & not already tracked
    
    348
    -        if track:
    
    349
    -            self.assert_ref_in_track(fullpath, track)
    
    350
    -
    
    351 360
         # List the submodules (path/url tuples) present at the given ref of this repo
    
    352 361
         def submodule_list(self):
    
    353 362
             modules = "{}:{}".format(self.ref, GIT_MODULES)
    
    ... ... @@ -413,28 +422,6 @@ class GitMirror(SourceFetcher):
    413 422
     
    
    414 423
                 return None
    
    415 424
     
    
    416
    -    # Assert that ref exists in track, if track has been specified.
    
    417
    -    def assert_ref_in_track(self, fullpath, track):
    
    418
    -        _, branch = self.source.check_output([self.source.host_git, 'branch', '--list', track,
    
    419
    -                                              '--contains', self.ref],
    
    420
    -                                             cwd=fullpath,)
    
    421
    -        if branch:
    
    422
    -            return
    
    423
    -        else:
    
    424
    -            _, tag = self.source.check_output([self.source.host_git, 'tag', '--list', track,
    
    425
    -                                               '--contains', self.ref],
    
    426
    -                                              cwd=fullpath,)
    
    427
    -            if tag:
    
    428
    -                return
    
    429
    -
    
    430
    -        detail = "The ref provided for the element does not exist locally in the provided track branch / tag " + \
    
    431
    -                 "'{}'.\nYou may wish to track the element to update the ref from '{}' ".format(track, track) + \
    
    432
    -                 "with `bst track`,\nor examine the upstream at '{}' for the specific ref.".format(self.url)
    
    433
    -
    
    434
    -        self.source.warn("{}: expected ref '{}' was not found in given track '{}' for staged repository: '{}'\n"
    
    435
    -                         .format(self.source, self.ref, track, self.url),
    
    436
    -                         detail=detail, warning_token=CoreWarnings.REF_NOT_IN_TRACK)
    
    437
    -
    
    438 425
         def _rebuild_git(self, fullpath):
    
    439 426
             if not self.tags:
    
    440 427
                 return
    
    ... ... @@ -563,7 +550,6 @@ class GitSource(Source):
    563 550
                     self.submodule_checkout_overrides[path] = checkout
    
    564 551
     
    
    565 552
             self.mark_download_url(self.original_url)
    
    566
    -        self.tracked = False
    
    567 553
     
    
    568 554
         def preflight(self):
    
    569 555
             # Check if git is installed, get the binary at the same time
    
    ... ... @@ -653,8 +639,6 @@ class GitSource(Source):
    653 639
                 # Update self.mirror.ref and node.ref from the self.tracking branch
    
    654 640
                 ret = self.mirror.latest_commit_with_tags(self.tracking, self.track_tags)
    
    655 641
     
    
    656
    -        # Set tracked attribute, parameter for if self.mirror.assert_ref_in_track is needed
    
    657
    -        self.tracked = True
    
    658 642
             return ret
    
    659 643
     
    
    660 644
         def init_workspace(self, directory):
    
    ... ... @@ -662,7 +646,7 @@ class GitSource(Source):
    662 646
             self.refresh_submodules()
    
    663 647
     
    
    664 648
             with self.timed_activity('Setting up workspace "{}"'.format(directory), silent_nested=True):
    
    665
    -            self.mirror.init_workspace(directory, track=(self.tracking if not self.tracked else None))
    
    649
    +            self.mirror.init_workspace(directory)
    
    666 650
                 for mirror in self.submodules:
    
    667 651
                     mirror.init_workspace(directory)
    
    668 652
     
    
    ... ... @@ -678,15 +662,9 @@ class GitSource(Source):
    678 662
             # Stage the main repo in the specified directory
    
    679 663
             #
    
    680 664
             with self.timed_activity("Staging {}".format(self.mirror.url), silent_nested=True):
    
    681
    -            self.mirror.stage(directory, track=(self.tracking if not self.tracked else None))
    
    665
    +            self.mirror.stage(directory)
    
    682 666
                 for mirror in self.submodules:
    
    683
    -                if mirror.path in self.submodule_checkout_overrides:
    
    684
    -                    checkout = self.submodule_checkout_overrides[mirror.path]
    
    685
    -                else:
    
    686
    -                    checkout = self.checkout_submodules
    
    687
    -
    
    688
    -                if checkout:
    
    689
    -                    mirror.stage(directory)
    
    667
    +                mirror.stage(directory)
    
    690 668
     
    
    691 669
         def get_source_fetchers(self):
    
    692 670
             yield self.mirror
    
    ... ... @@ -694,6 +672,74 @@ class GitSource(Source):
    694 672
             for submodule in self.submodules:
    
    695 673
                 yield submodule
    
    696 674
     
    
    675
    +    def validate_cache(self):
    
    676
    +        discovered_submodules = {}
    
    677
    +        unlisted_submodules = []
    
    678
    +        invalid_submodules = []
    
    679
    +
    
    680
    +        for path, url in self.mirror.submodule_list():
    
    681
    +            discovered_submodules[path] = url
    
    682
    +            if self.ignore_submodule(path):
    
    683
    +                continue
    
    684
    +
    
    685
    +            override_url = self.submodule_overrides.get(path)
    
    686
    +            if not override_url:
    
    687
    +                unlisted_submodules.append((path, url))
    
    688
    +
    
    689
    +        # Warn about submodules which are explicitly configured but do not exist
    
    690
    +        for path, url in self.submodule_overrides.items():
    
    691
    +            if path not in discovered_submodules:
    
    692
    +                invalid_submodules.append((path, url))
    
    693
    +
    
    694
    +        if invalid_submodules:
    
    695
    +            detail = []
    
    696
    +            for path, url in invalid_submodules:
    
    697
    +                detail.append("  Submodule URL '{}' at path '{}'".format(url, path))
    
    698
    +
    
    699
    +            self.warn("{}: Invalid submodules specified".format(self),
    
    700
    +                      warning_token=WARN_INVALID_SUBMODULE,
    
    701
    +                      detail="The following submodules are specified in the source "
    
    702
    +                      "description but do not exist according to the repository\n\n" +
    
    703
    +                      "\n".join(detail))
    
    704
    +
    
    705
    +        # Warn about submodules which exist but have not been explicitly configured
    
    706
    +        if unlisted_submodules:
    
    707
    +            detail = []
    
    708
    +            for path, url in unlisted_submodules:
    
    709
    +                detail.append("  Submodule URL '{}' at path '{}'".format(url, path))
    
    710
    +
    
    711
    +            self.warn("{}: Unlisted submodules exist".format(self),
    
    712
    +                      warning_token=WARN_UNLISTED_SUBMODULE,
    
    713
    +                      detail="The following submodules exist but are not specified " +
    
    714
    +                      "in the source description\n\n" +
    
    715
    +                      "\n".join(detail))
    
    716
    +
    
    717
    +        # Assert that the ref exists in the track tag/branch, if track has been specified.
    
    718
    +        ref_in_track = False
    
    719
    +        if self.tracking:
    
    720
    +            _, branch = self.check_output([self.host_git, 'branch', '--list', self.tracking,
    
    721
    +                                           '--contains', self.mirror.ref],
    
    722
    +                                          cwd=self.mirror.mirror)
    
    723
    +            if branch:
    
    724
    +                ref_in_track = True
    
    725
    +            else:
    
    726
    +                _, tag = self.check_output([self.host_git, 'tag', '--list', self.tracking,
    
    727
    +                                            '--contains', self.mirror.ref],
    
    728
    +                                           cwd=self.mirror.mirror)
    
    729
    +                if tag:
    
    730
    +                    ref_in_track = True
    
    731
    +
    
    732
    +            if not ref_in_track:
    
    733
    +                detail = "The ref provided for the element does not exist locally " + \
    
    734
    +                         "in the provided track branch / tag '{}'.\n".format(self.tracking) + \
    
    735
    +                         "You may wish to track the element to update the ref from '{}' ".format(self.tracking) + \
    
    736
    +                         "with `bst track`,\n" + \
    
    737
    +                         "or examine the upstream at '{}' for the specific ref.".format(self.mirror.url)
    
    738
    +
    
    739
    +                self.warn("{}: expected ref '{}' was not found in given track '{}' for staged repository: '{}'\n"
    
    740
    +                          .format(self, self.mirror.ref, self.tracking, self.mirror.url),
    
    741
    +                          detail=detail, warning_token=CoreWarnings.REF_NOT_IN_TRACK)
    
    742
    +
    
    697 743
         ###########################################################
    
    698 744
         #                     Local Functions                     #
    
    699 745
         ###########################################################
    
    ... ... @@ -718,12 +764,12 @@ class GitSource(Source):
    718 764
             self.mirror.ensure()
    
    719 765
             submodules = []
    
    720 766
     
    
    721
    -        # XXX Here we should issue a warning if either:
    
    722
    -        #   A.) A submodule exists but is not defined in the element configuration
    
    723
    -        #   B.) The element configuration configures submodules which dont exist at the current ref
    
    724
    -        #
    
    725 767
             for path, url in self.mirror.submodule_list():
    
    726 768
     
    
    769
    +            # Completely ignore submodules which are disabled for checkout
    
    770
    +            if self.ignore_submodule(path):
    
    771
    +                continue
    
    772
    +
    
    727 773
                 # Allow configuration to override the upstream
    
    728 774
                 # location of the submodules.
    
    729 775
                 override_url = self.submodule_overrides.get(path)
    
    ... ... @@ -747,6 +793,16 @@ class GitSource(Source):
    747 793
                 tags.append((tag, commit_ref, annotated))
    
    748 794
             return tags
    
    749 795
     
    
    796
    +    # Checks whether the plugin configuration has explicitly
    
    797
    +    # configured this submodule to be ignored
    
    798
    +    def ignore_submodule(self, path):
    
    799
    +        try:
    
    800
    +            checkout = self.submodule_checkout_overrides[path]
    
    801
    +        except KeyError:
    
    802
    +            checkout = self.checkout_submodules
    
    803
    +
    
    804
    +        return not checkout
    
    805
    +
    
    750 806
     
    
    751 807
     # Plugin entry point
    
    752 808
     def setup():
    

  • buildstream/source.py
    ... ... @@ -102,6 +102,11 @@ these methods are mandatory to implement.
    102 102
       submodules). For details on how to define a SourceFetcher, see
    
    103 103
       :ref:`SourceFetcher <core_source_fetcher>`.
    
    104 104
     
    
    105
    +* :func:`Source.validate_cache() <buildstream.source.Source.validate_cache>`
    
    106
    +
    
    107
    +  Perform any validations which require the sources to be cached.
    
    108
    +
    
    109
    +  **Optional**: This is completely optional and will do nothing if left unimplemented.
    
    105 110
     
    
    106 111
     Accessing previous sources
    
    107 112
     --------------------------
    
    ... ... @@ -480,9 +485,22 @@ class Source(Plugin):
    480 485
     
    
    481 486
             *Since: 1.2*
    
    482 487
             """
    
    483
    -
    
    484 488
             return []
    
    485 489
     
    
    490
    +    def validate_cache(self):
    
    491
    +        """Implement any validations once we know the sources are cached
    
    492
    +
    
    493
    +        This is guaranteed to be called only once for a given session
    
    494
    +        once the sources are known to be
    
    495
    +        :attr:`Consistency.CACHED <buildstream.types.Consistency.CACHED>`,
    
    496
    +        if source tracking is enabled in the session for this source,
    
    497
    +        then this will only be called if the sources become cached after
    
    498
    +        tracking completes.
    
    499
    +
    
    500
    +        *Since: 1.4*
    
    501
    +        """
    
    502
    +        pass
    
    503
    +
    
    486 504
         #############################################################
    
    487 505
         #                       Public Methods                      #
    
    488 506
         #############################################################
    
    ... ... @@ -659,6 +677,11 @@ class Source(Plugin):
    659 677
                 with context.silence():
    
    660 678
                     self.__consistency = self.get_consistency()  # pylint: disable=assignment-from-no-return
    
    661 679
     
    
    680
    +                # Give the Source an opportunity to validate the cached
    
    681
    +                # sources as soon as the Source becomes Consistency.CACHED.
    
    682
    +                if self.__consistency == Consistency.CACHED:
    
    683
    +                    self.validate_cache()
    
    684
    +
    
    662 685
         # Return cached consistency
    
    663 686
         #
    
    664 687
         def _get_consistency(self):
    

  • buildstream/storage/_casbaseddirectory.py
    ... ... @@ -350,10 +350,13 @@ class CasBasedDirectory(Directory):
    350 350
             filenode.is_executable = is_executable
    
    351 351
             self.index[filename] = IndexEntry(filenode, modified=modified or filename in self.index)
    
    352 352
     
    
    353
    -    def _copy_link_from_filesystem(self, basename, filename):
    
    354
    -        self._add_new_link_direct(filename, os.readlink(os.path.join(basename, filename)))
    
    353
    +    def _copy_link_from_filesystem(self, filesystem_path, relative_path, destination_name):
    
    354
    +        # filesystem_path should be a full path point to the source symlink.
    
    355
    +        # relative_path should be the path we're importing to, which is used to turn absolute paths into relative ones.
    
    356
    +        # destination_name should be the destination name in this directory.
    
    357
    +        self._add_new_link_direct(relative_path, destination_name, os.readlink(filesystem_path))
    
    355 358
     
    
    356
    -    def _add_new_link_direct(self, name, target):
    
    359
    +    def _add_new_link_direct(self, relative_path, name, target):
    
    357 360
             existing_link = self._find_pb2_entry(name)
    
    358 361
             if existing_link:
    
    359 362
                 symlinknode = existing_link
    
    ... ... @@ -361,8 +364,15 @@ class CasBasedDirectory(Directory):
    361 364
                 symlinknode = self.pb2_directory.symlinks.add()
    
    362 365
             assert isinstance(symlinknode, remote_execution_pb2.SymlinkNode)
    
    363 366
             symlinknode.name = name
    
    364
    -        # A symlink node has no digest.
    
    367
    +
    
    368
    +        absolute = target.startswith(CasBasedDirectory._pb2_absolute_path_prefix)
    
    369
    +        if absolute:
    
    370
    +            distance_to_root = len(relative_path.split(CasBasedDirectory._pb2_path_sep))
    
    371
    +            target = CasBasedDirectory._pb2_path_sep.join([".."] * distance_to_root + [target[1:]])
    
    365 372
             symlinknode.target = target
    
    373
    +
    
    374
    +        # A symlink node has no digest.
    
    375
    +
    
    366 376
             self.index[name] = IndexEntry(symlinknode, modified=(existing_link is not None))
    
    367 377
     
    
    368 378
         def delete_entry(self, name):
    
    ... ... @@ -527,7 +537,7 @@ class CasBasedDirectory(Directory):
    527 537
                     result.combine(subdir_result)
    
    528 538
                 elif os.path.islink(import_file):
    
    529 539
                     if self._check_replacement(entry, path_prefix, result):
    
    530
    -                    self._copy_link_from_filesystem(source_directory, entry)
    
    540
    +                    self._copy_link_from_filesystem(os.path.join(source_directory, entry), path_prefix, entry)
    
    531 541
                         result.files_written.append(relative_pathname)
    
    532 542
                 elif os.path.isdir(import_file):
    
    533 543
                     # A plain directory which already exists isn't a problem; just ignore it.
    
    ... ... @@ -602,7 +612,7 @@ class CasBasedDirectory(Directory):
    602 612
                             self.index[f] = IndexEntry(filenode, modified=True)
    
    603 613
                         else:
    
    604 614
                             assert isinstance(item, remote_execution_pb2.SymlinkNode)
    
    605
    -                        self._add_new_link_direct(name=f, target=item.target)
    
    615
    +                        self._add_new_link_direct(path_prefix, name=f, target=item.target)
    
    606 616
                     else:
    
    607 617
                         result.ignored.append(os.path.join(path_prefix, f))
    
    608 618
             return result
    
    ... ... @@ -637,7 +647,7 @@ class CasBasedDirectory(Directory):
    637 647
                     files = external_pathspec.list_relative_paths()
    
    638 648
     
    
    639 649
             if isinstance(external_pathspec, FileBasedDirectory):
    
    640
    -            source_directory = external_pathspec.get_underlying_directory()
    
    650
    +            source_directory = external_pathspec._get_underlying_directory()
    
    641 651
                 result = self._import_files_from_directory(source_directory, files=files)
    
    642 652
             elif isinstance(external_pathspec, str):
    
    643 653
                 source_directory = external_pathspec
    
    ... ... @@ -836,6 +846,27 @@ class CasBasedDirectory(Directory):
    836 846
             self._recalculate_recursing_up()
    
    837 847
             self._recalculate_recursing_down()
    
    838 848
     
    
    849
    +    def get_size(self):
    
    850
    +        total = len(self.pb2_directory.SerializeToString())
    
    851
    +        for i in self.index.values():
    
    852
    +            if isinstance(i.buildstream_object, CasBasedDirectory):
    
    853
    +                total += i.buildstream_object.get_size()
    
    854
    +            elif isinstance(i.pb_object, remote_execution_pb2.FileNode):
    
    855
    +                src_name = self.cas_cache.objpath(i.pb_object.digest)
    
    856
    +                filesize = os.stat(src_name).st_size
    
    857
    +                total += filesize
    
    858
    +            # Symlink nodes are encoded as part of the directory serialization.
    
    859
    +        return total
    
    860
    +
    
    861
    +    def fast_directory_import(self, dirname, other_directory):
    
    862
    +        assert dirname not in self.index
    
    863
    +        if isinstance(other_directory, CasBasedDirectory):
    
    864
    +            self.index[dirname] = IndexEntry(other_directory.pb_object,
    
    865
    +                                             buildstream_object=other_directory.buildstream_object)
    
    866
    +        else:
    
    867
    +            subdir = self.descend(dirname, create=True)
    
    868
    +            subdir.import_files(other_directory, can_link=True)
    
    869
    +
    
    839 870
         def _get_identifier(self):
    
    840 871
             path = ""
    
    841 872
             if self.parent:
    

  • buildstream/storage/_filebaseddirectory.py
    ... ... @@ -30,6 +30,7 @@ See also: :ref:`sandboxing`.
    30 30
     import os
    
    31 31
     import time
    
    32 32
     from .directory import Directory, VirtualDirectoryError
    
    33
    +from .. import utils
    
    33 34
     from ..utils import link_files, copy_files, list_relative_paths, _get_link_mtime, _magic_timestamp
    
    34 35
     from ..utils import _set_deterministic_user, _set_deterministic_mtime
    
    35 36
     
    
    ... ... @@ -125,6 +126,13 @@ class FileBasedDirectory(Directory):
    125 126
             self._mark_changed()
    
    126 127
             return import_result
    
    127 128
     
    
    129
    +    def fast_directory_import(self, dirname, other_directory):
    
    130
    +        # We can't do a fast import into a FileBasedDirectory, so this
    
    131
    +        # falls back to import_files.
    
    132
    +        assert dirname not in self.index
    
    133
    +        subdir = self.descend(dirname, create=True)
    
    134
    +        subdir.import_files(other_directory, can_link=True)
    
    135
    +
    
    128 136
         def _mark_changed(self):
    
    129 137
             self._directory_read = False
    
    130 138
     
    
    ... ... @@ -201,6 +209,9 @@ class FileBasedDirectory(Directory):
    201 209
     
    
    202 210
             return list_relative_paths(self.external_directory)
    
    203 211
     
    
    212
    +    def get_size(self):
    
    213
    +        return utils._get_dir_size(self.external_directory)
    
    214
    +
    
    204 215
         def __str__(self):
    
    205 216
             # This returns the whole path (since we don't know where the directory started)
    
    206 217
             # which exposes the sandbox directory; we will have to assume for the time being
    

  • buildstream/storage/directory.py
    ... ... @@ -99,6 +99,30 @@ class Directory():
    99 99
     
    
    100 100
             raise NotImplementedError()
    
    101 101
     
    
    102
    +    def fast_directory_import(self, dirname, other_directory):
    
    103
    +        """Import other_directory as a new directory in this one.
    
    104
    +
    
    105
    +        This is a potentially faster method than import_directory with
    
    106
    +        fewer options. dirname must not already exist, and all files
    
    107
    +        are imported unconditionally. It is assumed that it is
    
    108
    +        acceptable to use filesystem hard links to files in
    
    109
    +        other_directory. You cannot update utimes or get a
    
    110
    +        FileListResult.
    
    111
    +
    
    112
    +        This only provides a benefit if both this and other_directory
    
    113
    +        are CAS-based directories. In other cases, it will fall back
    
    114
    +        to import_directory.
    
    115
    +
    
    116
    +        Args:
    
    117
    +          dirname: The name to call the subdirectory in this
    
    118
    +          directory. This must not already exist in this directory.
    
    119
    +
    
    120
    +          other_directory: The directory to import.
    
    121
    +
    
    122
    +        """
    
    123
    +
    
    124
    +        raise NotImplementedError()
    
    125
    +
    
    102 126
         def export_files(self, to_directory, *, can_link=False, can_destroy=False):
    
    103 127
             """Copies everything from this into to_directory.
    
    104 128
     
    
    ... ... @@ -176,3 +200,9 @@ class Directory():
    176 200
     
    
    177 201
             """
    
    178 202
             raise NotImplementedError()
    
    203
    +
    
    204
    +    def get_size(self):
    
    205
    +        """ Get an approximation of the storage space in bytes used by this directory
    
    206
    +        and all files and subdirectories in it. Storage space varies by implementation
    
    207
    +        and effective space used may be lower than this number due to deduplication. """
    
    208
    +        raise NotImplementedError()

  • tests/cachekey/project/elements/build1.expected
    1
    -a0d000abc1dea8714cd27f348d0b798b35e7246c44e330c4b3f7912fabacc6db
    \ No newline at end of file
    1
    +dadb8f86874f714b4f6d4c9025332934efb7e85c38f6a68b1267746ae8f43f24

  • tests/cachekey/project/elements/build2.expected
    1
    -79f546a78748d943a6958c99ab4ad03305f96fefd0b424b6b246b0c9816e00c6
    \ No newline at end of file
    1
    +f81cefce283dd3581ba2fc865ff9c2763119274b114b12edb4e87196cfff8b2a

  • tests/cachekey/project/target.expected
    1
    -d6d283ed1fb0467fcfa5bf69f8596d0f0ac6638281bc9d8e52e1212e2ec0bcab
    \ No newline at end of file
    1
    +92dae6a712b4f91f4fdbdf8dad732cf07ff4da092a319fa4f4b261a9287640de

  • tests/examples/autotools.py
    ... ... @@ -29,9 +29,7 @@ def test_autotools_build(cli, tmpdir, datafiles):
    29 29
         result.assert_success()
    
    30 30
     
    
    31 31
         assert_contains(checkout, ['/usr', '/usr/lib', '/usr/bin',
    
    32
    -                               '/usr/share', '/usr/lib/debug',
    
    33
    -                               '/usr/lib/debug/usr', '/usr/lib/debug/usr/bin',
    
    34
    -                               '/usr/lib/debug/usr/bin/hello',
    
    32
    +                               '/usr/share',
    
    35 33
                                    '/usr/bin/hello',
    
    36 34
                                    '/usr/share/doc', '/usr/share/doc/amhello',
    
    37 35
                                    '/usr/share/doc/amhello/README'])
    

  • tests/examples/developing.py
    ... ... @@ -30,9 +30,7 @@ def test_autotools_build(cli, tmpdir, datafiles):
    30 30
         result.assert_success()
    
    31 31
     
    
    32 32
         assert_contains(checkout, ['/usr', '/usr/lib', '/usr/bin',
    
    33
    -                               '/usr/share', '/usr/lib/debug',
    
    34
    -                               '/usr/lib/debug/usr', '/usr/lib/debug/usr/bin',
    
    35
    -                               '/usr/lib/debug/usr/bin/hello',
    
    33
    +                               '/usr/share',
    
    36 34
                                    '/usr/bin/hello'])
    
    37 35
     
    
    38 36
     
    

  • tests/examples/flatpak-autotools.py
    ... ... @@ -48,9 +48,7 @@ def test_autotools_build(cli, tmpdir, datafiles):
    48 48
         assert result.exit_code == 0
    
    49 49
     
    
    50 50
         assert_contains(checkout, ['/usr', '/usr/lib', '/usr/bin',
    
    51
    -                               '/usr/share', '/usr/lib/debug',
    
    52
    -                               '/usr/lib/debug/usr', '/usr/lib/debug/usr/bin',
    
    53
    -                               '/usr/lib/debug/usr/bin/hello',
    
    51
    +                               '/usr/share',
    
    54 52
                                    '/usr/bin/hello', '/usr/share/doc',
    
    55 53
                                    '/usr/share/doc/amhello',
    
    56 54
                                    '/usr/share/doc/amhello/README'])
    

  • tests/format/project.py
    ... ... @@ -200,3 +200,10 @@ def test_element_path_project_path_contains_symlinks(cli, datafiles, tmpdir):
    200 200
             f.write("kind: manual\n")
    
    201 201
         result = cli.run(project=linked_project, args=['show', 'element.bst'])
    
    202 202
         result.assert_success()
    
    203
    +
    
    204
    +
    
    205
    +@pytest.mark.datafiles(os.path.join(DATA_DIR))
    
    206
    +def test_empty_depends(cli, datafiles):
    
    207
    +    project = os.path.join(datafiles.dirname, datafiles.basename, "empty-depends")
    
    208
    +    result = cli.run(project=project, args=['show', 'manual.bst'])
    
    209
    +    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)

  • tests/format/project/empty-depends/manual.bst
    1
    +kind: manual
    
    2
    +
    
    3
    +depends:

  • tests/format/project/empty-depends/project.conf
    1
    +name: test

  • tests/frontend/source_checkout.py
    ... ... @@ -28,10 +28,17 @@ def generate_remote_import_element(input_path, output_path):
    28 28
     
    
    29 29
     
    
    30 30
     @pytest.mark.datafiles(DATA_DIR)
    
    31
    -def test_source_checkout(datafiles, cli):
    
    31
    +@pytest.mark.parametrize('with_workspace', [('workspace'), ('no-workspace')])
    
    32
    +def test_source_checkout(datafiles, tmpdir_factory, cli, with_workspace):
    
    33
    +    tmpdir = tmpdir_factory.mktemp("")
    
    32 34
         project = os.path.join(datafiles.dirname, datafiles.basename)
    
    33 35
         checkout = os.path.join(cli.directory, 'source-checkout')
    
    34 36
         target = 'checkout-deps.bst'
    
    37
    +    workspace = os.path.join(str(tmpdir), 'workspace')
    
    38
    +
    
    39
    +    if with_workspace == "workspace":
    
    40
    +        result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, target])
    
    41
    +        result.assert_success()
    
    35 42
     
    
    36 43
         result = cli.run(project=project, args=['source-checkout', target, '--deps', 'none', checkout])
    
    37 44
         result.assert_success()
    

  • tests/integration/autotools.py
    ... ... @@ -32,9 +32,7 @@ def test_autotools_build(cli, tmpdir, datafiles):
    32 32
         assert result.exit_code == 0
    
    33 33
     
    
    34 34
         assert_contains(checkout, ['/usr', '/usr/lib', '/usr/bin',
    
    35
    -                               '/usr/share', '/usr/lib/debug',
    
    36
    -                               '/usr/lib/debug/usr', '/usr/lib/debug/usr/bin',
    
    37
    -                               '/usr/lib/debug/usr/bin/hello',
    
    35
    +                               '/usr/share',
    
    38 36
                                    '/usr/bin/hello', '/usr/share/doc',
    
    39 37
                                    '/usr/share/doc/amhello',
    
    40 38
                                    '/usr/share/doc/amhello/README'])
    
    ... ... @@ -57,9 +55,7 @@ def test_autotools_confroot_build(cli, tmpdir, datafiles):
    57 55
         assert result.exit_code == 0
    
    58 56
     
    
    59 57
         assert_contains(checkout, ['/usr', '/usr/lib', '/usr/bin',
    
    60
    -                               '/usr/share', '/usr/lib/debug',
    
    61
    -                               '/usr/lib/debug/usr', '/usr/lib/debug/usr/bin',
    
    62
    -                               '/usr/lib/debug/usr/bin/hello',
    
    58
    +                               '/usr/share',
    
    63 59
                                    '/usr/bin/hello', '/usr/share/doc',
    
    64 60
                                    '/usr/share/doc/amhello',
    
    65 61
                                    '/usr/share/doc/amhello/README'])
    

  • tests/integration/cmake.py
    ... ... @@ -28,10 +28,7 @@ def test_cmake_build(cli, tmpdir, datafiles):
    28 28
         result = cli.run(project=project, args=['checkout', element_name, checkout])
    
    29 29
         assert result.exit_code == 0
    
    30 30
     
    
    31
    -    assert_contains(checkout, ['/usr', '/usr/bin', '/usr/bin/hello',
    
    32
    -                               '/usr/lib/debug', '/usr/lib/debug/usr',
    
    33
    -                               '/usr/lib/debug/usr/bin',
    
    34
    -                               '/usr/lib/debug/usr/bin/hello'])
    
    31
    +    assert_contains(checkout, ['/usr', '/usr/bin', '/usr/bin/hello'])
    
    35 32
     
    
    36 33
     
    
    37 34
     @pytest.mark.datafiles(DATA_DIR)
    
    ... ... @@ -47,10 +44,7 @@ def test_cmake_confroot_build(cli, tmpdir, datafiles):
    47 44
         result = cli.run(project=project, args=['checkout', element_name, checkout])
    
    48 45
         assert result.exit_code == 0
    
    49 46
     
    
    50
    -    assert_contains(checkout, ['/usr', '/usr/bin', '/usr/bin/hello',
    
    51
    -                               '/usr/lib/debug', '/usr/lib/debug/usr',
    
    52
    -                               '/usr/lib/debug/usr/bin',
    
    53
    -                               '/usr/lib/debug/usr/bin/hello'])
    
    47
    +    assert_contains(checkout, ['/usr', '/usr/bin', '/usr/bin/hello'])
    
    54 48
     
    
    55 49
     
    
    56 50
     @pytest.mark.datafiles(DATA_DIR)
    

  • tests/integration/compose.py
    ... ... @@ -38,48 +38,40 @@ def create_compose_element(name, path, config={}):
    38 38
     @pytest.mark.datafiles(DATA_DIR)
    
    39 39
     @pytest.mark.parametrize("include_domains,exclude_domains,expected", [
    
    40 40
         # Test flat inclusion
    
    41
    -    ([], [], ['/usr', '/usr/lib', '/usr/bin',
    
    42
    -              '/usr/share', '/usr/lib/debug',
    
    43
    -              '/usr/lib/debug/usr', '/usr/lib/debug/usr/bin',
    
    44
    -              '/usr/lib/debug/usr/bin/hello', '/usr/bin/hello',
    
    41
    +    ([], [], ['/usr', '/usr/bin',
    
    42
    +              '/usr/share',
    
    43
    +              '/usr/bin/hello',
    
    45 44
                   '/usr/share/doc', '/usr/share/doc/amhello',
    
    46 45
                   '/usr/share/doc/amhello/README',
    
    47 46
                   '/tests', '/tests/test']),
    
    48 47
         # Test only runtime
    
    49
    -    (['runtime'], [], ['/usr', '/usr/lib', '/usr/share',
    
    48
    +    (['runtime'], [], ['/usr', '/usr/share',
    
    50 49
                            '/usr/bin', '/usr/bin/hello']),
    
    51 50
         # Test with runtime and doc
    
    52
    -    (['runtime', 'doc'], [], ['/usr', '/usr/lib', '/usr/share',
    
    51
    +    (['runtime', 'doc'], [], ['/usr', '/usr/share',
    
    53 52
                                   '/usr/bin', '/usr/bin/hello',
    
    54 53
                                   '/usr/share/doc', '/usr/share/doc/amhello',
    
    55 54
                                   '/usr/share/doc/amhello/README']),
    
    56 55
         # Test with only runtime excluded
    
    57
    -    ([], ['runtime'], ['/usr', '/usr/lib', '/usr/share',
    
    58
    -                       '/usr/lib/debug', '/usr/lib/debug/usr',
    
    59
    -                       '/usr/lib/debug/usr/bin',
    
    60
    -                       '/usr/lib/debug/usr/bin/hello',
    
    56
    +    ([], ['runtime'], ['/usr', '/usr/share',
    
    61 57
                            '/usr/share/doc', '/usr/share/doc/amhello',
    
    62 58
                            '/usr/share/doc/amhello/README',
    
    63 59
                            '/tests', '/tests/test']),
    
    64 60
         # Test with runtime and doc excluded
    
    65
    -    ([], ['runtime', 'doc'], ['/usr', '/usr/lib', '/usr/share',
    
    66
    -                              '/usr/lib/debug', '/usr/lib/debug/usr',
    
    67
    -                              '/usr/lib/debug/usr/bin',
    
    68
    -                              '/usr/lib/debug/usr/bin/hello',
    
    61
    +    ([], ['runtime', 'doc'], ['/usr', '/usr/share',
    
    69 62
                                   '/tests', '/tests/test']),
    
    70 63
         # Test with runtime simultaneously in- and excluded
    
    71
    -    (['runtime'], ['runtime'], ['/usr', '/usr/lib', '/usr/share']),
    
    64
    +    (['runtime'], ['runtime'], ['/usr', '/usr/share']),
    
    72 65
         # Test with runtime included and doc excluded
    
    73
    -    (['runtime'], ['doc'], ['/usr', '/usr/lib', '/usr/share',
    
    66
    +    (['runtime'], ['doc'], ['/usr', '/usr/share',
    
    74 67
                                 '/usr/bin', '/usr/bin/hello']),
    
    75 68
         # Test including a custom 'test' domain
    
    76
    -    (['test'], [], ['/usr', '/usr/lib', '/usr/share',
    
    69
    +    (['test'], [], ['/usr', '/usr/share',
    
    77 70
                         '/tests', '/tests/test']),
    
    78 71
         # Test excluding a custom 'test' domain
    
    79
    -    ([], ['test'], ['/usr', '/usr/lib', '/usr/bin',
    
    80
    -                    '/usr/share', '/usr/lib/debug',
    
    81
    -                    '/usr/lib/debug/usr', '/usr/lib/debug/usr/bin',
    
    82
    -                    '/usr/lib/debug/usr/bin/hello', '/usr/bin/hello',
    
    72
    +    ([], ['test'], ['/usr', '/usr/bin',
    
    73
    +                    '/usr/share',
    
    74
    +                    '/usr/bin/hello',
    
    83 75
                         '/usr/share/doc', '/usr/share/doc/amhello',
    
    84 76
                         '/usr/share/doc/amhello/README'])
    
    85 77
     ])
    

  • tests/sources/git.py
    ... ... @@ -455,6 +455,274 @@ def test_ref_not_in_track(cli, tmpdir, datafiles, fail):
    455 455
             assert "ref-not-in-track" in result.stderr
    
    456 456
     
    
    457 457
     
    
    458
    +@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
    
    459
    +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
    
    460
    +@pytest.mark.parametrize("fail", ['warn', 'error'])
    
    461
    +def test_unlisted_submodule(cli, tmpdir, datafiles, fail):
    
    462
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    463
    +
    
    464
    +    # Make the warning an error if we're testing errors
    
    465
    +    if fail == 'error':
    
    466
    +        project_template = {
    
    467
    +            "name": "foo",
    
    468
    +            "fatal-warnings": ['git:unlisted-submodule']
    
    469
    +        }
    
    470
    +        _yaml.dump(project_template, os.path.join(project, 'project.conf'))
    
    471
    +
    
    472
    +    # Create the submodule first from the 'subrepofiles' subdir
    
    473
    +    subrepo = create_repo('git', str(tmpdir), 'subrepo')
    
    474
    +    subrepo.create(os.path.join(project, 'subrepofiles'))
    
    475
    +
    
    476
    +    # Create the repo from 'repofiles' subdir
    
    477
    +    repo = create_repo('git', str(tmpdir))
    
    478
    +    ref = repo.create(os.path.join(project, 'repofiles'))
    
    479
    +
    
    480
    +    # Add a submodule pointing to the one we created
    
    481
    +    ref = repo.add_submodule('subdir', 'file://' + subrepo.repo)
    
    482
    +
    
    483
    +    # Create the source, and delete the explicit configuration
    
    484
    +    # of the submodules.
    
    485
    +    #
    
    486
    +    # We expect this to cause an unlisted submodule warning
    
    487
    +    # after the source has been fetched.
    
    488
    +    #
    
    489
    +    gitsource = repo.source_config(ref=ref)
    
    490
    +    del gitsource['submodules']
    
    491
    +
    
    492
    +    # Write out our test target
    
    493
    +    element = {
    
    494
    +        'kind': 'import',
    
    495
    +        'sources': [
    
    496
    +            gitsource
    
    497
    +        ]
    
    498
    +    }
    
    499
    +    _yaml.dump(element, os.path.join(project, 'target.bst'))
    
    500
    +
    
    501
    +    # We will not see the warning or error before the first fetch, because
    
    502
    +    # we don't have the repository yet and so we have no knowledge of
    
    503
    +    # the unlisted submodule.
    
    504
    +    result = cli.run(project=project, args=['show', 'target.bst'])
    
    505
    +    result.assert_success()
    
    506
    +    assert "git:unlisted-submodule" not in result.stderr
    
    507
    +
    
    508
    +    # We will notice this directly in fetch, as it will try to fetch
    
    509
    +    # the submodules it discovers as a result of fetching the primary repo.
    
    510
    +    result = cli.run(project=project, args=['fetch', 'target.bst'])
    
    511
    +
    
    512
    +    # Assert a warning or an error depending on what we're checking
    
    513
    +    if fail == 'error':
    
    514
    +        result.assert_main_error(ErrorDomain.STREAM, None)
    
    515
    +        result.assert_task_error(ErrorDomain.PLUGIN, 'git:unlisted-submodule')
    
    516
    +    else:
    
    517
    +        result.assert_success()
    
    518
    +        assert "git:unlisted-submodule" in result.stderr
    
    519
    +
    
    520
    +    # Now that we've fetched it, `bst show` will discover the unlisted submodule too
    
    521
    +    result = cli.run(project=project, args=['show', 'target.bst'])
    
    522
    +
    
    523
    +    # Assert a warning or an error depending on what we're checking
    
    524
    +    if fail == 'error':
    
    525
    +        result.assert_main_error(ErrorDomain.PLUGIN, 'git:unlisted-submodule')
    
    526
    +    else:
    
    527
    +        result.assert_success()
    
    528
    +        assert "git:unlisted-submodule" in result.stderr
    
    529
    +
    
    530
    +
    
    531
    +@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
    
    532
    +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
    
    533
    +@pytest.mark.parametrize("fail", ['warn', 'error'])
    
    534
    +def test_track_unlisted_submodule(cli, tmpdir, datafiles, fail):
    
    535
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    536
    +
    
    537
    +    # Make the warning an error if we're testing errors
    
    538
    +    if fail == 'error':
    
    539
    +        project_template = {
    
    540
    +            "name": "foo",
    
    541
    +            "fatal-warnings": ['git:unlisted-submodule']
    
    542
    +        }
    
    543
    +        _yaml.dump(project_template, os.path.join(project, 'project.conf'))
    
    544
    +
    
    545
    +    # Create the submodule first from the 'subrepofiles' subdir
    
    546
    +    subrepo = create_repo('git', str(tmpdir), 'subrepo')
    
    547
    +    subrepo.create(os.path.join(project, 'subrepofiles'))
    
    548
    +
    
    549
    +    # Create the repo from 'repofiles' subdir
    
    550
    +    repo = create_repo('git', str(tmpdir))
    
    551
    +    ref = repo.create(os.path.join(project, 'repofiles'))
    
    552
    +
    
    553
    +    # Add a submodule pointing to the one we created, but use
    
    554
    +    # the original ref, let the submodules appear after tracking
    
    555
    +    repo.add_submodule('subdir', 'file://' + subrepo.repo)
    
    556
    +
    
    557
    +    # Create the source, and delete the explicit configuration
    
    558
    +    # of the submodules.
    
    559
    +    gitsource = repo.source_config(ref=ref)
    
    560
    +    del gitsource['submodules']
    
    561
    +
    
    562
    +    # Write out our test target
    
    563
    +    element = {
    
    564
    +        'kind': 'import',
    
    565
    +        'sources': [
    
    566
    +            gitsource
    
    567
    +        ]
    
    568
    +    }
    
    569
    +    _yaml.dump(element, os.path.join(project, 'target.bst'))
    
    570
    +
    
    571
    +    # Fetch the repo, we will not see the warning because we
    
    572
    +    # are still pointing to a ref which predates the submodules
    
    573
    +    result = cli.run(project=project, args=['fetch', 'target.bst'])
    
    574
    +    result.assert_success()
    
    575
    +    assert "git:unlisted-submodule" not in result.stderr
    
    576
    +
    
    577
    +    # We won't get a warning/error when tracking either, the source
    
    578
    +    # has not become Consistency.CACHED so the opportunity to check
    
    579
    +    # for the warning has not yet arisen.
    
    580
    +    result = cli.run(project=project, args=['track', 'target.bst'])
    
    581
    +    result.assert_success()
    
    582
    +    assert "git:unlisted-submodule" not in result.stderr
    
    583
    +
    
    584
    +    # Fetching the repo at the new ref will finally reveal the warning
    
    585
    +    result = cli.run(project=project, args=['fetch', 'target.bst'])
    
    586
    +    if fail == 'error':
    
    587
    +        result.assert_main_error(ErrorDomain.STREAM, None)
    
    588
    +        result.assert_task_error(ErrorDomain.PLUGIN, 'git:unlisted-submodule')
    
    589
    +    else:
    
    590
    +        result.assert_success()
    
    591
    +        assert "git:unlisted-submodule" in result.stderr
    
    592
    +
    
    593
    +
    
    594
    +@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
    
    595
    +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
    
    596
    +@pytest.mark.parametrize("fail", ['warn', 'error'])
    
    597
    +def test_invalid_submodule(cli, tmpdir, datafiles, fail):
    
    598
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    599
    +
    
    600
    +    # Make the warning an error if we're testing errors
    
    601
    +    if fail == 'error':
    
    602
    +        project_template = {
    
    603
    +            "name": "foo",
    
    604
    +            "fatal-warnings": ['git:invalid-submodule']
    
    605
    +        }
    
    606
    +        _yaml.dump(project_template, os.path.join(project, 'project.conf'))
    
    607
    +
    
    608
    +    # Create the repo from 'repofiles' subdir
    
    609
    +    repo = create_repo('git', str(tmpdir))
    
    610
    +    ref = repo.create(os.path.join(project, 'repofiles'))
    
    611
    +
    
    612
    +    # Create the source without any submodules, and add
    
    613
    +    # an invalid submodule configuration to it.
    
    614
    +    #
    
    615
    +    # We expect this to cause an invalid submodule warning
    
    616
    +    # after the source has been fetched and we know what
    
    617
    +    # the real submodules actually are.
    
    618
    +    #
    
    619
    +    gitsource = repo.source_config(ref=ref)
    
    620
    +    gitsource['submodules'] = {
    
    621
    +        'subdir': {
    
    622
    +            'url': 'https://pony.org/repo.git'
    
    623
    +        }
    
    624
    +    }
    
    625
    +
    
    626
    +    # Write out our test target
    
    627
    +    element = {
    
    628
    +        'kind': 'import',
    
    629
    +        'sources': [
    
    630
    +            gitsource
    
    631
    +        ]
    
    632
    +    }
    
    633
    +    _yaml.dump(element, os.path.join(project, 'target.bst'))
    
    634
    +
    
    635
    +    # We will not see the warning or error before the first fetch, because
    
    636
    +    # we don't have the repository yet and so we have no knowledge of
    
    637
    +    # the unlisted submodule.
    
    638
    +    result = cli.run(project=project, args=['show', 'target.bst'])
    
    639
    +    result.assert_success()
    
    640
    +    assert "git:invalid-submodule" not in result.stderr
    
    641
    +
    
    642
    +    # We will notice this directly in fetch, as it will try to fetch
    
    643
    +    # the submodules it discovers as a result of fetching the primary repo.
    
    644
    +    result = cli.run(project=project, args=['fetch', 'target.bst'])
    
    645
    +
    
    646
    +    # Assert a warning or an error depending on what we're checking
    
    647
    +    if fail == 'error':
    
    648
    +        result.assert_main_error(ErrorDomain.STREAM, None)
    
    649
    +        result.assert_task_error(ErrorDomain.PLUGIN, 'git:invalid-submodule')
    
    650
    +    else:
    
    651
    +        result.assert_success()
    
    652
    +        assert "git:invalid-submodule" in result.stderr
    
    653
    +
    
    654
    +    # Now that we've fetched it, `bst show` will discover the unlisted submodule too
    
    655
    +    result = cli.run(project=project, args=['show', 'target.bst'])
    
    656
    +
    
    657
    +    # Assert a warning or an error depending on what we're checking
    
    658
    +    if fail == 'error':
    
    659
    +        result.assert_main_error(ErrorDomain.PLUGIN, 'git:invalid-submodule')
    
    660
    +    else:
    
    661
    +        result.assert_success()
    
    662
    +        assert "git:invalid-submodule" in result.stderr
    
    663
    +
    
    664
    +
    
    665
    +@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
    
    666
    +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
    
    667
    +@pytest.mark.parametrize("fail", ['warn', 'error'])
    
    668
    +def test_track_invalid_submodule(cli, tmpdir, datafiles, fail):
    
    669
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    670
    +
    
    671
    +    # Make the warning an error if we're testing errors
    
    672
    +    if fail == 'error':
    
    673
    +        project_template = {
    
    674
    +            "name": "foo",
    
    675
    +            "fatal-warnings": ['git:invalid-submodule']
    
    676
    +        }
    
    677
    +        _yaml.dump(project_template, os.path.join(project, 'project.conf'))
    
    678
    +
    
    679
    +    # Create the submodule first from the 'subrepofiles' subdir
    
    680
    +    subrepo = create_repo('git', str(tmpdir), 'subrepo')
    
    681
    +    subrepo.create(os.path.join(project, 'subrepofiles'))
    
    682
    +
    
    683
    +    # Create the repo from 'repofiles' subdir
    
    684
    +    repo = create_repo('git', str(tmpdir))
    
    685
    +    ref = repo.create(os.path.join(project, 'repofiles'))
    
    686
    +
    
    687
    +    # Add a submodule pointing to the one we created
    
    688
    +    ref = repo.add_submodule('subdir', 'file://' + subrepo.repo)
    
    689
    +
    
    690
    +    # Add a commit beyond the ref which *removes* the submodule we've added
    
    691
    +    repo.remove_path('subdir')
    
    692
    +
    
    693
    +    # Create the source, this will keep the submodules so initially
    
    694
    +    # the configuration is valid for the ref we're using
    
    695
    +    gitsource = repo.source_config(ref=ref)
    
    696
    +
    
    697
    +    # Write out our test target
    
    698
    +    element = {
    
    699
    +        'kind': 'import',
    
    700
    +        'sources': [
    
    701
    +            gitsource
    
    702
    +        ]
    
    703
    +    }
    
    704
    +    _yaml.dump(element, os.path.join(project, 'target.bst'))
    
    705
    +
    
    706
    +    # Fetch the repo, we will not see the warning because we
    
    707
    +    # are still pointing to a ref which predates the submodules
    
    708
    +    result = cli.run(project=project, args=['fetch', 'target.bst'])
    
    709
    +    result.assert_success()
    
    710
    +    assert "git:invalid-submodule" not in result.stderr
    
    711
    +
    
    712
    +    # In this case, we will get the error directly after tracking,
    
    713
    +    # since the new HEAD does not require any submodules which are
    
    714
    +    # not locally cached, the Source will be CACHED directly after
    
    715
    +    # tracking and the validations will occur as a result.
    
    716
    +    #
    
    717
    +    result = cli.run(project=project, args=['track', 'target.bst'])
    
    718
    +    if fail == 'error':
    
    719
    +        result.assert_main_error(ErrorDomain.STREAM, None)
    
    720
    +        result.assert_task_error(ErrorDomain.PLUGIN, 'git:invalid-submodule')
    
    721
    +    else:
    
    722
    +        result.assert_success()
    
    723
    +        assert "git:invalid-submodule" in result.stderr
    
    724
    +
    
    725
    +
    
    458 726
     @pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
    
    459 727
     @pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
    
    460 728
     @pytest.mark.parametrize("ref_format", ['sha1', 'git-describe'])
    

  • tests/storage/virtual_directory_import.py
    ... ... @@ -149,10 +149,10 @@ def resolve_symlinks(path, root):
    149 149
                 if target.startswith(os.path.sep):
    
    150 150
                     # Absolute link - relative to root
    
    151 151
                     location = os.path.join(root, target, tail)
    
    152
    +                return resolve_symlinks(location, root)
    
    152 153
                 else:
    
    153
    -                # Relative link - relative to symlink location
    
    154
    -                location = os.path.join(location, target)
    
    155
    -            return resolve_symlinks(location, root)
    
    154
    +                return resolve_symlinks(os.path.join(os.path.join(*components[:i]), target, tail), root)
    
    155
    +
    
    156 156
         # If we got here, no symlinks were found. Add on the final component and return.
    
    157 157
         location = os.path.join(location, components[-1])
    
    158 158
         return location
    
    ... ... @@ -199,7 +199,13 @@ def _import_test(tmpdir, original, overlay, generator_function, verify_contents=
    199 199
                         pass
    
    200 200
                     else:
    
    201 201
                         assert os.path.islink(realpath)
    
    202
    -                    assert os.readlink(realpath) == content
    
    202
    +                    # We expect all storage to normalise absolute symlinks.
    
    203
    +                    depth = len(path.split(os.path.sep)) - 1
    
    204
    +                    if content.startswith(os.path.sep):
    
    205
    +                        assert os.readlink(realpath) == os.path.sep.join([".."] * depth + [content[1:]])
    
    206
    +                    else:
    
    207
    +                        assert os.readlink(realpath) == content
    
    208
    +
    
    203 209
                 elif typename == 'D':
    
    204 210
                     # We can't do any more tests than this because it
    
    205 211
                     # depends on things present in the original. Blank
    

  • tests/testutils/repo/git.py
    ... ... @@ -76,6 +76,12 @@ class Git(Repo):
    76 76
             self._run_git('commit', '-m', 'Added the submodule')
    
    77 77
             return self.latest_commit()
    
    78 78
     
    
    79
    +    # This can also be used to a file or a submodule
    
    80
    +    def remove_path(self, path):
    
    81
    +        self._run_git('rm', path)
    
    82
    +        self._run_git('commit', '-m', 'Removing {}'.format(path))
    
    83
    +        return self.latest_commit()
    
    84
    +
    
    79 85
         def source_config(self, ref=None, checkout_submodules=None):
    
    80 86
             config = {
    
    81 87
                 'kind': 'git',
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]