Chandan Singh pushed to branch chandan/source-checkout at BuildStream / buildstream
Commits:
- 
f1aa0974
by Javier Jardón at 2018-10-18T14:13:35Z
- 
f1550c8e
by Valentin David at 2018-10-18T16:02:45Z
- 
cad1f9c7
by Chandan Singh at 2018-10-19T17:59:28Z
19 changed files:
- buildstream/_artifactcache/artifactcache.py
- buildstream/_context.py
- buildstream/_frontend/cli.py
- buildstream/_includes.py
- buildstream/_loader/loadelement.py
- buildstream/_loader/loader.py
- buildstream/_options/optionpool.py
- buildstream/_pipeline.py
- buildstream/_project.py
- buildstream/_stream.py
- buildstream/element.py
- buildstream/plugins/elements/junction.py
- buildstream/plugins/sources/git.py
- buildstream/source.py
- tests/completions/completions.py
- + tests/frontend/project/elements/checkout-deps.bst
- + tests/frontend/project/files/etc-files/etc/buildstream/config
- + tests/frontend/source_checkout.py
- tests/yaml/yaml.py
Changes:
| ... | ... | @@ -19,7 +19,8 @@ | 
| 19 | 19 |  | 
| 20 | 20 |  import os
 | 
| 21 | 21 |  import string
 | 
| 22 | -from collections import Mapping, namedtuple
 | |
| 22 | +from collections import namedtuple
 | |
| 23 | +from collections.abc import Mapping
 | |
| 23 | 24 |  | 
| 24 | 25 |  from ..types import _KeyStrength
 | 
| 25 | 26 |  from .._exceptions import ArtifactError, ImplError, LoadError, LoadErrorReason
 | 
| ... | ... | @@ -19,7 +19,8 @@ | 
| 19 | 19 |  | 
| 20 | 20 |  import os
 | 
| 21 | 21 |  import datetime
 | 
| 22 | -from collections import deque, Mapping
 | |
| 22 | +from collections import deque
 | |
| 23 | +from collections.abc import Mapping
 | |
| 23 | 24 |  from contextlib import contextmanager
 | 
| 24 | 25 |  from . import utils
 | 
| 25 | 26 |  from . import _cachekey
 | 
| ... | ... | @@ -662,6 +662,33 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar): | 
| 662 | 662 |                              tar=tar)
 | 
| 663 | 663 |  | 
| 664 | 664 |  | 
| 665 | +##################################################################
 | |
| 666 | +#                  Source Checkout Command                      #
 | |
| 667 | +##################################################################
 | |
| 668 | +@cli.command(name='source-checkout', short_help='Checkout sources for an element')
 | |
| 669 | +@click.option('--except', 'except_', multiple=True,
 | |
| 670 | +              type=click.Path(readable=False),
 | |
| 671 | +              help="Except certain dependencies")
 | |
| 672 | +@click.option('--deps', '-d', default='none',
 | |
| 673 | +              type=click.Choice(['build', 'none', 'run', 'all']),
 | |
| 674 | +              help='The dependencies whose sources to checkout (default: none)')
 | |
| 675 | +@click.option('--fetch', default=False, is_flag=True,
 | |
| 676 | +              help='Fetch elements if they are not fetched')
 | |
| 677 | +@click.argument('element',
 | |
| 678 | +                type=click.Path(readable=False))
 | |
| 679 | +@click.argument('location', type=click.Path())
 | |
| 680 | +@click.pass_obj
 | |
| 681 | +def source_checkout(app, element, location, deps, fetch, except_):
 | |
| 682 | +    """Checkout sources of an element to the specified location
 | |
| 683 | +    """
 | |
| 684 | +    with app.initialized():
 | |
| 685 | +        app.stream.source_checkout(element,
 | |
| 686 | +                                   location=location,
 | |
| 687 | +                                   deps=deps,
 | |
| 688 | +                                   fetch=fetch,
 | |
| 689 | +                                   except_targets=except_)
 | |
| 690 | + | |
| 691 | + | |
| 665 | 692 |  ##################################################################
 | 
| 666 | 693 |  #                      Workspace Command                         #
 | 
| 667 | 694 |  ##################################################################
 | 
| 1 | 1 |  import os
 | 
| 2 | -from collections import Mapping
 | |
| 2 | +from collections.abc import Mapping
 | |
| 3 | 3 |  from . import _yaml
 | 
| 4 | 4 |  from ._exceptions import LoadError, LoadErrorReason
 | 
| 5 | 5 |  | 
| ... | ... | @@ -18,7 +18,7 @@ | 
| 18 | 18 |  #        Tristan Van Berkom <tristan vanberkom codethink co uk>
 | 
| 19 | 19 |  | 
| 20 | 20 |  # System imports
 | 
| 21 | -from collections import Mapping
 | |
| 21 | +from collections.abc import Mapping
 | |
| 22 | 22 |  | 
| 23 | 23 |  # BuildStream toplevel imports
 | 
| 24 | 24 |  from .._exceptions import LoadError, LoadErrorReason
 | 
| ... | ... | @@ -19,7 +19,8 @@ | 
| 19 | 19 |  | 
| 20 | 20 |  import os
 | 
| 21 | 21 |  from functools import cmp_to_key
 | 
| 22 | -from collections import Mapping, namedtuple
 | |
| 22 | +from collections import namedtuple
 | |
| 23 | +from collections.abc import Mapping
 | |
| 23 | 24 |  import tempfile
 | 
| 24 | 25 |  import shutil
 | 
| 25 | 26 |  | 
| ... | ... | @@ -18,7 +18,7 @@ | 
| 18 | 18 |  #        Tristan Van Berkom <tristan vanberkom codethink co uk>
 | 
| 19 | 19 |  #
 | 
| 20 | 20 |  | 
| 21 | -from collections import Mapping
 | |
| 21 | +from collections.abc import Mapping
 | |
| 22 | 22 |  import jinja2
 | 
| 23 | 23 |  | 
| 24 | 24 |  from .. import _yaml
 | 
| ... | ... | @@ -383,6 +383,33 @@ class Pipeline(): | 
| 383 | 383 |                  detail += "  " + element._get_full_name() + "\n"
 | 
| 384 | 384 |              raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline-workspaced")
 | 
| 385 | 385 |  | 
| 386 | +    # assert_sources_cached()
 | |
| 387 | +    #
 | |
| 388 | +    # Asserts that sources for the given list of elements are cached.
 | |
| 389 | +    #
 | |
| 390 | +    # Args:
 | |
| 391 | +    #    elements (list): The list of elements
 | |
| 392 | +    #
 | |
| 393 | +    def assert_sources_cached(self, elements):
 | |
| 394 | +        uncached = []
 | |
| 395 | +        with self._context.timed_activity("Checking sources"):
 | |
| 396 | +            for element in elements:
 | |
| 397 | +                if element._get_consistency() != Consistency.CACHED:
 | |
| 398 | +                    uncached.append(element)
 | |
| 399 | + | |
| 400 | +        if uncached:
 | |
| 401 | +            detail = "Sources are not cached for the following elements:\n\n"
 | |
| 402 | +            for element in uncached:
 | |
| 403 | +                detail += "  Sources for element: {} are not cached\n".format(element._get_full_name())
 | |
| 404 | +                for source in element.sources():
 | |
| 405 | +                    if source._get_consistency() != Consistency.CACHED:
 | |
| 406 | +                        detail += "    Source {} is not cached\n".format(source)
 | |
| 407 | +                detail += '\n'
 | |
| 408 | +            detail += "Try fetching these elements first with `bst fetch`,\n" + \
 | |
| 409 | +                      "or run this command with `--fetch` option\n"
 | |
| 410 | + | |
| 411 | +            raise PipelineError("Uncached sources", detail=detail, reason="uncached-sources")
 | |
| 412 | + | |
| 386 | 413 |      #############################################################
 | 
| 387 | 414 |      #                     Private Methods                       #
 | 
| 388 | 415 |      #############################################################
 | 
| ... | ... | @@ -19,7 +19,8 @@ | 
| 19 | 19 |  #        Tiago Gomes <tiago gomes codethink co uk>
 | 
| 20 | 20 |  | 
| 21 | 21 |  import os
 | 
| 22 | -from collections import Mapping, OrderedDict
 | |
| 22 | +from collections import OrderedDict
 | |
| 23 | +from collections.abc import Mapping
 | |
| 23 | 24 |  from pluginbase import PluginBase
 | 
| 24 | 25 |  from . import utils
 | 
| 25 | 26 |  from . import _cachekey
 | 
| ... | ... | @@ -379,27 +379,7 @@ class Stream(): | 
| 379 | 379 |          elements, _ = self._load((target,), (), fetch_subprojects=True)
 | 
| 380 | 380 |          target = elements[0]
 | 
| 381 | 381 |  | 
| 382 | -        if not tar:
 | |
| 383 | -            try:
 | |
| 384 | -                os.makedirs(location, exist_ok=True)
 | |
| 385 | -            except OSError as e:
 | |
| 386 | -                raise StreamError("Failed to create checkout directory: '{}'"
 | |
| 387 | -                                  .format(e)) from e
 | |
| 388 | - | |
| 389 | -        if not tar:
 | |
| 390 | -            if not os.access(location, os.W_OK):
 | |
| 391 | -                raise StreamError("Checkout directory '{}' not writable"
 | |
| 392 | -                                  .format(location))
 | |
| 393 | -            if not force and os.listdir(location):
 | |
| 394 | -                raise StreamError("Checkout directory '{}' not empty"
 | |
| 395 | -                                  .format(location))
 | |
| 396 | -        elif os.path.exists(location) and location != '-':
 | |
| 397 | -            if not os.access(location, os.W_OK):
 | |
| 398 | -                raise StreamError("Output file '{}' not writable"
 | |
| 399 | -                                  .format(location))
 | |
| 400 | -            if not force and os.path.exists(location):
 | |
| 401 | -                raise StreamError("Output file '{}' already exists"
 | |
| 402 | -                                  .format(location))
 | |
| 382 | +        self.__check_location_writable(location, force=force, tar=tar)
 | |
| 403 | 383 |  | 
| 404 | 384 |          # Stage deps into a temporary sandbox first
 | 
| 405 | 385 |          try:
 | 
| ... | ... | @@ -436,6 +416,42 @@ class Stream(): | 
| 436 | 416 |              raise StreamError("Error while staging dependencies into a sandbox"
 | 
| 437 | 417 |                                ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
 | 
| 438 | 418 |  | 
| 419 | +    # source_checkout()
 | |
| 420 | +    #
 | |
| 421 | +    # Checkout sources of the target element to the specified location
 | |
| 422 | +    #
 | |
| 423 | +    # Args:
 | |
| 424 | +    #    target (str): The target element whose sources to checkout
 | |
| 425 | +    #    location (str): Location to checkout the sources to
 | |
| 426 | +    #    deps (str): The dependencies to checkout
 | |
| 427 | +    #    fetch (bool): Whether to fetch missing sources
 | |
| 428 | +    #    except_targets (list): List of targets to except from staging
 | |
| 429 | +    #
 | |
| 430 | +    def source_checkout(self, target, *,
 | |
| 431 | +                        location=None,
 | |
| 432 | +                        deps='none',
 | |
| 433 | +                        fetch=False,
 | |
| 434 | +                        except_targets=()):
 | |
| 435 | + | |
| 436 | +        self.__check_location_writable(location)
 | |
| 437 | + | |
| 438 | +        elements, _ = self._load((target,), (),
 | |
| 439 | +                                 selection=deps,
 | |
| 440 | +                                 except_targets=except_targets,
 | |
| 441 | +                                 fetch_subprojects=True)
 | |
| 442 | + | |
| 443 | +        # Assert all sources are cached
 | |
| 444 | +        if fetch:
 | |
| 445 | +            self._fetch(elements)
 | |
| 446 | +        self._pipeline.assert_sources_cached(elements)
 | |
| 447 | + | |
| 448 | +        # Stage all sources determined by scope
 | |
| 449 | +        try:
 | |
| 450 | +            self._write_element_sources(location, elements)
 | |
| 451 | +        except BstError as e:
 | |
| 452 | +            raise StreamError("Error while writing sources"
 | |
| 453 | +                              ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
 | |
| 454 | + | |
| 439 | 455 |      # workspace_open
 | 
| 440 | 456 |      #
 | 
| 441 | 457 |      # Open a project workspace
 | 
| ... | ... | @@ -719,7 +735,7 @@ class Stream(): | 
| 719 | 735 |                  if self._write_element_script(source_directory, element)
 | 
| 720 | 736 |              ]
 | 
| 721 | 737 |  | 
| 722 | -            self._write_element_sources(tempdir, elements)
 | |
| 738 | +            self._write_element_sources(os.path.join(tempdir, "source"), elements)
 | |
| 723 | 739 |              self._write_build_script(tempdir, elements)
 | 
| 724 | 740 |              self._collect_sources(tempdir, tar_location,
 | 
| 725 | 741 |                                    target.normal_name, compression)
 | 
| ... | ... | @@ -1082,11 +1098,10 @@ class Stream(): | 
| 1082 | 1098 |      # Write all source elements to the given directory
 | 
| 1083 | 1099 |      def _write_element_sources(self, directory, elements):
 | 
| 1084 | 1100 |          for element in elements:
 | 
| 1085 | -            source_dir = os.path.join(directory, "source")
 | |
| 1086 | -            element_source_dir = os.path.join(source_dir, element.normal_name)
 | |
| 1087 | -            os.makedirs(element_source_dir)
 | |
| 1088 | - | |
| 1089 | -            element._stage_sources_at(element_source_dir)
 | |
| 1101 | +            element_source_dir = os.path.join(directory, element.normal_name)
 | |
| 1102 | +            if list(element.sources()):
 | |
| 1103 | +                os.makedirs(element_source_dir)
 | |
| 1104 | +                element._stage_sources_at(element_source_dir)
 | |
| 1090 | 1105 |  | 
| 1091 | 1106 |      # Write a master build script to the sandbox
 | 
| 1092 | 1107 |      def _write_build_script(self, directory, elements):
 | 
| ... | ... | @@ -1115,3 +1130,29 @@ class Stream(): | 
| 1115 | 1130 |  | 
| 1116 | 1131 |              with tarfile.open(tar_name, permissions) as tar:
 | 
| 1117 | 1132 |                  tar.add(directory, arcname=element_name)
 | 
| 1133 | + | |
| 1134 | +    #############################################################
 | |
| 1135 | +    #                    Private Methods                        #
 | |
| 1136 | +    #############################################################
 | |
| 1137 | + | |
| 1138 | +    # Check if given location is writable
 | |
| 1139 | +    def __check_location_writable(self, location, force=False, tar=False):
 | |
| 1140 | +        if not tar:
 | |
| 1141 | +            try:
 | |
| 1142 | +                os.makedirs(location, exist_ok=True)
 | |
| 1143 | +            except OSError as e:
 | |
| 1144 | +                raise StreamError("Failed to create checkout directory: '{}'"
 | |
| 1145 | +                                  .format(e)) from e
 | |
| 1146 | +            if not os.access(location, os.W_OK):
 | |
| 1147 | +                raise StreamError("Checkout directory '{}' not writable"
 | |
| 1148 | +                                  .format(location))
 | |
| 1149 | +            if not force and os.listdir(location):
 | |
| 1150 | +                raise StreamError("Checkout directory '{}' not empty"
 | |
| 1151 | +                                  .format(location))
 | |
| 1152 | +        elif os.path.exists(location) and location != '-':
 | |
| 1153 | +            if not os.access(location, os.W_OK):
 | |
| 1154 | +                raise StreamError("Output file '{}' not writable"
 | |
| 1155 | +                                  .format(location))
 | |
| 1156 | +            if not force and os.path.exists(location):
 | |
| 1157 | +                raise StreamError("Output file '{}' already exists"
 | |
| 1158 | +                                  .format(location)) | 
| ... | ... | @@ -76,7 +76,8 @@ import os | 
| 76 | 76 |  import re
 | 
| 77 | 77 |  import stat
 | 
| 78 | 78 |  import copy
 | 
| 79 | -from collections import Mapping, OrderedDict
 | |
| 79 | +from collections import OrderedDict
 | |
| 80 | +from collections.abc import Mapping
 | |
| 80 | 81 |  from contextlib import contextmanager
 | 
| 81 | 82 |  import tempfile
 | 
| 82 | 83 |  import shutil
 | 
| ... | ... | @@ -124,7 +124,7 @@ the user to resolve possibly conflicting nested junctions by creating a junction | 
| 124 | 124 |  with the same name in the top-level project, which then takes precedence.
 | 
| 125 | 125 |  """
 | 
| 126 | 126 |  | 
| 127 | -from collections import Mapping
 | |
| 127 | +from collections.abc import Mapping
 | |
| 128 | 128 |  from buildstream import Element
 | 
| 129 | 129 |  from buildstream._pipeline import PipelineError
 | 
| 130 | 130 |  | 
| ... | ... | @@ -89,7 +89,7 @@ import os | 
| 89 | 89 |  import errno
 | 
| 90 | 90 |  import re
 | 
| 91 | 91 |  import shutil
 | 
| 92 | -from collections import Mapping
 | |
| 92 | +from collections.abc import Mapping
 | |
| 93 | 93 |  from io import StringIO
 | 
| 94 | 94 |  | 
| 95 | 95 |  from configparser import RawConfigParser
 | 
| ... | ... | @@ -155,7 +155,7 @@ Class Reference | 
| 155 | 155 |  """
 | 
| 156 | 156 |  | 
| 157 | 157 |  import os
 | 
| 158 | -from collections import Mapping
 | |
| 158 | +from collections.abc import Mapping
 | |
| 159 | 159 |  from contextlib import contextmanager
 | 
| 160 | 160 |  | 
| 161 | 161 |  from . import Plugin, Consistency
 | 
| ... | ... | @@ -15,6 +15,7 @@ MAIN_COMMANDS = [ | 
| 15 | 15 |      'push ',
 | 
| 16 | 16 |      'shell ',
 | 
| 17 | 17 |      'show ',
 | 
| 18 | +    'source-checkout ',
 | |
| 18 | 19 |      'source-bundle ',
 | 
| 19 | 20 |      'track ',
 | 
| 20 | 21 |      'workspace '
 | 
| 1 | +kind: import
 | |
| 2 | +description: It is important for this element to have both build and runtime dependencies
 | |
| 3 | +sources:
 | |
| 4 | +- kind: local
 | |
| 5 | +  path: files/etc-files
 | |
| 6 | +depends:
 | |
| 7 | +- filename: import-dev.bst
 | |
| 8 | +  type: build
 | |
| 9 | +- filename: import-bin.bst
 | |
| 10 | +  type: runtime | 
| 1 | +config | 
| 1 | +import os
 | |
| 2 | +import pytest
 | |
| 3 | + | |
| 4 | +from tests.testutils import cli
 | |
| 5 | + | |
| 6 | +from buildstream import utils, _yaml
 | |
| 7 | +from buildstream._exceptions import ErrorDomain, LoadErrorReason
 | |
| 8 | + | |
| 9 | +# Project directory
 | |
| 10 | +DATA_DIR = os.path.join(
 | |
| 11 | +    os.path.dirname(os.path.realpath(__file__)),
 | |
| 12 | +    'project',
 | |
| 13 | +)
 | |
| 14 | + | |
| 15 | + | |
| 16 | +def generate_remote_import_element(input_path, output_path):
 | |
| 17 | +    return {
 | |
| 18 | +        'kind': 'import',
 | |
| 19 | +        'sources': [
 | |
| 20 | +            {
 | |
| 21 | +                'kind': 'remote',
 | |
| 22 | +                'url': 'file://{}'.format(input_path),
 | |
| 23 | +                'filename': output_path,
 | |
| 24 | +                'ref': utils.sha256sum(input_path),
 | |
| 25 | +            }
 | |
| 26 | +        ]
 | |
| 27 | +    }
 | |
| 28 | + | |
| 29 | + | |
| 30 | +@pytest.mark.datafiles(DATA_DIR)
 | |
| 31 | +def test_source_checkout(datafiles, cli):
 | |
| 32 | +    project = os.path.join(datafiles.dirname, datafiles.basename)
 | |
| 33 | +    checkout = os.path.join(cli.directory, 'source-checkout')
 | |
| 34 | +    target = 'checkout-deps.bst'
 | |
| 35 | + | |
| 36 | +    result = cli.run(project=project, args=['source-checkout', target, '--deps', 'none', checkout])
 | |
| 37 | +    result.assert_success()
 | |
| 38 | + | |
| 39 | +    assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
 | |
| 40 | + | |
| 41 | + | |
| 42 | +@pytest.mark.datafiles(DATA_DIR)
 | |
| 43 | +@pytest.mark.parametrize('deps', [('build'), ('none'), ('run'), ('all')])
 | |
| 44 | +def test_source_checkout_deps(datafiles, cli, deps):
 | |
| 45 | +    project = os.path.join(datafiles.dirname, datafiles.basename)
 | |
| 46 | +    checkout = os.path.join(cli.directory, 'source-checkout')
 | |
| 47 | +    target = 'checkout-deps.bst'
 | |
| 48 | + | |
| 49 | +    result = cli.run(project=project, args=['source-checkout', target, '--deps', deps, checkout])
 | |
| 50 | +    result.assert_success()
 | |
| 51 | + | |
| 52 | +    # Sources of the target
 | |
| 53 | +    if deps == 'build':
 | |
| 54 | +        assert not os.path.exists(os.path.join(checkout, 'checkout-deps'))
 | |
| 55 | +    else:
 | |
| 56 | +        assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
 | |
| 57 | + | |
| 58 | +    # Sources of the target's build dependencies
 | |
| 59 | +    if deps in ('build', 'all'):
 | |
| 60 | +        assert os.path.exists(os.path.join(checkout, 'import-dev', 'usr', 'include', 'pony.h'))
 | |
| 61 | +    else:
 | |
| 62 | +        assert not os.path.exists(os.path.join(checkout, 'import-dev'))
 | |
| 63 | + | |
| 64 | +    # Sources of the target's runtime dependencies
 | |
| 65 | +    if deps in ('run', 'all'):
 | |
| 66 | +        assert os.path.exists(os.path.join(checkout, 'import-bin', 'usr', 'bin', 'hello'))
 | |
| 67 | +    else:
 | |
| 68 | +        assert not os.path.exists(os.path.join(checkout, 'import-bin'))
 | |
| 69 | + | |
| 70 | + | |
| 71 | +@pytest.mark.datafiles(DATA_DIR)
 | |
| 72 | +def test_source_checkout_except(datafiles, cli):
 | |
| 73 | +    project = os.path.join(datafiles.dirname, datafiles.basename)
 | |
| 74 | +    checkout = os.path.join(cli.directory, 'source-checkout')
 | |
| 75 | +    target = 'checkout-deps.bst'
 | |
| 76 | + | |
| 77 | +    result = cli.run(project=project, args=['source-checkout', target,
 | |
| 78 | +                                            '--deps', 'all',
 | |
| 79 | +                                            '--except', 'import-bin.bst',
 | |
| 80 | +                                            checkout])
 | |
| 81 | +    result.assert_success()
 | |
| 82 | + | |
| 83 | +    # Sources for the target should be present
 | |
| 84 | +    assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
 | |
| 85 | + | |
| 86 | +    # Sources for import-bin.bst should not be present
 | |
| 87 | +    assert not os.path.exists(os.path.join(checkout, 'import-bin'))
 | |
| 88 | + | |
| 89 | +    # Sources for other dependencies should be present
 | |
| 90 | +    assert os.path.exists(os.path.join(checkout, 'import-dev', 'usr', 'include', 'pony.h'))
 | |
| 91 | + | |
| 92 | + | |
| 93 | +@pytest.mark.datafiles(DATA_DIR)
 | |
| 94 | +@pytest.mark.parametrize('fetch', [(False), (True)])
 | |
| 95 | +def test_source_checkout_fetch(datafiles, cli, fetch):
 | |
| 96 | +    project = os.path.join(datafiles.dirname, datafiles.basename)
 | |
| 97 | +    checkout = os.path.join(cli.directory, 'source-checkout')
 | |
| 98 | +    target = 'remote-import-dev.bst'
 | |
| 99 | +    target_path = os.path.join(project, 'elements', target)
 | |
| 100 | + | |
| 101 | +    # Create an element with remote source
 | |
| 102 | +    element = generate_remote_import_element(
 | |
| 103 | +        os.path.join(project, 'files', 'dev-files', 'usr', 'include', 'pony.h'),
 | |
| 104 | +        'pony.h')
 | |
| 105 | +    _yaml.dump(element, target_path)
 | |
| 106 | + | |
| 107 | +    # Testing --fetch option requires that we do not have the sources
 | |
| 108 | +    # cached already
 | |
| 109 | +    assert cli.get_element_state(project, target) == 'fetch needed'
 | |
| 110 | + | |
| 111 | +    args = ['source-checkout']
 | |
| 112 | +    if fetch:
 | |
| 113 | +        args += ['--fetch']
 | |
| 114 | +    args += [target, checkout]
 | |
| 115 | +    result = cli.run(project=project, args=args)
 | |
| 116 | + | |
| 117 | +    if fetch:
 | |
| 118 | +        result.assert_success()
 | |
| 119 | +        assert os.path.exists(os.path.join(checkout, 'remote-import-dev', 'pony.h'))
 | |
| 120 | +    else:
 | |
| 121 | +        result.assert_main_error(ErrorDomain.PIPELINE, 'uncached-sources') | 
| 1 | 1 |  import os
 | 
| 2 | 2 |  import pytest
 | 
| 3 | 3 |  import tempfile
 | 
| 4 | -from collections import Mapping
 | |
| 4 | +from collections.abc import Mapping
 | |
| 5 | 5 |  | 
| 6 | 6 |  from buildstream import _yaml
 | 
| 7 | 7 |  from buildstream._exceptions import LoadError, LoadErrorReason
 | 
