[Notes] [Git][BuildStream/buildstream][bschubert/fix-unbound-variable-exception] 15 commits: Add --tar option to source-checkout command



Title: GitLab

Benjamin Schubert pushed to branch bschubert/fix-unbound-variable-exception at BuildStream / buildstream

Commits:

13 changed files:

Changes:

  • NEWS
    ... ... @@ -2,6 +2,14 @@
    2 2
     buildstream 1.3.1
    
    3 3
     =================
    
    4 4
     
    
    5
    +  o Added `bst artifact log` subcommand for viewing build logs.
    
    6
    +
    
    7
    +  o BREAKING CHANGE: The bst source-bundle command has been removed. The
    
    8
    +    functionality it provided has been replaced by the `--include-build-scripts`
    
    9
    +    option of the `bst source-checkout` command. To produce a tarball containing
    
    10
    +    an element's sources and generated build scripts you can do the command
    
    11
    +    `bst source-checkout --include-build-scripts --tar foo.bst some-file.tar`
    
    12
    +
    
    5 13
       o BREAKING CHANGE: Default strip-commands have been removed as they are too
    
    6 14
         specific. Recommendation if you are building in Linux is to use the
    
    7 15
         ones being used in freedesktop-sdk project, for example
    

  • buildstream/_artifactcache/artifactcache.py
    ... ... @@ -874,9 +874,7 @@ class ArtifactCache():
    874 874
                                 "\nValid values are, for example: 800M 10G 1T 50%\n"
    
    875 875
                                 .format(str(e))) from e
    
    876 876
     
    
    877
    -        stat = os.statvfs(artifactdir_volume)
    
    878
    -        available_space = (stat.f_bsize * stat.f_bavail)
    
    879
    -
    
    877
    +        available_space, total_size = self._get_volume_space_info_for(artifactdir_volume)
    
    880 878
             cache_size = self.get_cache_size()
    
    881 879
     
    
    882 880
             # Ensure system has enough storage for the cache_quota
    
    ... ... @@ -893,7 +891,7 @@ class ArtifactCache():
    893 891
                                 "BuildStream requires a minimum cache quota of 2G.")
    
    894 892
             elif cache_quota > cache_size + available_space:  # Check maximum
    
    895 893
                 if '%' in self.context.config_cache_quota:
    
    896
    -                available = (available_space / (stat.f_blocks * stat.f_bsize)) * 100
    
    894
    +                available = (available_space / total_size) * 100
    
    897 895
                     available = '{}% of total disk space'.format(round(available, 1))
    
    898 896
                 else:
    
    899 897
                     available = utils._pretty_size(available_space)
    
    ... ... @@ -919,6 +917,20 @@ class ArtifactCache():
    919 917
             self._cache_quota = cache_quota - headroom
    
    920 918
             self._cache_lower_threshold = self._cache_quota / 2
    
    921 919
     
    
    920
    +    # _get_volume_space_info_for
    
    921
    +    #
    
    922
    +    # Get the available space and total space for the given volume
    
    923
    +    #
    
    924
    +    # Args:
    
    925
    +    #     volume: volume for which to get the size
    
    926
    +    #
    
    927
    +    # Returns:
    
    928
    +    #     A tuple containing first the availabe number of bytes on the requested
    
    929
    +    #     volume, then the total number of bytes of the volume.
    
    930
    +    def _get_volume_space_info_for(self, volume):
    
    931
    +        stat = os.statvfs(volume)
    
    932
    +        return stat.f_bsize * stat.f_bavail, stat.f_bsize * stat.f_blocks
    
    933
    +
    
    922 934
     
    
    923 935
     # _configured_remote_artifact_cache_specs():
    
    924 936
     #
    

  • buildstream/_frontend/cli.py
    1 1
     import os
    
    2 2
     import sys
    
    3
    +from contextlib import ExitStack
    
    4
    +from fnmatch import fnmatch
    
    5
    +from tempfile import TemporaryDirectory
    
    3 6
     
    
    4 7
     import click
    
    5 8
     from .. import _yaml
    
    ... ... @@ -107,6 +110,23 @@ def complete_target(args, incomplete):
    107 110
         return complete_list
    
    108 111
     
    
    109 112
     
    
    113
    +def complete_artifact(args, incomplete):
    
    114
    +    from .._context import Context
    
    115
    +    ctx = Context()
    
    116
    +
    
    117
    +    config = None
    
    118
    +    for i, arg in enumerate(args):
    
    119
    +        if arg in ('-c', '--config'):
    
    120
    +            config = args[i + 1]
    
    121
    +    ctx.load(config)
    
    122
    +
    
    123
    +    # element targets are valid artifact names
    
    124
    +    complete_list = complete_target(args, incomplete)
    
    125
    +    complete_list.extend(ref for ref in ctx.artifactcache.cas.list_refs() if ref.startswith(incomplete))
    
    126
    +
    
    127
    +    return complete_list
    
    128
    +
    
    129
    +
    
    110 130
     def override_completions(cmd, cmd_param, args, incomplete):
    
    111 131
         """
    
    112 132
         :param cmd_param: command definition
    
    ... ... @@ -121,13 +141,15 @@ def override_completions(cmd, cmd_param, args, incomplete):
    121 141
         # We can't easily extend click's data structures without
    
    122 142
         # modifying click itself, so just do some weak special casing
    
    123 143
         # right here and select which parameters we want to handle specially.
    
    124
    -    if isinstance(cmd_param.type, click.Path) and \
    
    125
    -       (cmd_param.name == 'elements' or
    
    126
    -        cmd_param.name == 'element' or
    
    127
    -        cmd_param.name == 'except_' or
    
    128
    -        cmd_param.opts == ['--track'] or
    
    129
    -        cmd_param.opts == ['--track-except']):
    
    130
    -        return complete_target(args, incomplete)
    
    144
    +    if isinstance(cmd_param.type, click.Path):
    
    145
    +        if (cmd_param.name == 'elements' or
    
    146
    +                cmd_param.name == 'element' or
    
    147
    +                cmd_param.name == 'except_' or
    
    148
    +                cmd_param.opts == ['--track'] or
    
    149
    +                cmd_param.opts == ['--track-except']):
    
    150
    +            return complete_target(args, incomplete)
    
    151
    +        if cmd_param.name == 'artifacts':
    
    152
    +            return complete_artifact(args, incomplete)
    
    131 153
     
    
    132 154
         raise CompleteUnhandled()
    
    133 155
     
    
    ... ... @@ -725,6 +747,8 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar):
    725 747
     #                  Source Checkout Command                      #
    
    726 748
     ##################################################################
    
    727 749
     @cli.command(name='source-checkout', short_help='Checkout sources for an element')
    
    750
    +@click.option('--force', '-f', default=False, is_flag=True,
    
    751
    +              help="Allow files to be overwritten")
    
    728 752
     @click.option('--except', 'except_', multiple=True,
    
    729 753
                   type=click.Path(readable=False),
    
    730 754
                   help="Except certain dependencies")
    
    ... ... @@ -733,11 +757,15 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar):
    733 757
                   help='The dependencies whose sources to checkout (default: none)')
    
    734 758
     @click.option('--fetch', 'fetch_', default=False, is_flag=True,
    
    735 759
                   help='Fetch elements if they are not fetched')
    
    736
    -@click.argument('element', required=False,
    
    737
    -                type=click.Path(readable=False))
    
    760
    +@click.option('--tar', 'tar', default=False, is_flag=True,
    
    761
    +              help='Create a tarball from the element\'s sources instead of a '
    
    762
    +                   'file tree.')
    
    763
    +@click.option('--include-build-scripts', 'build_scripts', is_flag=True)
    
    764
    +@click.argument('element', required=False, type=click.Path(readable=False))
    
    738 765
     @click.argument('location', type=click.Path(), required=False)
    
    739 766
     @click.pass_obj
    
    740
    -def source_checkout(app, element, location, deps, fetch_, except_):
    
    767
    +def source_checkout(app, element, location, force, deps, fetch_, except_,
    
    768
    +                    tar, build_scripts):
    
    741 769
         """Checkout sources of an element to the specified location
    
    742 770
         """
    
    743 771
         if not element and not location:
    
    ... ... @@ -757,9 +785,12 @@ def source_checkout(app, element, location, deps, fetch_, except_):
    757 785
     
    
    758 786
             app.stream.source_checkout(element,
    
    759 787
                                        location=location,
    
    788
    +                                   force=force,
    
    760 789
                                        deps=deps,
    
    761 790
                                        fetch=fetch_,
    
    762
    -                                   except_targets=except_)
    
    791
    +                                   except_targets=except_,
    
    792
    +                                   tar=tar,
    
    793
    +                                   include_build_scripts=build_scripts)
    
    763 794
     
    
    764 795
     
    
    765 796
     ##################################################################
    
    ... ... @@ -908,32 +939,99 @@ def workspace_list(app):
    908 939
             app.stream.workspace_list()
    
    909 940
     
    
    910 941
     
    
    911
    -##################################################################
    
    912
    -#                     Source Bundle Command                      #
    
    913
    -##################################################################
    
    914
    -@cli.command(name="source-bundle", short_help="Produce a build bundle to be manually executed")
    
    915
    -@click.option('--except', 'except_', multiple=True,
    
    916
    -              type=click.Path(readable=False),
    
    917
    -              help="Elements to except from the tarball")
    
    918
    -@click.option('--compression', default='gz',
    
    919
    -              type=click.Choice(['none', 'gz', 'bz2', 'xz']),
    
    920
    -              help="Compress the tar file using the given algorithm.")
    
    921
    -@click.option('--track', 'track_', default=False, is_flag=True,
    
    922
    -              help="Track new source references before bundling")
    
    923
    -@click.option('--force', '-f', default=False, is_flag=True,
    
    924
    -              help="Overwrite an existing tarball")
    
    925
    -@click.option('--directory', default=os.getcwd(),
    
    926
    -              help="The directory to write the tarball to")
    
    927
    -@click.argument('element',
    
    928
    -                type=click.Path(readable=False))
    
    942
    +#############################################################
    
    943
    +#                     Artifact Commands                     #
    
    944
    +#############################################################
    
    945
    +def _classify_artifacts(names, cas, project_directory):
    
    946
    +    element_targets = []
    
    947
    +    artifact_refs = []
    
    948
    +    element_globs = []
    
    949
    +    artifact_globs = []
    
    950
    +
    
    951
    +    for name in names:
    
    952
    +        if name.endswith('.bst'):
    
    953
    +            if any(c in "*?[" for c in name):
    
    954
    +                element_globs.append(name)
    
    955
    +            else:
    
    956
    +                element_targets.append(name)
    
    957
    +        else:
    
    958
    +            if any(c in "*?[" for c in name):
    
    959
    +                artifact_globs.append(name)
    
    960
    +            else:
    
    961
    +                artifact_refs.append(name)
    
    962
    +
    
    963
    +    if element_globs:
    
    964
    +        for dirpath, _, filenames in os.walk(project_directory):
    
    965
    +            for filename in filenames:
    
    966
    +                element_path = os.path.join(dirpath, filename).lstrip(project_directory).lstrip('/')
    
    967
    +                if any(fnmatch(element_path, glob) for glob in element_globs):
    
    968
    +                    element_targets.append(element_path)
    
    969
    +
    
    970
    +    if artifact_globs:
    
    971
    +        artifact_refs.extend(ref for ref in cas.list_refs()
    
    972
    +                             if any(fnmatch(ref, glob) for glob in artifact_globs))
    
    973
    +
    
    974
    +    return element_targets, artifact_refs
    
    975
    +
    
    976
    +
    
    977
    +@cli.group(short_help="Manipulate cached artifacts")
    
    978
    +def artifact():
    
    979
    +    """Manipulate cached artifacts"""
    
    980
    +    pass
    
    981
    +
    
    982
    +
    
    983
    +################################################################
    
    984
    +#                     Artifact Log Command                     #
    
    985
    +################################################################
    
    986
    +@artifact.command(name='log', short_help="Show logs of an artifact")
    
    987
    +@click.argument('artifacts', type=click.Path(), nargs=-1)
    
    929 988
     @click.pass_obj
    
    930
    -def source_bundle(app, element, force, directory,
    
    931
    -                  track_, compression, except_):
    
    932
    -    """Produce a source bundle to be manually executed
    
    933
    -    """
    
    934
    -    with app.initialized():
    
    935
    -        app.stream.source_bundle(element, directory,
    
    936
    -                                 track_first=track_,
    
    937
    -                                 force=force,
    
    938
    -                                 compression=compression,
    
    939
    -                                 except_targets=except_)
    989
    +def artifact_log(app, artifacts):
    
    990
    +    """Show logs of all artifacts"""
    
    991
    +    from .._exceptions import CASError
    
    992
    +    from .._message import MessageType
    
    993
    +    from .._pipeline import PipelineSelection
    
    994
    +    from ..storage._casbaseddirectory import CasBasedDirectory
    
    995
    +
    
    996
    +    with ExitStack() as stack:
    
    997
    +        stack.enter_context(app.initialized())
    
    998
    +        cache = app.context.artifactcache
    
    999
    +
    
    1000
    +        elements, artifacts = _classify_artifacts(artifacts, cache.cas,
    
    1001
    +                                                  app.project.directory)
    
    1002
    +
    
    1003
    +        vdirs = []
    
    1004
    +        extractdirs = []
    
    1005
    +        if artifacts:
    
    1006
    +            for ref in artifacts:
    
    1007
    +                try:
    
    1008
    +                    cache_id = cache.cas.resolve_ref(ref, update_mtime=True)
    
    1009
    +                    vdir = CasBasedDirectory(cache.cas, cache_id)
    
    1010
    +                    vdirs.append(vdir)
    
    1011
    +                except CASError as e:
    
    1012
    +                    app._message(MessageType.WARN, "Artifact {} is not cached".format(ref), detail=str(e))
    
    1013
    +                    continue
    
    1014
    +        if elements:
    
    1015
    +            elements = app.stream.load_selection(elements, selection=PipelineSelection.NONE)
    
    1016
    +            for element in elements:
    
    1017
    +                if not element._cached():
    
    1018
    +                    app._message(MessageType.WARN, "Element {} is not cached".format(element))
    
    1019
    +                    continue
    
    1020
    +                ref = cache.get_artifact_fullname(element, element._get_cache_key())
    
    1021
    +                cache_id = cache.cas.resolve_ref(ref, update_mtime=True)
    
    1022
    +                vdir = CasBasedDirectory(cache.cas, cache_id)
    
    1023
    +                vdirs.append(vdir)
    
    1024
    +
    
    1025
    +        for vdir in vdirs:
    
    1026
    +            # NOTE: If reading the logs feels unresponsive, here would be a good place to provide progress information.
    
    1027
    +            logsdir = vdir.descend(["logs"])
    
    1028
    +            td = stack.enter_context(TemporaryDirectory())
    
    1029
    +            logsdir.export_files(td, can_link=True)
    
    1030
    +            extractdirs.append(td)
    
    1031
    +
    
    1032
    +        for extractdir in extractdirs:
    
    1033
    +            for log in (os.path.join(extractdir, log) for log in os.listdir(extractdir)):
    
    1034
    +                # NOTE: Should click gain the ability to pass files to the pager this can be optimised.
    
    1035
    +                with open(log) as f:
    
    1036
    +                    data = f.read()
    
    1037
    +                    click.echo_via_pager(data)

  • buildstream/_stream.py
    ... ... @@ -25,8 +25,8 @@ import stat
    25 25
     import shlex
    
    26 26
     import shutil
    
    27 27
     import tarfile
    
    28
    -from contextlib import contextmanager
    
    29
    -from tempfile import TemporaryDirectory
    
    28
    +import tempfile
    
    29
    +from contextlib import contextmanager, suppress
    
    30 30
     
    
    31 31
     from ._exceptions import StreamError, ImplError, BstError, set_last_task_error
    
    32 32
     from ._message import Message, MessageType
    
    ... ... @@ -449,11 +449,14 @@ class Stream():
    449 449
         #
    
    450 450
         def source_checkout(self, target, *,
    
    451 451
                             location=None,
    
    452
    +                        force=False,
    
    452 453
                             deps='none',
    
    453 454
                             fetch=False,
    
    454
    -                        except_targets=()):
    
    455
    +                        except_targets=(),
    
    456
    +                        tar=False,
    
    457
    +                        include_build_scripts=False):
    
    455 458
     
    
    456
    -        self._check_location_writable(location)
    
    459
    +        self._check_location_writable(location, force=force, tar=tar)
    
    457 460
     
    
    458 461
             elements, _ = self._load((target,), (),
    
    459 462
                                      selection=deps,
    
    ... ... @@ -467,7 +470,8 @@ class Stream():
    467 470
     
    
    468 471
             # Stage all sources determined by scope
    
    469 472
             try:
    
    470
    -            self._write_element_sources(location, elements)
    
    473
    +            self._source_checkout(elements, location, force, deps,
    
    474
    +                                  fetch, tar, include_build_scripts)
    
    471 475
             except BstError as e:
    
    472 476
                 raise StreamError("Error while writing sources"
    
    473 477
                                   ": '{}'".format(e), detail=e.detail, reason=e.reason) from e
    
    ... ... @@ -728,87 +732,6 @@ class Stream():
    728 732
                 'workspaces': workspaces
    
    729 733
             })
    
    730 734
     
    
    731
    -    # source_bundle()
    
    732
    -    #
    
    733
    -    # Create a host buildable tarball bundle for the given target.
    
    734
    -    #
    
    735
    -    # Args:
    
    736
    -    #    target (str): The target element to bundle
    
    737
    -    #    directory (str): The directory to output the tarball
    
    738
    -    #    track_first (bool): Track new source references before bundling
    
    739
    -    #    compression (str): The compression type to use
    
    740
    -    #    force (bool): Overwrite an existing tarball
    
    741
    -    #
    
    742
    -    def source_bundle(self, target, directory, *,
    
    743
    -                      track_first=False,
    
    744
    -                      force=False,
    
    745
    -                      compression="gz",
    
    746
    -                      except_targets=()):
    
    747
    -
    
    748
    -        if track_first:
    
    749
    -            track_targets = (target,)
    
    750
    -        else:
    
    751
    -            track_targets = ()
    
    752
    -
    
    753
    -        elements, track_elements = self._load((target,), track_targets,
    
    754
    -                                              selection=PipelineSelection.ALL,
    
    755
    -                                              except_targets=except_targets,
    
    756
    -                                              track_selection=PipelineSelection.ALL,
    
    757
    -                                              fetch_subprojects=True)
    
    758
    -
    
    759
    -        # source-bundle only supports one target
    
    760
    -        target = self.targets[0]
    
    761
    -
    
    762
    -        self._message(MessageType.INFO, "Bundling sources for target {}".format(target.name))
    
    763
    -
    
    764
    -        # Find the correct filename for the compression algorithm
    
    765
    -        tar_location = os.path.join(directory, target.normal_name + ".tar")
    
    766
    -        if compression != "none":
    
    767
    -            tar_location += "." + compression
    
    768
    -
    
    769
    -        # Attempt writing a file to generate a good error message
    
    770
    -        # early
    
    771
    -        #
    
    772
    -        # FIXME: A bit hackish
    
    773
    -        try:
    
    774
    -            open(tar_location, mode="x")
    
    775
    -            os.remove(tar_location)
    
    776
    -        except IOError as e:
    
    777
    -            raise StreamError("Cannot write to {0}: {1}"
    
    778
    -                              .format(tar_location, e)) from e
    
    779
    -
    
    780
    -        # Fetch and possibly track first
    
    781
    -        #
    
    782
    -        self._fetch(elements, track_elements=track_elements)
    
    783
    -
    
    784
    -        # We don't use the scheduler for this as it is almost entirely IO
    
    785
    -        # bound.
    
    786
    -
    
    787
    -        # Create a temporary directory to build the source tree in
    
    788
    -        builddir = self._context.builddir
    
    789
    -        os.makedirs(builddir, exist_ok=True)
    
    790
    -        prefix = "{}-".format(target.normal_name)
    
    791
    -
    
    792
    -        with TemporaryDirectory(prefix=prefix, dir=builddir) as tempdir:
    
    793
    -            source_directory = os.path.join(tempdir, 'source')
    
    794
    -            try:
    
    795
    -                os.makedirs(source_directory)
    
    796
    -            except OSError as e:
    
    797
    -                raise StreamError("Failed to create directory: {}"
    
    798
    -                                  .format(e)) from e
    
    799
    -
    
    800
    -            # Any elements that don't implement _write_script
    
    801
    -            # should not be included in the later stages.
    
    802
    -            elements = [
    
    803
    -                element for element in elements
    
    804
    -                if self._write_element_script(source_directory, element)
    
    805
    -            ]
    
    806
    -
    
    807
    -            self._write_element_sources(os.path.join(tempdir, "source"), elements)
    
    808
    -            self._write_build_script(tempdir, elements)
    
    809
    -            self._collect_sources(tempdir, tar_location,
    
    810
    -                                  target.normal_name, compression)
    
    811
    -
    
    812 735
         # redirect_element_names()
    
    813 736
         #
    
    814 737
         # Takes a list of element names and returns a list where elements have been
    
    ... ... @@ -1189,6 +1112,54 @@ class Stream():
    1189 1112
     
    
    1190 1113
             sandbox_vroot.export_files(directory, can_link=True, can_destroy=True)
    
    1191 1114
     
    
    1115
    +    # Helper function for source_checkout()
    
    1116
    +    def _source_checkout(self, elements,
    
    1117
    +                         location=None,
    
    1118
    +                         force=False,
    
    1119
    +                         deps='none',
    
    1120
    +                         fetch=False,
    
    1121
    +                         tar=False,
    
    1122
    +                         include_build_scripts=False):
    
    1123
    +        location = os.path.abspath(location)
    
    1124
    +        location_parent = os.path.abspath(os.path.join(location, ".."))
    
    1125
    +
    
    1126
    +        # Stage all our sources in a temporary directory. The this
    
    1127
    +        # directory can be used to either construct a tarball or moved
    
    1128
    +        # to the final desired location.
    
    1129
    +        temp_source_dir = tempfile.TemporaryDirectory(dir=location_parent)
    
    1130
    +        try:
    
    1131
    +            self._write_element_sources(temp_source_dir.name, elements)
    
    1132
    +            if include_build_scripts:
    
    1133
    +                self._write_build_scripts(temp_source_dir.name, elements)
    
    1134
    +            if tar:
    
    1135
    +                self._create_tarball(temp_source_dir.name, location)
    
    1136
    +            else:
    
    1137
    +                self._move_directory(temp_source_dir.name, location, force)
    
    1138
    +        except OSError as e:
    
    1139
    +            raise StreamError("Failed to checkout sources to {}: {}"
    
    1140
    +                              .format(location, e)) from e
    
    1141
    +        finally:
    
    1142
    +            with suppress(FileNotFoundError):
    
    1143
    +                temp_source_dir.cleanup()
    
    1144
    +
    
    1145
    +    # Move a directory src to dest. This will work across devices and
    
    1146
    +    # may optionaly overwrite existing files.
    
    1147
    +    def _move_directory(self, src, dest, force=False):
    
    1148
    +        def is_empty_dir(path):
    
    1149
    +            return os.path.isdir(dest) and not os.listdir(dest)
    
    1150
    +
    
    1151
    +        try:
    
    1152
    +            os.rename(src, dest)
    
    1153
    +            return
    
    1154
    +        except OSError:
    
    1155
    +            pass
    
    1156
    +
    
    1157
    +        if force or is_empty_dir(dest):
    
    1158
    +            try:
    
    1159
    +                utils.link_files(src, dest)
    
    1160
    +            except utils.UtilError as e:
    
    1161
    +                raise StreamError("Failed to move directory: {}".format(e)) from e
    
    1162
    +
    
    1192 1163
         # Write the element build script to the given directory
    
    1193 1164
         def _write_element_script(self, directory, element):
    
    1194 1165
             try:
    
    ... ... @@ -1205,8 +1176,28 @@ class Stream():
    1205 1176
                     os.makedirs(element_source_dir)
    
    1206 1177
                     element._stage_sources_at(element_source_dir, mount_workspaces=False)
    
    1207 1178
     
    
    1179
    +    # Create a tarball from the content of directory
    
    1180
    +    def _create_tarball(self, directory, tar_name):
    
    1181
    +        try:
    
    1182
    +            with utils.save_file_atomic(tar_name, mode='wb') as f:
    
    1183
    +                # This TarFile does not need to be explicitly closed
    
    1184
    +                # as the underlying file object will be closed be the
    
    1185
    +                # save_file_atomic contect manager
    
    1186
    +                tarball = tarfile.open(fileobj=f, mode='w')
    
    1187
    +                for item in os.listdir(str(directory)):
    
    1188
    +                    file_to_add = os.path.join(directory, item)
    
    1189
    +                    tarball.add(file_to_add, arcname=item)
    
    1190
    +        except OSError as e:
    
    1191
    +            raise StreamError("Failed to create tar archive: {}".format(e)) from e
    
    1192
    +
    
    1193
    +    # Write all the build_scripts for elements in the directory location
    
    1194
    +    def _write_build_scripts(self, location, elements):
    
    1195
    +        for element in elements:
    
    1196
    +            self._write_element_script(location, element)
    
    1197
    +        self._write_master_build_script(location, elements)
    
    1198
    +
    
    1208 1199
         # Write a master build script to the sandbox
    
    1209
    -    def _write_build_script(self, directory, elements):
    
    1200
    +    def _write_master_build_script(self, directory, elements):
    
    1210 1201
     
    
    1211 1202
             module_string = ""
    
    1212 1203
             for element in elements:
    

  • buildstream/element.py
    ... ... @@ -1612,9 +1612,9 @@ class Element(Plugin):
    1612 1612
                             sandbox_vpath = sandbox_vroot.descend(path_components)
    
    1613 1613
                             try:
    
    1614 1614
                                 sandbox_vpath.import_files(workspace.get_absolute_path())
    
    1615
    -                        except UtilError as e:
    
    1615
    +                        except UtilError as e2:
    
    1616 1616
                                 self.warn("Failed to preserve workspace state for failed build sysroot: {}"
    
    1617
    -                                      .format(e))
    
    1617
    +                                      .format(e2))
    
    1618 1618
     
    
    1619 1619
                         self.__set_build_result(success=False, description=str(e), detail=e.detail)
    
    1620 1620
                         self._cache_artifact(rootdir, sandbox, e.collect)
    

  • doc/source/using_commands.rst
    ... ... @@ -86,13 +86,6 @@ project's main directory.
    86 86
     
    
    87 87
     ----
    
    88 88
     
    
    89
    -.. _invoking_source_bundle:
    
    90
    -
    
    91
    -.. click:: buildstream._frontend.cli:source_bundle
    
    92
    -   :prog: bst source bundle
    
    93
    -
    
    94
    -----
    
    95
    -
    
    96 89
     .. _invoking_workspace:
    
    97 90
     
    
    98 91
     .. click:: buildstream._frontend.cli:workspace
    

  • tests/artifactcache/expiry.py
    ... ... @@ -18,6 +18,7 @@
    18 18
     #
    
    19 19
     
    
    20 20
     import os
    
    21
    +from unittest import mock
    
    21 22
     
    
    22 23
     import pytest
    
    23 24
     
    
    ... ... @@ -311,6 +312,8 @@ def test_never_delete_required_track(cli, datafiles, tmpdir):
    311 312
         ("0", True),
    
    312 313
         ("-1", False),
    
    313 314
         ("pony", False),
    
    315
    +    ("7K", False),
    
    316
    +    ("70%", False),
    
    314 317
         ("200%", False)
    
    315 318
     ])
    
    316 319
     @pytest.mark.datafiles(DATA_DIR)
    
    ... ... @@ -324,7 +327,35 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, success):
    324 327
             }
    
    325 328
         })
    
    326 329
     
    
    327
    -    res = cli.run(project=project, args=['workspace', 'list'])
    
    330
    +    # We patch how we get space information
    
    331
    +    # Ideally we would instead create a FUSE device on which we control
    
    332
    +    # everything.
    
    333
    +    # If the value is a percentage, we fix the current values to take into
    
    334
    +    # account the block size, since this is important in how we compute the size
    
    335
    +
    
    336
    +    if quota.endswith("%"):  # We set the used space at 60% of total space
    
    337
    +        stats = os.statvfs(".")
    
    338
    +        free_space = 0.6 * stats.f_bsize * stats.f_blocks
    
    339
    +        total_space = stats.f_bsize * stats.f_blocks
    
    340
    +    else:
    
    341
    +        free_space = 6000
    
    342
    +        total_space = 10000
    
    343
    +
    
    344
    +    volume_space_patch = mock.patch(
    
    345
    +        "buildstream._artifactcache.artifactcache.ArtifactCache._get_volume_space_info_for",
    
    346
    +        autospec=True,
    
    347
    +        return_value=(free_space, total_space),
    
    348
    +    )
    
    349
    +
    
    350
    +    cache_size_patch = mock.patch(
    
    351
    +        "buildstream._artifactcache.artifactcache.ArtifactCache.get_cache_size",
    
    352
    +        autospec=True,
    
    353
    +        return_value=0,
    
    354
    +    )
    
    355
    +
    
    356
    +    with volume_space_patch, cache_size_patch:
    
    357
    +        res = cli.run(project=project, args=['workspace', 'list'])
    
    358
    +
    
    328 359
         if success:
    
    329 360
             res.assert_success()
    
    330 361
         else:
    

  • tests/completions/completions.py
    ... ... @@ -6,6 +6,7 @@ from tests.testutils import cli
    6 6
     DATA_DIR = os.path.dirname(os.path.realpath(__file__))
    
    7 7
     
    
    8 8
     MAIN_COMMANDS = [
    
    9
    +    'artifact ',
    
    9 10
         'build ',
    
    10 11
         'checkout ',
    
    11 12
         'fetch ',
    
    ... ... @@ -16,7 +17,6 @@ MAIN_COMMANDS = [
    16 17
         'shell ',
    
    17 18
         'show ',
    
    18 19
         'source-checkout ',
    
    19
    -    'source-bundle ',
    
    20 20
         'track ',
    
    21 21
         'workspace '
    
    22 22
     ]
    

  • tests/frontend/help.py
    ... ... @@ -25,7 +25,6 @@ def test_help_main(cli):
    25 25
         ('push'),
    
    26 26
         ('shell'),
    
    27 27
         ('show'),
    
    28
    -    ('source-bundle'),
    
    29 28
         ('track'),
    
    30 29
         ('workspace')
    
    31 30
     ])
    

  • tests/frontend/source_checkout.py
    1 1
     import os
    
    2 2
     import pytest
    
    3
    +import tarfile
    
    4
    +from pathlib import Path
    
    3 5
     
    
    4 6
     from tests.testutils import cli
    
    5 7
     
    
    ... ... @@ -55,6 +57,39 @@ def test_source_checkout(datafiles, cli, tmpdir_factory, with_workspace, guess_e
    55 57
         assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
    
    56 58
     
    
    57 59
     
    
    60
    +@pytest.mark.datafiles(DATA_DIR)
    
    61
    +@pytest.mark.parametrize('force_flag', ['--force', '-f'])
    
    62
    +def test_source_checkout_force(datafiles, cli, force_flag):
    
    63
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    64
    +    checkout = os.path.join(cli.directory, 'source-checkout')
    
    65
    +    target = 'checkout-deps.bst'
    
    66
    +
    
    67
    +    os.makedirs(os.path.join(checkout, 'some-thing'))
    
    68
    +    # Path(os.path.join(checkout, 'some-file')).touch()
    
    69
    +
    
    70
    +    result = cli.run(project=project, args=['source-checkout', force_flag, target, '--deps', 'none', checkout])
    
    71
    +    result.assert_success()
    
    72
    +
    
    73
    +    assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
    
    74
    +
    
    75
    +
    
    76
    +@pytest.mark.datafiles(DATA_DIR)
    
    77
    +def test_source_checkout_tar(datafiles, cli):
    
    78
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    79
    +    checkout = os.path.join(cli.directory, 'source-checkout.tar')
    
    80
    +    target = 'checkout-deps.bst'
    
    81
    +
    
    82
    +    result = cli.run(project=project, args=['source-checkout', '--tar', target, '--deps', 'none', checkout])
    
    83
    +    result.assert_success()
    
    84
    +
    
    85
    +    assert os.path.exists(checkout)
    
    86
    +    with tarfile.open(checkout) as tf:
    
    87
    +        expected_content = os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config')
    
    88
    +        tar_members = [f.name for f in tf]
    
    89
    +        for member in tar_members:
    
    90
    +            assert member in expected_content
    
    91
    +
    
    92
    +
    
    58 93
     @pytest.mark.datafiles(DATA_DIR)
    
    59 94
     @pytest.mark.parametrize('deps', [('build'), ('none'), ('run'), ('all')])
    
    60 95
     def test_source_checkout_deps(datafiles, cli, deps):
    
    ... ... @@ -135,3 +170,38 @@ def test_source_checkout_fetch(datafiles, cli, fetch):
    135 170
             assert os.path.exists(os.path.join(checkout, 'remote-import-dev', 'pony.h'))
    
    136 171
         else:
    
    137 172
             result.assert_main_error(ErrorDomain.PIPELINE, 'uncached-sources')
    
    173
    +
    
    174
    +
    
    175
    +@pytest.mark.datafiles(DATA_DIR)
    
    176
    +def test_source_checkout_build_scripts(cli, tmpdir, datafiles):
    
    177
    +    project_path = os.path.join(datafiles.dirname, datafiles.basename)
    
    178
    +    element_name = 'source-bundle/source-bundle-hello.bst'
    
    179
    +    normal_name = 'source-bundle-source-bundle-hello'
    
    180
    +    checkout = os.path.join(str(tmpdir), 'source-checkout')
    
    181
    +
    
    182
    +    args = ['source-checkout', '--include-build-scripts', element_name, checkout]
    
    183
    +    result = cli.run(project=project_path, args=args)
    
    184
    +    result.assert_success()
    
    185
    +
    
    186
    +    # There sould be a script for each element (just one in this case) and a top level build script
    
    187
    +    expected_scripts = ['build.sh', 'build-' + normal_name]
    
    188
    +    for script in expected_scripts:
    
    189
    +        assert script in os.listdir(checkout)
    
    190
    +
    
    191
    +
    
    192
    +@pytest.mark.datafiles(DATA_DIR)
    
    193
    +def test_source_checkout_tar_buildscripts(cli, tmpdir, datafiles):
    
    194
    +    project_path = os.path.join(datafiles.dirname, datafiles.basename)
    
    195
    +    element_name = 'source-bundle/source-bundle-hello.bst'
    
    196
    +    normal_name = 'source-bundle-source-bundle-hello'
    
    197
    +    tar_file = os.path.join(str(tmpdir), 'source-checkout.tar')
    
    198
    +
    
    199
    +    args = ['source-checkout', '--include-build-scripts', '--tar', element_name, tar_file]
    
    200
    +    result = cli.run(project=project_path, args=args)
    
    201
    +    result.assert_success()
    
    202
    +
    
    203
    +    expected_scripts = ['build.sh', 'build-' + normal_name]
    
    204
    +
    
    205
    +    with tarfile.open(tar_file, 'r') as tf:
    
    206
    +        for script in expected_scripts:
    
    207
    +            assert script in tf.getnames()

  • tests/frontend/source_bundle.pytests/integration/artifact.py
    1 1
     #
    
    2
    +#  Copyright (C) 2018 Codethink Limited
    
    2 3
     #  Copyright (C) 2018 Bloomberg Finance LP
    
    3 4
     #
    
    4 5
     #  This program is free software; you can redistribute it and/or
    
    ... ... @@ -14,15 +15,17 @@
    14 15
     #  You should have received a copy of the GNU Lesser General Public
    
    15 16
     #  License along with this library. If not, see <http://www.gnu.org/licenses/>.
    
    16 17
     #
    
    17
    -#  Authors: Chandan Singh <csingh43 bloomberg net>
    
    18
    +#  Authors: Richard Maw <richard maw codethink co uk>
    
    18 19
     #
    
    19 20
     
    
    20 21
     import os
    
    21
    -import tarfile
    
    22
    -
    
    23 22
     import pytest
    
    24 23
     
    
    25
    -from tests.testutils import cli
    
    24
    +from tests.testutils import cli_integration as cli
    
    25
    +
    
    26
    +
    
    27
    +pytestmark = pytest.mark.integration
    
    28
    +
    
    26 29
     
    
    27 30
     # Project directory
    
    28 31
     DATA_DIR = os.path.join(
    
    ... ... @@ -31,18 +34,35 @@ DATA_DIR = os.path.join(
    31 34
     )
    
    32 35
     
    
    33 36
     
    
    37
    +@pytest.mark.integration
    
    34 38
     @pytest.mark.datafiles(DATA_DIR)
    
    35
    -def test_source_bundle(cli, tmpdir, datafiles):
    
    36
    -    project_path = os.path.join(datafiles.dirname, datafiles.basename)
    
    37
    -    element_name = 'source-bundle/source-bundle-hello.bst'
    
    38
    -    normal_name = 'source-bundle-source-bundle-hello'
    
    39
    -
    
    40
    -    # Verify that we can correctly produce a source-bundle
    
    41
    -    args = ['source-bundle', element_name, '--directory', str(tmpdir)]
    
    42
    -    result = cli.run(project=project_path, args=args)
    
    43
    -    result.assert_success()
    
    44
    -
    
    45
    -    # Verify that the source-bundle contains our sources and a build script
    
    46
    -    with tarfile.open(os.path.join(str(tmpdir), '{}.tar.gz'.format(normal_name))) as bundle:
    
    47
    -        assert os.path.join(normal_name, 'source', normal_name, 'llamas.txt') in bundle.getnames()
    
    48
    -        assert os.path.join(normal_name, 'build.sh') in bundle.getnames()
    39
    +def test_artifact_log(cli, tmpdir, datafiles):
    
    40
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    41
    +
    
    42
    +    # Get the cache key of our test element
    
    43
    +    result = cli.run(project=project, silent=True, args=[
    
    44
    +        '--no-colors',
    
    45
    +        'show', '--deps', 'none', '--format', '%{full-key}',
    
    46
    +        'base.bst'
    
    47
    +    ])
    
    48
    +    key = result.output.strip()
    
    49
    +
    
    50
    +    # Ensure we have an artifact to read
    
    51
    +    result = cli.run(project=project, args=['build', 'base.bst'])
    
    52
    +    assert result.exit_code == 0
    
    53
    +
    
    54
    +    # Read the log via the element name
    
    55
    +    result = cli.run(project=project, args=['artifact', 'log', 'base.bst'])
    
    56
    +    assert result.exit_code == 0
    
    57
    +    log = result.output
    
    58
    +
    
    59
    +    # Read the log via the key
    
    60
    +    result = cli.run(project=project, args=['artifact', 'log', 'test/base/' + key])
    
    61
    +    assert result.exit_code == 0
    
    62
    +    assert log == result.output
    
    63
    +
    
    64
    +    # Read the log via glob
    
    65
    +    result = cli.run(project=project, args=['artifact', 'log', 'test/base/*'])
    
    66
    +    assert result.exit_code == 0
    
    67
    +    # The artifact is cached under both a strong key and a weak key
    
    68
    +    assert (log + log) == result.output

  • tests/testutils/mock_os.py deleted
    1
    -from contextlib import contextmanager
    
    2
    -import os
    
    3
    -
    
    4
    -
    
    5
    -# MockAttributeResult
    
    6
    -#
    
    7
    -# A class to take a dictionary of kwargs and make them accessible via
    
    8
    -# attributes of the object.
    
    9
    -#
    
    10
    -class MockAttributeResult(dict):
    
    11
    -    __getattr__ = dict.get
    
    12
    -
    
    13
    -
    
    14
    -# mock_statvfs():
    
    15
    -#
    
    16
    -# Gets a function which mocks statvfs and returns a statvfs result with the kwargs accessible.
    
    17
    -#
    
    18
    -# Returns:
    
    19
    -#    func(path) -> object: object will have all the kwargs accessible via object.kwarg
    
    20
    -#
    
    21
    -# Example:
    
    22
    -#    statvfs = mock_statvfs(f_blocks=10)
    
    23
    -#    result = statvfs("regardless/of/path")
    
    24
    -#    assert result.f_blocks == 10 # True
    
    25
    -def mock_statvfs(**kwargs):
    
    26
    -    def statvfs(path):
    
    27
    -        return MockAttributeResult(kwargs)
    
    28
    -    return statvfs
    
    29
    -
    
    30
    -
    
    31
    -# monkey_patch()
    
    32
    -#
    
    33
    -# with monkey_patch("statvfs", custom_statvfs):
    
    34
    -#    assert os.statvfs == custom_statvfs # True
    
    35
    -# assert os.statvfs == custom_statvfs # False
    
    36
    -#
    
    37
    -@contextmanager
    
    38
    -def monkey_patch(to_patch, patched_func):
    
    39
    -    orig = getattr(os, to_patch)
    
    40
    -    setattr(os, to_patch, patched_func)
    
    41
    -    try:
    
    42
    -        yield
    
    43
    -    finally:
    
    44
    -        setattr(os, to_patch, orig)

  • tests/utils/misc.py
    1
    +import os
    
    2
    +from unittest import mock
    
    3
    +
    
    1 4
     from buildstream import _yaml
    
    2
    -from ..testutils import mock_os
    
    3
    -from ..testutils.runcli import cli
    
    4 5
     
    
    5
    -import os
    
    6
    -import pytest
    
    6
    +from ..testutils.runcli import cli
    
    7 7
     
    
    8 8
     
    
    9 9
     KiB = 1024
    
    ... ... @@ -13,7 +13,6 @@ TiB = (GiB * 1024)
    13 13
     
    
    14 14
     
    
    15 15
     def test_parse_size_over_1024T(cli, tmpdir):
    
    16
    -    BLOCK_SIZE = 4096
    
    17 16
         cli.configure({
    
    18 17
             'cache': {
    
    19 18
                 'quota': 2048 * TiB
    
    ... ... @@ -23,9 +22,13 @@ def test_parse_size_over_1024T(cli, tmpdir):
    23 22
         os.makedirs(str(project))
    
    24 23
         _yaml.dump({'name': 'main'}, str(project.join("project.conf")))
    
    25 24
     
    
    26
    -    bavail = (1025 * TiB) / BLOCK_SIZE
    
    27
    -    patched_statvfs = mock_os.mock_statvfs(f_bavail=bavail, f_bsize=BLOCK_SIZE)
    
    28
    -    with mock_os.monkey_patch("statvfs", patched_statvfs):
    
    25
    +    volume_space_patch = mock.patch(
    
    26
    +        "buildstream._artifactcache.artifactcache.ArtifactCache._get_volume_space_info_for",
    
    27
    +        autospec=True,
    
    28
    +        return_value=(1025 * TiB, 1025 * TiB)
    
    29
    +    )
    
    30
    +
    
    31
    +    with volume_space_patch:
    
    29 32
             result = cli.run(project, args=["build", "file.bst"])
    
    30 33
             failure_msg = 'Your system does not have enough available space to support the cache quota specified.'
    
    31 34
             assert failure_msg in result.stderr



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]