[Notes] [Git][BuildStream/buildstream][Qinusty/597-non-alias-url-fix] 30 commits: .gitlab-ci.yml: Bump tags for testsuite images



Title: GitLab

Qinusty pushed to branch Qinusty/597-non-alias-url-fix at BuildStream / buildstream

Commits:

28 changed files:

Changes:

  • .gitlab-ci.yml
    1
    -image: buildstream/testsuite-debian:9-master-112-a9f63c5e
    
    1
    +image: buildstream/testsuite-debian:9-master-114-4cab18e3
    
    2 2
     
    
    3 3
     cache:
    
    4 4
       key: "$CI_JOB_NAME-"
    
    ... ... @@ -10,6 +10,16 @@ stages:
    10 10
       - test
    
    11 11
       - post
    
    12 12
     
    
    13
    +# Avoid running all the tests post merge on
    
    14
    +# master or on any release branch.
    
    15
    +#
    
    16
    +.tests-condition-template: &tests-condition
    
    17
    +  only:
    
    18
    +  - branches
    
    19
    +  except:
    
    20
    +  - master
    
    21
    +  - /bst-1\..*/
    
    22
    +
    
    13 23
     #####################################################
    
    14 24
     #                  Prepare stage                    #
    
    15 25
     #####################################################
    
    ... ... @@ -26,6 +36,11 @@ source_dist:
    26 36
       - tar -ztf dist/*
    
    27 37
       - tarball=$(cd dist && echo $(ls *))
    
    28 38
     
    
    39
    +  # Verify that the source distribution tarball can be installed correctly
    
    40
    +  #
    
    41
    +  - pip3 install dist/*.tar.gz
    
    42
    +  - bst --version
    
    43
    +
    
    29 44
       # unpack tarball as `dist/buildstream` directory
    
    30 45
       - |
    
    31 46
         cat > dist/unpack.sh << EOF
    
    ... ... @@ -79,25 +94,31 @@ source_dist:
    79 94
         - coverage-linux/
    
    80 95
     
    
    81 96
     tests-debian-9:
    
    82
    -  image: buildstream/testsuite-debian:9-master-112-a9f63c5e
    
    97
    +  image: buildstream/testsuite-debian:9-master-114-4cab18e3
    
    83 98
       <<: *linux-tests
    
    99
    +  <<: *tests-condition
    
    84 100
     
    
    85 101
     tests-fedora-27:
    
    86
    -  image: buildstream/testsuite-fedora:27-master-112-a9f63c5e
    
    102
    +  image: buildstream/testsuite-fedora:27-master-114-4cab18e3
    
    87 103
       <<: *linux-tests
    
    104
    +  <<: *tests-condition
    
    88 105
     
    
    89 106
     tests-fedora-28:
    
    90
    -  image: buildstream/testsuite-fedora:28-master-112-a9f63c5e
    
    107
    +  image: buildstream/testsuite-fedora:28-master-114-4cab18e3
    
    91 108
       <<: *linux-tests
    
    109
    +  <<: *tests-condition
    
    92 110
     
    
    93 111
     tests-ubuntu-18.04:
    
    94
    -  image: buildstream/testsuite-ubuntu:18.04-master-112-a9f63c5e
    
    112
    +  image: buildstream/testsuite-ubuntu:18.04-master-114-4cab18e3
    
    95 113
       <<: *linux-tests
    
    114
    +  <<: *tests-condition
    
    96 115
     
    
    97 116
     tests-unix:
    
    117
    +  <<: *tests-condition
    
    118
    +
    
    98 119
       # Use fedora here, to a) run a test on fedora and b) ensure that we
    
    99 120
       # can get rid of ostree - this is not possible with debian-8
    
    100
    -  image: buildstream/testsuite-fedora:27-master-112-a9f63c5e
    
    121
    +  image: buildstream/testsuite-fedora:27-master-114-4cab18e3
    
    101 122
       stage: test
    
    102 123
       variables:
    
    103 124
         BST_FORCE_BACKEND: "unix"
    
    ... ... @@ -133,6 +154,15 @@ tests-unix:
    133 154
     # Note: We still do not enforce a consistent installation of python3-sphinx,
    
    134 155
     #       as it will significantly grow the backing image.
    
    135 156
     docs:
    
    157
    +
    
    158
    +  # Here we build the docs for every pre-merge CI, but avoid
    
    159
    +  # the job on post-merge to stable branches, because we only
    
    160
    +  # ever publish them from master
    
    161
    +  only:
    
    162
    +  - branches
    
    163
    +  except:
    
    164
    +  - /bst-1\..*/
    
    165
    +
    
    136 166
       stage: test
    
    137 167
       script:
    
    138 168
       - export BST_SOURCE_CACHE="$(pwd)/cache/integration-cache/sources"
    
    ... ... @@ -157,6 +187,8 @@ docs:
    157 187
     # as an output of radon, with some conversion
    
    158 188
     #
    
    159 189
     codequality:
    
    190
    +  <<: *tests-condition
    
    191
    +
    
    160 192
       image: docker:stable
    
    161 193
       stage: post
    
    162 194
       variables:
    
    ... ... @@ -175,6 +207,8 @@ codequality:
    175 207
         paths: [codeclimate.json]
    
    176 208
     
    
    177 209
     analysis:
    
    210
    +  <<: *tests-condition
    
    211
    +
    
    178 212
       stage: post
    
    179 213
       script:
    
    180 214
       - |
    
    ... ... @@ -203,6 +237,8 @@ analysis:
    203 237
     # Collate coverage reports
    
    204 238
     #
    
    205 239
     coverage:
    
    240
    +  <<: *tests-condition
    
    241
    +
    
    206 242
       stage: post
    
    207 243
       coverage: '/TOTAL +\d+ +\d+ +(\d+\.\d+)%/'
    
    208 244
       script:
    

  • MANIFEST.in
    ... ... @@ -23,4 +23,4 @@ recursive-include tests *.expected
    23 23
     recursive-include buildstream/_protos *.proto
    
    24 24
     
    
    25 25
     # Requirements files
    
    26
    -dev-requirements.txt
    26
    +include dev-requirements.txt

  • README.rst
    1 1
     About
    
    2 2
     -----
    
    3
    -.. image:: https://gitlab.com/BuildStream/buildstream/badges/master/pipeline.svg
    
    4
    -   :target: https://gitlab.com/BuildStream/buildstream/commits/master
    
    5
    -
    
    6
    -.. image:: https://gitlab.com/BuildStream/buildstream/badges/master/coverage.svg?job=coverage
    
    7
    -   :target: https://gitlab.com/BuildStream/buildstream/commits/master
    
    8 3
     
    
    9 4
     
    
    10 5
     What is BuildStream?
    

  • buildstream/_artifactcache/cascache.py
    ... ... @@ -30,6 +30,8 @@ from urllib.parse import urlparse
    30 30
     
    
    31 31
     import grpc
    
    32 32
     
    
    33
    +from .. import _yaml
    
    34
    +
    
    33 35
     from .._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
    
    34 36
     from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
    
    35 37
     from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc
    
    ... ... @@ -526,6 +528,25 @@ class CASCache(ArtifactCache):
    526 528
         #
    
    527 529
         def remove(self, ref, *, defer_prune=False):
    
    528 530
     
    
    531
    +        # Remove extract if not used by other ref
    
    532
    +        tree = self.resolve_ref(ref)
    
    533
    +        ref_name, ref_hash = os.path.split(ref)
    
    534
    +        extract = os.path.join(self.extractdir, ref_name, tree.hash)
    
    535
    +        keys_file = os.path.join(extract, 'meta', 'keys.yaml')
    
    536
    +        if os.path.exists(keys_file):
    
    537
    +            keys_meta = _yaml.load(keys_file)
    
    538
    +            keys = [keys_meta['strong'], keys_meta['weak']]
    
    539
    +            remove_extract = True
    
    540
    +            for other_hash in keys:
    
    541
    +                if other_hash == ref_hash:
    
    542
    +                    continue
    
    543
    +                remove_extract = False
    
    544
    +                break
    
    545
    +
    
    546
    +            if remove_extract:
    
    547
    +                utils._force_rmtree(extract)
    
    548
    +
    
    549
    +        # Remove cache ref
    
    529 550
             refpath = self._refpath(ref)
    
    530 551
             if not os.path.exists(refpath):
    
    531 552
                 raise ArtifactError("Could not find artifact for ref '{}'".format(ref))
    

  • buildstream/_frontend/linuxapp.py
    ... ... @@ -22,12 +22,43 @@ import click
    22 22
     from .app import App
    
    23 23
     
    
    24 24
     
    
    25
    +# This trick is currently only supported on some terminals,
    
    26
    +# avoid using it where it can cause garbage to be printed
    
    27
    +# to the terminal.
    
    28
    +#
    
    29
    +def _osc_777_supported():
    
    30
    +
    
    31
    +    term = os.environ.get('TERM')
    
    32
    +
    
    33
    +    if term and (term.startswith('xterm') or term.startswith('vte')):
    
    34
    +
    
    35
    +        # Since vte version 4600, upstream silently ignores
    
    36
    +        # the OSC 777 without printing garbage to the terminal.
    
    37
    +        #
    
    38
    +        # For distros like Fedora who have patched vte, this
    
    39
    +        # will trigger a desktop notification and bring attention
    
    40
    +        # to the terminal.
    
    41
    +        #
    
    42
    +        vte_version = os.environ.get('VTE_VERSION')
    
    43
    +        try:
    
    44
    +            vte_version_int = int(vte_version)
    
    45
    +        except (ValueError, TypeError):
    
    46
    +            return False
    
    47
    +
    
    48
    +        if vte_version_int >= 4600:
    
    49
    +            return True
    
    50
    +
    
    51
    +    return False
    
    52
    +
    
    53
    +
    
    25 54
     # A linux specific App implementation
    
    26 55
     #
    
    27 56
     class LinuxApp(App):
    
    28 57
     
    
    29 58
         def notify(self, title, text):
    
    30 59
     
    
    31
    -        term = os.environ['TERM']
    
    32
    -        if term in ('xterm', 'vte'):
    
    33
    -            click.echo("\033]777;notify;{};{}\007".format(title, text))
    60
    +        # Currently we only try this notification method
    
    61
    +        # of sending an escape sequence to the terminal
    
    62
    +        #
    
    63
    +        if _osc_777_supported():
    
    64
    +            click.echo("\033]777;notify;{};{}\007".format(title, text), err=True)

  • buildstream/_pipeline.py
    ... ... @@ -28,6 +28,7 @@ from ._message import Message, MessageType
    28 28
     from ._profile import Topics, profile_start, profile_end
    
    29 29
     from . import Scope, Consistency
    
    30 30
     from ._project import ProjectRefStorage
    
    31
    +from .plugin import CoreWarnings
    
    31 32
     
    
    32 33
     
    
    33 34
     # PipelineSelection()
    
    ... ... @@ -110,6 +111,8 @@ class Pipeline():
    110 111
                                                    rewritable=rewritable,
    
    111 112
                                                    fetch_subprojects=fetch_subprojects)
    
    112 113
     
    
    114
    +        self._check_unaliased_urls(elements)
    
    115
    +
    
    113 116
             # Now create element groups to match the input target groups
    
    114 117
             elt_iter = iter(elements)
    
    115 118
             element_groups = [
    
    ... ... @@ -355,10 +358,14 @@ class Pipeline():
    355 358
         #
    
    356 359
         def assert_consistent(self, elements):
    
    357 360
             inconsistent = []
    
    361
    +        inconsistent_workspaced = []
    
    358 362
             with self._context.timed_activity("Checking sources"):
    
    359 363
                 for element in elements:
    
    360 364
                     if element._get_consistency() == Consistency.INCONSISTENT:
    
    361
    -                    inconsistent.append(element)
    
    365
    +                    if element._get_workspace():
    
    366
    +                        inconsistent_workspaced.append(element)
    
    367
    +                    else:
    
    368
    +                        inconsistent.append(element)
    
    362 369
     
    
    363 370
             if inconsistent:
    
    364 371
                 detail = "Exact versions are missing for the following elements:\n\n"
    
    ... ... @@ -372,6 +379,13 @@ class Pipeline():
    372 379
     
    
    373 380
                 raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline")
    
    374 381
     
    
    382
    +        if inconsistent_workspaced:
    
    383
    +            detail = "Some workspaces do not exist but are not closed\n" + \
    
    384
    +                     "Try closing them with `bst workspace close`\n\n"
    
    385
    +            for element in inconsistent_workspaced:
    
    386
    +                detail += "  " + element._get_full_name() + "\n"
    
    387
    +            raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline-workspaced")
    
    388
    +
    
    375 389
         #############################################################
    
    376 390
         #                     Private Methods                       #
    
    377 391
         #############################################################
    
    ... ... @@ -422,6 +436,30 @@ class Pipeline():
    422 436
     
    
    423 437
                     raise PipelineError("Untrackable sources", detail=detail, reason="untrackable-sources")
    
    424 438
     
    
    439
    +    # _check_unaliased_urls():
    
    440
    +    #
    
    441
    +    # Checks all dependencies sources for urls not using an alias. Raises an appropriate warning.
    
    442
    +    #
    
    443
    +    # Args:
    
    444
    +    #   elements (list of Element): The list of elements loaded to check.
    
    445
    +    #
    
    446
    +    def _check_unaliased_urls(self, elements):
    
    447
    +        unaliased_urls = []
    
    448
    +        for element in self.dependencies(elements, Scope.ALL):
    
    449
    +            sources = element.sources()
    
    450
    +            unaliased_urls.extend(itertools.chain(*[
    
    451
    +                source._get_unaliased_urls() for source in sources
    
    452
    +            ]))
    
    453
    +
    
    454
    +        if unaliased_urls:
    
    455
    +            message = "The use of an alias in urls is strongly advised."
    
    456
    +            unaliased_urls_strs = [
    
    457
    +                "{}: {}".format(prov, url)
    
    458
    +                for url, prov in unaliased_urls
    
    459
    +            ]
    
    460
    +            detail = "Unaliased URLs:\n\t" + "\n\t".join(unaliased_urls_strs)
    
    461
    +            elements[0].warn(message, detail=detail, warning_token=CoreWarnings.URL_WITHOUT_ALIAS)
    
    462
    +
    
    425 463
         # _message()
    
    426 464
         #
    
    427 465
         # Local message propagator
    

  • buildstream/_version.py
    ... ... @@ -43,6 +43,7 @@ def get_config():
    43 43
         cfg.VCS = "git"
    
    44 44
         cfg.style = "pep440"
    
    45 45
         cfg.tag_prefix = ""
    
    46
    +    cfg.tag_regex = "*.*.*"
    
    46 47
         cfg.parentdir_prefix = "BuildStream-"
    
    47 48
         cfg.versionfile_source = "buildstream/_version.py"
    
    48 49
         cfg.verbose = False
    
    ... ... @@ -215,7 +216,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
    215 216
     
    
    216 217
     
    
    217 218
     @register_vcs_handler("git", "pieces_from_vcs")
    
    218
    -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
    
    219
    +def git_pieces_from_vcs(tag_prefix, tag_regex, root, verbose, run_command=run_command):
    
    219 220
         """Get version from 'git describe' in the root of the source tree.
    
    220 221
     
    
    221 222
         This only gets called if the git-archive 'subst' keywords were *not*
    
    ... ... @@ -237,7 +238,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
    237 238
         # if there isn't one, this yields HEX[-dirty] (no NUM)
    
    238 239
         describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
    
    239 240
                                               "--always", "--long",
    
    240
    -                                          "--match", "%s*" % tag_prefix],
    
    241
    +                                          "--match", "%s%s" % (tag_prefix, tag_regex)],
    
    241 242
                                        cwd=root)
    
    242 243
         # --long was added in git-1.5.5
    
    243 244
         if describe_out is None:
    
    ... ... @@ -505,7 +506,7 @@ def get_versions():
    505 506
                     "date": None}
    
    506 507
     
    
    507 508
         try:
    
    508
    -        pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
    
    509
    +        pieces = git_pieces_from_vcs(cfg.tag_prefix, cfg.tag_regex, root, verbose)
    
    509 510
             return render(pieces, cfg.style)
    
    510 511
         except NotThisMethod:
    
    511 512
             pass
    

  • buildstream/data/projectconfig.yaml
    ... ... @@ -68,7 +68,7 @@ variables:
    68 68
       # Generic implementation for stripping debugging symbols
    
    69 69
       strip-binaries: |
    
    70 70
     
    
    71
    -    find "%{install-root}" -type f \
    
    71
    +    cd "%{install-root}" && find -type f \
    
    72 72
           '(' -perm -111 -o -name '*.so*' \
    
    73 73
               -o -name '*.cmxs' -o -name '*.node' ')' \
    
    74 74
           -exec sh -ec \
    
    ... ... @@ -76,7 +76,7 @@ variables:
    76 76
            if [ "$hdr" != "$(printf \\x7fELF)" ]; then
    
    77 77
                exit 0
    
    78 78
            fi
    
    79
    -       debugfile="%{install-root}%{debugdir}/$(basename "$1")"
    
    79
    +       debugfile="%{install-root}%{debugdir}/$1"
    
    80 80
            mkdir -p "$(dirname "$debugfile")"
    
    81 81
            objcopy %{objcopy-extract-args} "$1" "$debugfile"
    
    82 82
            chmod 644 "$debugfile"
    

  • buildstream/plugin.py
    ... ... @@ -508,6 +508,7 @@ class Plugin():
    508 508
                 project = self._get_project()
    
    509 509
     
    
    510 510
                 if project._warning_is_fatal(warning_token):
    
    511
    +                detail = detail if detail else ""
    
    511 512
                     raise PluginError(message="{}\n{}".format(brief, detail), reason=warning_token)
    
    512 513
     
    
    513 514
             self.__message(MessageType.WARN, brief=brief, detail=detail)
    
    ... ... @@ -757,6 +758,16 @@ class CoreWarnings():
    757 758
         which is found to be invalid based on the configured track
    
    758 759
         """
    
    759 760
     
    
    761
    +    URL_WITHOUT_ALIAS = "url-without-alias"
    
    762
    +    """
    
    763
    +    This warning will be produced when a source is configured with a url which does not contain an alias.
    
    764
    +    """
    
    765
    +
    
    766
    +    UNKNOWN_ALIAS = "unknown-alias"
    
    767
    +    """
    
    768
    +    This warning will be produced when a source is configured with a url which does not contain an alias.
    
    769
    +    """
    
    770
    +
    
    760 771
     
    
    761 772
     __CORE_WARNINGS = [
    
    762 773
         value
    

  • buildstream/source.py
    ... ... @@ -137,6 +137,7 @@ from . import Plugin
    137 137
     from . import _yaml, utils
    
    138 138
     from ._exceptions import BstError, ImplError, ErrorDomain
    
    139 139
     from ._projectrefs import ProjectRefStorage
    
    140
    +from .plugin import CoreWarnings
    
    140 141
     
    
    141 142
     
    
    142 143
     class Consistency():
    
    ... ... @@ -219,10 +220,7 @@ class SourceFetcher():
    219 220
             Args:
    
    220 221
                url (str): The url used to download.
    
    221 222
             """
    
    222
    -        # Not guaranteed to be a valid alias yet.
    
    223
    -        # Ensuring it's a valid alias currently happens in Project.get_alias_uris
    
    224
    -        alias, _ = url.split(utils._ALIAS_SEPARATOR, 1)
    
    225
    -        self.__alias = alias
    
    223
    +        self.__alias = _extract_alias(url)
    
    226 224
     
    
    227 225
         #############################################################
    
    228 226
         #            Private Methods used in BuildStream            #
    
    ... ... @@ -282,7 +280,7 @@ class Source(Plugin):
    282 280
             self.__consistency = Consistency.INCONSISTENT   # Cached consistency state
    
    283 281
             self.__alias_override = alias_override          # Tuple of alias and its override to use instead
    
    284 282
             self.__expected_alias = None                    # A hacky way to store the first alias used
    
    285
    -
    
    283
    +        self.__configuring = False
    
    286 284
             # FIXME: Reconstruct a MetaSource from a Source instead of storing it.
    
    287 285
             self.__meta = meta                              # MetaSource stored so we can copy this source later.
    
    288 286
     
    
    ... ... @@ -291,8 +289,9 @@ class Source(Plugin):
    291 289
             self.__init_defaults(meta)
    
    292 290
             self.__config = self.__extract_config(meta)
    
    293 291
             self.__first_pass = meta.first_pass
    
    292
    +        self.__unaliased_urls = []
    
    294 293
     
    
    295
    -        self.configure(self.__config)
    
    294
    +        self.__do_configure()
    
    296 295
     
    
    297 296
         COMMON_CONFIG_KEYS = ['kind', 'directory']
    
    298 297
         """Common source config keys
    
    ... ... @@ -459,8 +458,7 @@ class Source(Plugin):
    459 458
     
    
    460 459
             *Since: 1.2*
    
    461 460
             """
    
    462
    -        alias, _ = url.split(utils._ALIAS_SEPARATOR, 1)
    
    463
    -        self.__expected_alias = alias
    
    461
    +        self.__expected_alias = self.__alias_from_url(url)
    
    464 462
     
    
    465 463
         def get_source_fetchers(self):
    
    466 464
             """Get the objects that are used for fetching
    
    ... ... @@ -525,8 +523,7 @@ class Source(Plugin):
    525 523
             else:
    
    526 524
                 # Sneakily store the alias if it hasn't already been stored
    
    527 525
                 if not self.__expected_alias and url and utils._ALIAS_SEPARATOR in url:
    
    528
    -                url_alias, _ = url.split(utils._ALIAS_SEPARATOR, 1)
    
    529
    -                self.__expected_alias = url_alias
    
    526
    +                self.mark_download_url(url)
    
    530 527
     
    
    531 528
                 project = self._get_project()
    
    532 529
                 return project.translate_url(url, first_pass=self.__first_pass)
    
    ... ... @@ -861,10 +858,24 @@ class Source(Plugin):
    861 858
             else:
    
    862 859
                 return None
    
    863 860
     
    
    861
    +    # Gets the URLs which don't use an alias.
    
    862
    +    #
    
    863
    +    def _get_unaliased_urls(self):
    
    864
    +        return self.__unaliased_urls
    
    865
    +
    
    864 866
         #############################################################
    
    865 867
         #                   Local Private Methods                   #
    
    866 868
         #############################################################
    
    867 869
     
    
    870
    +    # __do_configure()
    
    871
    +    #
    
    872
    +    # Performs self.configure() and sets self.__configuring to True whilst the source configures.
    
    873
    +    #
    
    874
    +    def __do_configure(self):
    
    875
    +        self.__configuring = True
    
    876
    +        self.configure(self.__config)
    
    877
    +        self.__configuring = False
    
    878
    +
    
    868 879
         # Tries to call fetch for every mirror, stopping once it succeeds
    
    869 880
         def __do_fetch(self, **kwargs):
    
    870 881
             project = self._get_project()
    
    ... ... @@ -914,12 +925,12 @@ class Source(Plugin):
    914 925
         # Tries to call track for every mirror, stopping once it succeeds
    
    915 926
         def __do_track(self, **kwargs):
    
    916 927
             project = self._get_project()
    
    917
    -        # If there are no mirrors, or no aliases to replace, there's nothing to do here.
    
    918 928
             alias = self._get_alias()
    
    919 929
             if self.__first_pass:
    
    920 930
                 mirrors = project.first_pass_config.mirrors
    
    921 931
             else:
    
    922 932
                 mirrors = project.config.mirrors
    
    933
    +        # If there are no mirrors, or no aliases to replace, there's nothing to do here.
    
    923 934
             if not mirrors or not alias:
    
    924 935
                 return self.track(**kwargs)
    
    925 936
     
    
    ... ... @@ -988,3 +999,18 @@ class Source(Plugin):
    988 999
     
    
    989 1000
                 if src.get_consistency() == Consistency.RESOLVED:
    
    990 1001
                     src._fetch(previous_sources[0:index])
    
    1002
    +
    
    1003
    +
    
    1004
    +    def __alias_from_url(self, url):
    
    1005
    +        alias = _extract_alias(url)
    
    1006
    +        if not alias:
    
    1007
    +            self.__unaliased_urls.append((url, self._get_provenance()))
    
    1008
    +        return alias
    
    1009
    +
    
    1010
    +
    
    1011
    +def _extract_alias(url):
    
    1012
    +    parts = url.split(utils._ALIAS_SEPARATOR, 1)
    
    1013
    +    if len(parts) > 1 and not parts[0].lower() in utils._URI_SCHEMES:
    
    1014
    +        return parts[0]
    
    1015
    +    else:
    
    1016
    +        return ""

  • buildstream/utils.py
    ... ... @@ -47,6 +47,7 @@ _magic_timestamp = calendar.timegm([2011, 11, 11, 11, 11, 11])
    47 47
     
    
    48 48
     # The separator we use for user specified aliases
    
    49 49
     _ALIAS_SEPARATOR = ':'
    
    50
    +_URI_SCHEMES = ["http", "https", "ftp", "file", "git", "sftp", "ssh"]
    
    50 51
     
    
    51 52
     
    
    52 53
     class UtilError(BstError):
    
    ... ... @@ -484,7 +485,16 @@ def get_bst_version():
    484 485
             raise UtilError("Your git repository has no tags - BuildStream can't "
    
    485 486
                             "determine its version. Please run `git fetch --tags`.")
    
    486 487
     
    
    487
    -    return (int(versions[0]), int(versions[1]))
    
    488
    +    try:
    
    489
    +        return (int(versions[0]), int(versions[1]))
    
    490
    +    except IndexError:
    
    491
    +        raise UtilError("Cannot detect Major and Minor parts of the version\n"
    
    492
    +                        "Version: {} not in XX.YY.whatever format"
    
    493
    +                        .format(__version__))
    
    494
    +    except ValueError:
    
    495
    +        raise UtilError("Cannot convert version to integer numbers\n"
    
    496
    +                        "Version: {} not in Integer.Integer.whatever format"
    
    497
    +                        .format(__version__))
    
    488 498
     
    
    489 499
     
    
    490 500
     @contextmanager
    

  • dev-requirements.txt
    1 1
     coverage == 4.4.0
    
    2 2
     pep8
    
    3
    -pytest >= 3.1.0
    
    3
    +pylint == 2.1.1
    
    4
    +pytest >= 3.7
    
    4 5
     pytest-cov >= 2.5.0
    
    5 6
     pytest-datafiles
    
    6 7
     pytest-env
    

  • setup.cfg
    ... ... @@ -4,6 +4,7 @@ style = pep440
    4 4
     versionfile_source = buildstream/_version.py
    
    5 5
     versionfile_build = buildstream/_version.py
    
    6 6
     tag_prefix =
    
    7
    +tag_regex = *.*.*
    
    7 8
     parentdir_prefix = BuildStream-
    
    8 9
     
    
    9 10
     [aliases]
    

  • setup.py
    ... ... @@ -224,6 +224,13 @@ def get_cmdclass():
    224 224
     with open('dev-requirements.txt') as dev_reqs:
    
    225 225
         dev_requires = dev_reqs.read().splitlines()
    
    226 226
     
    
    227
    +#####################################################
    
    228
    +#     Prepare package description from README       #
    
    229
    +#####################################################
    
    230
    +with open(os.path.join(os.path.dirname(os.path.realpath(__file__)),
    
    231
    +                       'README.rst')) as readme:
    
    232
    +    long_description = readme.read()
    
    233
    +
    
    227 234
     
    
    228 235
     #####################################################
    
    229 236
     #             Main setup() Invocation               #
    
    ... ... @@ -233,8 +240,13 @@ setup(name='BuildStream',
    233 240
           version=versioneer.get_version(),
    
    234 241
           cmdclass=get_cmdclass(),
    
    235 242
     
    
    243
    +      author='BuildStream Developers',
    
    244
    +      author_email='buildstream-list gnome org',
    
    236 245
           description='A framework for modelling build pipelines in YAML',
    
    237 246
           license='LGPL',
    
    247
    +      long_description=long_description,
    
    248
    +      long_description_content_type='text/x-rst; charset=UTF-8',
    
    249
    +      url='https://gitlab.com/BuildStream/buildstream',
    
    238 250
           packages=find_packages(exclude=('tests', 'tests.*')),
    
    239 251
           package_data={'buildstream': ['plugins/*/*.py', 'plugins/*/*.yaml',
    
    240 252
                                         'data/*.yaml', 'data/*.sh.in']},
    
    ... ... @@ -261,7 +273,7 @@ setup(name='BuildStream',
    261 273
               'ruamel.yaml < 0.15.52',
    
    262 274
               'pluginbase',
    
    263 275
               'Click',
    
    264
    -          'blessings',
    
    276
    +          'blessings >= 1.6',
    
    265 277
               'jinja2 >= 2.10',
    
    266 278
               'protobuf >= 3.5',
    
    267 279
               'grpcio >= 1.10',
    

  • tests/artifactcache/expiry.py
    ... ... @@ -268,3 +268,38 @@ def test_invalid_cache_quota(cli, datafiles, tmpdir, quota, success):
    268 268
             res.assert_success()
    
    269 269
         else:
    
    270 270
             res.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
    
    271
    +
    
    272
    +
    
    273
    +@pytest.mark.datafiles(DATA_DIR)
    
    274
    +def test_extract_expiry(cli, datafiles, tmpdir):
    
    275
    +    project = os.path.join(datafiles.dirname, datafiles.basename)
    
    276
    +    element_path = 'elements'
    
    277
    +
    
    278
    +    cli.configure({
    
    279
    +        'cache': {
    
    280
    +            'quota': 10000000,
    
    281
    +        }
    
    282
    +    })
    
    283
    +
    
    284
    +    create_element_size('target.bst', project, element_path, [], 6000000)
    
    285
    +    res = cli.run(project=project, args=['build', 'target.bst'])
    
    286
    +    res.assert_success()
    
    287
    +    assert cli.get_element_state(project, 'target.bst') == 'cached'
    
    288
    +
    
    289
    +    # Force creating extract
    
    290
    +    res = cli.run(project=project, args=['checkout', 'target.bst', os.path.join(str(tmpdir), 'checkout')])
    
    291
    +    res.assert_success()
    
    292
    +
    
    293
    +    extractdir = os.path.join(project, 'cache', 'artifacts', 'extract', 'test', 'target')
    
    294
    +    extracts = os.listdir(extractdir)
    
    295
    +    assert(len(extracts) == 1)
    
    296
    +    extract = os.path.join(extractdir, extracts[0])
    
    297
    +
    
    298
    +    # Remove target.bst from artifact cache
    
    299
    +    create_element_size('target2.bst', project, element_path, [], 6000000)
    
    300
    +    res = cli.run(project=project, args=['build', 'target2.bst'])
    
    301
    +    res.assert_success()
    
    302
    +    assert cli.get_element_state(project, 'target.bst') != 'cached'
    
    303
    +
    
    304
    +    # Now the extract should be removed.
    
    305
    +    assert not os.path.exists(extract)

  • tests/cachekey/project/elements/build1.expected
    1
    -90fa9c1b5334aac3bfd8956d92af0a91e2f2bbcbcac73861957165e577bc8768
    \ No newline at end of file
    1
    +05429485dff08bdb968f7d10c2cdda63be49c8a783d54863a0d4abce44bbebe9
    \ No newline at end of file

  • tests/cachekey/project/elements/build2.expected
    1
    -a62c29fe4e05820412e391430f61aa88a1a82e138de8cac726a9dc4fcd7ed8b9
    \ No newline at end of file
    1
    +4155c7bc836cdb092de3241fa92883bd8c7dd94c55affa406e559aeb6252c669
    \ No newline at end of file

  • tests/cachekey/project/target.expected
    1
    -09620aa58875d96611d22632b7585a0f22f88f5ecca6f5d1915d3e529d036bd8
    \ No newline at end of file
    1
    +f5affaacd3ac724f5415a7a8349c6dca6122841dd7f9769de4f9d6cb7185f9b8
    \ No newline at end of file

  • tests/examples/autotools.py
    ... ... @@ -28,7 +28,9 @@ def test_autotools_build(cli, tmpdir, datafiles):
    28 28
     
    
    29 29
         assert_contains(checkout, ['/usr', '/usr/lib', '/usr/bin',
    
    30 30
                                    '/usr/share', '/usr/lib/debug',
    
    31
    -                               '/usr/lib/debug/hello', '/usr/bin/hello',
    
    31
    +                               '/usr/lib/debug/usr', '/usr/lib/debug/usr/bin',
    
    32
    +                               '/usr/lib/debug/usr/bin/hello',
    
    33
    +                               '/usr/bin/hello',
    
    32 34
                                    '/usr/share/doc', '/usr/share/doc/amhello',
    
    33 35
                                    '/usr/share/doc/amhello/README'])
    
    34 36
     
    

  • tests/examples/developing.py
    ... ... @@ -29,7 +29,9 @@ def test_autotools_build(cli, tmpdir, datafiles):
    29 29
     
    
    30 30
         assert_contains(checkout, ['/usr', '/usr/lib', '/usr/bin',
    
    31 31
                                    '/usr/share', '/usr/lib/debug',
    
    32
    -                               '/usr/lib/debug/hello', '/usr/bin/hello'])
    
    32
    +                               '/usr/lib/debug/usr', '/usr/lib/debug/usr/bin',
    
    33
    +                               '/usr/lib/debug/usr/bin/hello',
    
    34
    +                               '/usr/bin/hello'])
    
    33 35
     
    
    34 36
     
    
    35 37
     # Test the unmodified hello command works as expected.
    

  • tests/examples/flatpak-autotools.py
    ... ... @@ -47,8 +47,10 @@ def test_autotools_build(cli, tmpdir, datafiles):
    47 47
     
    
    48 48
         assert_contains(checkout, ['/usr', '/usr/lib', '/usr/bin',
    
    49 49
                                    '/usr/share', '/usr/lib/debug',
    
    50
    -                               '/usr/lib/debug/hello', '/usr/bin/hello',
    
    51
    -                               '/usr/share/doc', '/usr/share/doc/amhello',
    
    50
    +                               '/usr/lib/debug/usr', '/usr/lib/debug/usr/bin',
    
    51
    +                               '/usr/lib/debug/usr/bin/hello',
    
    52
    +                               '/usr/bin/hello', '/usr/share/doc',
    
    53
    +                               '/usr/share/doc/amhello',
    
    52 54
                                    '/usr/share/doc/amhello/README'])
    
    53 55
     
    
    54 56
     
    

  • tests/frontend/workspace.py
    ... ... @@ -767,3 +767,16 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte
    767 767
         # Check that workspace config is converted correctly if necessary
    
    768 768
         loaded_config = _yaml.node_sanitize(_yaml.load(workspace_config_path))
    
    769 769
         assert loaded_config == parse_dict_as_yaml(expected)
    
    770
    +
    
    771
    +
    
    772
    +@pytest.mark.datafiles(DATA_DIR)
    
    773
    +@pytest.mark.parametrize("kind", repo_kinds)
    
    774
    +def test_inconsitent_pipeline_message(cli, tmpdir, datafiles, kind):
    
    775
    +    element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
    
    776
    +
    
    777
    +    shutil.rmtree(workspace)
    
    778
    +
    
    779
    +    result = cli.run(project=project, args=[
    
    780
    +        'build', element_name
    
    781
    +    ])
    
    782
    +    result.assert_main_error(ErrorDomain.PIPELINE, "inconsistent-pipeline-workspaced")

  • tests/integration/autotools.py
    ... ... @@ -31,8 +31,10 @@ def test_autotools_build(cli, tmpdir, datafiles):
    31 31
     
    
    32 32
         assert_contains(checkout, ['/usr', '/usr/lib', '/usr/bin',
    
    33 33
                                    '/usr/share', '/usr/lib/debug',
    
    34
    -                               '/usr/lib/debug/hello', '/usr/bin/hello',
    
    35
    -                               '/usr/share/doc', '/usr/share/doc/amhello',
    
    34
    +                               '/usr/lib/debug/usr', '/usr/lib/debug/usr/bin',
    
    35
    +                               '/usr/lib/debug/usr/bin/hello',
    
    36
    +                               '/usr/bin/hello', '/usr/share/doc',
    
    37
    +                               '/usr/share/doc/amhello',
    
    36 38
                                    '/usr/share/doc/amhello/README'])
    
    37 39
     
    
    38 40
     
    

  • tests/integration/cmake.py
    ... ... @@ -27,7 +27,9 @@ def test_cmake_build(cli, tmpdir, datafiles):
    27 27
         assert result.exit_code == 0
    
    28 28
     
    
    29 29
         assert_contains(checkout, ['/usr', '/usr/bin', '/usr/bin/hello',
    
    30
    -                               '/usr/lib/debug', '/usr/lib/debug/hello'])
    
    30
    +                               '/usr/lib/debug', '/usr/lib/debug/usr',
    
    31
    +                               '/usr/lib/debug/usr/bin',
    
    32
    +                               '/usr/lib/debug/usr/bin/hello'])
    
    31 33
     
    
    32 34
     
    
    33 35
     @pytest.mark.datafiles(DATA_DIR)
    

  • tests/integration/compose.py
    ... ... @@ -39,7 +39,8 @@ def create_compose_element(name, path, config={}):
    39 39
         # Test flat inclusion
    
    40 40
         ([], [], ['/usr', '/usr/lib', '/usr/bin',
    
    41 41
                   '/usr/share', '/usr/lib/debug',
    
    42
    -              '/usr/lib/debug/hello', '/usr/bin/hello',
    
    42
    +              '/usr/lib/debug/usr', '/usr/lib/debug/usr/bin',
    
    43
    +              '/usr/lib/debug/usr/bin/hello', '/usr/bin/hello',
    
    43 44
                   '/usr/share/doc', '/usr/share/doc/amhello',
    
    44 45
                   '/usr/share/doc/amhello/README',
    
    45 46
                   '/tests', '/tests/test']),
    
    ... ... @@ -53,13 +54,17 @@ def create_compose_element(name, path, config={}):
    53 54
                                   '/usr/share/doc/amhello/README']),
    
    54 55
         # Test with only runtime excluded
    
    55 56
         ([], ['runtime'], ['/usr', '/usr/lib', '/usr/share',
    
    56
    -                       '/usr/lib/debug', '/usr/lib/debug/hello',
    
    57
    +                       '/usr/lib/debug', '/usr/lib/debug/usr',
    
    58
    +                       '/usr/lib/debug/usr/bin',
    
    59
    +                       '/usr/lib/debug/usr/bin/hello',
    
    57 60
                            '/usr/share/doc', '/usr/share/doc/amhello',
    
    58 61
                            '/usr/share/doc/amhello/README',
    
    59 62
                            '/tests', '/tests/test']),
    
    60 63
         # Test with runtime and doc excluded
    
    61 64
         ([], ['runtime', 'doc'], ['/usr', '/usr/lib', '/usr/share',
    
    62
    -                              '/usr/lib/debug', '/usr/lib/debug/hello',
    
    65
    +                              '/usr/lib/debug', '/usr/lib/debug/usr',
    
    66
    +                              '/usr/lib/debug/usr/bin',
    
    67
    +                              '/usr/lib/debug/usr/bin/hello',
    
    63 68
                                   '/tests', '/tests/test']),
    
    64 69
         # Test with runtime simultaneously in- and excluded
    
    65 70
         (['runtime'], ['runtime'], ['/usr', '/usr/lib', '/usr/share']),
    
    ... ... @@ -72,7 +77,8 @@ def create_compose_element(name, path, config={}):
    72 77
         # Test excluding a custom 'test' domain
    
    73 78
         ([], ['test'], ['/usr', '/usr/lib', '/usr/bin',
    
    74 79
                         '/usr/share', '/usr/lib/debug',
    
    75
    -                    '/usr/lib/debug/hello', '/usr/bin/hello',
    
    80
    +                    '/usr/lib/debug/usr', '/usr/lib/debug/usr/bin',
    
    81
    +                    '/usr/lib/debug/usr/bin/hello', '/usr/bin/hello',
    
    76 82
                         '/usr/share/doc', '/usr/share/doc/amhello',
    
    77 83
                         '/usr/share/doc/amhello/README'])
    
    78 84
     ])
    

  • tests/integration/source-determinism.py
    ... ... @@ -2,7 +2,8 @@ import os
    2 2
     import pytest
    
    3 3
     
    
    4 4
     from buildstream import _yaml, utils
    
    5
    -from tests.testutils import cli, create_repo, ALL_REPO_KINDS
    
    5
    +from tests.testutils import create_repo, ALL_REPO_KINDS
    
    6
    +from tests.testutils import cli_integration as cli
    
    6 7
     
    
    7 8
     
    
    8 9
     DATA_DIR = os.path.join(
    
    ... ... @@ -28,7 +29,7 @@ def create_test_directory(*path, mode=0o644):
    28 29
     @pytest.mark.integration
    
    29 30
     @pytest.mark.datafiles(DATA_DIR)
    
    30 31
     @pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS] + ['local'])
    
    31
    -def test_deterministic_source_umask(cli, tmpdir, datafiles, kind):
    
    32
    +def test_deterministic_source_umask(cli, tmpdir, datafiles, kind, integration_cache):
    
    32 33
         project = str(datafiles)
    
    33 34
         element_name = 'list'
    
    34 35
         element_path = os.path.join(project, 'elements', element_name)
    
    ... ... @@ -91,14 +92,16 @@ def test_deterministic_source_umask(cli, tmpdir, datafiles, kind):
    91 92
                     return f.read()
    
    92 93
             finally:
    
    93 94
                 os.umask(old_umask)
    
    94
    -            cli.remove_artifact_from_cache(project, element_name)
    
    95
    +            cache_dir = os.path.join(integration_cache, 'artifacts')
    
    96
    +            cli.remove_artifact_from_cache(project, element_name,
    
    97
    +                                           cache_dir=cache_dir)
    
    95 98
     
    
    96 99
         assert get_value_for_umask(0o022) == get_value_for_umask(0o077)
    
    97 100
     
    
    98 101
     
    
    99 102
     @pytest.mark.integration
    
    100 103
     @pytest.mark.datafiles(DATA_DIR)
    
    101
    -def test_deterministic_source_local(cli, tmpdir, datafiles):
    
    104
    +def test_deterministic_source_local(cli, tmpdir, datafiles, integration_cache):
    
    102 105
         """Only user rights should be considered for local source.
    
    103 106
         """
    
    104 107
         project = str(datafiles)
    
    ... ... @@ -150,6 +153,8 @@ def test_deterministic_source_local(cli, tmpdir, datafiles):
    150 153
                 with open(os.path.join(checkoutdir, 'ls-l'), 'r') as f:
    
    151 154
                     return f.read()
    
    152 155
             finally:
    
    153
    -            cli.remove_artifact_from_cache(project, element_name)
    
    156
    +            cache_dir = os.path.join(integration_cache, 'artifacts')
    
    157
    +            cli.remove_artifact_from_cache(project, element_name,
    
    158
    +                                           cache_dir=cache_dir)
    
    154 159
     
    
    155 160
         assert get_value_for_mask(0o7777) == get_value_for_mask(0o0700)

  • tests/testutils/runcli.py
    ... ... @@ -198,8 +198,10 @@ class Cli():
    198 198
             for key, val in config.items():
    
    199 199
                 self.config[key] = val
    
    200 200
     
    
    201
    -    def remove_artifact_from_cache(self, project, element_name):
    
    202
    -        cache_dir = os.path.join(project, 'cache', 'artifacts')
    
    201
    +    def remove_artifact_from_cache(self, project, element_name,
    
    202
    +                                   *, cache_dir=None):
    
    203
    +        if not cache_dir:
    
    204
    +            cache_dir = os.path.join(project, 'cache', 'artifacts')
    
    203 205
     
    
    204 206
             cache_dir = os.path.join(cache_dir, 'cas', 'refs', 'heads')
    
    205 207
     
    

  • versioneer.py
    ... ... @@ -355,6 +355,7 @@ def get_config_from_root(root):
    355 355
         cfg.versionfile_source = get(parser, "versionfile_source")
    
    356 356
         cfg.versionfile_build = get(parser, "versionfile_build")
    
    357 357
         cfg.tag_prefix = get(parser, "tag_prefix")
    
    358
    +    cfg.tag_regex = get(parser, "tag_regex") or "*"
    
    358 359
         if cfg.tag_prefix in ("''", '""'):
    
    359 360
             cfg.tag_prefix = ""
    
    360 361
         cfg.parentdir_prefix = get(parser, "parentdir_prefix")
    
    ... ... @@ -463,6 +464,7 @@ def get_config():
    463 464
         cfg.VCS = "git"
    
    464 465
         cfg.style = "%(STYLE)s"
    
    465 466
         cfg.tag_prefix = "%(TAG_PREFIX)s"
    
    467
    +    cfg.tag_regex = "%(TAG_REGEX)s"
    
    466 468
         cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
    
    467 469
         cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
    
    468 470
         cfg.verbose = False
    
    ... ... @@ -635,7 +637,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
    635 637
     
    
    636 638
     
    
    637 639
     @register_vcs_handler("git", "pieces_from_vcs")
    
    638
    -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
    
    640
    +def git_pieces_from_vcs(tag_prefix, tag_regex, root, verbose, run_command=run_command):
    
    639 641
         """Get version from 'git describe' in the root of the source tree.
    
    640 642
     
    
    641 643
         This only gets called if the git-archive 'subst' keywords were *not*
    
    ... ... @@ -657,7 +659,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
    657 659
         # if there isn't one, this yields HEX[-dirty] (no NUM)
    
    658 660
         describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
    
    659 661
                                               "--always", "--long",
    
    660
    -                                          "--match", "%%s*" %% tag_prefix],
    
    662
    +                                          "--match", "%%s%%s" %% (tag_prefix, tag_regex)],
    
    661 663
                                        cwd=root)
    
    662 664
         # --long was added in git-1.5.5
    
    663 665
         if describe_out is None:
    
    ... ... @@ -925,7 +927,7 @@ def get_versions():
    925 927
                     "date": None}
    
    926 928
     
    
    927 929
         try:
    
    928
    -        pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
    
    930
    +        pieces = git_pieces_from_vcs(cfg.tag_prefix, cfg.tag_regex, root, verbose)
    
    929 931
             return render(pieces, cfg.style)
    
    930 932
         except NotThisMethod:
    
    931 933
             pass
    
    ... ... @@ -1027,7 +1029,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
    1027 1029
     
    
    1028 1030
     
    
    1029 1031
     @register_vcs_handler("git", "pieces_from_vcs")
    
    1030
    -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
    
    1032
    +def git_pieces_from_vcs(tag_prefix, tag_regex, root, verbose, run_command=run_command):
    
    1031 1033
         """Get version from 'git describe' in the root of the source tree.
    
    1032 1034
     
    
    1033 1035
         This only gets called if the git-archive 'subst' keywords were *not*
    
    ... ... @@ -1049,7 +1051,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
    1049 1051
         # if there isn't one, this yields HEX[-dirty] (no NUM)
    
    1050 1052
         describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
    
    1051 1053
                                               "--always", "--long",
    
    1052
    -                                          "--match", "%s*" % tag_prefix],
    
    1054
    +                                          "--match", "%s%s" % (tag_prefix, tag_regex)],
    
    1053 1055
                                        cwd=root)
    
    1054 1056
         # --long was added in git-1.5.5
    
    1055 1057
         if describe_out is None:
    
    ... ... @@ -1451,7 +1453,7 @@ def get_versions(verbose=False):
    1451 1453
         from_vcs_f = handlers.get("pieces_from_vcs")
    
    1452 1454
         if from_vcs_f:
    
    1453 1455
             try:
    
    1454
    -            pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
    
    1456
    +            pieces = from_vcs_f(cfg.tag_prefix, cfg.tag_regex, root, verbose)
    
    1455 1457
                 ver = render(pieces, cfg.style)
    
    1456 1458
                 if verbose:
    
    1457 1459
                     print("got version from VCS %s" % ver)
    
    ... ... @@ -1586,6 +1588,7 @@ def get_cmdclass():
    1586 1588
                                 {"DOLLAR": "$",
    
    1587 1589
                                  "STYLE": cfg.style,
    
    1588 1590
                                  "TAG_PREFIX": cfg.tag_prefix,
    
    1591
    +                             "TAG_REGEX": cfg.tag_regex,
    
    1589 1592
                                  "PARENTDIR_PREFIX": cfg.parentdir_prefix,
    
    1590 1593
                                  "VERSIONFILE_SOURCE": cfg.versionfile_source,
    
    1591 1594
                                  })
    
    ... ... @@ -1615,6 +1618,7 @@ def get_cmdclass():
    1615 1618
                                 {"DOLLAR": "$",
    
    1616 1619
                                  "STYLE": cfg.style,
    
    1617 1620
                                  "TAG_PREFIX": cfg.tag_prefix,
    
    1621
    +                             "TAG_REGEX": cfg.tag_regex,
    
    1618 1622
                                  "PARENTDIR_PREFIX": cfg.parentdir_prefix,
    
    1619 1623
                                  "VERSIONFILE_SOURCE": cfg.versionfile_source,
    
    1620 1624
                                  })
    
    ... ... @@ -1716,6 +1720,7 @@ def do_setup():
    1716 1720
             f.write(LONG % {"DOLLAR": "$",
    
    1717 1721
                             "STYLE": cfg.style,
    
    1718 1722
                             "TAG_PREFIX": cfg.tag_prefix,
    
    1723
    +                        "TAG_REGEX": cfg.tag_regex,
    
    1719 1724
                             "PARENTDIR_PREFIX": cfg.parentdir_prefix,
    
    1720 1725
                             "VERSIONFILE_SOURCE": cfg.versionfile_source,
    
    1721 1726
                             })
    



  • [Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]