Gökçen Nurlu pushed to branch gokcennurlu/remote_url_override_push_error at BuildStream / buildstream
Commits:
-
e29aea36
by William Salmon at 2018-12-19T13:23:19Z
-
898a23a5
by Will Salmon at 2018-12-19T14:35:41Z
-
a2f1d879
by Javier Jardón at 2018-12-19T15:36:11Z
-
aae5e4b3
by Javier Jardón at 2018-12-19T16:09:48Z
-
2b767fe8
by Jürg Billeter at 2018-12-20T10:06:11Z
-
7a102144
by Jürg Billeter at 2018-12-20T10:06:11Z
-
b325989e
by Jürg Billeter at 2018-12-20T10:07:20Z
-
77d8ad45
by Jürg Billeter at 2018-12-20T10:42:39Z
-
9835b7f1
by Chandan Singh at 2018-12-20T12:50:02Z
-
14da6955
by Chandan Singh at 2018-12-20T13:34:10Z
-
7368f569
by Angelos Evripiotis at 2018-12-20T13:59:23Z
-
46efc91d
by Angelos Evripiotis at 2018-12-20T13:59:23Z
-
e0c575c4
by Angelos Evripiotis at 2018-12-20T14:37:38Z
-
c05d8b4f
by Chandan Singh at 2018-12-20T15:53:12Z
-
e8055a56
by Chandan Singh at 2018-12-20T15:53:12Z
-
cd4889af
by Chandan Singh at 2018-12-20T16:34:08Z
-
ac995236
by Tom Pollard at 2018-12-20T17:18:02Z
-
c3153dea
by Tom Pollard at 2018-12-21T10:10:58Z
-
d5f3c723
by Gökçen Nurlu at 2018-12-21T12:31:37Z
-
df73c09f
by Gökçen Nurlu at 2018-12-21T12:31:37Z
-
fc47c148
by Gökçen Nurlu at 2018-12-21T12:31:37Z
25 changed files:
- NEWS
- README.rst
- buildstream/__init__.py
- buildstream/_artifactcache/artifactcache.py
- buildstream/_context.py
- buildstream/_frontend/app.py
- buildstream/_frontend/cli.py
- + buildstream/_gitsourcebase.py
- buildstream/_project.py
- buildstream/_scheduler/queues/fetchqueue.py
- buildstream/_stream.py
- buildstream/data/userconfig.yaml
- buildstream/element.py
- buildstream/plugins/sources/git.py
- buildstream/sandbox/sandbox.py
- buildstream/utils.py
- tests/artifactcache/pull.py
- tests/artifactcache/push.py
- tests/frontend/logging.py
- tests/integration/build-tree.py
- + tests/sources/no-fetch-cached/files/file
- + tests/sources/no-fetch-cached/plugins/sources/always_cached.py
- + tests/sources/no-fetch-cached/project.conf
- + tests/sources/no_fetch_cached.py
- tests/testutils/runcli.py
Changes:
| ... | ... | @@ -30,6 +30,12 @@ buildstream 1.3.1 |
| 30 | 30 |
make changes to their .bst files if they are expecting these environment
|
| 31 | 31 |
variables to be set.
|
| 32 | 32 |
|
| 33 |
+ o BREAKING CHANGE: The 'auto-init' functionality has been removed. This would
|
|
| 34 |
+ offer to create a project in the event that bst was run against a directory
|
|
| 35 |
+ without a project, to be friendly to new users. It has been replaced with
|
|
| 36 |
+ an error message and a hint instead, to avoid bothering folks that just
|
|
| 37 |
+ made a mistake.
|
|
| 38 |
+ |
|
| 33 | 39 |
o Failed builds are included in the cache as well.
|
| 34 | 40 |
`bst checkout` will provide anything in `%{install-root}`.
|
| 35 | 41 |
A build including cached fails will cause any dependant elements
|
| ... | ... | @@ -67,8 +73,8 @@ buildstream 1.3.1 |
| 67 | 73 |
instead of just a specially-formatted build-root with a `root` and `scratch`
|
| 68 | 74 |
subdirectory.
|
| 69 | 75 |
|
| 70 |
- o The buildstream.conf file learned new 'prompt.auto-init',
|
|
| 71 |
- 'prompt.really-workspace-close-remove-dir', and
|
|
| 76 |
+ o The buildstream.conf file learned new
|
|
| 77 |
+ 'prompt.really-workspace-close-remove-dir' and
|
|
| 72 | 78 |
'prompt.really-workspace-reset-hard' options. These allow users to suppress
|
| 73 | 79 |
certain confirmation prompts, e.g. double-checking that the user meant to
|
| 74 | 80 |
run the command as typed.
|
| ... | ... | @@ -16,6 +16,9 @@ About |
| 16 | 16 |
.. image:: https://img.shields.io/pypi/v/BuildStream.svg
|
| 17 | 17 |
:target: https://pypi.org/project/BuildStream
|
| 18 | 18 |
|
| 19 |
+.. image:: https://app.fossa.io/api/projects/git%2Bgitlab.com%2FBuildStream%2Fbuildstream.svg?type=shield
|
|
| 20 |
+ :target: https://app.fossa.io/projects/git%2Bgitlab.com%2FBuildStream%2Fbuildstream?ref=badge_shield
|
|
| 21 |
+ |
|
| 19 | 22 |
|
| 20 | 23 |
What is BuildStream?
|
| 21 | 24 |
====================
|
| ... | ... | @@ -34,3 +34,8 @@ if "_BST_COMPLETION" not in os.environ: |
| 34 | 34 |
from .element import Element, ElementError
|
| 35 | 35 |
from .buildelement import BuildElement
|
| 36 | 36 |
from .scriptelement import ScriptElement
|
| 37 |
+ |
|
| 38 |
+ # XXX We are exposing a private member here as we expect it to move to a
|
|
| 39 |
+ # separate package soon. See the following discussion for more details:
|
|
| 40 |
+ # https://gitlab.com/BuildStream/buildstream/issues/739#note_124819869
|
|
| 41 |
+ from ._gitsourcebase import _GitSourceBase
|
| ... | ... | @@ -110,36 +110,42 @@ class ArtifactCache(): |
| 110 | 110 |
# assume project and element names are not allowed to contain slashes
|
| 111 | 111 |
return '{0}/{1}/{2}'.format(project.name, element_name, key)
|
| 112 | 112 |
|
| 113 |
+ # get_remotes_from_projects()
|
|
| 114 |
+ #
|
|
| 115 |
+ # Generates list artifact caches based on project configuration
|
|
| 116 |
+ #
|
|
| 117 |
+ # Returns:
|
|
| 118 |
+ # (list of (list of ArtifactCacheSpec, Project)): Configurations each are
|
|
| 119 |
+ # ready to be consumed by `self._set_remotes()`
|
|
| 120 |
+ #
|
|
| 121 |
+ # This requires that all of the projects which are to be processed in the session
|
|
| 122 |
+ # have already been loaded and are observable in the Context.
|
|
| 123 |
+ #
|
|
| 124 |
+ def get_remotes_from_projects(self):
|
|
| 125 |
+ return [
|
|
| 126 |
+ (_configured_remote_artifact_cache_specs(self.context, prj), prj)
|
|
| 127 |
+ for prj in self.context.get_projects()
|
|
| 128 |
+ ]
|
|
| 129 |
+ |
|
| 113 | 130 |
# setup_remotes():
|
| 114 | 131 |
#
|
| 115 | 132 |
# Sets up which remotes to use
|
| 116 | 133 |
#
|
| 117 | 134 |
# Args:
|
| 118 |
- # use_config (bool): Whether to use project configuration
|
|
| 119 |
- # remote_url (str): Remote artifact cache URL
|
|
| 135 |
+ # remotes (list of (list of ArtifactCacheSpec, Project)): Configurations which are
|
|
| 136 |
+ # ready to be consumed by `self._set_remotes()`
|
|
| 120 | 137 |
#
|
| 121 | 138 |
# This requires that all of the projects which are to be processed in the session
|
| 122 | 139 |
# have already been loaded and are observable in the Context.
|
| 123 | 140 |
#
|
| 124 |
- def setup_remotes(self, *, use_config=False, remote_url=None):
|
|
| 125 |
- |
|
| 141 |
+ def setup_remotes(self, *, remotes=None):
|
|
| 126 | 142 |
# Ensure we do not double-initialise since this can be expensive
|
| 127 | 143 |
assert not self._remotes_setup
|
| 128 | 144 |
self._remotes_setup = True
|
| 129 | 145 |
|
| 130 |
- # Initialize remote artifact caches. We allow the commandline to override
|
|
| 131 |
- # the user config in some cases (for example `bst push --remote=...`).
|
|
| 132 |
- has_remote_caches = False
|
|
| 133 |
- if remote_url:
|
|
| 134 |
- self._set_remotes([ArtifactCacheSpec(remote_url, push=True)])
|
|
| 135 |
- has_remote_caches = True
|
|
| 136 |
- if use_config:
|
|
| 137 |
- for project in self.context.get_projects():
|
|
| 138 |
- artifact_caches = _configured_remote_artifact_cache_specs(self.context, project)
|
|
| 139 |
- if artifact_caches: # artifact_caches is a list of ArtifactCacheSpec instances
|
|
| 140 |
- self._set_remotes(artifact_caches, project=project)
|
|
| 141 |
- has_remote_caches = True
|
|
| 142 |
- if has_remote_caches:
|
|
| 146 |
+ if remotes:
|
|
| 147 |
+ for caches, project in remotes:
|
|
| 148 |
+ self._set_remotes(caches, project=project)
|
|
| 143 | 149 |
self._initialize_remotes()
|
| 144 | 150 |
|
| 145 | 151 |
# specs_from_config_node()
|
| ... | ... | @@ -117,10 +117,6 @@ class Context(): |
| 117 | 117 |
# Whether or not to attempt to pull build trees globally
|
| 118 | 118 |
self.pull_buildtrees = None
|
| 119 | 119 |
|
| 120 |
- # Boolean, whether to offer to create a project for the user, if we are
|
|
| 121 |
- # invoked outside of a directory where we can resolve the project.
|
|
| 122 |
- self.prompt_auto_init = None
|
|
| 123 |
- |
|
| 124 | 120 |
# Boolean, whether we double-check with the user that they meant to
|
| 125 | 121 |
# remove a workspace directory.
|
| 126 | 122 |
self.prompt_workspace_close_remove_dir = None
|
| ... | ... | @@ -258,12 +254,10 @@ class Context(): |
| 258 | 254 |
prompt = _yaml.node_get(
|
| 259 | 255 |
defaults, Mapping, 'prompt')
|
| 260 | 256 |
_yaml.node_validate(prompt, [
|
| 261 |
- 'auto-init', 'really-workspace-close-remove-dir',
|
|
| 257 |
+ 'really-workspace-close-remove-dir',
|
|
| 262 | 258 |
'really-workspace-close-project-inaccessible',
|
| 263 | 259 |
'really-workspace-reset-hard',
|
| 264 | 260 |
])
|
| 265 |
- self.prompt_auto_init = _node_get_option_str(
|
|
| 266 |
- prompt, 'auto-init', ['ask', 'no']) == 'ask'
|
|
| 267 | 261 |
self.prompt_workspace_close_remove_dir = _node_get_option_str(
|
| 268 | 262 |
prompt, 'really-workspace-close-remove-dir', ['ask', 'yes']) == 'ask'
|
| 269 | 263 |
self.prompt_workspace_close_project_inaccessible = _node_get_option_str(
|
| ... | ... | @@ -219,13 +219,13 @@ class App(): |
| 219 | 219 |
default_mirror=self._main_options.get('default_mirror'))
|
| 220 | 220 |
except LoadError as e:
|
| 221 | 221 |
|
| 222 |
- # Let's automatically start a `bst init` session in this case
|
|
| 223 |
- if e.reason == LoadErrorReason.MISSING_PROJECT_CONF and self.interactive:
|
|
| 224 |
- click.echo("A project was not detected in the directory: {}".format(directory), err=True)
|
|
| 225 |
- if self.context.prompt_auto_init:
|
|
| 226 |
- click.echo("", err=True)
|
|
| 227 |
- if click.confirm("Would you like to create a new project here?"):
|
|
| 228 |
- self.init_project(None)
|
|
| 222 |
+ # Help users that are new to BuildStream by suggesting 'init'.
|
|
| 223 |
+ # We don't want to slow down users that just made a mistake, so
|
|
| 224 |
+ # don't stop them with an offer to create a project for them.
|
|
| 225 |
+ if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
|
|
| 226 |
+ click.echo("No project found. You can create a new project like so:", err=True)
|
|
| 227 |
+ click.echo("", err=True)
|
|
| 228 |
+ click.echo(" bst init", err=True)
|
|
| 229 | 229 |
|
| 230 | 230 |
self._error_exit(e, "Error loading project")
|
| 231 | 231 |
|
| ... | ... | @@ -527,11 +527,14 @@ def show(app, elements, deps, except_, order, format_): |
| 527 | 527 |
help="Mount a file or directory into the sandbox")
|
| 528 | 528 |
@click.option('--isolate', is_flag=True, default=False,
|
| 529 | 529 |
help='Create an isolated build sandbox')
|
| 530 |
+@click.option('--use-buildtree', '-t', 'cli_buildtree', type=click.Choice(['ask', 'try', 'always', 'never']),
|
|
| 531 |
+ default='ask',
|
|
| 532 |
+ help='Defaults to ask but if set to always the function will fail if a build tree is not available')
|
|
| 530 | 533 |
@click.argument('element', required=False,
|
| 531 | 534 |
type=click.Path(readable=False))
|
| 532 | 535 |
@click.argument('command', type=click.STRING, nargs=-1)
|
| 533 | 536 |
@click.pass_obj
|
| 534 |
-def shell(app, element, sysroot, mount, isolate, build_, command):
|
|
| 537 |
+def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, command):
|
|
| 535 | 538 |
"""Run a command in the target element's sandbox environment
|
| 536 | 539 |
|
| 537 | 540 |
This will stage a temporary sysroot for running the target
|
| ... | ... | @@ -557,6 +560,8 @@ def shell(app, element, sysroot, mount, isolate, build_, command): |
| 557 | 560 |
else:
|
| 558 | 561 |
scope = Scope.RUN
|
| 559 | 562 |
|
| 563 |
+ use_buildtree = False
|
|
| 564 |
+ |
|
| 560 | 565 |
with app.initialized():
|
| 561 | 566 |
if not element:
|
| 562 | 567 |
element = app.context.guess_element()
|
| ... | ... | @@ -570,12 +575,30 @@ def shell(app, element, sysroot, mount, isolate, build_, command): |
| 570 | 575 |
HostMount(path, host_path)
|
| 571 | 576 |
for host_path, path in mount
|
| 572 | 577 |
]
|
| 578 |
+ |
|
| 579 |
+ cached = element._cached_buildtree()
|
|
| 580 |
+ if cli_buildtree == "always":
|
|
| 581 |
+ if cached:
|
|
| 582 |
+ use_buildtree = True
|
|
| 583 |
+ else:
|
|
| 584 |
+ raise AppError("No buildtree is cached but the use buildtree option was specified")
|
|
| 585 |
+ elif cli_buildtree == "never":
|
|
| 586 |
+ pass
|
|
| 587 |
+ elif cli_buildtree == "try":
|
|
| 588 |
+ use_buildtree = cached
|
|
| 589 |
+ else:
|
|
| 590 |
+ if app.interactive and cached:
|
|
| 591 |
+ use_buildtree = bool(click.confirm('Do you want to use the cached buildtree?'))
|
|
| 592 |
+ if use_buildtree and not element._cached_success():
|
|
| 593 |
+ click.echo("Warning: using a buildtree from a failed build.")
|
|
| 594 |
+ |
|
| 573 | 595 |
try:
|
| 574 | 596 |
exitcode = app.stream.shell(element, scope, prompt,
|
| 575 | 597 |
directory=sysroot,
|
| 576 | 598 |
mounts=mounts,
|
| 577 | 599 |
isolate=isolate,
|
| 578 |
- command=command)
|
|
| 600 |
+ command=command,
|
|
| 601 |
+ usebuildtree=use_buildtree)
|
|
| 579 | 602 |
except BstError as e:
|
| 580 | 603 |
raise AppError("Error launching shell: {}".format(e), detail=e.detail) from e
|
| 581 | 604 |
|
| 1 |
+#
|
|
| 2 |
+# Copyright (C) 2016 Codethink Limited
|
|
| 3 |
+# Copyright (C) 2018 Bloomberg Finance LP
|
|
| 4 |
+#
|
|
| 5 |
+# This program is free software; you can redistribute it and/or
|
|
| 6 |
+# modify it under the terms of the GNU Lesser General Public
|
|
| 7 |
+# License as published by the Free Software Foundation; either
|
|
| 8 |
+# version 2 of the License, or (at your option) any later version.
|
|
| 9 |
+#
|
|
| 10 |
+# This library is distributed in the hope that it will be useful,
|
|
| 11 |
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
| 12 |
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
| 13 |
+# Lesser General Public License for more details.
|
|
| 14 |
+#
|
|
| 15 |
+# You should have received a copy of the GNU Lesser General Public
|
|
| 16 |
+# License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
|
| 17 |
+#
|
|
| 18 |
+# Authors:
|
|
| 19 |
+# Tristan Van Berkom <tristan vanberkom codethink co uk>
|
|
| 20 |
+# Chandan Singh <csingh43 bloomberg net>
|
|
| 21 |
+ |
|
| 22 |
+"""Abstract base class for source implementations that work with a Git repository"""
|
|
| 23 |
+ |
|
| 24 |
+import os
|
|
| 25 |
+import re
|
|
| 26 |
+import shutil
|
|
| 27 |
+from collections.abc import Mapping
|
|
| 28 |
+from io import StringIO
|
|
| 29 |
+from tempfile import TemporaryFile
|
|
| 30 |
+ |
|
| 31 |
+from configparser import RawConfigParser
|
|
| 32 |
+ |
|
| 33 |
+from buildstream import Source, SourceError, Consistency, SourceFetcher, CoreWarnings
|
|
| 34 |
+from buildstream import utils
|
|
| 35 |
+from buildstream.utils import move_atomic, DirectoryExistsError
|
|
| 36 |
+ |
|
| 37 |
+GIT_MODULES = '.gitmodules'
|
|
| 38 |
+ |
|
| 39 |
+# Warnings
|
|
| 40 |
+WARN_INCONSISTENT_SUBMODULE = "inconsistent-submodule"
|
|
| 41 |
+WARN_UNLISTED_SUBMODULE = "unlisted-submodule"
|
|
| 42 |
+WARN_INVALID_SUBMODULE = "invalid-submodule"
|
|
| 43 |
+ |
|
| 44 |
+ |
|
| 45 |
+# Because of handling of submodules, we maintain a GitMirror
|
|
| 46 |
+# for the primary git source and also for each submodule it
|
|
| 47 |
+# might have at a given time
|
|
| 48 |
+#
|
|
| 49 |
+class GitMirror(SourceFetcher):
|
|
| 50 |
+ |
|
| 51 |
+ def __init__(self, source, path, url, ref, *, primary=False, tags=[]):
|
|
| 52 |
+ |
|
| 53 |
+ super().__init__()
|
|
| 54 |
+ self.source = source
|
|
| 55 |
+ self.path = path
|
|
| 56 |
+ self.url = url
|
|
| 57 |
+ self.ref = ref
|
|
| 58 |
+ self.tags = tags
|
|
| 59 |
+ self.primary = primary
|
|
| 60 |
+ self.mirror = os.path.join(source.get_mirror_directory(), utils.url_directory_name(url))
|
|
| 61 |
+ self.mark_download_url(url)
|
|
| 62 |
+ |
|
| 63 |
+ # Ensures that the mirror exists
|
|
| 64 |
+ def ensure(self, alias_override=None):
|
|
| 65 |
+ |
|
| 66 |
+ # Unfortunately, git does not know how to only clone just a specific ref,
|
|
| 67 |
+ # so we have to download all of those gigs even if we only need a couple
|
|
| 68 |
+ # of bytes.
|
|
| 69 |
+ if not os.path.exists(self.mirror):
|
|
| 70 |
+ |
|
| 71 |
+ # Do the initial clone in a tmpdir just because we want an atomic move
|
|
| 72 |
+ # after a long standing clone which could fail overtime, for now do
|
|
| 73 |
+ # this directly in our git directory, eliminating the chances that the
|
|
| 74 |
+ # system configured tmpdir is not on the same partition.
|
|
| 75 |
+ #
|
|
| 76 |
+ with self.source.tempdir() as tmpdir:
|
|
| 77 |
+ url = self.source.translate_url(self.url, alias_override=alias_override,
|
|
| 78 |
+ primary=self.primary)
|
|
| 79 |
+ self.source.call([self.source.host_git, 'clone', '--mirror', '-n', url, tmpdir],
|
|
| 80 |
+ fail="Failed to clone git repository {}".format(url),
|
|
| 81 |
+ fail_temporarily=True)
|
|
| 82 |
+ |
|
| 83 |
+ try:
|
|
| 84 |
+ move_atomic(tmpdir, self.mirror)
|
|
| 85 |
+ except DirectoryExistsError:
|
|
| 86 |
+ # Another process was quicker to download this repository.
|
|
| 87 |
+ # Let's discard our own
|
|
| 88 |
+ self.source.status("{}: Discarding duplicate clone of {}"
|
|
| 89 |
+ .format(self.source, url))
|
|
| 90 |
+ except OSError as e:
|
|
| 91 |
+ raise SourceError("{}: Failed to move cloned git repository {} from '{}' to '{}': {}"
|
|
| 92 |
+ .format(self.source, url, tmpdir, self.mirror, e)) from e
|
|
| 93 |
+ |
|
| 94 |
+ def _fetch(self, alias_override=None):
|
|
| 95 |
+ url = self.source.translate_url(self.url,
|
|
| 96 |
+ alias_override=alias_override,
|
|
| 97 |
+ primary=self.primary)
|
|
| 98 |
+ |
|
| 99 |
+ if alias_override:
|
|
| 100 |
+ remote_name = utils.url_directory_name(alias_override)
|
|
| 101 |
+ _, remotes = self.source.check_output(
|
|
| 102 |
+ [self.source.host_git, 'remote'],
|
|
| 103 |
+ fail="Failed to retrieve list of remotes in {}".format(self.mirror),
|
|
| 104 |
+ cwd=self.mirror
|
|
| 105 |
+ )
|
|
| 106 |
+ if remote_name not in remotes:
|
|
| 107 |
+ self.source.call(
|
|
| 108 |
+ [self.source.host_git, 'remote', 'add', remote_name, url],
|
|
| 109 |
+ fail="Failed to add remote {} with url {}".format(remote_name, url),
|
|
| 110 |
+ cwd=self.mirror
|
|
| 111 |
+ )
|
|
| 112 |
+ else:
|
|
| 113 |
+ remote_name = "origin"
|
|
| 114 |
+ |
|
| 115 |
+ self.source.call([self.source.host_git, 'fetch', remote_name, '--prune', '--force', '--tags'],
|
|
| 116 |
+ fail="Failed to fetch from remote git repository: {}".format(url),
|
|
| 117 |
+ fail_temporarily=True,
|
|
| 118 |
+ cwd=self.mirror)
|
|
| 119 |
+ |
|
| 120 |
+ def fetch(self, alias_override=None):
|
|
| 121 |
+ # Resolve the URL for the message
|
|
| 122 |
+ resolved_url = self.source.translate_url(self.url,
|
|
| 123 |
+ alias_override=alias_override,
|
|
| 124 |
+ primary=self.primary)
|
|
| 125 |
+ |
|
| 126 |
+ with self.source.timed_activity("Fetching from {}"
|
|
| 127 |
+ .format(resolved_url),
|
|
| 128 |
+ silent_nested=True):
|
|
| 129 |
+ self.ensure(alias_override)
|
|
| 130 |
+ if not self.has_ref():
|
|
| 131 |
+ self._fetch(alias_override)
|
|
| 132 |
+ self.assert_ref()
|
|
| 133 |
+ |
|
| 134 |
+ def has_ref(self):
|
|
| 135 |
+ if not self.ref:
|
|
| 136 |
+ return False
|
|
| 137 |
+ |
|
| 138 |
+ # If the mirror doesnt exist, we also dont have the ref
|
|
| 139 |
+ if not os.path.exists(self.mirror):
|
|
| 140 |
+ return False
|
|
| 141 |
+ |
|
| 142 |
+ # Check if the ref is really there
|
|
| 143 |
+ rc = self.source.call([self.source.host_git, 'cat-file', '-t', self.ref], cwd=self.mirror)
|
|
| 144 |
+ return rc == 0
|
|
| 145 |
+ |
|
| 146 |
+ def assert_ref(self):
|
|
| 147 |
+ if not self.has_ref():
|
|
| 148 |
+ raise SourceError("{}: expected ref '{}' was not found in git repository: '{}'"
|
|
| 149 |
+ .format(self.source, self.ref, self.url))
|
|
| 150 |
+ |
|
| 151 |
+ def latest_commit_with_tags(self, tracking, track_tags=False):
|
|
| 152 |
+ _, output = self.source.check_output(
|
|
| 153 |
+ [self.source.host_git, 'rev-parse', tracking],
|
|
| 154 |
+ fail="Unable to find commit for specified branch name '{}'".format(tracking),
|
|
| 155 |
+ cwd=self.mirror)
|
|
| 156 |
+ ref = output.rstrip('\n')
|
|
| 157 |
+ |
|
| 158 |
+ if self.source.ref_format == 'git-describe':
|
|
| 159 |
+ # Prefix the ref with the closest tag, if available,
|
|
| 160 |
+ # to make the ref human readable
|
|
| 161 |
+ exit_code, output = self.source.check_output(
|
|
| 162 |
+ [self.source.host_git, 'describe', '--tags', '--abbrev=40', '--long', ref],
|
|
| 163 |
+ cwd=self.mirror)
|
|
| 164 |
+ if exit_code == 0:
|
|
| 165 |
+ ref = output.rstrip('\n')
|
|
| 166 |
+ |
|
| 167 |
+ if not track_tags:
|
|
| 168 |
+ return ref, []
|
|
| 169 |
+ |
|
| 170 |
+ tags = set()
|
|
| 171 |
+ for options in [[], ['--first-parent'], ['--tags'], ['--tags', '--first-parent']]:
|
|
| 172 |
+ exit_code, output = self.source.check_output(
|
|
| 173 |
+ [self.source.host_git, 'describe', '--abbrev=0', ref] + options,
|
|
| 174 |
+ cwd=self.mirror)
|
|
| 175 |
+ if exit_code == 0:
|
|
| 176 |
+ tag = output.strip()
|
|
| 177 |
+ _, commit_ref = self.source.check_output(
|
|
| 178 |
+ [self.source.host_git, 'rev-parse', tag + '^{commit}'],
|
|
| 179 |
+ fail="Unable to resolve tag '{}'".format(tag),
|
|
| 180 |
+ cwd=self.mirror)
|
|
| 181 |
+ exit_code = self.source.call(
|
|
| 182 |
+ [self.source.host_git, 'cat-file', 'tag', tag],
|
|
| 183 |
+ cwd=self.mirror)
|
|
| 184 |
+ annotated = (exit_code == 0)
|
|
| 185 |
+ |
|
| 186 |
+ tags.add((tag, commit_ref.strip(), annotated))
|
|
| 187 |
+ |
|
| 188 |
+ return ref, list(tags)
|
|
| 189 |
+ |
|
| 190 |
+ def stage(self, directory):
|
|
| 191 |
+ fullpath = os.path.join(directory, self.path)
|
|
| 192 |
+ |
|
| 193 |
+ # Using --shared here avoids copying the objects into the checkout, in any
|
|
| 194 |
+ # case we're just checking out a specific commit and then removing the .git/
|
|
| 195 |
+ # directory.
|
|
| 196 |
+ self.source.call([self.source.host_git, 'clone', '--no-checkout', '--shared', self.mirror, fullpath],
|
|
| 197 |
+ fail="Failed to create git mirror {} in directory: {}".format(self.mirror, fullpath),
|
|
| 198 |
+ fail_temporarily=True)
|
|
| 199 |
+ |
|
| 200 |
+ self.source.call([self.source.host_git, 'checkout', '--force', self.ref],
|
|
| 201 |
+ fail="Failed to checkout git ref {}".format(self.ref),
|
|
| 202 |
+ cwd=fullpath)
|
|
| 203 |
+ |
|
| 204 |
+ # Remove .git dir
|
|
| 205 |
+ shutil.rmtree(os.path.join(fullpath, ".git"))
|
|
| 206 |
+ |
|
| 207 |
+ self._rebuild_git(fullpath)
|
|
| 208 |
+ |
|
| 209 |
+ def init_workspace(self, directory):
|
|
| 210 |
+ fullpath = os.path.join(directory, self.path)
|
|
| 211 |
+ url = self.source.translate_url(self.url)
|
|
| 212 |
+ |
|
| 213 |
+ self.source.call([self.source.host_git, 'clone', '--no-checkout', self.mirror, fullpath],
|
|
| 214 |
+ fail="Failed to clone git mirror {} in directory: {}".format(self.mirror, fullpath),
|
|
| 215 |
+ fail_temporarily=True)
|
|
| 216 |
+ |
|
| 217 |
+ self.source.call([self.source.host_git, 'remote', 'set-url', 'origin', url],
|
|
| 218 |
+ fail='Failed to add remote origin "{}"'.format(url),
|
|
| 219 |
+ cwd=fullpath)
|
|
| 220 |
+ |
|
| 221 |
+ self.source.call([self.source.host_git, 'checkout', '--force', self.ref],
|
|
| 222 |
+ fail="Failed to checkout git ref {}".format(self.ref),
|
|
| 223 |
+ cwd=fullpath)
|
|
| 224 |
+ |
|
| 225 |
+ # List the submodules (path/url tuples) present at the given ref of this repo
|
|
| 226 |
+ def submodule_list(self):
|
|
| 227 |
+ modules = "{}:{}".format(self.ref, GIT_MODULES)
|
|
| 228 |
+ exit_code, output = self.source.check_output(
|
|
| 229 |
+ [self.source.host_git, 'show', modules], cwd=self.mirror)
|
|
| 230 |
+ |
|
| 231 |
+ # If git show reports error code 128 here, we take it to mean there is
|
|
| 232 |
+ # no .gitmodules file to display for the given revision.
|
|
| 233 |
+ if exit_code == 128:
|
|
| 234 |
+ return
|
|
| 235 |
+ elif exit_code != 0:
|
|
| 236 |
+ raise SourceError(
|
|
| 237 |
+ "{plugin}: Failed to show gitmodules at ref {ref}".format(
|
|
| 238 |
+ plugin=self, ref=self.ref))
|
|
| 239 |
+ |
|
| 240 |
+ content = '\n'.join([l.strip() for l in output.splitlines()])
|
|
| 241 |
+ |
|
| 242 |
+ io = StringIO(content)
|
|
| 243 |
+ parser = RawConfigParser()
|
|
| 244 |
+ parser.read_file(io)
|
|
| 245 |
+ |
|
| 246 |
+ for section in parser.sections():
|
|
| 247 |
+ # validate section name against the 'submodule "foo"' pattern
|
|
| 248 |
+ if re.match(r'submodule "(.*)"', section):
|
|
| 249 |
+ path = parser.get(section, 'path')
|
|
| 250 |
+ url = parser.get(section, 'url')
|
|
| 251 |
+ |
|
| 252 |
+ yield (path, url)
|
|
| 253 |
+ |
|
| 254 |
+ # Fetch the ref which this mirror requires its submodule to have,
|
|
| 255 |
+ # at the given ref of this mirror.
|
|
| 256 |
+ def submodule_ref(self, submodule, ref=None):
|
|
| 257 |
+ if not ref:
|
|
| 258 |
+ ref = self.ref
|
|
| 259 |
+ |
|
| 260 |
+ # list objects in the parent repo tree to find the commit
|
|
| 261 |
+ # object that corresponds to the submodule
|
|
| 262 |
+ _, output = self.source.check_output([self.source.host_git, 'ls-tree', ref, submodule],
|
|
| 263 |
+ fail="ls-tree failed for commit {} and submodule: {}".format(
|
|
| 264 |
+ ref, submodule),
|
|
| 265 |
+ cwd=self.mirror)
|
|
| 266 |
+ |
|
| 267 |
+ # read the commit hash from the output
|
|
| 268 |
+ fields = output.split()
|
|
| 269 |
+ if len(fields) >= 2 and fields[1] == 'commit':
|
|
| 270 |
+ submodule_commit = output.split()[2]
|
|
| 271 |
+ |
|
| 272 |
+ # fail if the commit hash is invalid
|
|
| 273 |
+ if len(submodule_commit) != 40:
|
|
| 274 |
+ raise SourceError("{}: Error reading commit information for submodule '{}'"
|
|
| 275 |
+ .format(self.source, submodule))
|
|
| 276 |
+ |
|
| 277 |
+ return submodule_commit
|
|
| 278 |
+ |
|
| 279 |
+ else:
|
|
| 280 |
+ detail = "The submodule '{}' is defined either in the BuildStream source\n".format(submodule) + \
|
|
| 281 |
+ "definition, or in a .gitmodules file. But the submodule was never added to the\n" + \
|
|
| 282 |
+ "underlying git repository with `git submodule add`."
|
|
| 283 |
+ |
|
| 284 |
+ self.source.warn("{}: Ignoring inconsistent submodule '{}'"
|
|
| 285 |
+ .format(self.source, submodule), detail=detail,
|
|
| 286 |
+ warning_token=WARN_INCONSISTENT_SUBMODULE)
|
|
| 287 |
+ |
|
| 288 |
+ return None
|
|
| 289 |
+ |
|
| 290 |
+ def _rebuild_git(self, fullpath):
|
|
| 291 |
+ if not self.tags:
|
|
| 292 |
+ return
|
|
| 293 |
+ |
|
| 294 |
+ with self.source.tempdir() as tmpdir:
|
|
| 295 |
+ included = set()
|
|
| 296 |
+ shallow = set()
|
|
| 297 |
+ for _, commit_ref, _ in self.tags:
|
|
| 298 |
+ |
|
| 299 |
+ _, out = self.source.check_output([self.source.host_git, 'rev-list',
|
|
| 300 |
+ '--boundary', '{}..{}'.format(commit_ref, self.ref)],
|
|
| 301 |
+ fail="Failed to get git history {}..{} in directory: {}"
|
|
| 302 |
+ .format(commit_ref, self.ref, fullpath),
|
|
| 303 |
+ fail_temporarily=True,
|
|
| 304 |
+ cwd=self.mirror)
|
|
| 305 |
+ for line in out.splitlines():
|
|
| 306 |
+ rev = line.lstrip('-')
|
|
| 307 |
+ if line[0] == '-':
|
|
| 308 |
+ shallow.add(rev)
|
|
| 309 |
+ else:
|
|
| 310 |
+ included.add(rev)
|
|
| 311 |
+ |
|
| 312 |
+ shallow -= included
|
|
| 313 |
+ included |= shallow
|
|
| 314 |
+ |
|
| 315 |
+ self.source.call([self.source.host_git, 'init'],
|
|
| 316 |
+ fail="Cannot initialize git repository: {}".format(fullpath),
|
|
| 317 |
+ cwd=fullpath)
|
|
| 318 |
+ |
|
| 319 |
+ for rev in included:
|
|
| 320 |
+ with TemporaryFile(dir=tmpdir) as commit_file:
|
|
| 321 |
+ self.source.call([self.source.host_git, 'cat-file', 'commit', rev],
|
|
| 322 |
+ stdout=commit_file,
|
|
| 323 |
+ fail="Failed to get commit {}".format(rev),
|
|
| 324 |
+ cwd=self.mirror)
|
|
| 325 |
+ commit_file.seek(0, 0)
|
|
| 326 |
+ self.source.call([self.source.host_git, 'hash-object', '-w', '-t', 'commit', '--stdin'],
|
|
| 327 |
+ stdin=commit_file,
|
|
| 328 |
+ fail="Failed to add commit object {}".format(rev),
|
|
| 329 |
+ cwd=fullpath)
|
|
| 330 |
+ |
|
| 331 |
+ with open(os.path.join(fullpath, '.git', 'shallow'), 'w') as shallow_file:
|
|
| 332 |
+ for rev in shallow:
|
|
| 333 |
+ shallow_file.write('{}\n'.format(rev))
|
|
| 334 |
+ |
|
| 335 |
+ for tag, commit_ref, annotated in self.tags:
|
|
| 336 |
+ if annotated:
|
|
| 337 |
+ with TemporaryFile(dir=tmpdir) as tag_file:
|
|
| 338 |
+ tag_data = 'object {}\ntype commit\ntag {}\n'.format(commit_ref, tag)
|
|
| 339 |
+ tag_file.write(tag_data.encode('ascii'))
|
|
| 340 |
+ tag_file.seek(0, 0)
|
|
| 341 |
+ _, tag_ref = self.source.check_output(
|
|
| 342 |
+ [self.source.host_git, 'hash-object', '-w', '-t',
|
|
| 343 |
+ 'tag', '--stdin'],
|
|
| 344 |
+ stdin=tag_file,
|
|
| 345 |
+ fail="Failed to add tag object {}".format(tag),
|
|
| 346 |
+ cwd=fullpath)
|
|
| 347 |
+ |
|
| 348 |
+ self.source.call([self.source.host_git, 'tag', tag, tag_ref.strip()],
|
|
| 349 |
+ fail="Failed to tag: {}".format(tag),
|
|
| 350 |
+ cwd=fullpath)
|
|
| 351 |
+ else:
|
|
| 352 |
+ self.source.call([self.source.host_git, 'tag', tag, commit_ref],
|
|
| 353 |
+ fail="Failed to tag: {}".format(tag),
|
|
| 354 |
+ cwd=fullpath)
|
|
| 355 |
+ |
|
| 356 |
+ with open(os.path.join(fullpath, '.git', 'HEAD'), 'w') as head:
|
|
| 357 |
+ self.source.call([self.source.host_git, 'rev-parse', self.ref],
|
|
| 358 |
+ stdout=head,
|
|
| 359 |
+ fail="Failed to parse commit {}".format(self.ref),
|
|
| 360 |
+ cwd=self.mirror)
|
|
| 361 |
+ |
|
| 362 |
+ |
|
| 363 |
+class _GitSourceBase(Source):
|
|
| 364 |
+ # pylint: disable=attribute-defined-outside-init
|
|
| 365 |
+ |
|
| 366 |
+ def configure(self, node):
|
|
| 367 |
+ ref = self.node_get_member(node, str, 'ref', None)
|
|
| 368 |
+ |
|
| 369 |
+ config_keys = ['url', 'track', 'ref', 'submodules',
|
|
| 370 |
+ 'checkout-submodules', 'ref-format',
|
|
| 371 |
+ 'track-tags', 'tags']
|
|
| 372 |
+ self.node_validate(node, config_keys + Source.COMMON_CONFIG_KEYS)
|
|
| 373 |
+ |
|
| 374 |
+ tags_node = self.node_get_member(node, list, 'tags', [])
|
|
| 375 |
+ for tag_node in tags_node:
|
|
| 376 |
+ self.node_validate(tag_node, ['tag', 'commit', 'annotated'])
|
|
| 377 |
+ |
|
| 378 |
+ tags = self._load_tags(node)
|
|
| 379 |
+ self.track_tags = self.node_get_member(node, bool, 'track-tags', False)
|
|
| 380 |
+ |
|
| 381 |
+ self.original_url = self.node_get_member(node, str, 'url')
|
|
| 382 |
+ self.mirror = GitMirror(self, '', self.original_url, ref, tags=tags, primary=True)
|
|
| 383 |
+ self.tracking = self.node_get_member(node, str, 'track', None)
|
|
| 384 |
+ |
|
| 385 |
+ self.ref_format = self.node_get_member(node, str, 'ref-format', 'sha1')
|
|
| 386 |
+ if self.ref_format not in ['sha1', 'git-describe']:
|
|
| 387 |
+ provenance = self.node_provenance(node, member_name='ref-format')
|
|
| 388 |
+ raise SourceError("{}: Unexpected value for ref-format: {}".format(provenance, self.ref_format))
|
|
| 389 |
+ |
|
| 390 |
+ # At this point we now know if the source has a ref and/or a track.
|
|
| 391 |
+ # If it is missing both then we will be unable to track or build.
|
|
| 392 |
+ if self.mirror.ref is None and self.tracking is None:
|
|
| 393 |
+ raise SourceError("{}: Git sources require a ref and/or track".format(self),
|
|
| 394 |
+ reason="missing-track-and-ref")
|
|
| 395 |
+ |
|
| 396 |
+ self.checkout_submodules = self.node_get_member(node, bool, 'checkout-submodules', True)
|
|
| 397 |
+ self.submodules = []
|
|
| 398 |
+ |
|
| 399 |
+ # Parse a dict of submodule overrides, stored in the submodule_overrides
|
|
| 400 |
+ # and submodule_checkout_overrides dictionaries.
|
|
| 401 |
+ self.submodule_overrides = {}
|
|
| 402 |
+ self.submodule_checkout_overrides = {}
|
|
| 403 |
+ modules = self.node_get_member(node, Mapping, 'submodules', {})
|
|
| 404 |
+ for path, _ in self.node_items(modules):
|
|
| 405 |
+ submodule = self.node_get_member(modules, Mapping, path)
|
|
| 406 |
+ url = self.node_get_member(submodule, str, 'url', None)
|
|
| 407 |
+ |
|
| 408 |
+ # Make sure to mark all URLs that are specified in the configuration
|
|
| 409 |
+ if url:
|
|
| 410 |
+ self.mark_download_url(url, primary=False)
|
|
| 411 |
+ |
|
| 412 |
+ self.submodule_overrides[path] = url
|
|
| 413 |
+ if 'checkout' in submodule:
|
|
| 414 |
+ checkout = self.node_get_member(submodule, bool, 'checkout')
|
|
| 415 |
+ self.submodule_checkout_overrides[path] = checkout
|
|
| 416 |
+ |
|
| 417 |
+ self.mark_download_url(self.original_url)
|
|
| 418 |
+ |
|
| 419 |
+ def preflight(self):
|
|
| 420 |
+ # Check if git is installed, get the binary at the same time
|
|
| 421 |
+ self.host_git = utils.get_host_tool('git')
|
|
| 422 |
+ |
|
| 423 |
+ def get_unique_key(self):
|
|
| 424 |
+ # Here we want to encode the local name of the repository and
|
|
| 425 |
+ # the ref, if the user changes the alias to fetch the same sources
|
|
| 426 |
+ # from another location, it should not affect the cache key.
|
|
| 427 |
+ key = [self.original_url, self.mirror.ref]
|
|
| 428 |
+ if self.mirror.tags:
|
|
| 429 |
+ tags = {tag: (commit, annotated) for tag, commit, annotated in self.mirror.tags}
|
|
| 430 |
+ key.append({'tags': tags})
|
|
| 431 |
+ |
|
| 432 |
+ # Only modify the cache key with checkout_submodules if it's something
|
|
| 433 |
+ # other than the default behaviour.
|
|
| 434 |
+ if self.checkout_submodules is False:
|
|
| 435 |
+ key.append({"checkout_submodules": self.checkout_submodules})
|
|
| 436 |
+ |
|
| 437 |
+ # We want the cache key to change if the source was
|
|
| 438 |
+ # configured differently, and submodules count.
|
|
| 439 |
+ if self.submodule_overrides:
|
|
| 440 |
+ key.append(self.submodule_overrides)
|
|
| 441 |
+ |
|
| 442 |
+ if self.submodule_checkout_overrides:
|
|
| 443 |
+ key.append({"submodule_checkout_overrides": self.submodule_checkout_overrides})
|
|
| 444 |
+ |
|
| 445 |
+ return key
|
|
| 446 |
+ |
|
| 447 |
+ def get_consistency(self):
|
|
| 448 |
+ if self._have_all_refs():
|
|
| 449 |
+ return Consistency.CACHED
|
|
| 450 |
+ elif self.mirror.ref is not None:
|
|
| 451 |
+ return Consistency.RESOLVED
|
|
| 452 |
+ return Consistency.INCONSISTENT
|
|
| 453 |
+ |
|
| 454 |
+ def load_ref(self, node):
|
|
| 455 |
+ self.mirror.ref = self.node_get_member(node, str, 'ref', None)
|
|
| 456 |
+ self.mirror.tags = self._load_tags(node)
|
|
| 457 |
+ |
|
| 458 |
+ def get_ref(self):
|
|
| 459 |
+ return self.mirror.ref, self.mirror.tags
|
|
| 460 |
+ |
|
| 461 |
+ def set_ref(self, ref_data, node):
|
|
| 462 |
+ if not ref_data:
|
|
| 463 |
+ self.mirror.ref = None
|
|
| 464 |
+ if 'ref' in node:
|
|
| 465 |
+ del node['ref']
|
|
| 466 |
+ self.mirror.tags = []
|
|
| 467 |
+ if 'tags' in node:
|
|
| 468 |
+ del node['tags']
|
|
| 469 |
+ else:
|
|
| 470 |
+ ref, tags = ref_data
|
|
| 471 |
+ node['ref'] = self.mirror.ref = ref
|
|
| 472 |
+ self.mirror.tags = tags
|
|
| 473 |
+ if tags:
|
|
| 474 |
+ node['tags'] = []
|
|
| 475 |
+ for tag, commit_ref, annotated in tags:
|
|
| 476 |
+ data = {'tag': tag,
|
|
| 477 |
+ 'commit': commit_ref,
|
|
| 478 |
+ 'annotated': annotated}
|
|
| 479 |
+ node['tags'].append(data)
|
|
| 480 |
+ else:
|
|
| 481 |
+ if 'tags' in node:
|
|
| 482 |
+ del node['tags']
|
|
| 483 |
+ |
|
| 484 |
+ def track(self):
|
|
| 485 |
+ |
|
| 486 |
+ # If self.tracking is not specified it's not an error, just silently return
|
|
| 487 |
+ if not self.tracking:
|
|
| 488 |
+ # Is there a better way to check if a ref is given.
|
|
| 489 |
+ if self.mirror.ref is None:
|
|
| 490 |
+ detail = 'Without a tracking branch ref can not be updated. Please ' + \
|
|
| 491 |
+ 'provide a ref or a track.'
|
|
| 492 |
+ raise SourceError("{}: No track or ref".format(self),
|
|
| 493 |
+ detail=detail, reason="track-attempt-no-track")
|
|
| 494 |
+ return None
|
|
| 495 |
+ |
|
| 496 |
+ # Resolve the URL for the message
|
|
| 497 |
+ resolved_url = self.translate_url(self.mirror.url)
|
|
| 498 |
+ with self.timed_activity("Tracking {} from {}"
|
|
| 499 |
+ .format(self.tracking, resolved_url),
|
|
| 500 |
+ silent_nested=True):
|
|
| 501 |
+ self.mirror.ensure()
|
|
| 502 |
+ self.mirror._fetch()
|
|
| 503 |
+ |
|
| 504 |
+ # Update self.mirror.ref and node.ref from the self.tracking branch
|
|
| 505 |
+ ret = self.mirror.latest_commit_with_tags(self.tracking, self.track_tags)
|
|
| 506 |
+ |
|
| 507 |
+ return ret
|
|
| 508 |
+ |
|
| 509 |
+ def init_workspace(self, directory):
|
|
| 510 |
+ # XXX: may wish to refactor this as some code dupe with stage()
|
|
| 511 |
+ self._refresh_submodules()
|
|
| 512 |
+ |
|
| 513 |
+ with self.timed_activity('Setting up workspace "{}"'.format(directory), silent_nested=True):
|
|
| 514 |
+ self.mirror.init_workspace(directory)
|
|
| 515 |
+ for mirror in self.submodules:
|
|
| 516 |
+ mirror.init_workspace(directory)
|
|
| 517 |
+ |
|
| 518 |
+ def stage(self, directory):
|
|
| 519 |
+ |
|
| 520 |
+ # Need to refresh submodule list here again, because
|
|
| 521 |
+ # it's possible that we did not load in the main process
|
|
| 522 |
+ # with submodules present (source needed fetching) and
|
|
| 523 |
+ # we may not know about the submodule yet come time to build.
|
|
| 524 |
+ #
|
|
| 525 |
+ self._refresh_submodules()
|
|
| 526 |
+ |
|
| 527 |
+ # Stage the main repo in the specified directory
|
|
| 528 |
+ #
|
|
| 529 |
+ with self.timed_activity("Staging {}".format(self.mirror.url), silent_nested=True):
|
|
| 530 |
+ self.mirror.stage(directory)
|
|
| 531 |
+ for mirror in self.submodules:
|
|
| 532 |
+ mirror.stage(directory)
|
|
| 533 |
+ |
|
| 534 |
+ def get_source_fetchers(self):
|
|
| 535 |
+ yield self.mirror
|
|
| 536 |
+ self._refresh_submodules()
|
|
| 537 |
+ for submodule in self.submodules:
|
|
| 538 |
+ yield submodule
|
|
| 539 |
+ |
|
| 540 |
+ def validate_cache(self):
|
|
| 541 |
+ discovered_submodules = {}
|
|
| 542 |
+ unlisted_submodules = []
|
|
| 543 |
+ invalid_submodules = []
|
|
| 544 |
+ |
|
| 545 |
+ for path, url in self.mirror.submodule_list():
|
|
| 546 |
+ discovered_submodules[path] = url
|
|
| 547 |
+ if self._ignore_submodule(path):
|
|
| 548 |
+ continue
|
|
| 549 |
+ |
|
| 550 |
+ override_url = self.submodule_overrides.get(path)
|
|
| 551 |
+ if not override_url:
|
|
| 552 |
+ unlisted_submodules.append((path, url))
|
|
| 553 |
+ |
|
| 554 |
+ # Warn about submodules which are explicitly configured but do not exist
|
|
| 555 |
+ for path, url in self.submodule_overrides.items():
|
|
| 556 |
+ if path not in discovered_submodules:
|
|
| 557 |
+ invalid_submodules.append((path, url))
|
|
| 558 |
+ |
|
| 559 |
+ if invalid_submodules:
|
|
| 560 |
+ detail = []
|
|
| 561 |
+ for path, url in invalid_submodules:
|
|
| 562 |
+ detail.append(" Submodule URL '{}' at path '{}'".format(url, path))
|
|
| 563 |
+ |
|
| 564 |
+ self.warn("{}: Invalid submodules specified".format(self),
|
|
| 565 |
+ warning_token=WARN_INVALID_SUBMODULE,
|
|
| 566 |
+ detail="The following submodules are specified in the source "
|
|
| 567 |
+ "description but do not exist according to the repository\n\n" +
|
|
| 568 |
+ "\n".join(detail))
|
|
| 569 |
+ |
|
| 570 |
+ # Warn about submodules which exist but have not been explicitly configured
|
|
| 571 |
+ if unlisted_submodules:
|
|
| 572 |
+ detail = []
|
|
| 573 |
+ for path, url in unlisted_submodules:
|
|
| 574 |
+ detail.append(" Submodule URL '{}' at path '{}'".format(url, path))
|
|
| 575 |
+ |
|
| 576 |
+ self.warn("{}: Unlisted submodules exist".format(self),
|
|
| 577 |
+ warning_token=WARN_UNLISTED_SUBMODULE,
|
|
| 578 |
+ detail="The following submodules exist but are not specified " +
|
|
| 579 |
+ "in the source description\n\n" +
|
|
| 580 |
+ "\n".join(detail))
|
|
| 581 |
+ |
|
| 582 |
+ # Assert that the ref exists in the track tag/branch, if track has been specified.
|
|
| 583 |
+ ref_in_track = False
|
|
| 584 |
+ if self.tracking:
|
|
| 585 |
+ _, branch = self.check_output([self.host_git, 'branch', '--list', self.tracking,
|
|
| 586 |
+ '--contains', self.mirror.ref],
|
|
| 587 |
+ cwd=self.mirror.mirror)
|
|
| 588 |
+ if branch:
|
|
| 589 |
+ ref_in_track = True
|
|
| 590 |
+ else:
|
|
| 591 |
+ _, tag = self.check_output([self.host_git, 'tag', '--list', self.tracking,
|
|
| 592 |
+ '--contains', self.mirror.ref],
|
|
| 593 |
+ cwd=self.mirror.mirror)
|
|
| 594 |
+ if tag:
|
|
| 595 |
+ ref_in_track = True
|
|
| 596 |
+ |
|
| 597 |
+ if not ref_in_track:
|
|
| 598 |
+ detail = "The ref provided for the element does not exist locally " + \
|
|
| 599 |
+ "in the provided track branch / tag '{}'.\n".format(self.tracking) + \
|
|
| 600 |
+ "You may wish to track the element to update the ref from '{}' ".format(self.tracking) + \
|
|
| 601 |
+ "with `bst track`,\n" + \
|
|
| 602 |
+ "or examine the upstream at '{}' for the specific ref.".format(self.mirror.url)
|
|
| 603 |
+ |
|
| 604 |
+ self.warn("{}: expected ref '{}' was not found in given track '{}' for staged repository: '{}'\n"
|
|
| 605 |
+ .format(self, self.mirror.ref, self.tracking, self.mirror.url),
|
|
| 606 |
+ detail=detail, warning_token=CoreWarnings.REF_NOT_IN_TRACK)
|
|
| 607 |
+ |
|
| 608 |
+ ###########################################################
|
|
| 609 |
+ # Local Functions #
|
|
| 610 |
+ ###########################################################
|
|
| 611 |
+ |
|
| 612 |
+ def _have_all_refs(self):
|
|
| 613 |
+ if not self.mirror.has_ref():
|
|
| 614 |
+ return False
|
|
| 615 |
+ |
|
| 616 |
+ self._refresh_submodules()
|
|
| 617 |
+ for mirror in self.submodules:
|
|
| 618 |
+ if not os.path.exists(mirror.mirror):
|
|
| 619 |
+ return False
|
|
| 620 |
+ if not mirror.has_ref():
|
|
| 621 |
+ return False
|
|
| 622 |
+ |
|
| 623 |
+ return True
|
|
| 624 |
+ |
|
| 625 |
+ # Refreshes the GitMirror objects for submodules
|
|
| 626 |
+ #
|
|
| 627 |
+ # Assumes that we have our mirror and we have the ref which we point to
|
|
| 628 |
+ #
|
|
| 629 |
+ def _refresh_submodules(self):
|
|
| 630 |
+ self.mirror.ensure()
|
|
| 631 |
+ submodules = []
|
|
| 632 |
+ |
|
| 633 |
+ for path, url in self.mirror.submodule_list():
|
|
| 634 |
+ |
|
| 635 |
+ # Completely ignore submodules which are disabled for checkout
|
|
| 636 |
+ if self._ignore_submodule(path):
|
|
| 637 |
+ continue
|
|
| 638 |
+ |
|
| 639 |
+ # Allow configuration to override the upstream
|
|
| 640 |
+ # location of the submodules.
|
|
| 641 |
+ override_url = self.submodule_overrides.get(path)
|
|
| 642 |
+ if override_url:
|
|
| 643 |
+ url = override_url
|
|
| 644 |
+ |
|
| 645 |
+ ref = self.mirror.submodule_ref(path)
|
|
| 646 |
+ if ref is not None:
|
|
| 647 |
+ mirror = GitMirror(self, path, url, ref)
|
|
| 648 |
+ submodules.append(mirror)
|
|
| 649 |
+ |
|
| 650 |
+ self.submodules = submodules
|
|
| 651 |
+ |
|
| 652 |
+ def _load_tags(self, node):
|
|
| 653 |
+ tags = []
|
|
| 654 |
+ tags_node = self.node_get_member(node, list, 'tags', [])
|
|
| 655 |
+ for tag_node in tags_node:
|
|
| 656 |
+ tag = self.node_get_member(tag_node, str, 'tag')
|
|
| 657 |
+ commit_ref = self.node_get_member(tag_node, str, 'commit')
|
|
| 658 |
+ annotated = self.node_get_member(tag_node, bool, 'annotated')
|
|
| 659 |
+ tags.append((tag, commit_ref, annotated))
|
|
| 660 |
+ return tags
|
|
| 661 |
+ |
|
| 662 |
+ # Checks whether the plugin configuration has explicitly
|
|
| 663 |
+ # configured this submodule to be ignored
|
|
| 664 |
+ def _ignore_submodule(self, path):
|
|
| 665 |
+ try:
|
|
| 666 |
+ checkout = self.submodule_checkout_overrides[path]
|
|
| 667 |
+ except KeyError:
|
|
| 668 |
+ checkout = self.checkout_submodules
|
|
| 669 |
+ |
|
| 670 |
+ return not checkout
|
| ... | ... | @@ -677,8 +677,9 @@ class Project(): |
| 677 | 677 |
#
|
| 678 | 678 |
def _find_project_dir(self, directory):
|
| 679 | 679 |
workspace_element = None
|
| 680 |
+ config_filenames = [_PROJECT_CONF_FILE, WORKSPACE_PROJECT_FILE]
|
|
| 680 | 681 |
found_directory, filename = utils._search_upward_for_files(
|
| 681 |
- directory, [_PROJECT_CONF_FILE, WORKSPACE_PROJECT_FILE]
|
|
| 682 |
+ directory, config_filenames
|
|
| 682 | 683 |
)
|
| 683 | 684 |
if filename == _PROJECT_CONF_FILE:
|
| 684 | 685 |
project_directory = found_directory
|
| ... | ... | @@ -691,8 +692,8 @@ class Project(): |
| 691 | 692 |
else:
|
| 692 | 693 |
raise LoadError(
|
| 693 | 694 |
LoadErrorReason.MISSING_PROJECT_CONF,
|
| 694 |
- '{} not found in current directory or any of its parent directories'
|
|
| 695 |
- .format(_PROJECT_CONF_FILE))
|
|
| 695 |
+ "None of {names} found in '{path}' or any of its parent directories"
|
|
| 696 |
+ .format(names=config_filenames, path=directory))
|
|
| 696 | 697 |
|
| 697 | 698 |
return project_directory, workspace_element
|
| 698 | 699 |
|
| ... | ... | @@ -40,10 +40,7 @@ class FetchQueue(Queue): |
| 40 | 40 |
self._skip_cached = skip_cached
|
| 41 | 41 |
|
| 42 | 42 |
def process(self, element):
|
| 43 |
- previous_sources = []
|
|
| 44 |
- for source in element.sources():
|
|
| 45 |
- source._fetch(previous_sources)
|
|
| 46 |
- previous_sources.append(source)
|
|
| 43 |
+ element._fetch()
|
|
| 47 | 44 |
|
| 48 | 45 |
def status(self, element):
|
| 49 | 46 |
# state of dependencies may have changed, recalculate element state
|
| ... | ... | @@ -28,6 +28,7 @@ import tarfile |
| 28 | 28 |
import tempfile
|
| 29 | 29 |
from contextlib import contextmanager, suppress
|
| 30 | 30 |
|
| 31 |
+from ._artifactcache import ArtifactCacheSpec
|
|
| 31 | 32 |
from ._exceptions import StreamError, ImplError, BstError, set_last_task_error
|
| 32 | 33 |
from ._message import Message, MessageType
|
| 33 | 34 |
from ._scheduler import Scheduler, SchedStatus, TrackQueue, FetchQueue, BuildQueue, PullQueue, PushQueue
|
| ... | ... | @@ -124,6 +125,7 @@ class Stream(): |
| 124 | 125 |
# mounts (list of HostMount): Additional directories to mount into the sandbox
|
| 125 | 126 |
# isolate (bool): Whether to isolate the environment like we do in builds
|
| 126 | 127 |
# command (list): An argv to launch in the sandbox, or None
|
| 128 |
+ # usebuildtree (bool): Wheather to use a buildtree as the source.
|
|
| 127 | 129 |
#
|
| 128 | 130 |
# Returns:
|
| 129 | 131 |
# (int): The exit code of the launched shell
|
| ... | ... | @@ -132,7 +134,8 @@ class Stream(): |
| 132 | 134 |
directory=None,
|
| 133 | 135 |
mounts=None,
|
| 134 | 136 |
isolate=False,
|
| 135 |
- command=None):
|
|
| 137 |
+ command=None,
|
|
| 138 |
+ usebuildtree=False):
|
|
| 136 | 139 |
|
| 137 | 140 |
# Assert we have everything we need built, unless the directory is specified
|
| 138 | 141 |
# in which case we just blindly trust the directory, using the element
|
| ... | ... | @@ -147,7 +150,8 @@ class Stream(): |
| 147 | 150 |
raise StreamError("Elements need to be built or downloaded before staging a shell environment",
|
| 148 | 151 |
detail="\n".join(missing_deps))
|
| 149 | 152 |
|
| 150 |
- return element._shell(scope, directory, mounts=mounts, isolate=isolate, prompt=prompt, command=command)
|
|
| 153 |
+ return element._shell(scope, directory, mounts=mounts, isolate=isolate, prompt=prompt, command=command,
|
|
| 154 |
+ usebuildtree=usebuildtree)
|
|
| 151 | 155 |
|
| 152 | 156 |
# build()
|
| 153 | 157 |
#
|
| ... | ... | @@ -305,6 +309,7 @@ class Stream(): |
| 305 | 309 |
selection=selection,
|
| 306 | 310 |
use_artifact_config=use_config,
|
| 307 | 311 |
artifact_remote_url=remote,
|
| 312 |
+ artifact_remote_can_push=False,
|
|
| 308 | 313 |
fetch_subprojects=True)
|
| 309 | 314 |
|
| 310 | 315 |
if not self._artifacts.has_fetch_remotes():
|
| ... | ... | @@ -343,6 +348,7 @@ class Stream(): |
| 343 | 348 |
selection=selection,
|
| 344 | 349 |
use_artifact_config=use_config,
|
| 345 | 350 |
artifact_remote_url=remote,
|
| 351 |
+ artifact_remote_can_push=True,
|
|
| 346 | 352 |
fetch_subprojects=True)
|
| 347 | 353 |
|
| 348 | 354 |
if not self._artifacts.has_push_remotes():
|
| ... | ... | @@ -849,7 +855,8 @@ class Stream(): |
| 849 | 855 |
# track_except_targets (list of str): Specified targets to except from fetching
|
| 850 | 856 |
# track_cross_junctions (bool): Whether tracking should cross junction boundaries
|
| 851 | 857 |
# use_artifact_config (bool): Whether to initialize artifacts with the config
|
| 852 |
- # artifact_remote_url (bool): A remote url for initializing the artifacts
|
|
| 858 |
+ # artifact_remote_url (str): A remote url for initializing the artifacts
|
|
| 859 |
+ # artifact_remote_can_push (bool): Whether `artifact_remote_url` can be used to push
|
|
| 853 | 860 |
# fetch_subprojects (bool): Whether to fetch subprojects while loading
|
| 854 | 861 |
#
|
| 855 | 862 |
# Returns:
|
| ... | ... | @@ -864,6 +871,7 @@ class Stream(): |
| 864 | 871 |
track_cross_junctions=False,
|
| 865 | 872 |
use_artifact_config=False,
|
| 866 | 873 |
artifact_remote_url=None,
|
| 874 |
+ artifact_remote_can_push=False,
|
|
| 867 | 875 |
fetch_subprojects=False,
|
| 868 | 876 |
dynamic_plan=False):
|
| 869 | 877 |
|
| ... | ... | @@ -927,12 +935,20 @@ class Stream(): |
| 927 | 935 |
self._pipeline.resolve_elements(track_selected)
|
| 928 | 936 |
return [], track_selected
|
| 929 | 937 |
|
| 930 |
- # ArtifactCache.setup_remotes expects all projects to be fully loaded
|
|
| 931 |
- for project in self._context.get_projects():
|
|
| 932 |
- project.ensure_fully_loaded()
|
|
| 933 |
- |
|
| 938 |
+ if use_artifact_config:
|
|
| 939 |
+ # ArtifactCache.get_remotes_from_projects expects all projects to be
|
|
| 940 |
+ # fully loaded
|
|
| 941 |
+ for project in self._context.get_projects():
|
|
| 942 |
+ project.ensure_fully_loaded()
|
|
| 943 |
+ remotes = self._artifacts.get_remotes_from_projects()
|
|
| 944 |
+ else:
|
|
| 945 |
+ # Build the ArtifactCacheSpec instance based on `--remote`
|
|
| 946 |
+ remotes = [(
|
|
| 947 |
+ [ArtifactCacheSpec(artifact_remote_url, push=artifact_remote_can_push)],
|
|
| 948 |
+ None
|
|
| 949 |
+ )]
|
|
| 934 | 950 |
# Connect to remote caches, this needs to be done before resolving element state
|
| 935 |
- self._artifacts.setup_remotes(use_config=use_artifact_config, remote_url=artifact_remote_url)
|
|
| 951 |
+ self._artifacts.setup_remotes(remotes=remotes)
|
|
| 936 | 952 |
|
| 937 | 953 |
# Now move on to loading primary selection.
|
| 938 | 954 |
#
|
| ... | ... | @@ -1260,7 +1276,7 @@ class Stream(): |
| 1260 | 1276 |
required_list = []
|
| 1261 | 1277 |
|
| 1262 | 1278 |
# If context is set to not pull buildtrees, or no fetch remotes, return empty list
|
| 1263 |
- if not (self._context.pull_buildtrees or self._artifacts.has_fetch_remotes()):
|
|
| 1279 |
+ if not self._context.pull_buildtrees or not self._artifacts.has_fetch_remotes():
|
|
| 1264 | 1280 |
return required_list
|
| 1265 | 1281 |
|
| 1266 | 1282 |
for element in elements:
|
| ... | ... | @@ -112,14 +112,6 @@ logging: |
| 112 | 112 |
#
|
| 113 | 113 |
prompt:
|
| 114 | 114 |
|
| 115 |
- # Whether to create a project with 'bst init' if we are invoked outside of a
|
|
| 116 |
- # directory where we can resolve the project.
|
|
| 117 |
- #
|
|
| 118 |
- # ask - Prompt the user to choose.
|
|
| 119 |
- # no - Never create the project.
|
|
| 120 |
- #
|
|
| 121 |
- auto-init: ask
|
|
| 122 |
- |
|
| 123 | 115 |
# Whether to really proceed with 'bst workspace close --remove-dir' removing
|
| 124 | 116 |
# a workspace directory, potentially losing changes.
|
| 125 | 117 |
#
|
| ... | ... | @@ -1338,11 +1338,12 @@ class Element(Plugin): |
| 1338 | 1338 |
# is used to stage things by the `bst checkout` codepath
|
| 1339 | 1339 |
#
|
| 1340 | 1340 |
@contextmanager
|
| 1341 |
- def _prepare_sandbox(self, scope, directory, shell=False, integrate=True):
|
|
| 1341 |
+ def _prepare_sandbox(self, scope, directory, shell=False, integrate=True, usebuildtree=False):
|
|
| 1342 | 1342 |
# bst shell and bst checkout require a local sandbox.
|
| 1343 | 1343 |
bare_directory = True if directory else False
|
| 1344 | 1344 |
with self.__sandbox(directory, config=self.__sandbox_config, allow_remote=False,
|
| 1345 | 1345 |
bare_directory=bare_directory) as sandbox:
|
| 1346 |
+ sandbox._usebuildtree = usebuildtree
|
|
| 1346 | 1347 |
|
| 1347 | 1348 |
# Configure always comes first, and we need it.
|
| 1348 | 1349 |
self.__configure_sandbox(sandbox)
|
| ... | ... | @@ -1386,7 +1387,7 @@ class Element(Plugin): |
| 1386 | 1387 |
# Stage all sources that need to be copied
|
| 1387 | 1388 |
sandbox_vroot = sandbox.get_virtual_directory()
|
| 1388 | 1389 |
host_vdirectory = sandbox_vroot.descend(directory.lstrip(os.sep).split(os.sep), create=True)
|
| 1389 |
- self._stage_sources_at(host_vdirectory, mount_workspaces=mount_workspaces)
|
|
| 1390 |
+ self._stage_sources_at(host_vdirectory, mount_workspaces=mount_workspaces, usebuildtree=sandbox._usebuildtree)
|
|
| 1390 | 1391 |
|
| 1391 | 1392 |
# _stage_sources_at():
|
| 1392 | 1393 |
#
|
| ... | ... | @@ -1395,10 +1396,10 @@ class Element(Plugin): |
| 1395 | 1396 |
# Args:
|
| 1396 | 1397 |
# vdirectory (:class:`.storage.Directory`): A virtual directory object to stage sources into.
|
| 1397 | 1398 |
# mount_workspaces (bool): mount workspaces if True, copy otherwise
|
| 1399 |
+ # usebuildtree (bool): use a the elements build tree as its source.
|
|
| 1398 | 1400 |
#
|
| 1399 |
- def _stage_sources_at(self, vdirectory, mount_workspaces=True):
|
|
| 1401 |
+ def _stage_sources_at(self, vdirectory, mount_workspaces=True, usebuildtree=False):
|
|
| 1400 | 1402 |
with self.timed_activity("Staging sources", silent_nested=True):
|
| 1401 |
- |
|
| 1402 | 1403 |
if not isinstance(vdirectory, Directory):
|
| 1403 | 1404 |
vdirectory = FileBasedDirectory(vdirectory)
|
| 1404 | 1405 |
if not vdirectory.is_empty():
|
| ... | ... | @@ -1420,7 +1421,7 @@ class Element(Plugin): |
| 1420 | 1421 |
.format(workspace.get_absolute_path())):
|
| 1421 | 1422 |
workspace.stage(temp_staging_directory)
|
| 1422 | 1423 |
# Check if we have a cached buildtree to use
|
| 1423 |
- elif self._cached_buildtree():
|
|
| 1424 |
+ elif usebuildtree:
|
|
| 1424 | 1425 |
artifact_base, _ = self.__extract()
|
| 1425 | 1426 |
import_dir = os.path.join(artifact_base, 'buildtree')
|
| 1426 | 1427 |
else:
|
| ... | ... | @@ -1850,13 +1851,15 @@ class Element(Plugin): |
| 1850 | 1851 |
# isolate (bool): Whether to isolate the environment like we do in builds
|
| 1851 | 1852 |
# prompt (str): A suitable prompt string for PS1
|
| 1852 | 1853 |
# command (list): An argv to launch in the sandbox
|
| 1854 |
+ # usebuildtree (bool): Use the buildtree as its source
|
|
| 1853 | 1855 |
#
|
| 1854 | 1856 |
# Returns: Exit code
|
| 1855 | 1857 |
#
|
| 1856 | 1858 |
# If directory is not specified, one will be staged using scope
|
| 1857 |
- def _shell(self, scope=None, directory=None, *, mounts=None, isolate=False, prompt=None, command=None):
|
|
| 1859 |
+ def _shell(self, scope=None, directory=None, *, mounts=None, isolate=False, prompt=None, command=None,
|
|
| 1860 |
+ usebuildtree=False):
|
|
| 1858 | 1861 |
|
| 1859 |
- with self._prepare_sandbox(scope, directory, shell=True) as sandbox:
|
|
| 1862 |
+ with self._prepare_sandbox(scope, directory, shell=True, usebuildtree=usebuildtree) as sandbox:
|
|
| 1860 | 1863 |
environment = self.get_environment()
|
| 1861 | 1864 |
environment = copy.copy(environment)
|
| 1862 | 1865 |
flags = SandboxFlags.INTERACTIVE | SandboxFlags.ROOT_READ_ONLY
|
| ... | ... | @@ -2019,6 +2022,20 @@ class Element(Plugin): |
| 2019 | 2022 |
|
| 2020 | 2023 |
return True
|
| 2021 | 2024 |
|
| 2025 |
+ # _fetch()
|
|
| 2026 |
+ #
|
|
| 2027 |
+ # Fetch the element's sources.
|
|
| 2028 |
+ #
|
|
| 2029 |
+ # Raises:
|
|
| 2030 |
+ # SourceError: If one of the element sources has an error
|
|
| 2031 |
+ #
|
|
| 2032 |
+ def _fetch(self):
|
|
| 2033 |
+ previous_sources = []
|
|
| 2034 |
+ for source in self.sources():
|
|
| 2035 |
+ if source._get_consistency() < Consistency.CACHED:
|
|
| 2036 |
+ source._fetch(previous_sources)
|
|
| 2037 |
+ previous_sources.append(source)
|
|
| 2038 |
+ |
|
| 2022 | 2039 |
#############################################################
|
| 2023 | 2040 |
# Private Local Methods #
|
| 2024 | 2041 |
#############################################################
|
| ... | ... | @@ -156,652 +156,11 @@ This plugin also utilises the following configurable :class:`core warnings <buil |
| 156 | 156 |
found in the provided track in the element's git repository.
|
| 157 | 157 |
"""
|
| 158 | 158 |
|
| 159 |
-import os
|
|
| 160 |
-import re
|
|
| 161 |
-import shutil
|
|
| 162 |
-from collections.abc import Mapping
|
|
| 163 |
-from io import StringIO
|
|
| 164 |
-from tempfile import TemporaryFile
|
|
| 159 |
+from buildstream import _GitSourceBase
|
|
| 165 | 160 |
|
| 166 |
-from configparser import RawConfigParser
|
|
| 167 | 161 |
|
| 168 |
-from buildstream import Source, SourceError, Consistency, SourceFetcher, CoreWarnings
|
|
| 169 |
-from buildstream import utils
|
|
| 170 |
-from buildstream.utils import move_atomic, DirectoryExistsError
|
|
| 171 |
- |
|
| 172 |
-GIT_MODULES = '.gitmodules'
|
|
| 173 |
- |
|
| 174 |
-# Warnings
|
|
| 175 |
-WARN_INCONSISTENT_SUBMODULE = "inconsistent-submodule"
|
|
| 176 |
-WARN_UNLISTED_SUBMODULE = "unlisted-submodule"
|
|
| 177 |
-WARN_INVALID_SUBMODULE = "invalid-submodule"
|
|
| 178 |
- |
|
| 179 |
- |
|
| 180 |
-# Because of handling of submodules, we maintain a GitMirror
|
|
| 181 |
-# for the primary git source and also for each submodule it
|
|
| 182 |
-# might have at a given time
|
|
| 183 |
-#
|
|
| 184 |
-class GitMirror(SourceFetcher):
|
|
| 185 |
- |
|
| 186 |
- def __init__(self, source, path, url, ref, *, primary=False, tags=[]):
|
|
| 187 |
- |
|
| 188 |
- super().__init__()
|
|
| 189 |
- self.source = source
|
|
| 190 |
- self.path = path
|
|
| 191 |
- self.url = url
|
|
| 192 |
- self.ref = ref
|
|
| 193 |
- self.tags = tags
|
|
| 194 |
- self.primary = primary
|
|
| 195 |
- self.mirror = os.path.join(source.get_mirror_directory(), utils.url_directory_name(url))
|
|
| 196 |
- self.mark_download_url(url)
|
|
| 197 |
- |
|
| 198 |
- # Ensures that the mirror exists
|
|
| 199 |
- def ensure(self, alias_override=None):
|
|
| 200 |
- |
|
| 201 |
- # Unfortunately, git does not know how to only clone just a specific ref,
|
|
| 202 |
- # so we have to download all of those gigs even if we only need a couple
|
|
| 203 |
- # of bytes.
|
|
| 204 |
- if not os.path.exists(self.mirror):
|
|
| 205 |
- |
|
| 206 |
- # Do the initial clone in a tmpdir just because we want an atomic move
|
|
| 207 |
- # after a long standing clone which could fail overtime, for now do
|
|
| 208 |
- # this directly in our git directory, eliminating the chances that the
|
|
| 209 |
- # system configured tmpdir is not on the same partition.
|
|
| 210 |
- #
|
|
| 211 |
- with self.source.tempdir() as tmpdir:
|
|
| 212 |
- url = self.source.translate_url(self.url, alias_override=alias_override,
|
|
| 213 |
- primary=self.primary)
|
|
| 214 |
- self.source.call([self.source.host_git, 'clone', '--mirror', '-n', url, tmpdir],
|
|
| 215 |
- fail="Failed to clone git repository {}".format(url),
|
|
| 216 |
- fail_temporarily=True)
|
|
| 217 |
- |
|
| 218 |
- try:
|
|
| 219 |
- move_atomic(tmpdir, self.mirror)
|
|
| 220 |
- except DirectoryExistsError:
|
|
| 221 |
- # Another process was quicker to download this repository.
|
|
| 222 |
- # Let's discard our own
|
|
| 223 |
- self.source.status("{}: Discarding duplicate clone of {}"
|
|
| 224 |
- .format(self.source, url))
|
|
| 225 |
- except OSError as e:
|
|
| 226 |
- raise SourceError("{}: Failed to move cloned git repository {} from '{}' to '{}': {}"
|
|
| 227 |
- .format(self.source, url, tmpdir, self.mirror, e)) from e
|
|
| 228 |
- |
|
| 229 |
- def _fetch(self, alias_override=None):
|
|
| 230 |
- url = self.source.translate_url(self.url,
|
|
| 231 |
- alias_override=alias_override,
|
|
| 232 |
- primary=self.primary)
|
|
| 233 |
- |
|
| 234 |
- if alias_override:
|
|
| 235 |
- remote_name = utils.url_directory_name(alias_override)
|
|
| 236 |
- _, remotes = self.source.check_output(
|
|
| 237 |
- [self.source.host_git, 'remote'],
|
|
| 238 |
- fail="Failed to retrieve list of remotes in {}".format(self.mirror),
|
|
| 239 |
- cwd=self.mirror
|
|
| 240 |
- )
|
|
| 241 |
- if remote_name not in remotes:
|
|
| 242 |
- self.source.call(
|
|
| 243 |
- [self.source.host_git, 'remote', 'add', remote_name, url],
|
|
| 244 |
- fail="Failed to add remote {} with url {}".format(remote_name, url),
|
|
| 245 |
- cwd=self.mirror
|
|
| 246 |
- )
|
|
| 247 |
- else:
|
|
| 248 |
- remote_name = "origin"
|
|
| 249 |
- |
|
| 250 |
- self.source.call([self.source.host_git, 'fetch', remote_name, '--prune', '--force', '--tags'],
|
|
| 251 |
- fail="Failed to fetch from remote git repository: {}".format(url),
|
|
| 252 |
- fail_temporarily=True,
|
|
| 253 |
- cwd=self.mirror)
|
|
| 254 |
- |
|
| 255 |
- def fetch(self, alias_override=None):
|
|
| 256 |
- # Resolve the URL for the message
|
|
| 257 |
- resolved_url = self.source.translate_url(self.url,
|
|
| 258 |
- alias_override=alias_override,
|
|
| 259 |
- primary=self.primary)
|
|
| 260 |
- |
|
| 261 |
- with self.source.timed_activity("Fetching from {}"
|
|
| 262 |
- .format(resolved_url),
|
|
| 263 |
- silent_nested=True):
|
|
| 264 |
- self.ensure(alias_override)
|
|
| 265 |
- if not self.has_ref():
|
|
| 266 |
- self._fetch(alias_override)
|
|
| 267 |
- self.assert_ref()
|
|
| 268 |
- |
|
| 269 |
- def has_ref(self):
|
|
| 270 |
- if not self.ref:
|
|
| 271 |
- return False
|
|
| 272 |
- |
|
| 273 |
- # If the mirror doesnt exist, we also dont have the ref
|
|
| 274 |
- if not os.path.exists(self.mirror):
|
|
| 275 |
- return False
|
|
| 276 |
- |
|
| 277 |
- # Check if the ref is really there
|
|
| 278 |
- rc = self.source.call([self.source.host_git, 'cat-file', '-t', self.ref], cwd=self.mirror)
|
|
| 279 |
- return rc == 0
|
|
| 280 |
- |
|
| 281 |
- def assert_ref(self):
|
|
| 282 |
- if not self.has_ref():
|
|
| 283 |
- raise SourceError("{}: expected ref '{}' was not found in git repository: '{}'"
|
|
| 284 |
- .format(self.source, self.ref, self.url))
|
|
| 285 |
- |
|
| 286 |
- def latest_commit_with_tags(self, tracking, track_tags=False):
|
|
| 287 |
- _, output = self.source.check_output(
|
|
| 288 |
- [self.source.host_git, 'rev-parse', tracking],
|
|
| 289 |
- fail="Unable to find commit for specified branch name '{}'".format(tracking),
|
|
| 290 |
- cwd=self.mirror)
|
|
| 291 |
- ref = output.rstrip('\n')
|
|
| 292 |
- |
|
| 293 |
- if self.source.ref_format == 'git-describe':
|
|
| 294 |
- # Prefix the ref with the closest tag, if available,
|
|
| 295 |
- # to make the ref human readable
|
|
| 296 |
- exit_code, output = self.source.check_output(
|
|
| 297 |
- [self.source.host_git, 'describe', '--tags', '--abbrev=40', '--long', ref],
|
|
| 298 |
- cwd=self.mirror)
|
|
| 299 |
- if exit_code == 0:
|
|
| 300 |
- ref = output.rstrip('\n')
|
|
| 301 |
- |
|
| 302 |
- if not track_tags:
|
|
| 303 |
- return ref, []
|
|
| 304 |
- |
|
| 305 |
- tags = set()
|
|
| 306 |
- for options in [[], ['--first-parent'], ['--tags'], ['--tags', '--first-parent']]:
|
|
| 307 |
- exit_code, output = self.source.check_output(
|
|
| 308 |
- [self.source.host_git, 'describe', '--abbrev=0', ref] + options,
|
|
| 309 |
- cwd=self.mirror)
|
|
| 310 |
- if exit_code == 0:
|
|
| 311 |
- tag = output.strip()
|
|
| 312 |
- _, commit_ref = self.source.check_output(
|
|
| 313 |
- [self.source.host_git, 'rev-parse', tag + '^{commit}'],
|
|
| 314 |
- fail="Unable to resolve tag '{}'".format(tag),
|
|
| 315 |
- cwd=self.mirror)
|
|
| 316 |
- exit_code = self.source.call(
|
|
| 317 |
- [self.source.host_git, 'cat-file', 'tag', tag],
|
|
| 318 |
- cwd=self.mirror)
|
|
| 319 |
- annotated = (exit_code == 0)
|
|
| 320 |
- |
|
| 321 |
- tags.add((tag, commit_ref.strip(), annotated))
|
|
| 322 |
- |
|
| 323 |
- return ref, list(tags)
|
|
| 324 |
- |
|
| 325 |
- def stage(self, directory):
|
|
| 326 |
- fullpath = os.path.join(directory, self.path)
|
|
| 327 |
- |
|
| 328 |
- # Using --shared here avoids copying the objects into the checkout, in any
|
|
| 329 |
- # case we're just checking out a specific commit and then removing the .git/
|
|
| 330 |
- # directory.
|
|
| 331 |
- self.source.call([self.source.host_git, 'clone', '--no-checkout', '--shared', self.mirror, fullpath],
|
|
| 332 |
- fail="Failed to create git mirror {} in directory: {}".format(self.mirror, fullpath),
|
|
| 333 |
- fail_temporarily=True)
|
|
| 334 |
- |
|
| 335 |
- self.source.call([self.source.host_git, 'checkout', '--force', self.ref],
|
|
| 336 |
- fail="Failed to checkout git ref {}".format(self.ref),
|
|
| 337 |
- cwd=fullpath)
|
|
| 338 |
- |
|
| 339 |
- # Remove .git dir
|
|
| 340 |
- shutil.rmtree(os.path.join(fullpath, ".git"))
|
|
| 341 |
- |
|
| 342 |
- self._rebuild_git(fullpath)
|
|
| 343 |
- |
|
| 344 |
- def init_workspace(self, directory):
|
|
| 345 |
- fullpath = os.path.join(directory, self.path)
|
|
| 346 |
- url = self.source.translate_url(self.url)
|
|
| 347 |
- |
|
| 348 |
- self.source.call([self.source.host_git, 'clone', '--no-checkout', self.mirror, fullpath],
|
|
| 349 |
- fail="Failed to clone git mirror {} in directory: {}".format(self.mirror, fullpath),
|
|
| 350 |
- fail_temporarily=True)
|
|
| 351 |
- |
|
| 352 |
- self.source.call([self.source.host_git, 'remote', 'set-url', 'origin', url],
|
|
| 353 |
- fail='Failed to add remote origin "{}"'.format(url),
|
|
| 354 |
- cwd=fullpath)
|
|
| 355 |
- |
|
| 356 |
- self.source.call([self.source.host_git, 'checkout', '--force', self.ref],
|
|
| 357 |
- fail="Failed to checkout git ref {}".format(self.ref),
|
|
| 358 |
- cwd=fullpath)
|
|
| 359 |
- |
|
| 360 |
- # List the submodules (path/url tuples) present at the given ref of this repo
|
|
| 361 |
- def submodule_list(self):
|
|
| 362 |
- modules = "{}:{}".format(self.ref, GIT_MODULES)
|
|
| 363 |
- exit_code, output = self.source.check_output(
|
|
| 364 |
- [self.source.host_git, 'show', modules], cwd=self.mirror)
|
|
| 365 |
- |
|
| 366 |
- # If git show reports error code 128 here, we take it to mean there is
|
|
| 367 |
- # no .gitmodules file to display for the given revision.
|
|
| 368 |
- if exit_code == 128:
|
|
| 369 |
- return
|
|
| 370 |
- elif exit_code != 0:
|
|
| 371 |
- raise SourceError(
|
|
| 372 |
- "{plugin}: Failed to show gitmodules at ref {ref}".format(
|
|
| 373 |
- plugin=self, ref=self.ref))
|
|
| 374 |
- |
|
| 375 |
- content = '\n'.join([l.strip() for l in output.splitlines()])
|
|
| 376 |
- |
|
| 377 |
- io = StringIO(content)
|
|
| 378 |
- parser = RawConfigParser()
|
|
| 379 |
- parser.read_file(io)
|
|
| 380 |
- |
|
| 381 |
- for section in parser.sections():
|
|
| 382 |
- # validate section name against the 'submodule "foo"' pattern
|
|
| 383 |
- if re.match(r'submodule "(.*)"', section):
|
|
| 384 |
- path = parser.get(section, 'path')
|
|
| 385 |
- url = parser.get(section, 'url')
|
|
| 386 |
- |
|
| 387 |
- yield (path, url)
|
|
| 388 |
- |
|
| 389 |
- # Fetch the ref which this mirror requires its submodule to have,
|
|
| 390 |
- # at the given ref of this mirror.
|
|
| 391 |
- def submodule_ref(self, submodule, ref=None):
|
|
| 392 |
- if not ref:
|
|
| 393 |
- ref = self.ref
|
|
| 394 |
- |
|
| 395 |
- # list objects in the parent repo tree to find the commit
|
|
| 396 |
- # object that corresponds to the submodule
|
|
| 397 |
- _, output = self.source.check_output([self.source.host_git, 'ls-tree', ref, submodule],
|
|
| 398 |
- fail="ls-tree failed for commit {} and submodule: {}".format(
|
|
| 399 |
- ref, submodule),
|
|
| 400 |
- cwd=self.mirror)
|
|
| 401 |
- |
|
| 402 |
- # read the commit hash from the output
|
|
| 403 |
- fields = output.split()
|
|
| 404 |
- if len(fields) >= 2 and fields[1] == 'commit':
|
|
| 405 |
- submodule_commit = output.split()[2]
|
|
| 406 |
- |
|
| 407 |
- # fail if the commit hash is invalid
|
|
| 408 |
- if len(submodule_commit) != 40:
|
|
| 409 |
- raise SourceError("{}: Error reading commit information for submodule '{}'"
|
|
| 410 |
- .format(self.source, submodule))
|
|
| 411 |
- |
|
| 412 |
- return submodule_commit
|
|
| 413 |
- |
|
| 414 |
- else:
|
|
| 415 |
- detail = "The submodule '{}' is defined either in the BuildStream source\n".format(submodule) + \
|
|
| 416 |
- "definition, or in a .gitmodules file. But the submodule was never added to the\n" + \
|
|
| 417 |
- "underlying git repository with `git submodule add`."
|
|
| 418 |
- |
|
| 419 |
- self.source.warn("{}: Ignoring inconsistent submodule '{}'"
|
|
| 420 |
- .format(self.source, submodule), detail=detail,
|
|
| 421 |
- warning_token=WARN_INCONSISTENT_SUBMODULE)
|
|
| 422 |
- |
|
| 423 |
- return None
|
|
| 424 |
- |
|
| 425 |
- def _rebuild_git(self, fullpath):
|
|
| 426 |
- if not self.tags:
|
|
| 427 |
- return
|
|
| 428 |
- |
|
| 429 |
- with self.source.tempdir() as tmpdir:
|
|
| 430 |
- included = set()
|
|
| 431 |
- shallow = set()
|
|
| 432 |
- for _, commit_ref, _ in self.tags:
|
|
| 433 |
- |
|
| 434 |
- _, out = self.source.check_output([self.source.host_git, 'rev-list',
|
|
| 435 |
- '--boundary', '{}..{}'.format(commit_ref, self.ref)],
|
|
| 436 |
- fail="Failed to get git history {}..{} in directory: {}"
|
|
| 437 |
- .format(commit_ref, self.ref, fullpath),
|
|
| 438 |
- fail_temporarily=True,
|
|
| 439 |
- cwd=self.mirror)
|
|
| 440 |
- for line in out.splitlines():
|
|
| 441 |
- rev = line.lstrip('-')
|
|
| 442 |
- if line[0] == '-':
|
|
| 443 |
- shallow.add(rev)
|
|
| 444 |
- else:
|
|
| 445 |
- included.add(rev)
|
|
| 446 |
- |
|
| 447 |
- shallow -= included
|
|
| 448 |
- included |= shallow
|
|
| 449 |
- |
|
| 450 |
- self.source.call([self.source.host_git, 'init'],
|
|
| 451 |
- fail="Cannot initialize git repository: {}".format(fullpath),
|
|
| 452 |
- cwd=fullpath)
|
|
| 453 |
- |
|
| 454 |
- for rev in included:
|
|
| 455 |
- with TemporaryFile(dir=tmpdir) as commit_file:
|
|
| 456 |
- self.source.call([self.source.host_git, 'cat-file', 'commit', rev],
|
|
| 457 |
- stdout=commit_file,
|
|
| 458 |
- fail="Failed to get commit {}".format(rev),
|
|
| 459 |
- cwd=self.mirror)
|
|
| 460 |
- commit_file.seek(0, 0)
|
|
| 461 |
- self.source.call([self.source.host_git, 'hash-object', '-w', '-t', 'commit', '--stdin'],
|
|
| 462 |
- stdin=commit_file,
|
|
| 463 |
- fail="Failed to add commit object {}".format(rev),
|
|
| 464 |
- cwd=fullpath)
|
|
| 465 |
- |
|
| 466 |
- with open(os.path.join(fullpath, '.git', 'shallow'), 'w') as shallow_file:
|
|
| 467 |
- for rev in shallow:
|
|
| 468 |
- shallow_file.write('{}\n'.format(rev))
|
|
| 469 |
- |
|
| 470 |
- for tag, commit_ref, annotated in self.tags:
|
|
| 471 |
- if annotated:
|
|
| 472 |
- with TemporaryFile(dir=tmpdir) as tag_file:
|
|
| 473 |
- tag_data = 'object {}\ntype commit\ntag {}\n'.format(commit_ref, tag)
|
|
| 474 |
- tag_file.write(tag_data.encode('ascii'))
|
|
| 475 |
- tag_file.seek(0, 0)
|
|
| 476 |
- _, tag_ref = self.source.check_output(
|
|
| 477 |
- [self.source.host_git, 'hash-object', '-w', '-t',
|
|
| 478 |
- 'tag', '--stdin'],
|
|
| 479 |
- stdin=tag_file,
|
|
| 480 |
- fail="Failed to add tag object {}".format(tag),
|
|
| 481 |
- cwd=fullpath)
|
|
| 482 |
- |
|
| 483 |
- self.source.call([self.source.host_git, 'tag', tag, tag_ref.strip()],
|
|
| 484 |
- fail="Failed to tag: {}".format(tag),
|
|
| 485 |
- cwd=fullpath)
|
|
| 486 |
- else:
|
|
| 487 |
- self.source.call([self.source.host_git, 'tag', tag, commit_ref],
|
|
| 488 |
- fail="Failed to tag: {}".format(tag),
|
|
| 489 |
- cwd=fullpath)
|
|
| 490 |
- |
|
| 491 |
- with open(os.path.join(fullpath, '.git', 'HEAD'), 'w') as head:
|
|
| 492 |
- self.source.call([self.source.host_git, 'rev-parse', self.ref],
|
|
| 493 |
- stdout=head,
|
|
| 494 |
- fail="Failed to parse commit {}".format(self.ref),
|
|
| 495 |
- cwd=self.mirror)
|
|
| 496 |
- |
|
| 497 |
- |
|
| 498 |
-class GitSource(Source):
|
|
| 499 |
- # pylint: disable=attribute-defined-outside-init
|
|
| 500 |
- |
|
| 501 |
- def configure(self, node):
|
|
| 502 |
- ref = self.node_get_member(node, str, 'ref', None)
|
|
| 503 |
- |
|
| 504 |
- config_keys = ['url', 'track', 'ref', 'submodules',
|
|
| 505 |
- 'checkout-submodules', 'ref-format',
|
|
| 506 |
- 'track-tags', 'tags']
|
|
| 507 |
- self.node_validate(node, config_keys + Source.COMMON_CONFIG_KEYS)
|
|
| 508 |
- |
|
| 509 |
- tags_node = self.node_get_member(node, list, 'tags', [])
|
|
| 510 |
- for tag_node in tags_node:
|
|
| 511 |
- self.node_validate(tag_node, ['tag', 'commit', 'annotated'])
|
|
| 512 |
- |
|
| 513 |
- tags = self._load_tags(node)
|
|
| 514 |
- self.track_tags = self.node_get_member(node, bool, 'track-tags', False)
|
|
| 515 |
- |
|
| 516 |
- self.original_url = self.node_get_member(node, str, 'url')
|
|
| 517 |
- self.mirror = GitMirror(self, '', self.original_url, ref, tags=tags, primary=True)
|
|
| 518 |
- self.tracking = self.node_get_member(node, str, 'track', None)
|
|
| 519 |
- |
|
| 520 |
- self.ref_format = self.node_get_member(node, str, 'ref-format', 'sha1')
|
|
| 521 |
- if self.ref_format not in ['sha1', 'git-describe']:
|
|
| 522 |
- provenance = self.node_provenance(node, member_name='ref-format')
|
|
| 523 |
- raise SourceError("{}: Unexpected value for ref-format: {}".format(provenance, self.ref_format))
|
|
| 524 |
- |
|
| 525 |
- # At this point we now know if the source has a ref and/or a track.
|
|
| 526 |
- # If it is missing both then we will be unable to track or build.
|
|
| 527 |
- if self.mirror.ref is None and self.tracking is None:
|
|
| 528 |
- raise SourceError("{}: Git sources require a ref and/or track".format(self),
|
|
| 529 |
- reason="missing-track-and-ref")
|
|
| 530 |
- |
|
| 531 |
- self.checkout_submodules = self.node_get_member(node, bool, 'checkout-submodules', True)
|
|
| 532 |
- self.submodules = []
|
|
| 533 |
- |
|
| 534 |
- # Parse a dict of submodule overrides, stored in the submodule_overrides
|
|
| 535 |
- # and submodule_checkout_overrides dictionaries.
|
|
| 536 |
- self.submodule_overrides = {}
|
|
| 537 |
- self.submodule_checkout_overrides = {}
|
|
| 538 |
- modules = self.node_get_member(node, Mapping, 'submodules', {})
|
|
| 539 |
- for path, _ in self.node_items(modules):
|
|
| 540 |
- submodule = self.node_get_member(modules, Mapping, path)
|
|
| 541 |
- url = self.node_get_member(submodule, str, 'url', None)
|
|
| 542 |
- |
|
| 543 |
- # Make sure to mark all URLs that are specified in the configuration
|
|
| 544 |
- if url:
|
|
| 545 |
- self.mark_download_url(url, primary=False)
|
|
| 546 |
- |
|
| 547 |
- self.submodule_overrides[path] = url
|
|
| 548 |
- if 'checkout' in submodule:
|
|
| 549 |
- checkout = self.node_get_member(submodule, bool, 'checkout')
|
|
| 550 |
- self.submodule_checkout_overrides[path] = checkout
|
|
| 551 |
- |
|
| 552 |
- self.mark_download_url(self.original_url)
|
|
| 553 |
- |
|
| 554 |
- def preflight(self):
|
|
| 555 |
- # Check if git is installed, get the binary at the same time
|
|
| 556 |
- self.host_git = utils.get_host_tool('git')
|
|
| 557 |
- |
|
| 558 |
- def get_unique_key(self):
|
|
| 559 |
- # Here we want to encode the local name of the repository and
|
|
| 560 |
- # the ref, if the user changes the alias to fetch the same sources
|
|
| 561 |
- # from another location, it should not affect the cache key.
|
|
| 562 |
- key = [self.original_url, self.mirror.ref]
|
|
| 563 |
- if self.mirror.tags:
|
|
| 564 |
- tags = {tag: (commit, annotated) for tag, commit, annotated in self.mirror.tags}
|
|
| 565 |
- key.append({'tags': tags})
|
|
| 566 |
- |
|
| 567 |
- # Only modify the cache key with checkout_submodules if it's something
|
|
| 568 |
- # other than the default behaviour.
|
|
| 569 |
- if self.checkout_submodules is False:
|
|
| 570 |
- key.append({"checkout_submodules": self.checkout_submodules})
|
|
| 571 |
- |
|
| 572 |
- # We want the cache key to change if the source was
|
|
| 573 |
- # configured differently, and submodules count.
|
|
| 574 |
- if self.submodule_overrides:
|
|
| 575 |
- key.append(self.submodule_overrides)
|
|
| 576 |
- |
|
| 577 |
- if self.submodule_checkout_overrides:
|
|
| 578 |
- key.append({"submodule_checkout_overrides": self.submodule_checkout_overrides})
|
|
| 579 |
- |
|
| 580 |
- return key
|
|
| 581 |
- |
|
| 582 |
- def get_consistency(self):
|
|
| 583 |
- if self.have_all_refs():
|
|
| 584 |
- return Consistency.CACHED
|
|
| 585 |
- elif self.mirror.ref is not None:
|
|
| 586 |
- return Consistency.RESOLVED
|
|
| 587 |
- return Consistency.INCONSISTENT
|
|
| 588 |
- |
|
| 589 |
- def load_ref(self, node):
|
|
| 590 |
- self.mirror.ref = self.node_get_member(node, str, 'ref', None)
|
|
| 591 |
- self.mirror.tags = self._load_tags(node)
|
|
| 592 |
- |
|
| 593 |
- def get_ref(self):
|
|
| 594 |
- return self.mirror.ref, self.mirror.tags
|
|
| 595 |
- |
|
| 596 |
- def set_ref(self, ref_data, node):
|
|
| 597 |
- if not ref_data:
|
|
| 598 |
- self.mirror.ref = None
|
|
| 599 |
- if 'ref' in node:
|
|
| 600 |
- del node['ref']
|
|
| 601 |
- self.mirror.tags = []
|
|
| 602 |
- if 'tags' in node:
|
|
| 603 |
- del node['tags']
|
|
| 604 |
- else:
|
|
| 605 |
- ref, tags = ref_data
|
|
| 606 |
- node['ref'] = self.mirror.ref = ref
|
|
| 607 |
- self.mirror.tags = tags
|
|
| 608 |
- if tags:
|
|
| 609 |
- node['tags'] = []
|
|
| 610 |
- for tag, commit_ref, annotated in tags:
|
|
| 611 |
- data = {'tag': tag,
|
|
| 612 |
- 'commit': commit_ref,
|
|
| 613 |
- 'annotated': annotated}
|
|
| 614 |
- node['tags'].append(data)
|
|
| 615 |
- else:
|
|
| 616 |
- if 'tags' in node:
|
|
| 617 |
- del node['tags']
|
|
| 618 |
- |
|
| 619 |
- def track(self):
|
|
| 620 |
- |
|
| 621 |
- # If self.tracking is not specified it's not an error, just silently return
|
|
| 622 |
- if not self.tracking:
|
|
| 623 |
- # Is there a better way to check if a ref is given.
|
|
| 624 |
- if self.mirror.ref is None:
|
|
| 625 |
- detail = 'Without a tracking branch ref can not be updated. Please ' + \
|
|
| 626 |
- 'provide a ref or a track.'
|
|
| 627 |
- raise SourceError("{}: No track or ref".format(self),
|
|
| 628 |
- detail=detail, reason="track-attempt-no-track")
|
|
| 629 |
- return None
|
|
| 630 |
- |
|
| 631 |
- # Resolve the URL for the message
|
|
| 632 |
- resolved_url = self.translate_url(self.mirror.url)
|
|
| 633 |
- with self.timed_activity("Tracking {} from {}"
|
|
| 634 |
- .format(self.tracking, resolved_url),
|
|
| 635 |
- silent_nested=True):
|
|
| 636 |
- self.mirror.ensure()
|
|
| 637 |
- self.mirror._fetch()
|
|
| 638 |
- |
|
| 639 |
- # Update self.mirror.ref and node.ref from the self.tracking branch
|
|
| 640 |
- ret = self.mirror.latest_commit_with_tags(self.tracking, self.track_tags)
|
|
| 641 |
- |
|
| 642 |
- return ret
|
|
| 643 |
- |
|
| 644 |
- def init_workspace(self, directory):
|
|
| 645 |
- # XXX: may wish to refactor this as some code dupe with stage()
|
|
| 646 |
- self.refresh_submodules()
|
|
| 647 |
- |
|
| 648 |
- with self.timed_activity('Setting up workspace "{}"'.format(directory), silent_nested=True):
|
|
| 649 |
- self.mirror.init_workspace(directory)
|
|
| 650 |
- for mirror in self.submodules:
|
|
| 651 |
- mirror.init_workspace(directory)
|
|
| 652 |
- |
|
| 653 |
- def stage(self, directory):
|
|
| 654 |
- |
|
| 655 |
- # Need to refresh submodule list here again, because
|
|
| 656 |
- # it's possible that we did not load in the main process
|
|
| 657 |
- # with submodules present (source needed fetching) and
|
|
| 658 |
- # we may not know about the submodule yet come time to build.
|
|
| 659 |
- #
|
|
| 660 |
- self.refresh_submodules()
|
|
| 661 |
- |
|
| 662 |
- # Stage the main repo in the specified directory
|
|
| 663 |
- #
|
|
| 664 |
- with self.timed_activity("Staging {}".format(self.mirror.url), silent_nested=True):
|
|
| 665 |
- self.mirror.stage(directory)
|
|
| 666 |
- for mirror in self.submodules:
|
|
| 667 |
- mirror.stage(directory)
|
|
| 668 |
- |
|
| 669 |
- def get_source_fetchers(self):
|
|
| 670 |
- yield self.mirror
|
|
| 671 |
- self.refresh_submodules()
|
|
| 672 |
- for submodule in self.submodules:
|
|
| 673 |
- yield submodule
|
|
| 674 |
- |
|
| 675 |
- def validate_cache(self):
|
|
| 676 |
- discovered_submodules = {}
|
|
| 677 |
- unlisted_submodules = []
|
|
| 678 |
- invalid_submodules = []
|
|
| 679 |
- |
|
| 680 |
- for path, url in self.mirror.submodule_list():
|
|
| 681 |
- discovered_submodules[path] = url
|
|
| 682 |
- if self.ignore_submodule(path):
|
|
| 683 |
- continue
|
|
| 684 |
- |
|
| 685 |
- override_url = self.submodule_overrides.get(path)
|
|
| 686 |
- if not override_url:
|
|
| 687 |
- unlisted_submodules.append((path, url))
|
|
| 688 |
- |
|
| 689 |
- # Warn about submodules which are explicitly configured but do not exist
|
|
| 690 |
- for path, url in self.submodule_overrides.items():
|
|
| 691 |
- if path not in discovered_submodules:
|
|
| 692 |
- invalid_submodules.append((path, url))
|
|
| 693 |
- |
|
| 694 |
- if invalid_submodules:
|
|
| 695 |
- detail = []
|
|
| 696 |
- for path, url in invalid_submodules:
|
|
| 697 |
- detail.append(" Submodule URL '{}' at path '{}'".format(url, path))
|
|
| 698 |
- |
|
| 699 |
- self.warn("{}: Invalid submodules specified".format(self),
|
|
| 700 |
- warning_token=WARN_INVALID_SUBMODULE,
|
|
| 701 |
- detail="The following submodules are specified in the source "
|
|
| 702 |
- "description but do not exist according to the repository\n\n" +
|
|
| 703 |
- "\n".join(detail))
|
|
| 704 |
- |
|
| 705 |
- # Warn about submodules which exist but have not been explicitly configured
|
|
| 706 |
- if unlisted_submodules:
|
|
| 707 |
- detail = []
|
|
| 708 |
- for path, url in unlisted_submodules:
|
|
| 709 |
- detail.append(" Submodule URL '{}' at path '{}'".format(url, path))
|
|
| 710 |
- |
|
| 711 |
- self.warn("{}: Unlisted submodules exist".format(self),
|
|
| 712 |
- warning_token=WARN_UNLISTED_SUBMODULE,
|
|
| 713 |
- detail="The following submodules exist but are not specified " +
|
|
| 714 |
- "in the source description\n\n" +
|
|
| 715 |
- "\n".join(detail))
|
|
| 716 |
- |
|
| 717 |
- # Assert that the ref exists in the track tag/branch, if track has been specified.
|
|
| 718 |
- ref_in_track = False
|
|
| 719 |
- if self.tracking:
|
|
| 720 |
- _, branch = self.check_output([self.host_git, 'branch', '--list', self.tracking,
|
|
| 721 |
- '--contains', self.mirror.ref],
|
|
| 722 |
- cwd=self.mirror.mirror)
|
|
| 723 |
- if branch:
|
|
| 724 |
- ref_in_track = True
|
|
| 725 |
- else:
|
|
| 726 |
- _, tag = self.check_output([self.host_git, 'tag', '--list', self.tracking,
|
|
| 727 |
- '--contains', self.mirror.ref],
|
|
| 728 |
- cwd=self.mirror.mirror)
|
|
| 729 |
- if tag:
|
|
| 730 |
- ref_in_track = True
|
|
| 731 |
- |
|
| 732 |
- if not ref_in_track:
|
|
| 733 |
- detail = "The ref provided for the element does not exist locally " + \
|
|
| 734 |
- "in the provided track branch / tag '{}'.\n".format(self.tracking) + \
|
|
| 735 |
- "You may wish to track the element to update the ref from '{}' ".format(self.tracking) + \
|
|
| 736 |
- "with `bst track`,\n" + \
|
|
| 737 |
- "or examine the upstream at '{}' for the specific ref.".format(self.mirror.url)
|
|
| 738 |
- |
|
| 739 |
- self.warn("{}: expected ref '{}' was not found in given track '{}' for staged repository: '{}'\n"
|
|
| 740 |
- .format(self, self.mirror.ref, self.tracking, self.mirror.url),
|
|
| 741 |
- detail=detail, warning_token=CoreWarnings.REF_NOT_IN_TRACK)
|
|
| 742 |
- |
|
| 743 |
- ###########################################################
|
|
| 744 |
- # Local Functions #
|
|
| 745 |
- ###########################################################
|
|
| 746 |
- def have_all_refs(self):
|
|
| 747 |
- if not self.mirror.has_ref():
|
|
| 748 |
- return False
|
|
| 749 |
- |
|
| 750 |
- self.refresh_submodules()
|
|
| 751 |
- for mirror in self.submodules:
|
|
| 752 |
- if not os.path.exists(mirror.mirror):
|
|
| 753 |
- return False
|
|
| 754 |
- if not mirror.has_ref():
|
|
| 755 |
- return False
|
|
| 756 |
- |
|
| 757 |
- return True
|
|
| 758 |
- |
|
| 759 |
- # Refreshes the GitMirror objects for submodules
|
|
| 760 |
- #
|
|
| 761 |
- # Assumes that we have our mirror and we have the ref which we point to
|
|
| 762 |
- #
|
|
| 763 |
- def refresh_submodules(self):
|
|
| 764 |
- self.mirror.ensure()
|
|
| 765 |
- submodules = []
|
|
| 766 |
- |
|
| 767 |
- for path, url in self.mirror.submodule_list():
|
|
| 768 |
- |
|
| 769 |
- # Completely ignore submodules which are disabled for checkout
|
|
| 770 |
- if self.ignore_submodule(path):
|
|
| 771 |
- continue
|
|
| 772 |
- |
|
| 773 |
- # Allow configuration to override the upstream
|
|
| 774 |
- # location of the submodules.
|
|
| 775 |
- override_url = self.submodule_overrides.get(path)
|
|
| 776 |
- if override_url:
|
|
| 777 |
- url = override_url
|
|
| 778 |
- |
|
| 779 |
- ref = self.mirror.submodule_ref(path)
|
|
| 780 |
- if ref is not None:
|
|
| 781 |
- mirror = GitMirror(self, path, url, ref)
|
|
| 782 |
- submodules.append(mirror)
|
|
| 783 |
- |
|
| 784 |
- self.submodules = submodules
|
|
| 785 |
- |
|
| 786 |
- def _load_tags(self, node):
|
|
| 787 |
- tags = []
|
|
| 788 |
- tags_node = self.node_get_member(node, list, 'tags', [])
|
|
| 789 |
- for tag_node in tags_node:
|
|
| 790 |
- tag = self.node_get_member(tag_node, str, 'tag')
|
|
| 791 |
- commit_ref = self.node_get_member(tag_node, str, 'commit')
|
|
| 792 |
- annotated = self.node_get_member(tag_node, bool, 'annotated')
|
|
| 793 |
- tags.append((tag, commit_ref, annotated))
|
|
| 794 |
- return tags
|
|
| 795 |
- |
|
| 796 |
- # Checks whether the plugin configuration has explicitly
|
|
| 797 |
- # configured this submodule to be ignored
|
|
| 798 |
- def ignore_submodule(self, path):
|
|
| 799 |
- try:
|
|
| 800 |
- checkout = self.submodule_checkout_overrides[path]
|
|
| 801 |
- except KeyError:
|
|
| 802 |
- checkout = self.checkout_submodules
|
|
| 803 |
- |
|
| 804 |
- return not checkout
|
|
| 162 |
+class GitSource(_GitSourceBase):
|
|
| 163 |
+ pass
|
|
| 805 | 164 |
|
| 806 | 165 |
|
| 807 | 166 |
# Plugin entry point
|
| ... | ... | @@ -146,6 +146,7 @@ class Sandbox(): |
| 146 | 146 |
|
| 147 | 147 |
self._output_directory = None
|
| 148 | 148 |
self._vdir = None
|
| 149 |
+ self._usebuildtree = False
|
|
| 149 | 150 |
|
| 150 | 151 |
# This is set if anyone requests access to the underlying
|
| 151 | 152 |
# directory via get_directory.
|
| ... | ... | @@ -1168,7 +1168,7 @@ def _call(*popenargs, terminate=False, **kwargs): |
| 1168 | 1168 |
#
|
| 1169 | 1169 |
def _glob2re(pat):
|
| 1170 | 1170 |
i, n = 0, len(pat)
|
| 1171 |
- res = ''
|
|
| 1171 |
+ res = '(?ms)'
|
|
| 1172 | 1172 |
while i < n:
|
| 1173 | 1173 |
c = pat[i]
|
| 1174 | 1174 |
i = i + 1
|
| ... | ... | @@ -1205,7 +1205,7 @@ def _glob2re(pat): |
| 1205 | 1205 |
res = '{}[{}]'.format(res, stuff)
|
| 1206 | 1206 |
else:
|
| 1207 | 1207 |
res = res + re.escape(c)
|
| 1208 |
- return res + r'\Z(?ms)'
|
|
| 1208 |
+ return res + r'\Z'
|
|
| 1209 | 1209 |
|
| 1210 | 1210 |
|
| 1211 | 1211 |
# _deduplicate()
|
| ... | ... | @@ -146,7 +146,8 @@ def _test_pull(user_config_file, project_dir, artifact_dir, |
| 146 | 146 |
element = project.load_elements([element_name])[0]
|
| 147 | 147 |
|
| 148 | 148 |
# Manually setup the CAS remote
|
| 149 |
- cas.setup_remotes(use_config=True)
|
|
| 149 |
+ remotes = cas.get_remotes_from_projects()
|
|
| 150 |
+ cas.setup_remotes(remotes=remotes)
|
|
| 150 | 151 |
|
| 151 | 152 |
if cas.has_push_remotes(element=element):
|
| 152 | 153 |
# Push the element's artifact
|
| ... | ... | @@ -284,7 +285,8 @@ def _test_push_tree(user_config_file, project_dir, artifact_dir, artifact_digest |
| 284 | 285 |
cas = artifactcache.cas
|
| 285 | 286 |
|
| 286 | 287 |
# Manually setup the CAS remote
|
| 287 |
- artifactcache.setup_remotes(use_config=True)
|
|
| 288 |
+ remotes = artifactcache.get_remotes_from_projects()
|
|
| 289 |
+ artifactcache.setup_remotes(remotes=remotes)
|
|
| 288 | 290 |
|
| 289 | 291 |
if artifactcache.has_push_remotes():
|
| 290 | 292 |
directory = remote_execution_pb2.Directory()
|
| ... | ... | @@ -319,7 +321,8 @@ def _test_pull_tree(user_config_file, project_dir, artifact_dir, artifact_digest |
| 319 | 321 |
cas = context.artifactcache
|
| 320 | 322 |
|
| 321 | 323 |
# Manually setup the CAS remote
|
| 322 |
- cas.setup_remotes(use_config=True)
|
|
| 324 |
+ remotes = cas.get_remotes_from_projects()
|
|
| 325 |
+ cas.setup_remotes(remotes=remotes)
|
|
| 323 | 326 |
|
| 324 | 327 |
if cas.has_push_remotes():
|
| 325 | 328 |
# Pull the artifact using the Tree object
|
| ... | ... | @@ -125,8 +125,8 @@ def _test_push(user_config_file, project_dir, artifact_dir, |
| 125 | 125 |
element = project.load_elements([element_name])[0]
|
| 126 | 126 |
|
| 127 | 127 |
# Manually setup the CAS remote
|
| 128 |
- cas.setup_remotes(use_config=True)
|
|
| 129 |
- cas.initialize_remotes()
|
|
| 128 |
+ remotes = cas.get_remotes_from_projects()
|
|
| 129 |
+ cas.setup_remotes(remotes=remotes)
|
|
| 130 | 130 |
|
| 131 | 131 |
if cas.has_push_remotes(element=element):
|
| 132 | 132 |
# Push the element's artifact
|
| ... | ... | @@ -185,8 +185,8 @@ def test_push_directory(cli, tmpdir, datafiles): |
| 185 | 185 |
assert artifactcache.contains(element, element_key)
|
| 186 | 186 |
|
| 187 | 187 |
# Manually setup the CAS remote
|
| 188 |
- artifactcache.setup_remotes(use_config=True)
|
|
| 189 |
- artifactcache.initialize_remotes()
|
|
| 188 |
+ remotes = artifactcache.get_remotes_from_projects()
|
|
| 189 |
+ artifactcache.setup_remotes(remotes=remotes)
|
|
| 190 | 190 |
assert artifactcache.has_push_remotes(element=element)
|
| 191 | 191 |
|
| 192 | 192 |
# Recreate the CasBasedDirectory object from the cached artifact
|
| ... | ... | @@ -231,8 +231,8 @@ def _test_push_directory(user_config_file, project_dir, artifact_dir, artifact_d |
| 231 | 231 |
cas = context.artifactcache
|
| 232 | 232 |
|
| 233 | 233 |
# Manually setup the CAS remote
|
| 234 |
- cas.setup_remotes(use_config=True)
|
|
| 235 |
- cas.initialize_remotes()
|
|
| 234 |
+ remotes = cas.get_remotes_from_projects()
|
|
| 235 |
+ cas.setup_remotes(remotes=remotes)
|
|
| 236 | 236 |
|
| 237 | 237 |
if cas.has_push_remotes():
|
| 238 | 238 |
# Create a CasBasedDirectory from local CAS cache content
|
| ... | ... | @@ -307,8 +307,8 @@ def _test_push_message(user_config_file, project_dir, artifact_dir, queue): |
| 307 | 307 |
cas = context.artifactcache
|
| 308 | 308 |
|
| 309 | 309 |
# Manually setup the CAS remote
|
| 310 |
- cas.setup_remotes(use_config=True)
|
|
| 311 |
- cas.initialize_remotes()
|
|
| 310 |
+ remotes = cas.get_remotes_from_projects()
|
|
| 311 |
+ cas.setup_remotes(remotes=remotes)
|
|
| 312 | 312 |
|
| 313 | 313 |
if cas.has_push_remotes():
|
| 314 | 314 |
# Create an example message object
|
| ... | ... | @@ -41,7 +41,7 @@ def test_default_logging(cli, tmpdir, datafiles): |
| 41 | 41 |
result = cli.run(project=project, args=['source', 'fetch', element_name])
|
| 42 | 42 |
result.assert_success()
|
| 43 | 43 |
|
| 44 |
- m = re.search("\[\d\d:\d\d:\d\d\]\[\]\[\] SUCCESS Checking sources", result.stderr)
|
|
| 44 |
+ m = re.search(r"\[\d\d:\d\d:\d\d\]\[\]\[\] SUCCESS Checking sources", result.stderr)
|
|
| 45 | 45 |
assert(m is not None)
|
| 46 | 46 |
|
| 47 | 47 |
|
| ... | ... | @@ -77,7 +77,7 @@ def test_custom_logging(cli, tmpdir, datafiles): |
| 77 | 77 |
result = cli.run(project=project, args=['source', 'fetch', element_name])
|
| 78 | 78 |
result.assert_success()
|
| 79 | 79 |
|
| 80 |
- m = re.search("\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6},\d\d:\d\d:\d\d,,,SUCCESS,Checking sources", result.stderr)
|
|
| 80 |
+ m = re.search(r"\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6},\d\d:\d\d:\d\d,,,SUCCESS,Checking sources", result.stderr)
|
|
| 81 | 81 |
assert(m is not None)
|
| 82 | 82 |
|
| 83 | 83 |
|
| ... | ... | @@ -19,7 +19,9 @@ DATA_DIR = os.path.join( |
| 19 | 19 |
@pytest.mark.datafiles(DATA_DIR)
|
| 20 | 20 |
@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
| 21 | 21 |
def test_buildtree_staged(cli_integration, tmpdir, datafiles):
|
| 22 |
- # i.e. tests that cached build trees are staged by `bst shell --build`
|
|
| 22 |
+ # We can only test the non interacitve case
|
|
| 23 |
+ # The non interactive case defaults to not using buildtrees
|
|
| 24 |
+ # for `bst shell --build`
|
|
| 23 | 25 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 24 | 26 |
element_name = 'build-shell/buildtree.bst'
|
| 25 | 27 |
|
| ... | ... | @@ -27,15 +29,67 @@ def test_buildtree_staged(cli_integration, tmpdir, datafiles): |
| 27 | 29 |
res.assert_success()
|
| 28 | 30 |
|
| 29 | 31 |
res = cli_integration.run(project=project, args=[
|
| 30 |
- 'shell', '--build', element_name, '--', 'grep', '-q', 'Hi', 'test'
|
|
| 32 |
+ 'shell', '--build', element_name, '--', 'cat', 'test'
|
|
| 33 |
+ ])
|
|
| 34 |
+ res.assert_shell_error()
|
|
| 35 |
+ |
|
| 36 |
+ |
|
| 37 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 38 |
+@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
|
| 39 |
+def test_buildtree_staged_forced_true(cli_integration, tmpdir, datafiles):
|
|
| 40 |
+ # Test that if we ask for a build tree it is there.
|
|
| 41 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 42 |
+ element_name = 'build-shell/buildtree.bst'
|
|
| 43 |
+ |
|
| 44 |
+ res = cli_integration.run(project=project, args=['build', element_name])
|
|
| 45 |
+ res.assert_success()
|
|
| 46 |
+ |
|
| 47 |
+ res = cli_integration.run(project=project, args=[
|
|
| 48 |
+ 'shell', '--build', '--use-buildtree', 'always', element_name, '--', 'cat', 'test'
|
|
| 49 |
+ ])
|
|
| 50 |
+ res.assert_success()
|
|
| 51 |
+ assert 'Hi' in res.output
|
|
| 52 |
+ |
|
| 53 |
+ |
|
| 54 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 55 |
+@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
|
| 56 |
+def test_buildtree_staged_if_available(cli_integration, tmpdir, datafiles):
|
|
| 57 |
+ # Test that a build tree can be correctly detected.
|
|
| 58 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 59 |
+ element_name = 'build-shell/buildtree.bst'
|
|
| 60 |
+ |
|
| 61 |
+ res = cli_integration.run(project=project, args=['build', element_name])
|
|
| 62 |
+ res.assert_success()
|
|
| 63 |
+ |
|
| 64 |
+ res = cli_integration.run(project=project, args=[
|
|
| 65 |
+ 'shell', '--build', '--use-buildtree', 'try', element_name, '--', 'cat', 'test'
|
|
| 31 | 66 |
])
|
| 32 | 67 |
res.assert_success()
|
| 68 |
+ assert 'Hi' in res.output
|
|
| 69 |
+ |
|
| 70 |
+ |
|
| 71 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 72 |
+@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
|
| 73 |
+def test_buildtree_staged_forced_false(cli_integration, tmpdir, datafiles):
|
|
| 74 |
+ # Test that if we ask not to have a build tree it is not there
|
|
| 75 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 76 |
+ element_name = 'build-shell/buildtree.bst'
|
|
| 77 |
+ |
|
| 78 |
+ res = cli_integration.run(project=project, args=['build', element_name])
|
|
| 79 |
+ res.assert_success()
|
|
| 80 |
+ |
|
| 81 |
+ res = cli_integration.run(project=project, args=[
|
|
| 82 |
+ 'shell', '--build', '--use-buildtree', 'never', element_name, '--', 'cat', 'test'
|
|
| 83 |
+ ])
|
|
| 84 |
+ res.assert_shell_error()
|
|
| 85 |
+ |
|
| 86 |
+ assert 'Hi' not in res.output
|
|
| 33 | 87 |
|
| 34 | 88 |
|
| 35 | 89 |
@pytest.mark.datafiles(DATA_DIR)
|
| 36 | 90 |
@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
| 37 | 91 |
def test_buildtree_from_failure(cli_integration, tmpdir, datafiles):
|
| 38 |
- # i.e. test that on a build failure, we can still shell into it
|
|
| 92 |
+ # Test that we can use a build tree after a failure
|
|
| 39 | 93 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 40 | 94 |
element_name = 'build-shell/buildtree-fail.bst'
|
| 41 | 95 |
|
| ... | ... | @@ -44,9 +98,10 @@ def test_buildtree_from_failure(cli_integration, tmpdir, datafiles): |
| 44 | 98 |
|
| 45 | 99 |
# Assert that file has expected contents
|
| 46 | 100 |
res = cli_integration.run(project=project, args=[
|
| 47 |
- 'shell', '--build', element_name, '--', 'cat', 'test'
|
|
| 101 |
+ 'shell', '--build', element_name, '--use-buildtree', 'always', '--', 'cat', 'test'
|
|
| 48 | 102 |
])
|
| 49 | 103 |
res.assert_success()
|
| 104 |
+ assert "Warning: using a buildtree from a failed build" in res.output
|
|
| 50 | 105 |
assert 'Hi' in res.output
|
| 51 | 106 |
|
| 52 | 107 |
|
| ... | ... | @@ -80,6 +135,65 @@ def test_buildtree_pulled(cli, tmpdir, datafiles): |
| 80 | 135 |
|
| 81 | 136 |
# Check it's using the cached build tree
|
| 82 | 137 |
res = cli.run(project=project, args=[
|
| 83 |
- 'shell', '--build', element_name, '--', 'grep', '-q', 'Hi', 'test'
|
|
| 138 |
+ 'shell', '--build', element_name, '--use-buildtree', 'always', '--', 'cat', 'test'
|
|
| 84 | 139 |
])
|
| 85 | 140 |
res.assert_success()
|
| 141 |
+ |
|
| 142 |
+ |
|
| 143 |
+# This test checks for correct behaviour if a buildtree is not present.
|
|
| 144 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 145 |
+@pytest.mark.skipif(IS_LINUX and not HAVE_BWRAP, reason='Only available with bubblewrap on Linux')
|
|
| 146 |
+def test_buildtree_options(cli, tmpdir, datafiles):
|
|
| 147 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 148 |
+ element_name = 'build-shell/buildtree.bst'
|
|
| 149 |
+ |
|
| 150 |
+ with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
|
|
| 151 |
+ # Build the element to push it to cache
|
|
| 152 |
+ cli.configure({
|
|
| 153 |
+ 'artifacts': {'url': share.repo, 'push': True}
|
|
| 154 |
+ })
|
|
| 155 |
+ result = cli.run(project=project, args=['build', element_name])
|
|
| 156 |
+ result.assert_success()
|
|
| 157 |
+ assert cli.get_element_state(project, element_name) == 'cached'
|
|
| 158 |
+ |
|
| 159 |
+ # Discard the cache
|
|
| 160 |
+ cli.configure({
|
|
| 161 |
+ 'artifacts': {'url': share.repo, 'push': True},
|
|
| 162 |
+ 'artifactdir': os.path.join(cli.directory, 'artifacts2')
|
|
| 163 |
+ })
|
|
| 164 |
+ assert cli.get_element_state(project, element_name) != 'cached'
|
|
| 165 |
+ |
|
| 166 |
+ # Pull from cache, but do not include buildtrees.
|
|
| 167 |
+ result = cli.run(project=project, args=['pull', '--deps', 'all', element_name])
|
|
| 168 |
+ result.assert_success()
|
|
| 169 |
+ |
|
| 170 |
+ # The above is the simplest way I know to create a local cache without any buildtrees.
|
|
| 171 |
+ |
|
| 172 |
+ # Check it's not using the cached build tree
|
|
| 173 |
+ res = cli.run(project=project, args=[
|
|
| 174 |
+ 'shell', '--build', element_name, '--use-buildtree', 'never', '--', 'cat', 'test'
|
|
| 175 |
+ ])
|
|
| 176 |
+ res.assert_shell_error()
|
|
| 177 |
+ assert 'Hi' not in res.output
|
|
| 178 |
+ |
|
| 179 |
+ # Check it's not correctly handling the lack of buildtree
|
|
| 180 |
+ res = cli.run(project=project, args=[
|
|
| 181 |
+ 'shell', '--build', element_name, '--use-buildtree', 'try', '--', 'cat', 'test'
|
|
| 182 |
+ ])
|
|
| 183 |
+ res.assert_shell_error()
|
|
| 184 |
+ assert 'Hi' not in res.output
|
|
| 185 |
+ |
|
| 186 |
+ # Check it's not using the cached build tree, default is to ask, and fall back to not
|
|
| 187 |
+ # for non interactive behavior
|
|
| 188 |
+ res = cli.run(project=project, args=[
|
|
| 189 |
+ 'shell', '--build', element_name, '--', 'cat', 'test'
|
|
| 190 |
+ ])
|
|
| 191 |
+ res.assert_shell_error()
|
|
| 192 |
+ assert 'Hi' not in res.output
|
|
| 193 |
+ |
|
| 194 |
+ # Check it's using the cached build tree
|
|
| 195 |
+ res = cli.run(project=project, args=[
|
|
| 196 |
+ 'shell', '--build', element_name, '--use-buildtree', 'always', '--', 'cat', 'test'
|
|
| 197 |
+ ])
|
|
| 198 |
+ res.assert_main_error(ErrorDomain.PROG_NOT_FOUND, None)
|
|
| 199 |
+ assert 'Hi' not in res.output
|
| 1 |
+Hello World!
|
| 1 |
+"""
|
|
| 2 |
+always_cached
|
|
| 3 |
+=============
|
|
| 4 |
+ |
|
| 5 |
+This is a test source plugin that is always cached.
|
|
| 6 |
+Used to test that BuildStream core does not call fetch() for cached sources.
|
|
| 7 |
+ |
|
| 8 |
+"""
|
|
| 9 |
+ |
|
| 10 |
+from buildstream import Consistency, Source
|
|
| 11 |
+ |
|
| 12 |
+ |
|
| 13 |
+class AlwaysCachedSource(Source):
|
|
| 14 |
+ |
|
| 15 |
+ def configure(self, node):
|
|
| 16 |
+ pass
|
|
| 17 |
+ |
|
| 18 |
+ def preflight(self):
|
|
| 19 |
+ pass
|
|
| 20 |
+ |
|
| 21 |
+ def get_unique_key(self):
|
|
| 22 |
+ return None
|
|
| 23 |
+ |
|
| 24 |
+ def get_consistency(self):
|
|
| 25 |
+ return Consistency.CACHED
|
|
| 26 |
+ |
|
| 27 |
+ def load_ref(self, node):
|
|
| 28 |
+ pass
|
|
| 29 |
+ |
|
| 30 |
+ def get_ref(self):
|
|
| 31 |
+ return None
|
|
| 32 |
+ |
|
| 33 |
+ def set_ref(self, ref, node):
|
|
| 34 |
+ pass
|
|
| 35 |
+ |
|
| 36 |
+ def fetch(self):
|
|
| 37 |
+ # Source is always cached, so fetch() should never be called
|
|
| 38 |
+ assert False
|
|
| 39 |
+ |
|
| 40 |
+ def stage(self, directory):
|
|
| 41 |
+ pass
|
|
| 42 |
+ |
|
| 43 |
+ |
|
| 44 |
+def setup():
|
|
| 45 |
+ return AlwaysCachedSource
|
| 1 |
+# Project with local source plugins
|
|
| 2 |
+name: no-fetch-cached
|
|
| 3 |
+ |
|
| 4 |
+plugins:
|
|
| 5 |
+- origin: local
|
|
| 6 |
+ path: plugins/sources
|
|
| 7 |
+ sources:
|
|
| 8 |
+ always_cached: 0
|
| 1 |
+import os
|
|
| 2 |
+import pytest
|
|
| 3 |
+ |
|
| 4 |
+from buildstream import _yaml
|
|
| 5 |
+ |
|
| 6 |
+from tests.testutils import cli, create_repo
|
|
| 7 |
+from tests.testutils.site import HAVE_GIT
|
|
| 8 |
+ |
|
| 9 |
+DATA_DIR = os.path.join(
|
|
| 10 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
| 11 |
+ 'no-fetch-cached'
|
|
| 12 |
+)
|
|
| 13 |
+ |
|
| 14 |
+ |
|
| 15 |
+##################################################################
|
|
| 16 |
+# Tests #
|
|
| 17 |
+##################################################################
|
|
| 18 |
+# Test that fetch() is not called for cached sources
|
|
| 19 |
+@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
|
|
| 20 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 21 |
+def test_no_fetch_cached(cli, tmpdir, datafiles):
|
|
| 22 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 23 |
+ |
|
| 24 |
+ # Create the repo from 'files' subdir
|
|
| 25 |
+ repo = create_repo('git', str(tmpdir))
|
|
| 26 |
+ ref = repo.create(os.path.join(project, 'files'))
|
|
| 27 |
+ |
|
| 28 |
+ # Write out test target with a cached and a non-cached source
|
|
| 29 |
+ element = {
|
|
| 30 |
+ 'kind': 'import',
|
|
| 31 |
+ 'sources': [
|
|
| 32 |
+ repo.source_config(ref=ref),
|
|
| 33 |
+ {
|
|
| 34 |
+ 'kind': 'always_cached'
|
|
| 35 |
+ }
|
|
| 36 |
+ ]
|
|
| 37 |
+ }
|
|
| 38 |
+ _yaml.dump(element, os.path.join(project, 'target.bst'))
|
|
| 39 |
+ |
|
| 40 |
+ # Test fetch of target with a cached and a non-cached source
|
|
| 41 |
+ result = cli.run(project=project, args=[
|
|
| 42 |
+ 'source', 'fetch', 'target.bst'
|
|
| 43 |
+ ])
|
|
| 44 |
+ result.assert_success()
|
| ... | ... | @@ -153,6 +153,20 @@ class Result(): |
| 153 | 153 |
assert self.task_error_domain == error_domain, fail_message
|
| 154 | 154 |
assert self.task_error_reason == error_reason, fail_message
|
| 155 | 155 |
|
| 156 |
+ # assert_shell_error()
|
|
| 157 |
+ #
|
|
| 158 |
+ # Asserts that the buildstream created a shell and that the task in the
|
|
| 159 |
+ # shell failed.
|
|
| 160 |
+ #
|
|
| 161 |
+ # Args:
|
|
| 162 |
+ # fail_message (str): An optional message to override the automatic
|
|
| 163 |
+ # assertion error messages
|
|
| 164 |
+ # Raises:
|
|
| 165 |
+ # (AssertionError): If any of the assertions fail
|
|
| 166 |
+ #
|
|
| 167 |
+ def assert_shell_error(self, fail_message=''):
|
|
| 168 |
+ assert self.exit_code == 1, fail_message
|
|
| 169 |
+ |
|
| 156 | 170 |
# get_tracked_elements()
|
| 157 | 171 |
#
|
| 158 | 172 |
# Produces a list of element names on which tracking occurred
|
