Jonathan Maw pushed to branch 328-support-for-downloading-sources-from-mirrors at BuildStream / buildstream
Commits:
-
60290f31
by Javier Jardón at 2018-07-23T15:29:37Z
-
de162e43
by Javier Jardón at 2018-07-24T11:58:16Z
-
6106d657
by Josh Smith at 2018-07-25T10:50:14Z
-
4a637d63
by Jonathan Maw at 2018-07-25T12:05:19Z
-
a0ad985f
by Phil Dawson at 2018-07-25T13:03:23Z
-
95b4eae4
by Phil Dawson at 2018-07-25T13:03:23Z
-
889bf238
by Jim MacArthur at 2018-07-25T13:43:21Z
-
54fee6c8
by Phillip Smyth at 2018-07-25T14:15:36Z
-
f62b6cb7
by Jonathan Maw at 2018-07-25T15:01:33Z
-
9961cd65
by Jonathan Maw at 2018-07-25T16:25:10Z
-
a75928e9
by Jonathan Maw at 2018-07-25T16:25:10Z
-
0cfccfd8
by Jonathan Maw at 2018-07-25T16:34:54Z
-
28f18e08
by Jonathan Maw at 2018-07-25T16:35:40Z
-
aca8b29e
by Jonathan Maw at 2018-07-25T16:35:40Z
-
ffbf293d
by Jonathan Maw at 2018-07-25T16:35:40Z
-
cece104e
by Jonathan Maw at 2018-07-25T16:35:40Z
-
5feb2110
by Jonathan Maw at 2018-07-25T16:35:40Z
-
3cce90ba
by Jonathan Maw at 2018-07-25T16:35:40Z
-
ff3eccf0
by Jonathan Maw at 2018-07-25T16:35:40Z
-
6127f954
by Jonathan Maw at 2018-07-25T16:35:40Z
-
7ebadccb
by Jonathan Maw at 2018-07-25T16:35:40Z
-
c290fec3
by Jonathan Maw at 2018-07-25T16:35:40Z
25 changed files:
- NEWS
- buildstream/__init__.py
- buildstream/_artifactcache/cascache.py
- buildstream/_context.py
- buildstream/_frontend/app.py
- buildstream/_frontend/cli.py
- buildstream/_loader/loader.py
- buildstream/_project.py
- buildstream/_stream.py
- buildstream/plugins/sources/bzr.py
- buildstream/plugins/sources/git.py
- buildstream/source.py
- buildstream/utils.py
- + doc/source/examples/git-mirror.rst
- + doc/source/examples/tar-mirror.rst
- doc/source/format_project.rst
- doc/source/install_linux_distro.rst
- doc/source/using_config.rst
- doc/source/using_examples.rst
- tests/completions/completions.py
- + tests/frontend/mirror.py
- + tests/frontend/project/sources/fetch_source.py
- tests/frontend/show.py
- tests/frontend/workspace.py
- tests/testutils/repo/repo.py
Changes:
| ... | ... | @@ -5,6 +5,10 @@ buildstream 1.3.1 |
| 5 | 5 |
o Add a `--tar` option to `bst checkout` which allows a tarball to be
|
| 6 | 6 |
created from the artifact contents.
|
| 7 | 7 |
|
| 8 |
+ o Fetching and tracking will consult mirrors defined in project config,
|
|
| 9 |
+ and the preferred mirror to fetch from can be defined in the command
|
|
| 10 |
+ line or user config.
|
|
| 11 |
+ |
|
| 8 | 12 |
=================
|
| 9 | 13 |
buildstream 1.1.4
|
| 10 | 14 |
=================
|
| ... | ... | @@ -29,7 +29,7 @@ if "_BST_COMPLETION" not in os.environ: |
| 29 | 29 |
from .utils import UtilError, ProgramNotFoundError
|
| 30 | 30 |
from .sandbox import Sandbox, SandboxFlags
|
| 31 | 31 |
from .plugin import Plugin
|
| 32 |
- from .source import Source, SourceError, Consistency
|
|
| 32 |
+ from .source import Source, SourceError, Consistency, SourceFetcher
|
|
| 33 | 33 |
from .element import Element, ElementError, Scope
|
| 34 | 34 |
from .buildelement import BuildElement
|
| 35 | 35 |
from .scriptelement import ScriptElement
|
| ... | ... | @@ -240,7 +240,6 @@ class CASCache(ArtifactCache): |
| 240 | 240 |
|
| 241 | 241 |
except grpc.RpcError as e:
|
| 242 | 242 |
if e.code() != grpc.StatusCode.NOT_FOUND:
|
| 243 |
- element.info("{} not found at remote {}".format(element._get_brief_display_key(), remote.spec.url))
|
|
| 244 | 243 |
raise
|
| 245 | 244 |
|
| 246 | 245 |
return False
|
| ... | ... | @@ -259,7 +259,7 @@ class Context(): |
| 259 | 259 |
# Shallow validation of overrides, parts of buildstream which rely
|
| 260 | 260 |
# on the overrides are expected to validate elsewhere.
|
| 261 | 261 |
for _, overrides in _yaml.node_items(self._project_overrides):
|
| 262 |
- _yaml.node_validate(overrides, ['artifacts', 'options', 'strict'])
|
|
| 262 |
+ _yaml.node_validate(overrides, ['artifacts', 'options', 'strict', 'default-mirror'])
|
|
| 263 | 263 |
|
| 264 | 264 |
profile_end(Topics.LOAD_CONTEXT, 'load')
|
| 265 | 265 |
|
| ... | ... | @@ -202,7 +202,8 @@ class App(): |
| 202 | 202 |
# Load the Project
|
| 203 | 203 |
#
|
| 204 | 204 |
try:
|
| 205 |
- self.project = Project(directory, self.context, cli_options=self._main_options['option'])
|
|
| 205 |
+ self.project = Project(directory, self.context, cli_options=self._main_options['option'],
|
|
| 206 |
+ default_mirror=self._main_options.get('default_mirror'))
|
|
| 206 | 207 |
except LoadError as e:
|
| 207 | 208 |
|
| 208 | 209 |
# Let's automatically start a `bst init` session in this case
|
| ... | ... | @@ -270,6 +271,10 @@ class App(): |
| 270 | 271 |
|
| 271 | 272 |
# Exit with the error
|
| 272 | 273 |
self._error_exit(e)
|
| 274 |
+ except RecursionError:
|
|
| 275 |
+ click.echo("RecursionError: Depency depth is too large. Maximum recursion depth exceeded.",
|
|
| 276 |
+ err=True)
|
|
| 277 |
+ sys.exit(-1)
|
|
| 273 | 278 |
|
| 274 | 279 |
else:
|
| 275 | 280 |
# No exceptions occurred, print session time and summary
|
| ... | ... | @@ -217,6 +217,8 @@ def print_version(ctx, param, value): |
| 217 | 217 |
help="Elements must be rebuilt when their dependencies have changed")
|
| 218 | 218 |
@click.option('--option', '-o', type=click.Tuple([str, str]), multiple=True, metavar='OPTION VALUE',
|
| 219 | 219 |
help="Specify a project option")
|
| 220 |
+@click.option('--default-mirror', default=None,
|
|
| 221 |
+ help="The mirror to fetch from first, before attempting other mirrors")
|
|
| 220 | 222 |
@click.pass_context
|
| 221 | 223 |
def cli(context, **kwargs):
|
| 222 | 224 |
"""Build and manipulate BuildStream projects
|
| ... | ... | @@ -513,7 +513,7 @@ class Loader(): |
| 513 | 513 |
if self._fetch_subprojects:
|
| 514 | 514 |
if ticker:
|
| 515 | 515 |
ticker(filename, 'Fetching subproject from {} source'.format(source.get_kind()))
|
| 516 |
- source.fetch()
|
|
| 516 |
+ source._fetch()
|
|
| 517 | 517 |
else:
|
| 518 | 518 |
detail = "Try fetching the project with `bst fetch {}`".format(filename)
|
| 519 | 519 |
raise LoadError(LoadErrorReason.SUBPROJECT_FETCH_NEEDED,
|
| ... | ... | @@ -19,7 +19,7 @@ |
| 19 | 19 |
|
| 20 | 20 |
import os
|
| 21 | 21 |
import multiprocessing # for cpu_count()
|
| 22 |
-from collections import Mapping
|
|
| 22 |
+from collections import Mapping, OrderedDict
|
|
| 23 | 23 |
from pluginbase import PluginBase
|
| 24 | 24 |
from . import utils
|
| 25 | 25 |
from . import _cachekey
|
| ... | ... | @@ -35,9 +35,6 @@ from ._projectrefs import ProjectRefs, ProjectRefStorage |
| 35 | 35 |
from ._versions import BST_FORMAT_VERSION
|
| 36 | 36 |
|
| 37 | 37 |
|
| 38 |
-# The separator we use for user specified aliases
|
|
| 39 |
-_ALIAS_SEPARATOR = ':'
|
|
| 40 |
- |
|
| 41 | 38 |
# Project Configuration file
|
| 42 | 39 |
_PROJECT_CONF_FILE = 'project.conf'
|
| 43 | 40 |
|
| ... | ... | @@ -70,7 +67,7 @@ class HostMount(): |
| 70 | 67 |
#
|
| 71 | 68 |
class Project():
|
| 72 | 69 |
|
| 73 |
- def __init__(self, directory, context, *, junction=None, cli_options=None):
|
|
| 70 |
+ def __init__(self, directory, context, *, junction=None, cli_options=None, default_mirror=None):
|
|
| 74 | 71 |
|
| 75 | 72 |
# The project name
|
| 76 | 73 |
self.name = None
|
| ... | ... | @@ -94,6 +91,8 @@ class Project(): |
| 94 | 91 |
self.base_env_nocache = None # The base nocache mask (list) for the environment
|
| 95 | 92 |
self.element_overrides = {} # Element specific configurations
|
| 96 | 93 |
self.source_overrides = {} # Source specific configurations
|
| 94 |
+ self.mirrors = OrderedDict() # contains dicts of alias-mappings to URIs.
|
|
| 95 |
+ self.default_mirror = default_mirror # The name of the preferred mirror.
|
|
| 97 | 96 |
|
| 98 | 97 |
#
|
| 99 | 98 |
# Private Members
|
| ... | ... | @@ -133,8 +132,8 @@ class Project(): |
| 133 | 132 |
# fully qualified urls based on the shorthand which is allowed
|
| 134 | 133 |
# to be specified in the YAML
|
| 135 | 134 |
def translate_url(self, url):
|
| 136 |
- if url and _ALIAS_SEPARATOR in url:
|
|
| 137 |
- url_alias, url_body = url.split(_ALIAS_SEPARATOR, 1)
|
|
| 135 |
+ if url and utils._ALIAS_SEPARATOR in url:
|
|
| 136 |
+ url_alias, url_body = url.split(utils._ALIAS_SEPARATOR, 1)
|
|
| 138 | 137 |
alias_url = self._aliases.get(url_alias)
|
| 139 | 138 |
if alias_url:
|
| 140 | 139 |
url = alias_url + url_body
|
| ... | ... | @@ -202,6 +201,36 @@ class Project(): |
| 202 | 201 |
self._assert_plugin_format(source, version)
|
| 203 | 202 |
return source
|
| 204 | 203 |
|
| 204 |
+ # get_alias_uri()
|
|
| 205 |
+ #
|
|
| 206 |
+ # Returns the URI for a given alias, if it exists
|
|
| 207 |
+ #
|
|
| 208 |
+ # Args:
|
|
| 209 |
+ # alias (str): The alias.
|
|
| 210 |
+ #
|
|
| 211 |
+ # Returns:
|
|
| 212 |
+ # str: The URI for the given alias; or None: if there is no URI for
|
|
| 213 |
+ # that alias.
|
|
| 214 |
+ def get_alias_uri(self, alias):
|
|
| 215 |
+ return self._aliases.get(alias)
|
|
| 216 |
+ |
|
| 217 |
+ # get_alias_uris()
|
|
| 218 |
+ #
|
|
| 219 |
+ # Returns a list of every URI to replace an alias with
|
|
| 220 |
+ def get_alias_uris(self, alias):
|
|
| 221 |
+ if not alias or alias not in self._aliases:
|
|
| 222 |
+ return [None]
|
|
| 223 |
+ |
|
| 224 |
+ mirror_list = []
|
|
| 225 |
+ for key, alias_mapping in self.mirrors.items():
|
|
| 226 |
+ if alias in alias_mapping:
|
|
| 227 |
+ if key == self.default_mirror:
|
|
| 228 |
+ mirror_list = alias_mapping[alias] + mirror_list
|
|
| 229 |
+ else:
|
|
| 230 |
+ mirror_list += alias_mapping[alias]
|
|
| 231 |
+ mirror_list.append(self._aliases[alias])
|
|
| 232 |
+ return mirror_list
|
|
| 233 |
+ |
|
| 205 | 234 |
# _load():
|
| 206 | 235 |
#
|
| 207 | 236 |
# Loads the project configuration file in the project directory.
|
| ... | ... | @@ -249,7 +278,7 @@ class Project(): |
| 249 | 278 |
'aliases', 'name',
|
| 250 | 279 |
'artifacts', 'options',
|
| 251 | 280 |
'fail-on-overlap', 'shell',
|
| 252 |
- 'ref-storage', 'sandbox'
|
|
| 281 |
+ 'ref-storage', 'sandbox', 'mirrors',
|
|
| 253 | 282 |
])
|
| 254 | 283 |
|
| 255 | 284 |
# The project name, element path and option declarations
|
| ... | ... | @@ -290,6 +319,10 @@ class Project(): |
| 290 | 319 |
#
|
| 291 | 320 |
self.options.process_node(config)
|
| 292 | 321 |
|
| 322 |
+ # Override default_mirror if not set by command-line
|
|
| 323 |
+ if not self.default_mirror:
|
|
| 324 |
+ self.default_mirror = _yaml.node_get(overrides, str, 'default-mirror', default_value=None)
|
|
| 325 |
+ |
|
| 293 | 326 |
#
|
| 294 | 327 |
# Now all YAML composition is done, from here on we just load
|
| 295 | 328 |
# the values from our loaded configuration dictionary.
|
| ... | ... | @@ -414,6 +447,21 @@ class Project(): |
| 414 | 447 |
|
| 415 | 448 |
self._shell_host_files.append(mount)
|
| 416 | 449 |
|
| 450 |
+ mirrors = _yaml.node_get(config, list, 'mirrors', default_value=[])
|
|
| 451 |
+ for mirror in mirrors:
|
|
| 452 |
+ allowed_mirror_fields = [
|
|
| 453 |
+ 'name', 'aliases'
|
|
| 454 |
+ ]
|
|
| 455 |
+ _yaml.node_validate(mirror, allowed_mirror_fields)
|
|
| 456 |
+ mirror_name = _yaml.node_get(mirror, str, 'name')
|
|
| 457 |
+ alias_mappings = {}
|
|
| 458 |
+ for alias_mapping, uris in _yaml.node_items(mirror['aliases']):
|
|
| 459 |
+ assert isinstance(uris, list)
|
|
| 460 |
+ alias_mappings[alias_mapping] = list(uris)
|
|
| 461 |
+ self.mirrors[mirror_name] = alias_mappings
|
|
| 462 |
+ if not self.default_mirror:
|
|
| 463 |
+ self.default_mirror = mirror_name
|
|
| 464 |
+ |
|
| 417 | 465 |
# _assert_plugin_format()
|
| 418 | 466 |
#
|
| 419 | 467 |
# Helper to raise a PluginError if the loaded plugin is of a lesser version then
|
| ... | ... | @@ -476,7 +476,7 @@ class Stream(): |
| 476 | 476 |
|
| 477 | 477 |
# Check for workspace config
|
| 478 | 478 |
workspace = workspaces.get_workspace(target._get_full_name())
|
| 479 |
- if workspace:
|
|
| 479 |
+ if workspace and not force:
|
|
| 480 | 480 |
raise StreamError("Workspace '{}' is already defined at: {}"
|
| 481 | 481 |
.format(target.name, workspace.path))
|
| 482 | 482 |
|
| ... | ... | @@ -495,6 +495,10 @@ class Stream(): |
| 495 | 495 |
"fetch the latest version of the " +
|
| 496 | 496 |
"source.")
|
| 497 | 497 |
|
| 498 |
+ if workspace:
|
|
| 499 |
+ workspaces.delete_workspace(target._get_full_name())
|
|
| 500 |
+ workspaces.save_config()
|
|
| 501 |
+ shutil.rmtree(directory)
|
|
| 498 | 502 |
try:
|
| 499 | 503 |
os.makedirs(directory, exist_ok=True)
|
| 500 | 504 |
except OSError as e:
|
| ... | ... | @@ -102,7 +102,7 @@ class BzrSource(Source): |
| 102 | 102 |
def track(self):
|
| 103 | 103 |
with self.timed_activity("Tracking {}".format(self.url),
|
| 104 | 104 |
silent_nested=True):
|
| 105 |
- self._ensure_mirror()
|
|
| 105 |
+ self._ensure_mirror(skip_ref_check=True)
|
|
| 106 | 106 |
ret, out = self.check_output([self.host_bzr, "version-info",
|
| 107 | 107 |
"--custom", "--template={revno}",
|
| 108 | 108 |
self._get_branch_dir()],
|
| ... | ... | @@ -214,7 +214,7 @@ class BzrSource(Source): |
| 214 | 214 |
yield repodir
|
| 215 | 215 |
self._atomic_replace_mirrordir(repodir)
|
| 216 | 216 |
|
| 217 |
- def _ensure_mirror(self):
|
|
| 217 |
+ def _ensure_mirror(self, skip_ref_check=False):
|
|
| 218 | 218 |
with self._atomic_repodir() as repodir:
|
| 219 | 219 |
# Initialize repo if no metadata
|
| 220 | 220 |
bzr_metadata_dir = os.path.join(repodir, ".bzr")
|
| ... | ... | @@ -223,18 +223,21 @@ class BzrSource(Source): |
| 223 | 223 |
fail="Failed to initialize bzr repository")
|
| 224 | 224 |
|
| 225 | 225 |
branch_dir = os.path.join(repodir, self.tracking)
|
| 226 |
+ branch_url = self.url + "/" + self.tracking
|
|
| 226 | 227 |
if not os.path.exists(branch_dir):
|
| 227 | 228 |
# `bzr branch` the branch if it doesn't exist
|
| 228 | 229 |
# to get the upstream code
|
| 229 |
- branch_url = self.url + "/" + self.tracking
|
|
| 230 | 230 |
self.call([self.host_bzr, "branch", branch_url, branch_dir],
|
| 231 | 231 |
fail="Failed to branch from {} to {}".format(branch_url, branch_dir))
|
| 232 | 232 |
|
| 233 | 233 |
else:
|
| 234 | 234 |
# `bzr pull` the branch if it does exist
|
| 235 | 235 |
# to get any changes to the upstream code
|
| 236 |
- self.call([self.host_bzr, "pull", "--directory={}".format(branch_dir)],
|
|
| 236 |
+ self.call([self.host_bzr, "pull", "--directory={}".format(branch_dir), branch_url],
|
|
| 237 | 237 |
fail="Failed to pull new changes for {}".format(branch_dir))
|
| 238 |
+ if not skip_ref_check and not self._check_ref():
|
|
| 239 |
+ raise SourceError("Failed to ensure ref '{}' was mirrored".format(self.ref),
|
|
| 240 |
+ reason="ref-not-mirrored")
|
|
| 238 | 241 |
|
| 239 | 242 |
|
| 240 | 243 |
def setup():
|
| ... | ... | @@ -78,7 +78,7 @@ from io import StringIO |
| 78 | 78 |
|
| 79 | 79 |
from configparser import RawConfigParser
|
| 80 | 80 |
|
| 81 |
-from buildstream import Source, SourceError, Consistency
|
|
| 81 |
+from buildstream import Source, SourceError, Consistency, SourceFetcher
|
|
| 82 | 82 |
from buildstream import utils
|
| 83 | 83 |
|
| 84 | 84 |
GIT_MODULES = '.gitmodules'
|
| ... | ... | @@ -88,18 +88,22 @@ GIT_MODULES = '.gitmodules' |
| 88 | 88 |
# for the primary git source and also for each submodule it
|
| 89 | 89 |
# might have at a given time
|
| 90 | 90 |
#
|
| 91 |
-class GitMirror():
|
|
| 91 |
+class GitMirror(SourceFetcher):
|
|
| 92 | 92 |
|
| 93 | 93 |
def __init__(self, source, path, url, ref):
|
| 94 | 94 |
|
| 95 |
+ super().__init__()
|
|
| 95 | 96 |
self.source = source
|
| 96 | 97 |
self.path = path
|
| 97 |
- self.url = source.translate_url(url)
|
|
| 98 |
+ self.url = url
|
|
| 98 | 99 |
self.ref = ref
|
| 99 |
- self.mirror = os.path.join(source.get_mirror_directory(), utils.url_directory_name(self.url))
|
|
| 100 |
+ self.mirror = os.path.join(source.get_mirror_directory(), utils.url_directory_name(url))
|
|
| 101 |
+ |
|
| 102 |
+ alias, _ = self.source.split_aliased_url(self.url)
|
|
| 103 |
+ self.set_alias(alias)
|
|
| 100 | 104 |
|
| 101 | 105 |
# Ensures that the mirror exists
|
| 102 |
- def ensure(self):
|
|
| 106 |
+ def ensure(self, alias_override=None):
|
|
| 103 | 107 |
|
| 104 | 108 |
# Unfortunately, git does not know how to only clone just a specific ref,
|
| 105 | 109 |
# so we have to download all of those gigs even if we only need a couple
|
| ... | ... | @@ -112,20 +116,49 @@ class GitMirror(): |
| 112 | 116 |
# system configured tmpdir is not on the same partition.
|
| 113 | 117 |
#
|
| 114 | 118 |
with self.source.tempdir() as tmpdir:
|
| 115 |
- self.source.call([self.source.host_git, 'clone', '--mirror', '-n', self.url, tmpdir],
|
|
| 116 |
- fail="Failed to clone git repository {}".format(self.url))
|
|
| 119 |
+ url = self.source.translate_url(self.url, alias_override)
|
|
| 120 |
+ self.source.call([self.source.host_git, 'clone', '--mirror', '-n', url, tmpdir],
|
|
| 121 |
+ fail="Failed to clone git repository {}".format(url))
|
|
| 117 | 122 |
|
| 118 | 123 |
try:
|
| 119 | 124 |
shutil.move(tmpdir, self.mirror)
|
| 120 | 125 |
except (shutil.Error, OSError) as e:
|
| 121 | 126 |
raise SourceError("{}: Failed to move cloned git repository {} from '{}' to '{}'"
|
| 122 |
- .format(self.source, self.url, tmpdir, self.mirror)) from e
|
|
| 127 |
+ .format(self.source, url, tmpdir, self.mirror)) from e
|
|
| 128 |
+ |
|
| 129 |
+ def _fetch(self, alias_override=None):
|
|
| 130 |
+ url = self.source.translate_url(self.url, alias_override)
|
|
| 131 |
+ |
|
| 132 |
+ if alias_override:
|
|
| 133 |
+ remote_name = utils.url_directory_name(alias_override)
|
|
| 134 |
+ _, remotes = self.source.check_output(
|
|
| 135 |
+ [self.source.host_git, 'remote'],
|
|
| 136 |
+ fail="Failed to retrieve list of remotes in {}".format(self.mirror),
|
|
| 137 |
+ cwd=self.mirror
|
|
| 138 |
+ )
|
|
| 139 |
+ if remote_name not in remotes:
|
|
| 140 |
+ self.source.call(
|
|
| 141 |
+ [self.source.host_git, 'remote', 'add', remote_name, url],
|
|
| 142 |
+ fail="Failed to add remote {} with url {}".format(remote_name, url),
|
|
| 143 |
+ cwd=self.mirror
|
|
| 144 |
+ )
|
|
| 145 |
+ else:
|
|
| 146 |
+ remote_name = "origin"
|
|
| 123 | 147 |
|
| 124 |
- def fetch(self):
|
|
| 125 |
- self.source.call([self.source.host_git, 'fetch', 'origin', '--prune'],
|
|
| 126 |
- fail="Failed to fetch from remote git repository: {}".format(self.url),
|
|
| 148 |
+ self.source.call([self.source.host_git, 'fetch', remote_name, '--prune'],
|
|
| 149 |
+ fail="Failed to fetch from remote git repository: {}".format(url),
|
|
| 127 | 150 |
cwd=self.mirror)
|
| 128 | 151 |
|
| 152 |
+ def fetch(self, alias_override=None):
|
|
| 153 |
+ self.ensure(alias_override)
|
|
| 154 |
+ if not self.has_ref():
|
|
| 155 |
+ self._fetch(alias_override)
|
|
| 156 |
+ self.assert_ref()
|
|
| 157 |
+ |
|
| 158 |
+ def get_alias(self):
|
|
| 159 |
+ alias, _ = self.source.split_aliased_url(self.url)
|
|
| 160 |
+ return alias
|
|
| 161 |
+ |
|
| 129 | 162 |
def has_ref(self):
|
| 130 | 163 |
if not self.ref:
|
| 131 | 164 |
return False
|
| ... | ... | @@ -168,12 +201,13 @@ class GitMirror(): |
| 168 | 201 |
|
| 169 | 202 |
def init_workspace(self, directory):
|
| 170 | 203 |
fullpath = os.path.join(directory, self.path)
|
| 204 |
+ url = self.source.translate_url(self.url)
|
|
| 171 | 205 |
|
| 172 | 206 |
self.source.call([self.source.host_git, 'clone', '--no-checkout', self.mirror, fullpath],
|
| 173 | 207 |
fail="Failed to clone git mirror {} in directory: {}".format(self.mirror, fullpath))
|
| 174 | 208 |
|
| 175 |
- self.source.call([self.source.host_git, 'remote', 'set-url', 'origin', self.url],
|
|
| 176 |
- fail='Failed to add remote origin "{}"'.format(self.url),
|
|
| 209 |
+ self.source.call([self.source.host_git, 'remote', 'set-url', 'origin', url],
|
|
| 210 |
+ fail='Failed to add remote origin "{}"'.format(url),
|
|
| 177 | 211 |
cwd=fullpath)
|
| 178 | 212 |
|
| 179 | 213 |
self.source.call([self.source.host_git, 'checkout', '--force', self.ref],
|
| ... | ... | @@ -273,6 +307,10 @@ class GitSource(Source): |
| 273 | 307 |
checkout = self.node_get_member(submodule, bool, 'checkout')
|
| 274 | 308 |
self.submodule_checkout_overrides[path] = checkout
|
| 275 | 309 |
|
| 310 |
+ # Set the alias, because it's not implicitly set by a translate_url call.
|
|
| 311 |
+ alias, _ = self.split_aliased_url(self.original_url)
|
|
| 312 |
+ self.set_alias(alias)
|
|
| 313 |
+ |
|
| 276 | 314 |
def preflight(self):
|
| 277 | 315 |
# Check if git is installed, get the binary at the same time
|
| 278 | 316 |
self.host_git = utils.get_host_tool('git')
|
| ... | ... | @@ -324,31 +362,13 @@ class GitSource(Source): |
| 324 | 362 |
.format(self.tracking, self.mirror.url),
|
| 325 | 363 |
silent_nested=True):
|
| 326 | 364 |
self.mirror.ensure()
|
| 327 |
- self.mirror.fetch()
|
|
| 365 |
+ self.mirror._fetch()
|
|
| 328 | 366 |
|
| 329 | 367 |
# Update self.mirror.ref and node.ref from the self.tracking branch
|
| 330 | 368 |
ret = self.mirror.latest_commit(self.tracking)
|
| 331 | 369 |
|
| 332 | 370 |
return ret
|
| 333 | 371 |
|
| 334 |
- def fetch(self):
|
|
| 335 |
- |
|
| 336 |
- with self.timed_activity("Fetching {}".format(self.mirror.url), silent_nested=True):
|
|
| 337 |
- |
|
| 338 |
- # Here we are only interested in ensuring that our mirror contains
|
|
| 339 |
- # the self.mirror.ref commit.
|
|
| 340 |
- self.mirror.ensure()
|
|
| 341 |
- if not self.mirror.has_ref():
|
|
| 342 |
- self.mirror.fetch()
|
|
| 343 |
- |
|
| 344 |
- self.mirror.assert_ref()
|
|
| 345 |
- |
|
| 346 |
- # Here after performing any fetches, we need to also ensure that
|
|
| 347 |
- # we've cached the desired refs in our mirrors of submodules.
|
|
| 348 |
- #
|
|
| 349 |
- self.refresh_submodules()
|
|
| 350 |
- self.fetch_submodules()
|
|
| 351 |
- |
|
| 352 | 372 |
def init_workspace(self, directory):
|
| 353 | 373 |
# XXX: may wish to refactor this as some code dupe with stage()
|
| 354 | 374 |
self.refresh_submodules()
|
| ... | ... | @@ -380,6 +400,10 @@ class GitSource(Source): |
| 380 | 400 |
if checkout:
|
| 381 | 401 |
mirror.stage(directory)
|
| 382 | 402 |
|
| 403 |
+ def get_source_fetchers(self):
|
|
| 404 |
+ self.refresh_submodules()
|
|
| 405 |
+ return [self.mirror] + self.submodules
|
|
| 406 |
+ |
|
| 383 | 407 |
###########################################################
|
| 384 | 408 |
# Local Functions #
|
| 385 | 409 |
###########################################################
|
| ... | ... | @@ -401,6 +425,7 @@ class GitSource(Source): |
| 401 | 425 |
# Assumes that we have our mirror and we have the ref which we point to
|
| 402 | 426 |
#
|
| 403 | 427 |
def refresh_submodules(self):
|
| 428 |
+ self.mirror.ensure()
|
|
| 404 | 429 |
submodules = []
|
| 405 | 430 |
|
| 406 | 431 |
# XXX Here we should issue a warning if either:
|
| ... | ... | @@ -422,19 +447,6 @@ class GitSource(Source): |
| 422 | 447 |
|
| 423 | 448 |
self.submodules = submodules
|
| 424 | 449 |
|
| 425 |
- # Ensures that we have mirrored git repositories for all
|
|
| 426 |
- # the submodules existing at the given commit of the main git source.
|
|
| 427 |
- #
|
|
| 428 |
- # Also ensure that these mirrors have the required commits
|
|
| 429 |
- # referred to at the given commit of the main git source.
|
|
| 430 |
- #
|
|
| 431 |
- def fetch_submodules(self):
|
|
| 432 |
- for mirror in self.submodules:
|
|
| 433 |
- mirror.ensure()
|
|
| 434 |
- if not mirror.has_ref():
|
|
| 435 |
- mirror.fetch()
|
|
| 436 |
- mirror.assert_ref()
|
|
| 437 |
- |
|
| 438 | 450 |
|
| 439 | 451 |
# Plugin entry point
|
| 440 | 452 |
def setup():
|
| ... | ... | @@ -65,6 +65,33 @@ these methods are mandatory to implement. |
| 65 | 65 |
|
| 66 | 66 |
**Optional**: If left unimplemented, this will default to calling
|
| 67 | 67 |
:func:`Source.stage() <buildstream.source.Source.stage>`
|
| 68 |
+ |
|
| 69 |
+* :func:`Source.get_source_fetchers() <buildstream.source.Source.get_source_fetchers>`
|
|
| 70 |
+ |
|
| 71 |
+ Get the objects that are used for fetching.
|
|
| 72 |
+ |
|
| 73 |
+ **Optional**: This only needs to be implemented for sources that need to
|
|
| 74 |
+ download from multiple URLs while fetching (e.g. a git repo and its
|
|
| 75 |
+ submodules). For details on how to define a SourceFetcher, see
|
|
| 76 |
+ :ref:`SourceFetcher <core_source_fetcher>`.
|
|
| 77 |
+ |
|
| 78 |
+ |
|
| 79 |
+.. _core_source_fetcher:
|
|
| 80 |
+ |
|
| 81 |
+SourceFetcher - Object for fetching individual URLs
|
|
| 82 |
+===================================================
|
|
| 83 |
+ |
|
| 84 |
+ |
|
| 85 |
+Abstract Methods
|
|
| 86 |
+----------------
|
|
| 87 |
+SourceFetchers expose the following abstract methods. Unless explicitly
|
|
| 88 |
+mentioned, these methods are mandatory to implement.
|
|
| 89 |
+ |
|
| 90 |
+* :func:`SourceFetcher.fetch() <buildstream.source.SourceFetcher.fetch>`
|
|
| 91 |
+ |
|
| 92 |
+ Fetches the URL associated with this SourceFetcher, optionally taking an
|
|
| 93 |
+ alias override.
|
|
| 94 |
+ |
|
| 68 | 95 |
"""
|
| 69 | 96 |
|
| 70 | 97 |
import os
|
| ... | ... | @@ -113,6 +140,63 @@ class SourceError(BstError): |
| 113 | 140 |
super().__init__(message, detail=detail, domain=ErrorDomain.SOURCE, reason=reason)
|
| 114 | 141 |
|
| 115 | 142 |
|
| 143 |
+class SourceFetcher():
|
|
| 144 |
+ """SourceFetcher()
|
|
| 145 |
+ |
|
| 146 |
+ This interface exists so that a source that downloads from multiple
|
|
| 147 |
+ places (e.g. a git source with submodules) has a consistent interface for
|
|
| 148 |
+ fetching and substituting aliases.
|
|
| 149 |
+ |
|
| 150 |
+ *Since: 1.4*
|
|
| 151 |
+ """
|
|
| 152 |
+ def __init__(self):
|
|
| 153 |
+ self.__alias = None
|
|
| 154 |
+ |
|
| 155 |
+ #############################################################
|
|
| 156 |
+ # Abstract Methods #
|
|
| 157 |
+ #############################################################
|
|
| 158 |
+ def fetch(self, alias_override=None):
|
|
| 159 |
+ """Fetch remote sources and mirror them locally, ensuring at least
|
|
| 160 |
+ that the specific reference is cached locally.
|
|
| 161 |
+ |
|
| 162 |
+ Args:
|
|
| 163 |
+ alias_override (str): The alias to use instead of the default one
|
|
| 164 |
+ defined by the :ref:`aliases <project_source_aliases>` field
|
|
| 165 |
+ in the project's config.
|
|
| 166 |
+ |
|
| 167 |
+ Raises:
|
|
| 168 |
+ :class:`.SourceError`
|
|
| 169 |
+ |
|
| 170 |
+ Implementors should raise :class:`.SourceError` if the there is some
|
|
| 171 |
+ network error or if the source reference could not be matched.
|
|
| 172 |
+ """
|
|
| 173 |
+ raise ImplError("Source fetcher '{}' does not implement fetch()".format(type(self)))
|
|
| 174 |
+ |
|
| 175 |
+ #############################################################
|
|
| 176 |
+ # Public Methods #
|
|
| 177 |
+ #############################################################
|
|
| 178 |
+ def set_alias(self, alias):
|
|
| 179 |
+ """Sets the alias used by this fetcher
|
|
| 180 |
+ |
|
| 181 |
+ This must be called during the fetcher's initialization, so that the
|
|
| 182 |
+ correct alias_override can be provided to
|
|
| 183 |
+ :func:`fetch()<buildstream.source.SourceFetcher.fetch>`.
|
|
| 184 |
+ |
|
| 185 |
+ Args:
|
|
| 186 |
+ alias (str): The alias component of the SourceFetcher's URL, as
|
|
| 187 |
+ described by :ref:`Source aliases<project_source_aliases>`.
|
|
| 188 |
+ """
|
|
| 189 |
+ self.__alias = alias
|
|
| 190 |
+ |
|
| 191 |
+ #############################################################
|
|
| 192 |
+ # Private Methods used in BuildStream #
|
|
| 193 |
+ #############################################################
|
|
| 194 |
+ |
|
| 195 |
+ # Returns the alias used by this fetcher
|
|
| 196 |
+ def _get_alias(self):
|
|
| 197 |
+ return self.__alias
|
|
| 198 |
+ |
|
| 199 |
+ |
|
| 116 | 200 |
class Source(Plugin):
|
| 117 | 201 |
"""Source()
|
| 118 | 202 |
|
| ... | ... | @@ -124,7 +208,7 @@ class Source(Plugin): |
| 124 | 208 |
__defaults = {} # The defaults from the project
|
| 125 | 209 |
__defaults_set = False # Flag, in case there are not defaults at all
|
| 126 | 210 |
|
| 127 |
- def __init__(self, context, project, meta):
|
|
| 211 |
+ def __init__(self, context, project, meta, *, alias_override=None):
|
|
| 128 | 212 |
provenance = _yaml.node_get_provenance(meta.config)
|
| 129 | 213 |
super().__init__("{}-{}".format(meta.element_name, meta.element_index),
|
| 130 | 214 |
context, project, provenance, "source")
|
| ... | ... | @@ -134,6 +218,11 @@ class Source(Plugin): |
| 134 | 218 |
self.__element_kind = meta.element_kind # The kind of the element owning this source
|
| 135 | 219 |
self.__directory = meta.directory # Staging relative directory
|
| 136 | 220 |
self.__consistency = Consistency.INCONSISTENT # Cached consistency state
|
| 221 |
+ self.__alias_override = alias_override # Tuple of alias and its override to use instead
|
|
| 222 |
+ self.__expected_alias = None # A hacky way to store the first alias used
|
|
| 223 |
+ |
|
| 224 |
+ # FIXME: Reconstruct a MetaSource from a Source instead of storing it.
|
|
| 225 |
+ self.__meta = meta # MetaSource stored so we can copy this source later.
|
|
| 137 | 226 |
|
| 138 | 227 |
# Collect the composited element configuration and
|
| 139 | 228 |
# ask the element to configure itself.
|
| ... | ... | @@ -283,6 +372,34 @@ class Source(Plugin): |
| 283 | 372 |
"""
|
| 284 | 373 |
self.stage(directory)
|
| 285 | 374 |
|
| 375 |
+ def set_alias(self, alias):
|
|
| 376 |
+ """Set the alias of the source's URL
|
|
| 377 |
+ |
|
| 378 |
+ A source's alias should be set using set_alias if it does not call
|
|
| 379 |
+ :func:`~buildstream.source.Source.translate_url` during
|
|
| 380 |
+ :func:`~buildstream.plugin.Plugin.configure`.
|
|
| 381 |
+ |
|
| 382 |
+ Args:
|
|
| 383 |
+ (str) alias: An URL alias
|
|
| 384 |
+ """
|
|
| 385 |
+ self.__expected_alias = alias
|
|
| 386 |
+ |
|
| 387 |
+ def get_source_fetchers(self):
|
|
| 388 |
+ """Get the objects that are used for fetching
|
|
| 389 |
+ |
|
| 390 |
+ If this source doesn't download from multiple URLs,
|
|
| 391 |
+ returning None and falling back on the default behaviour
|
|
| 392 |
+ is recommended.
|
|
| 393 |
+ |
|
| 394 |
+ Returns:
|
|
| 395 |
+ list: A list of SourceFetchers. If SourceFetchers are not supported,
|
|
| 396 |
+ this will be an empty list.
|
|
| 397 |
+ |
|
| 398 |
+ *Since: 1.4*
|
|
| 399 |
+ """
|
|
| 400 |
+ |
|
| 401 |
+ return []
|
|
| 402 |
+ |
|
| 286 | 403 |
#############################################################
|
| 287 | 404 |
# Public Methods #
|
| 288 | 405 |
#############################################################
|
| ... | ... | @@ -299,18 +416,42 @@ class Source(Plugin): |
| 299 | 416 |
os.makedirs(directory, exist_ok=True)
|
| 300 | 417 |
return directory
|
| 301 | 418 |
|
| 302 |
- def translate_url(self, url):
|
|
| 419 |
+ def translate_url(self, url, alias_override=None):
|
|
| 303 | 420 |
"""Translates the given url which may be specified with an alias
|
| 304 | 421 |
into a fully qualified url.
|
| 305 | 422 |
|
| 306 | 423 |
Args:
|
| 307 | 424 |
url (str): A url, which may be using an alias
|
| 425 |
+ alias_override (str): Optionally, an URI to override the alias with.
|
|
| 308 | 426 |
|
| 309 | 427 |
Returns:
|
| 310 | 428 |
str: The fully qualified url, with aliases resolved
|
| 311 | 429 |
"""
|
| 312 |
- project = self._get_project()
|
|
| 313 |
- return project.translate_url(url)
|
|
| 430 |
+ # Alias overriding can happen explicitly (by command-line) or
|
|
| 431 |
+ # implicitly (the Source being constructed with an __alias_override).
|
|
| 432 |
+ if alias_override or self.__alias_override:
|
|
| 433 |
+ url_alias, url_body = self.split_aliased_url(url)
|
|
| 434 |
+ if url_alias:
|
|
| 435 |
+ if alias_override:
|
|
| 436 |
+ url = alias_override + url_body
|
|
| 437 |
+ else:
|
|
| 438 |
+ # Implicit alias overrides may only be done for one
|
|
| 439 |
+ # specific alias, so that sources that fetch from multiple
|
|
| 440 |
+ # URLs and use different aliases default to only overriding
|
|
| 441 |
+ # one alias, rather than getting confused.
|
|
| 442 |
+ override_alias = self.__alias_override[0]
|
|
| 443 |
+ override_url = self.__alias_override[1]
|
|
| 444 |
+ if url_alias == override_alias:
|
|
| 445 |
+ url = override_url + url_body
|
|
| 446 |
+ return url
|
|
| 447 |
+ else:
|
|
| 448 |
+ # Sneakily store the alias if it hasn't already been stored
|
|
| 449 |
+ if not self.__expected_alias and url and utils._ALIAS_SEPARATOR in url:
|
|
| 450 |
+ url_alias, _ = url.split(utils._ALIAS_SEPARATOR, 1)
|
|
| 451 |
+ self.__expected_alias = url_alias
|
|
| 452 |
+ |
|
| 453 |
+ project = self._get_project()
|
|
| 454 |
+ return project.translate_url(url)
|
|
| 314 | 455 |
|
| 315 | 456 |
def get_project_directory(self):
|
| 316 | 457 |
"""Fetch the project base directory
|
| ... | ... | @@ -340,6 +481,19 @@ class Source(Plugin): |
| 340 | 481 |
with utils._tempdir(dir=mirrordir) as tempdir:
|
| 341 | 482 |
yield tempdir
|
| 342 | 483 |
|
| 484 |
+ def split_aliased_url(self, url):
|
|
| 485 |
+ """Helper for splitting an URL into its alias and body
|
|
| 486 |
+ |
|
| 487 |
+ Returns:
|
|
| 488 |
+ (str): The alias component of the URL, or None if there is no alias
|
|
| 489 |
+ (str): The body of the URL, or None if there is no alias
|
|
| 490 |
+ """
|
|
| 491 |
+ if utils._ALIAS_SEPARATOR in url:
|
|
| 492 |
+ alias, body = url.split(utils._ALIAS_SEPARATOR, 1)
|
|
| 493 |
+ return alias, body
|
|
| 494 |
+ else:
|
|
| 495 |
+ return None, None
|
|
| 496 |
+ |
|
| 343 | 497 |
#############################################################
|
| 344 | 498 |
# Private Methods used in BuildStream #
|
| 345 | 499 |
#############################################################
|
| ... | ... | @@ -374,7 +528,45 @@ class Source(Plugin): |
| 374 | 528 |
# Wrapper function around plugin provided fetch method
|
| 375 | 529 |
#
|
| 376 | 530 |
def _fetch(self):
|
| 377 |
- self.fetch()
|
|
| 531 |
+ project = self._get_project()
|
|
| 532 |
+ source_fetchers = self.get_source_fetchers()
|
|
| 533 |
+ if source_fetchers:
|
|
| 534 |
+ for fetcher in source_fetchers:
|
|
| 535 |
+ alias = fetcher._get_alias()
|
|
| 536 |
+ success = False
|
|
| 537 |
+ for uri in project.get_alias_uris(alias):
|
|
| 538 |
+ try:
|
|
| 539 |
+ fetcher.fetch(uri)
|
|
| 540 |
+ # FIXME: We need a way to distinguish between serious errors and
|
|
| 541 |
+ # the host being unreachable
|
|
| 542 |
+ except BstError as e:
|
|
| 543 |
+ last_error = e
|
|
| 544 |
+ continue
|
|
| 545 |
+ success = True
|
|
| 546 |
+ break
|
|
| 547 |
+ if not success:
|
|
| 548 |
+ raise last_error
|
|
| 549 |
+ else:
|
|
| 550 |
+ alias = self._get_alias()
|
|
| 551 |
+ if not project.mirrors or not alias:
|
|
| 552 |
+ self.fetch()
|
|
| 553 |
+ return
|
|
| 554 |
+ |
|
| 555 |
+ context = self._get_context()
|
|
| 556 |
+ source_kind = type(self)
|
|
| 557 |
+ for uri in project.get_alias_uris(alias):
|
|
| 558 |
+ new_source = source_kind(context, project, self.__meta,
|
|
| 559 |
+ alias_override=(alias, uri))
|
|
| 560 |
+ new_source._preflight()
|
|
| 561 |
+ try:
|
|
| 562 |
+ new_source.fetch()
|
|
| 563 |
+ # FIXME: We need a way to distinguish between serious errors and
|
|
| 564 |
+ # the host being unreachable
|
|
| 565 |
+ except BstError as e:
|
|
| 566 |
+ last_error = e
|
|
| 567 |
+ continue
|
|
| 568 |
+ return
|
|
| 569 |
+ raise last_error
|
|
| 378 | 570 |
|
| 379 | 571 |
# Wrapper for stage() api which gives the source
|
| 380 | 572 |
# plugin a fully constructed path considering the
|
| ... | ... | @@ -581,7 +773,7 @@ class Source(Plugin): |
| 581 | 773 |
# Wrapper for track()
|
| 582 | 774 |
#
|
| 583 | 775 |
def _track(self):
|
| 584 |
- new_ref = self.track()
|
|
| 776 |
+ new_ref = self.__do_track()
|
|
| 585 | 777 |
current_ref = self.get_ref()
|
| 586 | 778 |
|
| 587 | 779 |
if new_ref is None:
|
| ... | ... | @@ -593,10 +785,48 @@ class Source(Plugin): |
| 593 | 785 |
|
| 594 | 786 |
return new_ref
|
| 595 | 787 |
|
| 788 |
+ # Returns the alias if it's defined in the project
|
|
| 789 |
+ def _get_alias(self):
|
|
| 790 |
+ alias = self.__expected_alias
|
|
| 791 |
+ project = self._get_project()
|
|
| 792 |
+ if project.get_alias_uri(alias):
|
|
| 793 |
+ # The alias must already be defined in the project's aliases
|
|
| 794 |
+ # otherwise http://foo gets treated like it contains an alias
|
|
| 795 |
+ return alias
|
|
| 796 |
+ else:
|
|
| 797 |
+ return None
|
|
| 798 |
+ |
|
| 596 | 799 |
#############################################################
|
| 597 | 800 |
# Local Private Methods #
|
| 598 | 801 |
#############################################################
|
| 599 | 802 |
|
| 803 |
+ # Tries to call track for every mirror, stopping once it succeeds
|
|
| 804 |
+ def __do_track(self):
|
|
| 805 |
+ project = self._get_project()
|
|
| 806 |
+ # If there are no mirrors, or no aliases to replace, there's nothing to do here.
|
|
| 807 |
+ alias = self._get_alias()
|
|
| 808 |
+ if not project.mirrors or not alias:
|
|
| 809 |
+ return self.track()
|
|
| 810 |
+ |
|
| 811 |
+ context = self._get_context()
|
|
| 812 |
+ source_kind = type(self)
|
|
| 813 |
+ |
|
| 814 |
+ # NOTE: We are assuming here that tracking only requires substituting the
|
|
| 815 |
+ # first alias used
|
|
| 816 |
+ for uri in reversed(project.get_alias_uris(alias)):
|
|
| 817 |
+ new_source = source_kind(context, project, self.__meta,
|
|
| 818 |
+ alias_override=(alias, uri))
|
|
| 819 |
+ new_source._preflight()
|
|
| 820 |
+ try:
|
|
| 821 |
+ ref = new_source.track()
|
|
| 822 |
+ # FIXME: We need a way to distinguish between serious errors and
|
|
| 823 |
+ # the host being unreachable
|
|
| 824 |
+ except BstError as e:
|
|
| 825 |
+ last_error = e
|
|
| 826 |
+ continue
|
|
| 827 |
+ return ref
|
|
| 828 |
+ raise last_error
|
|
| 829 |
+ |
|
| 600 | 830 |
# Ensures a fully constructed path and returns it
|
| 601 | 831 |
def __ensure_directory(self, directory):
|
| 602 | 832 |
|
| ... | ... | @@ -42,6 +42,10 @@ from . import _signals |
| 42 | 42 |
from ._exceptions import BstError, ErrorDomain
|
| 43 | 43 |
|
| 44 | 44 |
|
| 45 |
+# The separator we use for user specified aliases
|
|
| 46 |
+_ALIAS_SEPARATOR = ':'
|
|
| 47 |
+ |
|
| 48 |
+ |
|
| 45 | 49 |
class UtilError(BstError):
|
| 46 | 50 |
"""Raised by utility functions when system calls fail.
|
| 47 | 51 |
|
| 1 |
+ |
|
| 2 |
+ |
|
| 3 |
+Creating and using a git mirror
|
|
| 4 |
+'''''''''''''''''''''''''''''''
|
|
| 5 |
+This is an example of how to create a git mirror using git's
|
|
| 6 |
+`git-http-backend <https://git-scm.com/docs/git-http-backend>`_ and
|
|
| 7 |
+`lighttpd <https://redmine.lighttpd.net/projects/1/wiki/TutorialConfiguration>`_.
|
|
| 8 |
+ |
|
| 9 |
+ |
|
| 10 |
+Prerequisites
|
|
| 11 |
+=============
|
|
| 12 |
+You will need git installed, and git-http-backend must be present. It is assumed
|
|
| 13 |
+that the git-http-backend binary exists at `/usr/lib/git-core/git-http-backend`.
|
|
| 14 |
+ |
|
| 15 |
+You will need `lighttpd` installed, and at the bare minimum has the modules
|
|
| 16 |
+`mod_alias`, `mod_cgi`, and `mod_setenv`.
|
|
| 17 |
+ |
|
| 18 |
+I will be using gnome-modulesets as an example, which can be cloned from
|
|
| 19 |
+`http://gnome7.codethink.co.uk/gnome-modulesets.git`.
|
|
| 20 |
+ |
|
| 21 |
+ |
|
| 22 |
+Starting a git http server
|
|
| 23 |
+==========================
|
|
| 24 |
+ |
|
| 25 |
+ |
|
| 26 |
+1. Set up a directory containing mirrors
|
|
| 27 |
+----------------------------------------
|
|
| 28 |
+Choose a suitable directory to hold your mirrors, e.g. `/var/www/git`.
|
|
| 29 |
+ |
|
| 30 |
+Place the git repositories you want to use as mirrors in the mirror dir, e.g.
|
|
| 31 |
+``git clone --mirror http://git.gnome.org/browse/yelp-xsl /var/www/git/yelp-xsl.git``.
|
|
| 32 |
+ |
|
| 33 |
+ |
|
| 34 |
+2. Configure lighttpd
|
|
| 35 |
+---------------------
|
|
| 36 |
+Write out a lighttpd.conf as follows:
|
|
| 37 |
+ |
|
| 38 |
+::
|
|
| 39 |
+ |
|
| 40 |
+ server.document-root = "/var/www/git/"
|
|
| 41 |
+ server.port = 3000
|
|
| 42 |
+ server.modules = (
|
|
| 43 |
+ "mod_alias",
|
|
| 44 |
+ "mod_cgi",
|
|
| 45 |
+ "mod_setenv",
|
|
| 46 |
+ )
|
|
| 47 |
+
|
|
| 48 |
+ alias.url += ( "/git" => "/usr/lib/git-core/git-http-backend" )
|
|
| 49 |
+ $HTTP["url"] =~ "^/git" {
|
|
| 50 |
+ cgi.assign = ("" => "")
|
|
| 51 |
+ setenv.add-environment = (
|
|
| 52 |
+ "GIT_PROJECT_ROOT" => "/var/www/git",
|
|
| 53 |
+ "GIT_HTTP_EXPORT_ALL" => ""
|
|
| 54 |
+ )
|
|
| 55 |
+ }
|
|
| 56 |
+ |
|
| 57 |
+.. note::
|
|
| 58 |
+ |
|
| 59 |
+ If you have your mirrors in another directory, replace /var/www/git/ with that directory.
|
|
| 60 |
+ |
|
| 61 |
+ |
|
| 62 |
+3. Start lighttpd
|
|
| 63 |
+-----------------
|
|
| 64 |
+lighttpd can be invoked with the command-line ``lighttpd -D -f lighttpd.conf``.
|
|
| 65 |
+ |
|
| 66 |
+ |
|
| 67 |
+4. Test that you can fetch from it
|
|
| 68 |
+----------------------------------
|
|
| 69 |
+We can then clone the mirrored repo using git via http with
|
|
| 70 |
+``git clone http://127.0.0.1:3000/git/yelp-xsl``.
|
|
| 71 |
+ |
|
| 72 |
+.. note::
|
|
| 73 |
+ |
|
| 74 |
+ If you have set server.port to something other than the default, you will
|
|
| 75 |
+ need to replace the '3000' in the command-line.
|
|
| 76 |
+ |
|
| 77 |
+ |
|
| 78 |
+5. Configure the project to use the mirror
|
|
| 79 |
+------------------------------------------
|
|
| 80 |
+To add this local http server as a mirror, add the following to the project.conf:
|
|
| 81 |
+ |
|
| 82 |
+.. code:: yaml
|
|
| 83 |
+ |
|
| 84 |
+ mirrors:
|
|
| 85 |
+ - name: local-mirror
|
|
| 86 |
+ aliases:
|
|
| 87 |
+ git_gnome_org:
|
|
| 88 |
+ - http://127.0.0.1:3000/git/
|
|
| 89 |
+ |
|
| 90 |
+ |
|
| 91 |
+6. Test that the mirror works
|
|
| 92 |
+-----------------------------
|
|
| 93 |
+We can make buildstream use the mirror by setting the alias to an invalid URL, e.g.
|
|
| 94 |
+ |
|
| 95 |
+.. code:: yaml
|
|
| 96 |
+ |
|
| 97 |
+ aliases:
|
|
| 98 |
+ git_gnome_org: https://www.example.com/invalid/url/
|
|
| 99 |
+ |
|
| 100 |
+Now, if you build an element that uses the source you placed in the mirror
|
|
| 101 |
+(e.g. ``bst build core-deps/yelp-xsl.bst``), you will see that it uses your mirror.
|
|
| 102 |
+ |
|
| 103 |
+ |
|
| 104 |
+.. _lighttpd_git_tar_conf:
|
|
| 105 |
+ |
|
| 106 |
+Bonus: lighttpd conf for git and tar
|
|
| 107 |
+====================================
|
|
| 108 |
+For those who have also used the :ref:`tar-mirror tutorial <using_tar_mirror>`,
|
|
| 109 |
+a combined lighttpd.conf is below:
|
|
| 110 |
+ |
|
| 111 |
+::
|
|
| 112 |
+ |
|
| 113 |
+ server.document-root = "/var/www/"
|
|
| 114 |
+ server.port = 3000
|
|
| 115 |
+ server.modules = (
|
|
| 116 |
+ "mod_alias",
|
|
| 117 |
+ "mod_cgi",
|
|
| 118 |
+ "mod_setenv",
|
|
| 119 |
+ )
|
|
| 120 |
+
|
|
| 121 |
+ alias.url += ( "/git" => "/usr/lib/git-core/git-http-backend" )
|
|
| 122 |
+ $HTTP["url"] =~ "^/git" {
|
|
| 123 |
+ cgi.assign = ("" => "")
|
|
| 124 |
+ setenv.add-environment = (
|
|
| 125 |
+ "GIT_PROJECT_ROOT" => "/var/www/git",
|
|
| 126 |
+ "GIT_HTTP_EXPORT_ALL" => ""
|
|
| 127 |
+ )
|
|
| 128 |
+ } else $HTTP["url"] =~ "^/tar" {
|
|
| 129 |
+ dir-listing.activate = "enable"
|
|
| 130 |
+ }
|
|
| 131 |
+ |
|
| 132 |
+ |
|
| 133 |
+Further reading
|
|
| 134 |
+===============
|
|
| 135 |
+If this mirror isn't being used exclusively in a secure network, it is strongly
|
|
| 136 |
+recommended you `use SSL <https://redmine.lighttpd.net/projects/1/wiki/HowToSimpleSSL>`_.
|
|
| 137 |
+ |
|
| 138 |
+This is the bare minimum required to set up a git mirror. A large, public project
|
|
| 139 |
+would prefer to set it up using the
|
|
| 140 |
+`git protocol <https://git-scm.com/book/en/v1/Git-on-the-Server-Git-Daemon>`_,
|
|
| 141 |
+and a security-conscious project would be configured to use
|
|
| 142 |
+`git over SSH <https://git-scm.com/book/en/v1/Git-on-the-Server-Getting-Git-on-a-Server#Small-Setups>`_.
|
|
| 143 |
+ |
|
| 144 |
+Lighttpd is documented on `its wiki <https://redmine.lighttpd.net/projects/lighttpd/wiki>`_.
|
| 1 |
+ |
|
| 2 |
+ |
|
| 3 |
+.. _using_tar_mirror:
|
|
| 4 |
+ |
|
| 5 |
+Creating and using a tar mirror
|
|
| 6 |
+'''''''''''''''''''''''''''''''
|
|
| 7 |
+This is an example of how to create a tar mirror using
|
|
| 8 |
+`lighttpd <https://redmine.lighttpd.net/projects/1/wiki/TutorialConfiguration>`_.
|
|
| 9 |
+ |
|
| 10 |
+ |
|
| 11 |
+Prerequisites
|
|
| 12 |
+=============
|
|
| 13 |
+You will need `lighttpd` installed.
|
|
| 14 |
+ |
|
| 15 |
+ |
|
| 16 |
+I will be using gnome-modulesets as an example, which can be cloned from
|
|
| 17 |
+`http://gnome7.codethink.co.uk/gnome-modulesets.git`.
|
|
| 18 |
+ |
|
| 19 |
+ |
|
| 20 |
+Starting a tar server
|
|
| 21 |
+=====================
|
|
| 22 |
+ |
|
| 23 |
+ |
|
| 24 |
+1. Set up a directory containing mirrors
|
|
| 25 |
+----------------------------------------
|
|
| 26 |
+Choose a suitable directory to hold your mirrored tar files, e.g. `/var/www/tar`.
|
|
| 27 |
+ |
|
| 28 |
+Place the tar files you want to use as mirrors in your mirror dir, e.g.
|
|
| 29 |
+ |
|
| 30 |
+.. code::
|
|
| 31 |
+ |
|
| 32 |
+ mkdir -p /var/www/tar/gettext
|
|
| 33 |
+ wget -O /var/www/tar/gettext/gettext-0.19.8.1.tar.xz https://ftp.gnu.org/gnu/gettext/gettext-0.19.8.1.tar.xz
|
|
| 34 |
+ |
|
| 35 |
+ |
|
| 36 |
+2. Configure lighttpd
|
|
| 37 |
+---------------------
|
|
| 38 |
+Write out a lighttpd.conf as follows:
|
|
| 39 |
+ |
|
| 40 |
+::
|
|
| 41 |
+ |
|
| 42 |
+ server.document-root = "/var/www/tar/"
|
|
| 43 |
+ server.port = 3000
|
|
| 44 |
+
|
|
| 45 |
+ dir-listing.activate = "enable"
|
|
| 46 |
+ |
|
| 47 |
+.. note::
|
|
| 48 |
+ |
|
| 49 |
+ If you have your mirrors in another directory, replace /var/www/tar/ with that directory.
|
|
| 50 |
+ |
|
| 51 |
+.. note::
|
|
| 52 |
+ |
|
| 53 |
+ An example lighttpd.conf that works for both git and tar services is available
|
|
| 54 |
+ :ref:`here <lighttpd_git_tar_conf>`
|
|
| 55 |
+ |
|
| 56 |
+ |
|
| 57 |
+3. Start lighttpd
|
|
| 58 |
+-----------------
|
|
| 59 |
+lighttpd can be invoked with the command-line ``lighttpd -D -f lighttpd.conf``.
|
|
| 60 |
+ |
|
| 61 |
+ |
|
| 62 |
+4. Test that you can fetch from it
|
|
| 63 |
+----------------------------------
|
|
| 64 |
+We can then download the mirrored file with ``wget 127.0.0.1:3000/tar/gettext/gettext-0.19.8.1.tar.xz``.
|
|
| 65 |
+ |
|
| 66 |
+.. note::
|
|
| 67 |
+ |
|
| 68 |
+ If you have set server.port to something other than the default, you will need
|
|
| 69 |
+ to replace the '3000' in the command-line.
|
|
| 70 |
+ |
|
| 71 |
+ |
|
| 72 |
+5. Configure the project to use the mirror
|
|
| 73 |
+------------------------------------------
|
|
| 74 |
+To add this local http server as a mirror, add the following to the project.conf:
|
|
| 75 |
+ |
|
| 76 |
+.. code:: yaml
|
|
| 77 |
+ |
|
| 78 |
+ mirrors:
|
|
| 79 |
+ - name: local-mirror
|
|
| 80 |
+ aliases:
|
|
| 81 |
+ ftp_gnu_org:
|
|
| 82 |
+ - http://127.0.0.1:3000/tar/
|
|
| 83 |
+ |
|
| 84 |
+ |
|
| 85 |
+6. Test that the mirror works
|
|
| 86 |
+-----------------------------
|
|
| 87 |
+We can make buildstream use the mirror by setting the alias to an invalid URL, e.g.
|
|
| 88 |
+ |
|
| 89 |
+.. code:: yaml
|
|
| 90 |
+ |
|
| 91 |
+ aliases:
|
|
| 92 |
+ ftp_gnu_org: https://www.example.com/invalid/url/
|
|
| 93 |
+ |
|
| 94 |
+Now, if you build an element that uses the source you placed in the mirror
|
|
| 95 |
+(e.g. ``bst build core-deps/gettext.bst``), you will see that it uses your mirror.
|
|
| 96 |
+ |
|
| 97 |
+ |
|
| 98 |
+Further reading
|
|
| 99 |
+===============
|
|
| 100 |
+If this mirror isn't being used exclusively in a secure network, it is strongly
|
|
| 101 |
+recommended you `use SSL <https://redmine.lighttpd.net/projects/1/wiki/HowToSimpleSSL>`_.
|
|
| 102 |
+ |
|
| 103 |
+Lighttpd is documented on `its wiki <https://redmine.lighttpd.net/projects/lighttpd/wiki>`_.
|
| ... | ... | @@ -198,6 +198,39 @@ You can also specify a list of caches here; earlier entries in the list |
| 198 | 198 |
will have higher priority than later ones.
|
| 199 | 199 |
|
| 200 | 200 |
|
| 201 |
+.. _project_essentials_mirrors:
|
|
| 202 |
+ |
|
| 203 |
+Mirrors
|
|
| 204 |
+~~~~~~~
|
|
| 205 |
+A list of mirrors can be defined that couple a location to a mapping of aliases to a
|
|
| 206 |
+list of URIs, e.g.
|
|
| 207 |
+ |
|
| 208 |
+.. code:: yaml
|
|
| 209 |
+ |
|
| 210 |
+ mirrors:
|
|
| 211 |
+ - name: middle-earth
|
|
| 212 |
+ aliases:
|
|
| 213 |
+ foo:
|
|
| 214 |
+ - http://www.middle-earth.com/foo/1
|
|
| 215 |
+ - http://www.middle-earth.com/foo/2
|
|
| 216 |
+ bar:
|
|
| 217 |
+ - http://www.middle-earth.com/bar/1
|
|
| 218 |
+ - http://www.middle-earth.com/bar/2
|
|
| 219 |
+ - name: oz
|
|
| 220 |
+ aliases:
|
|
| 221 |
+ foo:
|
|
| 222 |
+ - http://www.oz.com/foo
|
|
| 223 |
+ bar:
|
|
| 224 |
+ - http://www.oz.com/bar
|
|
| 225 |
+ |
|
| 226 |
+The order that the mirrors (and the URIs therein) are consulted is in the order
|
|
| 227 |
+they are defined when fetching, and in reverse-order when tracking.
|
|
| 228 |
+ |
|
| 229 |
+A default mirror to consult first can be defined via
|
|
| 230 |
+:ref:`user config <config_default_mirror>`, or the command-line argument
|
|
| 231 |
+:ref:`--default-mirror <invoking_bst>`.
|
|
| 232 |
+ |
|
| 233 |
+ |
|
| 201 | 234 |
.. _project_plugins:
|
| 202 | 235 |
|
| 203 | 236 |
External plugins
|
| ... | ... | @@ -6,10 +6,8 @@ Installing BuildStream on a Linux distro |
| 6 | 6 |
BuildStream requires the following base system requirements:
|
| 7 | 7 |
|
| 8 | 8 |
* python3 >= 3.5
|
| 9 |
-* libostree >= v2017.8 with introspection data
|
|
| 10 | 9 |
* bubblewrap >= 0.1.2
|
| 11 | 10 |
* fuse2
|
| 12 |
-* PyGObject introspection bindings
|
|
| 13 | 11 |
|
| 14 | 12 |
BuildStream also depends on the host tools for the :mod:`Source <buildstream.source>` plugins.
|
| 15 | 13 |
Refer to the respective :ref:`source plugin <plugins_sources>` documentation for host tool
|
| ... | ... | @@ -20,6 +18,7 @@ The default plugins with extra host dependencies are: |
| 20 | 18 |
* bzr
|
| 21 | 19 |
* deb
|
| 22 | 20 |
* git
|
| 21 |
+* ostree
|
|
| 23 | 22 |
* patch
|
| 24 | 23 |
* tar
|
| 25 | 24 |
|
| ... | ... | @@ -52,18 +51,24 @@ Arch Linux |
| 52 | 51 |
Install the dependencies with::
|
| 53 | 52 |
|
| 54 | 53 |
sudo pacman -S \
|
| 55 |
- python fuse2 ostree bubblewrap python-gobject \
|
|
| 54 |
+ python fuse2 bubblewrap \
|
|
| 56 | 55 |
python-pip git
|
| 57 | 56 |
|
| 58 | 57 |
For the default plugins::
|
| 59 | 58 |
|
| 60 | 59 |
sudo pacman -S \
|
| 61 |
- lzip git bzr patch python-arpy
|
|
| 60 |
+ bzr git lzip ostree patch python-arpy python-gobject
|
|
| 62 | 61 |
|
| 63 | 62 |
|
| 64 | 63 |
Debian
|
| 65 | 64 |
++++++
|
| 65 |
+Install the dependencies with::
|
|
| 66 |
+ |
|
| 67 |
+ sudo apt-get install \
|
|
| 68 |
+ python3 fuse bubblewrap \
|
|
| 69 |
+ python3-pip python3-dev git
|
|
| 66 | 70 |
|
| 71 |
+For the default plugins:
|
|
| 67 | 72 |
|
| 68 | 73 |
Stretch
|
| 69 | 74 |
^^^^^^^
|
| ... | ... | @@ -78,32 +83,20 @@ And then running:: |
| 78 | 83 |
|
| 79 | 84 |
sudo apt-get update
|
| 80 | 85 |
|
| 81 |
-At this point you should be able to get the system requirements with::
|
|
| 86 |
+At this point you should be able to get the system requirements for the default plugins with::
|
|
| 82 | 87 |
|
| 83 | 88 |
sudo apt-get install \
|
| 84 |
- python3 fuse ostree gir1.2-ostree-1.0 bubblewrap python3-gi \
|
|
| 85 |
- python3-pip python3-dev git
|
|
| 89 |
+ bzr git lzip patch python3-arpy python3-gi
|
|
| 86 | 90 |
sudo apt-get install -t stretch-backports \
|
| 87 | 91 |
gir1.2-ostree-1.0 ostree
|
| 88 | 92 |
|
| 89 |
-For the default plugins::
|
|
| 90 |
- |
|
| 91 |
- sudo apt-get install \
|
|
| 92 |
- lzip git bzr patch python3-arpy
|
|
| 93 |
- |
|
| 94 | 93 |
Buster or Sid
|
| 95 | 94 |
^^^^^^^^^^^^^
|
| 96 | 95 |
For debian unstable or testing, only the following line should be enough
|
| 97 |
-to get the base system requirements installed::
|
|
| 98 |
- |
|
| 99 |
- sudo apt-get install \
|
|
| 100 |
- python3 fuse ostree gir1.2-ostree-1.0 bubblewrap python3-gi \
|
|
| 101 |
- python3-pip python3-dev git
|
|
| 102 |
- |
|
| 103 |
-For the default plugins::
|
|
| 96 |
+to get the system requirements for the default plugins installed::
|
|
| 104 | 97 |
|
| 105 | 98 |
sudo apt-get install \
|
| 106 |
- lzip git bzr patch python3-arpy
|
|
| 99 |
+ lzip gir1.2-ostree-1.0 git bzr ostree patch python3-arpy python3-gi
|
|
| 107 | 100 |
|
| 108 | 101 |
|
| 109 | 102 |
Fedora
|
| ... | ... | @@ -112,13 +105,13 @@ For recent fedora systems, the following line should get you the system |
| 112 | 105 |
requirements you need::
|
| 113 | 106 |
|
| 114 | 107 |
dnf install -y \
|
| 115 |
- python3 fuse ostree bubblewrap python3-gobject \
|
|
| 108 |
+ python3 fuse bubblewrap \
|
|
| 116 | 109 |
python3-pip python3-devel git
|
| 117 | 110 |
|
| 118 | 111 |
For the default plugins::
|
| 119 | 112 |
|
| 120 | 113 |
dnf install -y \
|
| 121 |
- lzip git bzr patch python3-arpy
|
|
| 114 |
+ bzr git lzip patch ostree python3-arpy python3-gobject
|
|
| 122 | 115 |
|
| 123 | 116 |
|
| 124 | 117 |
Installing
|
| ... | ... | @@ -89,6 +89,28 @@ modifying some low level component. |
| 89 | 89 |
the ``--strict`` and ``--no-strict`` command line options.
|
| 90 | 90 |
|
| 91 | 91 |
|
| 92 |
+.. _config_default_mirror:
|
|
| 93 |
+ |
|
| 94 |
+Default Mirror
|
|
| 95 |
+~~~~~~~~~~~~~~
|
|
| 96 |
+ |
|
| 97 |
+When using :ref:`mirrors <project_essentials_mirrors>`, a default mirror can
|
|
| 98 |
+be defined to be fetched first.
|
|
| 99 |
+The default mirror is defined by its name, e.g.
|
|
| 100 |
+ |
|
| 101 |
+.. code:: yaml
|
|
| 102 |
+ |
|
| 103 |
+ projects:
|
|
| 104 |
+ project-name:
|
|
| 105 |
+ default-mirror: oz
|
|
| 106 |
+ |
|
| 107 |
+ |
|
| 108 |
+.. note::
|
|
| 109 |
+ |
|
| 110 |
+ It is possible to override this at invocation time using the
|
|
| 111 |
+ ``--default-mirror`` command-line option.
|
|
| 112 |
+ |
|
| 113 |
+ |
|
| 92 | 114 |
Default configuration
|
| 93 | 115 |
---------------------
|
| 94 | 116 |
The default BuildStream configuration is specified here for reference:
|
| ... | ... | @@ -10,3 +10,5 @@ maintained and work as expected. |
| 10 | 10 |
:maxdepth: 1
|
| 11 | 11 |
|
| 12 | 12 |
examples/flatpak-autotools
|
| 13 |
+ examples/tar-mirror
|
|
| 14 |
+ examples/git-mirror
|
| ... | ... | @@ -27,6 +27,7 @@ MAIN_OPTIONS = [ |
| 27 | 27 |
"--colors ",
|
| 28 | 28 |
"--config ",
|
| 29 | 29 |
"--debug ",
|
| 30 |
+ "--default-mirror ",
|
|
| 30 | 31 |
"--directory ",
|
| 31 | 32 |
"--error-lines ",
|
| 32 | 33 |
"--fetchers ",
|
| 1 |
+import os
|
|
| 2 |
+import pytest
|
|
| 3 |
+ |
|
| 4 |
+from tests.testutils import cli, create_repo, ALL_REPO_KINDS
|
|
| 5 |
+ |
|
| 6 |
+from buildstream import _yaml
|
|
| 7 |
+ |
|
| 8 |
+ |
|
| 9 |
+# Project directory
|
|
| 10 |
+TOP_DIR = os.path.dirname(os.path.realpath(__file__))
|
|
| 11 |
+DATA_DIR = os.path.join(TOP_DIR, 'project')
|
|
| 12 |
+ |
|
| 13 |
+ |
|
| 14 |
+def generate_element(output_file):
|
|
| 15 |
+ element = {
|
|
| 16 |
+ 'kind': 'import',
|
|
| 17 |
+ 'sources': [
|
|
| 18 |
+ {
|
|
| 19 |
+ 'kind': 'fetch_source',
|
|
| 20 |
+ "output-text": output_file,
|
|
| 21 |
+ "urls": ["foo:repo1", "bar:repo2"],
|
|
| 22 |
+ "fetch-succeeds": {
|
|
| 23 |
+ "FOO/repo1": True,
|
|
| 24 |
+ "BAR/repo2": False,
|
|
| 25 |
+ "OOF/repo1": False,
|
|
| 26 |
+ "RAB/repo2": True,
|
|
| 27 |
+ "OFO/repo1": False,
|
|
| 28 |
+ "RBA/repo2": False,
|
|
| 29 |
+ "ooF/repo1": False,
|
|
| 30 |
+ "raB/repo2": False,
|
|
| 31 |
+ }
|
|
| 32 |
+ }
|
|
| 33 |
+ ]
|
|
| 34 |
+ }
|
|
| 35 |
+ return element
|
|
| 36 |
+ |
|
| 37 |
+ |
|
| 38 |
+def generate_project():
|
|
| 39 |
+ project = {
|
|
| 40 |
+ 'name': 'test',
|
|
| 41 |
+ 'element-path': 'elements',
|
|
| 42 |
+ 'aliases': {
|
|
| 43 |
+ 'foo': 'FOO/',
|
|
| 44 |
+ 'bar': 'BAR/',
|
|
| 45 |
+ },
|
|
| 46 |
+ 'mirrors': [
|
|
| 47 |
+ {
|
|
| 48 |
+ 'name': 'middle-earth',
|
|
| 49 |
+ 'aliases': {
|
|
| 50 |
+ 'foo': ['OOF/'],
|
|
| 51 |
+ 'bar': ['RAB/'],
|
|
| 52 |
+ },
|
|
| 53 |
+ },
|
|
| 54 |
+ {
|
|
| 55 |
+ 'name': 'arrakis',
|
|
| 56 |
+ 'aliases': {
|
|
| 57 |
+ 'foo': ['OFO/'],
|
|
| 58 |
+ 'bar': ['RBA/'],
|
|
| 59 |
+ },
|
|
| 60 |
+ },
|
|
| 61 |
+ {
|
|
| 62 |
+ 'name': 'oz',
|
|
| 63 |
+ 'aliases': {
|
|
| 64 |
+ 'foo': ['ooF/'],
|
|
| 65 |
+ 'bar': ['raB/'],
|
|
| 66 |
+ }
|
|
| 67 |
+ },
|
|
| 68 |
+ ],
|
|
| 69 |
+ 'plugins': [
|
|
| 70 |
+ {
|
|
| 71 |
+ 'origin': 'local',
|
|
| 72 |
+ 'path': 'sources',
|
|
| 73 |
+ 'sources': {
|
|
| 74 |
+ 'fetch_source': 0
|
|
| 75 |
+ }
|
|
| 76 |
+ }
|
|
| 77 |
+ ]
|
|
| 78 |
+ }
|
|
| 79 |
+ return project
|
|
| 80 |
+ |
|
| 81 |
+ |
|
| 82 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 83 |
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
|
|
| 84 |
+def test_mirror_fetch(cli, tmpdir, datafiles, kind):
|
|
| 85 |
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
|
|
| 86 |
+ dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
|
|
| 87 |
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
|
|
| 88 |
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
|
|
| 89 |
+ project_dir = os.path.join(str(tmpdir), 'project')
|
|
| 90 |
+ os.makedirs(project_dir)
|
|
| 91 |
+ element_dir = os.path.join(project_dir, 'elements')
|
|
| 92 |
+ |
|
| 93 |
+ # Create repo objects of the upstream and mirror
|
|
| 94 |
+ upstream_repo = create_repo(kind, upstream_repodir)
|
|
| 95 |
+ upstream_ref = upstream_repo.create(bin_files_path)
|
|
| 96 |
+ mirror_repo = upstream_repo.copy(mirror_repodir)
|
|
| 97 |
+ mirror_ref = upstream_ref
|
|
| 98 |
+ upstream_ref = upstream_repo.create(dev_files_path)
|
|
| 99 |
+ |
|
| 100 |
+ element = {
|
|
| 101 |
+ 'kind': 'import',
|
|
| 102 |
+ 'sources': [
|
|
| 103 |
+ upstream_repo.source_config(ref=upstream_ref)
|
|
| 104 |
+ ]
|
|
| 105 |
+ }
|
|
| 106 |
+ element_name = 'test.bst'
|
|
| 107 |
+ element_path = os.path.join(element_dir, element_name)
|
|
| 108 |
+ full_repo = element['sources'][0]['url']
|
|
| 109 |
+ upstream_map, repo_name = os.path.split(full_repo)
|
|
| 110 |
+ alias = 'foo-' + kind
|
|
| 111 |
+ aliased_repo = alias + ':' + repo_name
|
|
| 112 |
+ element['sources'][0]['url'] = aliased_repo
|
|
| 113 |
+ full_mirror = mirror_repo.source_config()['url']
|
|
| 114 |
+ mirror_map, _ = os.path.split(full_mirror)
|
|
| 115 |
+ os.makedirs(element_dir)
|
|
| 116 |
+ _yaml.dump(element, element_path)
|
|
| 117 |
+ |
|
| 118 |
+ project = {
|
|
| 119 |
+ 'name': 'test',
|
|
| 120 |
+ 'element-path': 'elements',
|
|
| 121 |
+ 'aliases': {
|
|
| 122 |
+ alias: upstream_map + "/"
|
|
| 123 |
+ },
|
|
| 124 |
+ 'mirrors': [
|
|
| 125 |
+ {
|
|
| 126 |
+ 'name': 'middle-earth',
|
|
| 127 |
+ 'aliases': {
|
|
| 128 |
+ alias: [mirror_map + "/"],
|
|
| 129 |
+ },
|
|
| 130 |
+ },
|
|
| 131 |
+ ]
|
|
| 132 |
+ }
|
|
| 133 |
+ project_file = os.path.join(project_dir, 'project.conf')
|
|
| 134 |
+ _yaml.dump(project, project_file)
|
|
| 135 |
+ |
|
| 136 |
+ # No obvious ways of checking that the mirror has been fetched
|
|
| 137 |
+ # But at least we can be sure it succeeds
|
|
| 138 |
+ result = cli.run(project=project_dir, args=['fetch', element_name])
|
|
| 139 |
+ result.assert_success()
|
|
| 140 |
+ |
|
| 141 |
+ |
|
| 142 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 143 |
+def test_mirror_fetch_multi(cli, tmpdir, datafiles):
|
|
| 144 |
+ output_file = os.path.join(str(tmpdir), "output.txt")
|
|
| 145 |
+ project_dir = str(tmpdir)
|
|
| 146 |
+ element_dir = os.path.join(project_dir, 'elements')
|
|
| 147 |
+ os.makedirs(element_dir, exist_ok=True)
|
|
| 148 |
+ element_name = "test.bst"
|
|
| 149 |
+ element_path = os.path.join(element_dir, element_name)
|
|
| 150 |
+ element = generate_element(output_file)
|
|
| 151 |
+ _yaml.dump(element, element_path)
|
|
| 152 |
+ |
|
| 153 |
+ project_file = os.path.join(project_dir, 'project.conf')
|
|
| 154 |
+ project = generate_project()
|
|
| 155 |
+ _yaml.dump(project, project_file)
|
|
| 156 |
+ |
|
| 157 |
+ result = cli.run(project=project_dir, args=['fetch', element_name])
|
|
| 158 |
+ result.assert_success()
|
|
| 159 |
+ with open(output_file) as f:
|
|
| 160 |
+ contents = f.read()
|
|
| 161 |
+ assert "Fetch foo:repo1 succeeded from FOO/repo1" in contents
|
|
| 162 |
+ assert "Fetch bar:repo2 succeeded from RAB/repo2" in contents
|
|
| 163 |
+ |
|
| 164 |
+ |
|
| 165 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 166 |
+def test_mirror_fetch_default_cmdline(cli, tmpdir, datafiles):
|
|
| 167 |
+ output_file = os.path.join(str(tmpdir), "output.txt")
|
|
| 168 |
+ project_dir = str(tmpdir)
|
|
| 169 |
+ element_dir = os.path.join(project_dir, 'elements')
|
|
| 170 |
+ os.makedirs(element_dir, exist_ok=True)
|
|
| 171 |
+ element_name = "test.bst"
|
|
| 172 |
+ element_path = os.path.join(element_dir, element_name)
|
|
| 173 |
+ element = generate_element(output_file)
|
|
| 174 |
+ _yaml.dump(element, element_path)
|
|
| 175 |
+ |
|
| 176 |
+ project_file = os.path.join(project_dir, 'project.conf')
|
|
| 177 |
+ project = generate_project()
|
|
| 178 |
+ _yaml.dump(project, project_file)
|
|
| 179 |
+ |
|
| 180 |
+ result = cli.run(project=project_dir, args=['--default-mirror', 'arrakis', 'fetch', element_name])
|
|
| 181 |
+ result.assert_success()
|
|
| 182 |
+ with open(output_file) as f:
|
|
| 183 |
+ contents = f.read()
|
|
| 184 |
+ print(contents)
|
|
| 185 |
+ # Success if fetching from arrakis' mirror happened before middle-earth's
|
|
| 186 |
+ arrakis_str = "OFO/repo1"
|
|
| 187 |
+ arrakis_pos = contents.find(arrakis_str)
|
|
| 188 |
+ assert arrakis_pos != -1, "'{}' wasn't found".format(arrakis_str)
|
|
| 189 |
+ me_str = "OOF/repo1"
|
|
| 190 |
+ me_pos = contents.find(me_str)
|
|
| 191 |
+ assert me_pos != -1, "'{}' wasn't found".format(me_str)
|
|
| 192 |
+ assert arrakis_pos < me_pos, "'{}' wasn't found before '{}'".format(arrakis_str, me_str)
|
|
| 193 |
+ |
|
| 194 |
+ |
|
| 195 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 196 |
+def test_mirror_fetch_default_userconfig(cli, tmpdir, datafiles):
|
|
| 197 |
+ output_file = os.path.join(str(tmpdir), "output.txt")
|
|
| 198 |
+ project_dir = str(tmpdir)
|
|
| 199 |
+ element_dir = os.path.join(project_dir, 'elements')
|
|
| 200 |
+ os.makedirs(element_dir, exist_ok=True)
|
|
| 201 |
+ element_name = "test.bst"
|
|
| 202 |
+ element_path = os.path.join(element_dir, element_name)
|
|
| 203 |
+ element = generate_element(output_file)
|
|
| 204 |
+ _yaml.dump(element, element_path)
|
|
| 205 |
+ |
|
| 206 |
+ project_file = os.path.join(project_dir, 'project.conf')
|
|
| 207 |
+ project = generate_project()
|
|
| 208 |
+ _yaml.dump(project, project_file)
|
|
| 209 |
+ |
|
| 210 |
+ userconfig = {
|
|
| 211 |
+ 'projects': {
|
|
| 212 |
+ 'test': {
|
|
| 213 |
+ 'default-mirror': 'oz'
|
|
| 214 |
+ }
|
|
| 215 |
+ }
|
|
| 216 |
+ }
|
|
| 217 |
+ cli.configure(userconfig)
|
|
| 218 |
+ |
|
| 219 |
+ result = cli.run(project=project_dir, args=['fetch', element_name])
|
|
| 220 |
+ result.assert_success()
|
|
| 221 |
+ with open(output_file) as f:
|
|
| 222 |
+ contents = f.read()
|
|
| 223 |
+ print(contents)
|
|
| 224 |
+ # Success if fetching from Oz' mirror happened before middle-earth's
|
|
| 225 |
+ oz_str = "ooF/repo1"
|
|
| 226 |
+ oz_pos = contents.find(oz_str)
|
|
| 227 |
+ assert oz_pos != -1, "'{}' wasn't found".format(oz_str)
|
|
| 228 |
+ me_str = "OOF/repo1"
|
|
| 229 |
+ me_pos = contents.find(me_str)
|
|
| 230 |
+ assert me_pos != -1, "'{}' wasn't found".format(me_str)
|
|
| 231 |
+ assert oz_pos < me_pos, "'{}' wasn't found before '{}'".format(oz_str, me_str)
|
|
| 232 |
+ |
|
| 233 |
+ |
|
| 234 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 235 |
+def test_mirror_fetch_default_cmdline_overrides_config(cli, tmpdir, datafiles):
|
|
| 236 |
+ output_file = os.path.join(str(tmpdir), "output.txt")
|
|
| 237 |
+ project_dir = str(tmpdir)
|
|
| 238 |
+ element_dir = os.path.join(project_dir, 'elements')
|
|
| 239 |
+ os.makedirs(element_dir, exist_ok=True)
|
|
| 240 |
+ element_name = "test.bst"
|
|
| 241 |
+ element_path = os.path.join(element_dir, element_name)
|
|
| 242 |
+ element = generate_element(output_file)
|
|
| 243 |
+ _yaml.dump(element, element_path)
|
|
| 244 |
+ |
|
| 245 |
+ project_file = os.path.join(project_dir, 'project.conf')
|
|
| 246 |
+ project = generate_project()
|
|
| 247 |
+ _yaml.dump(project, project_file)
|
|
| 248 |
+ |
|
| 249 |
+ userconfig = {
|
|
| 250 |
+ 'projects': {
|
|
| 251 |
+ 'test': {
|
|
| 252 |
+ 'default-mirror': 'oz'
|
|
| 253 |
+ }
|
|
| 254 |
+ }
|
|
| 255 |
+ }
|
|
| 256 |
+ cli.configure(userconfig)
|
|
| 257 |
+ |
|
| 258 |
+ result = cli.run(project=project_dir, args=['--default-mirror', 'arrakis', 'fetch', element_name])
|
|
| 259 |
+ result.assert_success()
|
|
| 260 |
+ with open(output_file) as f:
|
|
| 261 |
+ contents = f.read()
|
|
| 262 |
+ print(contents)
|
|
| 263 |
+ # Success if fetching from arrakis' mirror happened before middle-earth's
|
|
| 264 |
+ arrakis_str = "OFO/repo1"
|
|
| 265 |
+ arrakis_pos = contents.find(arrakis_str)
|
|
| 266 |
+ assert arrakis_pos != -1, "'{}' wasn't found".format(arrakis_str)
|
|
| 267 |
+ me_str = "OOF/repo1"
|
|
| 268 |
+ me_pos = contents.find(me_str)
|
|
| 269 |
+ assert me_pos != -1, "'{}' wasn't found".format(me_str)
|
|
| 270 |
+ assert arrakis_pos < me_pos, "'{}' wasn't found before '{}'".format(arrakis_str, me_str)
|
|
| 271 |
+ |
|
| 272 |
+ |
|
| 273 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 274 |
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
|
|
| 275 |
+def test_mirror_track_upstream_present(cli, tmpdir, datafiles, kind):
|
|
| 276 |
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
|
|
| 277 |
+ dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
|
|
| 278 |
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
|
|
| 279 |
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
|
|
| 280 |
+ project_dir = os.path.join(str(tmpdir), 'project')
|
|
| 281 |
+ os.makedirs(project_dir)
|
|
| 282 |
+ element_dir = os.path.join(project_dir, 'elements')
|
|
| 283 |
+ |
|
| 284 |
+ # Create repo objects of the upstream and mirror
|
|
| 285 |
+ upstream_repo = create_repo(kind, upstream_repodir)
|
|
| 286 |
+ upstream_ref = upstream_repo.create(bin_files_path)
|
|
| 287 |
+ mirror_repo = upstream_repo.copy(mirror_repodir)
|
|
| 288 |
+ mirror_ref = upstream_ref
|
|
| 289 |
+ upstream_ref = upstream_repo.create(dev_files_path)
|
|
| 290 |
+ |
|
| 291 |
+ element = {
|
|
| 292 |
+ 'kind': 'import',
|
|
| 293 |
+ 'sources': [
|
|
| 294 |
+ upstream_repo.source_config(ref=upstream_ref)
|
|
| 295 |
+ ]
|
|
| 296 |
+ }
|
|
| 297 |
+ |
|
| 298 |
+ element['sources'][0]
|
|
| 299 |
+ element_name = 'test.bst'
|
|
| 300 |
+ element_path = os.path.join(element_dir, element_name)
|
|
| 301 |
+ full_repo = element['sources'][0]['url']
|
|
| 302 |
+ upstream_map, repo_name = os.path.split(full_repo)
|
|
| 303 |
+ alias = 'foo-' + kind
|
|
| 304 |
+ aliased_repo = alias + ':' + repo_name
|
|
| 305 |
+ element['sources'][0]['url'] = aliased_repo
|
|
| 306 |
+ full_mirror = mirror_repo.source_config()['url']
|
|
| 307 |
+ mirror_map, _ = os.path.split(full_mirror)
|
|
| 308 |
+ os.makedirs(element_dir)
|
|
| 309 |
+ _yaml.dump(element, element_path)
|
|
| 310 |
+ |
|
| 311 |
+ project = {
|
|
| 312 |
+ 'name': 'test',
|
|
| 313 |
+ 'element-path': 'elements',
|
|
| 314 |
+ 'aliases': {
|
|
| 315 |
+ alias: upstream_map + "/"
|
|
| 316 |
+ },
|
|
| 317 |
+ 'mirrors': [
|
|
| 318 |
+ {
|
|
| 319 |
+ 'name': 'middle-earth',
|
|
| 320 |
+ 'aliases': {
|
|
| 321 |
+ alias: [mirror_map + "/"],
|
|
| 322 |
+ },
|
|
| 323 |
+ },
|
|
| 324 |
+ ]
|
|
| 325 |
+ }
|
|
| 326 |
+ project_file = os.path.join(project_dir, 'project.conf')
|
|
| 327 |
+ _yaml.dump(project, project_file)
|
|
| 328 |
+ |
|
| 329 |
+ result = cli.run(project=project_dir, args=['track', element_name])
|
|
| 330 |
+ result.assert_success()
|
|
| 331 |
+ |
|
| 332 |
+ # Tracking tries upstream first. Check the ref is from upstream.
|
|
| 333 |
+ new_element = _yaml.load(element_path)
|
|
| 334 |
+ source = new_element['sources'][0]
|
|
| 335 |
+ if 'ref' in source:
|
|
| 336 |
+ assert source['ref'] == upstream_ref
|
|
| 337 |
+ |
|
| 338 |
+ |
|
| 339 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 340 |
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
|
|
| 341 |
+def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind):
|
|
| 342 |
+ bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
|
|
| 343 |
+ dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
|
|
| 344 |
+ upstream_repodir = os.path.join(str(tmpdir), 'upstream')
|
|
| 345 |
+ mirror_repodir = os.path.join(str(tmpdir), 'mirror')
|
|
| 346 |
+ project_dir = os.path.join(str(tmpdir), 'project')
|
|
| 347 |
+ os.makedirs(project_dir)
|
|
| 348 |
+ element_dir = os.path.join(project_dir, 'elements')
|
|
| 349 |
+ |
|
| 350 |
+ # Create repo objects of the upstream and mirror
|
|
| 351 |
+ upstream_repo = create_repo(kind, upstream_repodir)
|
|
| 352 |
+ upstream_ref = upstream_repo.create(bin_files_path)
|
|
| 353 |
+ mirror_repo = upstream_repo.copy(mirror_repodir)
|
|
| 354 |
+ mirror_ref = upstream_ref
|
|
| 355 |
+ upstream_ref = upstream_repo.create(dev_files_path)
|
|
| 356 |
+ |
|
| 357 |
+ element = {
|
|
| 358 |
+ 'kind': 'import',
|
|
| 359 |
+ 'sources': [
|
|
| 360 |
+ upstream_repo.source_config(ref=upstream_ref)
|
|
| 361 |
+ ]
|
|
| 362 |
+ }
|
|
| 363 |
+ |
|
| 364 |
+ element['sources'][0]
|
|
| 365 |
+ element_name = 'test.bst'
|
|
| 366 |
+ element_path = os.path.join(element_dir, element_name)
|
|
| 367 |
+ full_repo = element['sources'][0]['url']
|
|
| 368 |
+ upstream_map, repo_name = os.path.split(full_repo)
|
|
| 369 |
+ alias = 'foo-' + kind
|
|
| 370 |
+ aliased_repo = alias + ':' + repo_name
|
|
| 371 |
+ element['sources'][0]['url'] = aliased_repo
|
|
| 372 |
+ full_mirror = mirror_repo.source_config()['url']
|
|
| 373 |
+ mirror_map, _ = os.path.split(full_mirror)
|
|
| 374 |
+ os.makedirs(element_dir)
|
|
| 375 |
+ _yaml.dump(element, element_path)
|
|
| 376 |
+ |
|
| 377 |
+ project = {
|
|
| 378 |
+ 'name': 'test',
|
|
| 379 |
+ 'element-path': 'elements',
|
|
| 380 |
+ 'aliases': {
|
|
| 381 |
+ alias: 'http://www.example.com/'
|
|
| 382 |
+ },
|
|
| 383 |
+ 'mirrors': [
|
|
| 384 |
+ {
|
|
| 385 |
+ 'name': 'middle-earth',
|
|
| 386 |
+ 'aliases': {
|
|
| 387 |
+ alias: [mirror_map + "/"],
|
|
| 388 |
+ },
|
|
| 389 |
+ },
|
|
| 390 |
+ ]
|
|
| 391 |
+ }
|
|
| 392 |
+ project_file = os.path.join(project_dir, 'project.conf')
|
|
| 393 |
+ _yaml.dump(project, project_file)
|
|
| 394 |
+ |
|
| 395 |
+ result = cli.run(project=project_dir, args=['track', element_name])
|
|
| 396 |
+ result.assert_success()
|
|
| 397 |
+ |
|
| 398 |
+ # Check that tracking fell back to the mirror
|
|
| 399 |
+ new_element = _yaml.load(element_path)
|
|
| 400 |
+ source = new_element['sources'][0]
|
|
| 401 |
+ if 'ref' in source:
|
|
| 402 |
+ assert source['ref'] == mirror_ref
|
| 1 |
+import os
|
|
| 2 |
+import sys
|
|
| 3 |
+ |
|
| 4 |
+from buildstream import Source, Consistency, SourceError, SourceFetcher
|
|
| 5 |
+ |
|
| 6 |
+# Expected config
|
|
| 7 |
+# sources:
|
|
| 8 |
+# - output-text: $FILE
|
|
| 9 |
+# urls:
|
|
| 10 |
+# - foo:bar
|
|
| 11 |
+# - baz:quux
|
|
| 12 |
+# fetch-succeeds:
|
|
| 13 |
+# Foo/bar: true
|
|
| 14 |
+# ooF/bar: false
|
|
| 15 |
+ |
|
| 16 |
+ |
|
| 17 |
+class FetchFetcher(SourceFetcher):
|
|
| 18 |
+ def __init__(self, source, url):
|
|
| 19 |
+ super().__init__()
|
|
| 20 |
+ self.source = source
|
|
| 21 |
+ self.original_url = url
|
|
| 22 |
+ |
|
| 23 |
+ alias, _ = self.source.split_aliased_url(self.original_url)
|
|
| 24 |
+ self.set_alias(alias)
|
|
| 25 |
+ |
|
| 26 |
+ def fetch(self, alias_override=None):
|
|
| 27 |
+ url = self.source.translate_url(self.original_url, alias_override)
|
|
| 28 |
+ with open(self.source.output_file, "a") as f:
|
|
| 29 |
+ success = url in self.source.fetch_succeeds and self.source.fetch_succeeds[url]
|
|
| 30 |
+ message = "Fetch {} {} from {}\n".format(self.original_url,
|
|
| 31 |
+ "succeeded" if success else "failed",
|
|
| 32 |
+ url)
|
|
| 33 |
+ f.write(message)
|
|
| 34 |
+ if not success:
|
|
| 35 |
+ raise SourceError("Failed to fetch {}".format(url))
|
|
| 36 |
+ |
|
| 37 |
+ |
|
| 38 |
+class FetchSource(Source):
|
|
| 39 |
+ # Read config to know which URLs to fetch
|
|
| 40 |
+ def configure(self, node):
|
|
| 41 |
+ self.original_urls = self.node_get_member(node, list, 'urls')
|
|
| 42 |
+ self.fetchers = [FetchFetcher(self, url) for url in self.original_urls]
|
|
| 43 |
+ self.output_file = self.node_get_member(node, str, 'output-text')
|
|
| 44 |
+ self.fetch_succeeds = {}
|
|
| 45 |
+ if 'fetch-succeeds' in node:
|
|
| 46 |
+ self.fetch_succeeds = {x[0]: x[1] for x in self.node_items(node['fetch-succeeds'])}
|
|
| 47 |
+ |
|
| 48 |
+ def get_source_fetchers(self):
|
|
| 49 |
+ return self.fetchers
|
|
| 50 |
+ |
|
| 51 |
+ def preflight(self):
|
|
| 52 |
+ output_dir = os.path.dirname(self.output_file)
|
|
| 53 |
+ if not os.path.exists(output_dir):
|
|
| 54 |
+ raise SourceError("Directory '{}' does not exist".format(output_dir))
|
|
| 55 |
+ |
|
| 56 |
+ def fetch(self):
|
|
| 57 |
+ for fetcher in self.fetchers:
|
|
| 58 |
+ fetcher.fetch()
|
|
| 59 |
+ |
|
| 60 |
+ def get_unique_key(self):
|
|
| 61 |
+ return {"urls": self.original_urls, "output_file": self.output_file}
|
|
| 62 |
+ |
|
| 63 |
+ def get_consistency(self):
|
|
| 64 |
+ if not os.path.exists(self.output_file):
|
|
| 65 |
+ return Consistency.RESOLVED
|
|
| 66 |
+ |
|
| 67 |
+ with open(self.output_file, "r") as f:
|
|
| 68 |
+ contents = f.read()
|
|
| 69 |
+ for url in self.original_urls:
|
|
| 70 |
+ if url not in contents:
|
|
| 71 |
+ return Consistency.RESOLVED
|
|
| 72 |
+ |
|
| 73 |
+ return Consistency.CACHED
|
|
| 74 |
+ |
|
| 75 |
+ # We dont have a ref, we're a local file...
|
|
| 76 |
+ def load_ref(self, node):
|
|
| 77 |
+ pass
|
|
| 78 |
+ |
|
| 79 |
+ def get_ref(self):
|
|
| 80 |
+ return None # pragma: nocover
|
|
| 81 |
+ |
|
| 82 |
+ def set_ref(self, ref, node):
|
|
| 83 |
+ pass # pragma: nocover
|
|
| 84 |
+ |
|
| 85 |
+ |
|
| 86 |
+def setup():
|
|
| 87 |
+ return FetchSource
|
| 1 | 1 |
import os
|
| 2 |
-import pytest
|
|
| 2 |
+import sys
|
|
| 3 |
+import shutil
|
|
| 3 | 4 |
import itertools
|
| 5 |
+import pytest
|
|
| 4 | 6 |
from tests.testutils import cli
|
| 5 |
- |
|
| 6 | 7 |
from buildstream import _yaml
|
| 7 | 8 |
from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
| 8 | 9 |
|
| ... | ... | @@ -232,3 +233,58 @@ def test_fetched_junction(cli, tmpdir, datafiles, element_name): |
| 232 | 233 |
|
| 233 | 234 |
results = result.output.strip().splitlines()
|
| 234 | 235 |
assert 'junction.bst:import-etc.bst-buildable' in results
|
| 236 |
+ |
|
| 237 |
+ |
|
| 238 |
+###############################################################
|
|
| 239 |
+# Testing recursion depth #
|
|
| 240 |
+###############################################################
|
|
| 241 |
+@pytest.mark.parametrize("dependency_depth", [100, 500, 1200])
|
|
| 242 |
+def test_exceed_max_recursion_depth(cli, tmpdir, dependency_depth):
|
|
| 243 |
+ project_name = "recursion-test"
|
|
| 244 |
+ path = str(tmpdir)
|
|
| 245 |
+ project_path = os.path.join(path, project_name)
|
|
| 246 |
+ |
|
| 247 |
+ def setup_test():
|
|
| 248 |
+ """
|
|
| 249 |
+ Creates a bst project with dependencydepth + 1 elements, each of which
|
|
| 250 |
+ depends of the previous element to be created. Each element created
|
|
| 251 |
+ is of type import and has an empty source file.
|
|
| 252 |
+ """
|
|
| 253 |
+ os.mkdir(project_path)
|
|
| 254 |
+ |
|
| 255 |
+ result = cli.run(project=project_path, silent=True,
|
|
| 256 |
+ args=['init', '--project-name', project_name])
|
|
| 257 |
+ result.assert_success()
|
|
| 258 |
+ |
|
| 259 |
+ sourcefiles_path = os.path.join(project_path, "files")
|
|
| 260 |
+ os.mkdir(sourcefiles_path)
|
|
| 261 |
+ |
|
| 262 |
+ element_path = os.path.join(project_path, "elements")
|
|
| 263 |
+ for i in range(0, dependency_depth + 1):
|
|
| 264 |
+ element = {
|
|
| 265 |
+ 'kind': 'import',
|
|
| 266 |
+ 'sources': [{'kind': 'local',
|
|
| 267 |
+ 'path': 'files/source{}'.format(str(i))}],
|
|
| 268 |
+ 'depends': ['element{}.bst'.format(str(i - 1))]
|
|
| 269 |
+ }
|
|
| 270 |
+ if i == 0:
|
|
| 271 |
+ del element['depends']
|
|
| 272 |
+ _yaml.dump(element, os.path.join(element_path, "element{}.bst".format(str(i))))
|
|
| 273 |
+ |
|
| 274 |
+ source = os.path.join(sourcefiles_path, "source{}".format(str(i)))
|
|
| 275 |
+ open(source, 'x').close()
|
|
| 276 |
+ assert os.path.exists(source)
|
|
| 277 |
+ |
|
| 278 |
+ setup_test()
|
|
| 279 |
+ result = cli.run(project=project_path, silent=True,
|
|
| 280 |
+ args=['show', "element{}.bst".format(str(dependency_depth))])
|
|
| 281 |
+ |
|
| 282 |
+ recursion_limit = sys.getrecursionlimit()
|
|
| 283 |
+ if dependency_depth <= recursion_limit:
|
|
| 284 |
+ result.assert_success()
|
|
| 285 |
+ else:
|
|
| 286 |
+ # Assert exception is thown and handled
|
|
| 287 |
+ assert not result.unhandled_exception
|
|
| 288 |
+ assert result.exit_code == -1
|
|
| 289 |
+ |
|
| 290 |
+ shutil.rmtree(project_path)
|
| ... | ... | @@ -123,6 +123,58 @@ def test_open_force(cli, tmpdir, datafiles, kind): |
| 123 | 123 |
result.assert_success()
|
| 124 | 124 |
|
| 125 | 125 |
|
| 126 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 127 |
+@pytest.mark.parametrize("kind", repo_kinds)
|
|
| 128 |
+def test_open_force_open(cli, tmpdir, datafiles, kind):
|
|
| 129 |
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
| 130 |
+ |
|
| 131 |
+ # Assert the workspace dir exists
|
|
| 132 |
+ assert os.path.exists(workspace)
|
|
| 133 |
+ |
|
| 134 |
+ # Now open the workspace again with --force, this should happily succeed
|
|
| 135 |
+ result = cli.run(project=project, args=[
|
|
| 136 |
+ 'workspace', 'open', '--force', element_name, workspace
|
|
| 137 |
+ ])
|
|
| 138 |
+ result.assert_success()
|
|
| 139 |
+ |
|
| 140 |
+ |
|
| 141 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 142 |
+@pytest.mark.parametrize("kind", repo_kinds)
|
|
| 143 |
+def test_open_force_different_workspace(cli, tmpdir, datafiles, kind):
|
|
| 144 |
+ element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False, "-alpha")
|
|
| 145 |
+ |
|
| 146 |
+ # Assert the workspace dir exists
|
|
| 147 |
+ assert os.path.exists(workspace)
|
|
| 148 |
+ |
|
| 149 |
+ hello_path = os.path.join(workspace, 'usr', 'bin', 'hello')
|
|
| 150 |
+ hello1_path = os.path.join(workspace, 'usr', 'bin', 'hello1')
|
|
| 151 |
+ |
|
| 152 |
+ tmpdir = os.path.join(str(tmpdir), "-beta")
|
|
| 153 |
+ shutil.move(hello_path, hello1_path)
|
|
| 154 |
+ element_name2, project2, workspace2 = open_workspace(cli, tmpdir, datafiles, kind, False, "-beta")
|
|
| 155 |
+ |
|
| 156 |
+ # Assert the workspace dir exists
|
|
| 157 |
+ assert os.path.exists(workspace2)
|
|
| 158 |
+ |
|
| 159 |
+ # Assert that workspace 1 contains the modified file
|
|
| 160 |
+ assert os.path.exists(hello1_path)
|
|
| 161 |
+ |
|
| 162 |
+ # Assert that workspace 2 contains the unmodified file
|
|
| 163 |
+ assert os.path.exists(os.path.join(workspace2, 'usr', 'bin', 'hello'))
|
|
| 164 |
+ |
|
| 165 |
+ # Now open the workspace again with --force, this should happily succeed
|
|
| 166 |
+ result = cli.run(project=project, args=[
|
|
| 167 |
+ 'workspace', 'open', '--force', element_name2, workspace
|
|
| 168 |
+ ])
|
|
| 169 |
+ |
|
| 170 |
+ # Assert that the file in workspace 1 has been replaced
|
|
| 171 |
+ # With the file from workspace 2
|
|
| 172 |
+ assert os.path.exists(hello_path)
|
|
| 173 |
+ assert not os.path.exists(hello1_path)
|
|
| 174 |
+ |
|
| 175 |
+ result.assert_success()
|
|
| 176 |
+ |
|
| 177 |
+ |
|
| 126 | 178 |
@pytest.mark.datafiles(DATA_DIR)
|
| 127 | 179 |
@pytest.mark.parametrize("kind", repo_kinds)
|
| 128 | 180 |
def test_close(cli, tmpdir, datafiles, kind):
|
| ... | ... | @@ -22,7 +22,7 @@ class Repo(): |
| 22 | 22 |
# The directory the actual repo will be stored in
|
| 23 | 23 |
self.repo = os.path.join(self.directory, subdir)
|
| 24 | 24 |
|
| 25 |
- os.makedirs(self.repo)
|
|
| 25 |
+ os.makedirs(self.repo, exist_ok=True)
|
|
| 26 | 26 |
|
| 27 | 27 |
# create():
|
| 28 | 28 |
#
|
| ... | ... | @@ -69,3 +69,22 @@ class Repo(): |
| 69 | 69 |
shutil.copytree(src_path, dest_path)
|
| 70 | 70 |
else:
|
| 71 | 71 |
shutil.copy2(src_path, dest_path)
|
| 72 |
+ |
|
| 73 |
+ # copy():
|
|
| 74 |
+ #
|
|
| 75 |
+ # Creates a copy of this repository in the specified
|
|
| 76 |
+ # destination.
|
|
| 77 |
+ #
|
|
| 78 |
+ # Args:
|
|
| 79 |
+ # dest (str): The destination directory
|
|
| 80 |
+ #
|
|
| 81 |
+ # Returns:
|
|
| 82 |
+ # (Repo): A Repo object for the new repository.
|
|
| 83 |
+ def copy(self, dest):
|
|
| 84 |
+ subdir = self.repo[len(self.directory):].lstrip(os.sep)
|
|
| 85 |
+ new_dir = os.path.join(dest, subdir)
|
|
| 86 |
+ os.makedirs(new_dir, exist_ok=True)
|
|
| 87 |
+ self.copy_directory(self.repo, new_dir)
|
|
| 88 |
+ repo_type = type(self)
|
|
| 89 |
+ new_repo = repo_type(dest, subdir)
|
|
| 90 |
+ return new_repo
|
