Valentin David pushed to branch valentindavid/git_shallow_fetch at BuildStream / buildstream
Commits:
-
629a6e52
by Chandan Singh at 2018-12-14T19:34:20Z
-
f894c0a8
by Chandan Singh at 2018-12-14T19:34:20Z
-
b23bec55
by Chandan Singh at 2018-12-14T20:07:13Z
-
eb688bbf
by Valentin David at 2018-12-17T08:39:09Z
29 changed files:
- NEWS
- buildstream/_frontend/cli.py
- buildstream/_frontend/complete.py
- buildstream/plugins/sources/git.py
- setup.py
- tests/completions/completions.py
- tests/frontend/cross_junction_workspace.py
- tests/frontend/fetch.py
- tests/frontend/help.py
- tests/frontend/logging.py
- tests/frontend/mirror.py
- tests/frontend/show.py
- tests/frontend/source_checkout.py
- tests/frontend/track.py
- tests/frontend/track_cross_junction.py
- tests/frontend/workspace.py
- tests/frontend/yamlcache.py
- tests/integration/compose.py
- tests/integration/pip_source.py
- tests/loader/junctions.py
- tests/pipeline/preflight.py
- tests/plugins/filter.py
- tests/sources/bzr.py
- tests/sources/deb.py
- tests/sources/git.py
- tests/sources/previous_source_access.py
- tests/sources/remote.py
- tests/sources/tar.py
- tests/sources/zip.py
Changes:
... | ... | @@ -10,6 +10,12 @@ buildstream 1.3.1 |
10 | 10 |
an element's sources and generated build scripts you can do the command
|
11 | 11 |
`bst source-checkout --include-build-scripts --tar foo.bst some-file.tar`
|
12 | 12 |
|
13 |
+ o BREAKING CHANGE: `bst track` and `bst fetch` commands are now osbolete.
|
|
14 |
+ Their functionality is provided by `bst source track` and
|
|
15 |
+ `bst source fetch` respectively.
|
|
16 |
+ |
|
17 |
+ o Added new `bst source checkout` command to checkout sources of an element.
|
|
18 |
+ |
|
13 | 19 |
o BREAKING CHANGE: Default strip-commands have been removed as they are too
|
14 | 20 |
specific. Recommendation if you are building in Linux is to use the
|
15 | 21 |
ones being used in freedesktop-sdk project, for example
|
... | ... | @@ -77,8 +83,6 @@ buildstream 1.3.1 |
77 | 83 |
with cached artifacts, only 'complete' elements can be pushed. If the element
|
78 | 84 |
is expected to have a populated build tree then it must be cached before pushing.
|
79 | 85 |
|
80 |
- o Added new `bst source-checkout` command to checkout sources of an element.
|
|
81 |
- |
|
82 | 86 |
o `bst workspace open` now supports the creation of multiple elements and
|
83 | 87 |
allows the user to set a default location for their creation. This has meant
|
84 | 88 |
that the new CLI is no longer backwards compatible with buildstream 1.2.
|
... | ... | @@ -49,7 +49,8 @@ def search_command(args, *, context=None): |
49 | 49 |
def complete_commands(cmd, args, incomplete):
|
50 | 50 |
command_ctx = search_command(args[1:])
|
51 | 51 |
if command_ctx and command_ctx.command and isinstance(command_ctx.command, click.MultiCommand):
|
52 |
- return [subcommand + " " for subcommand in command_ctx.command.list_commands(command_ctx)]
|
|
52 |
+ return [subcommand + " " for subcommand in command_ctx.command.list_commands(command_ctx)
|
|
53 |
+ if not command_ctx.command.get_command(command_ctx, subcommand).hidden]
|
|
53 | 54 |
|
54 | 55 |
return []
|
55 | 56 |
|
... | ... | @@ -354,106 +355,6 @@ def build(app, elements, all_, track_, track_save, track_all, track_except, trac |
354 | 355 |
build_all=all_)
|
355 | 356 |
|
356 | 357 |
|
357 |
-##################################################################
|
|
358 |
-# Fetch Command #
|
|
359 |
-##################################################################
|
|
360 |
-@cli.command(short_help="Fetch sources in a pipeline")
|
|
361 |
-@click.option('--except', 'except_', multiple=True,
|
|
362 |
- type=click.Path(readable=False),
|
|
363 |
- help="Except certain dependencies from fetching")
|
|
364 |
-@click.option('--deps', '-d', default='plan',
|
|
365 |
- type=click.Choice(['none', 'plan', 'all']),
|
|
366 |
- help='The dependencies to fetch (default: plan)')
|
|
367 |
-@click.option('--track', 'track_', default=False, is_flag=True,
|
|
368 |
- help="Track new source references before fetching")
|
|
369 |
-@click.option('--track-cross-junctions', '-J', default=False, is_flag=True,
|
|
370 |
- help="Allow tracking to cross junction boundaries")
|
|
371 |
-@click.argument('elements', nargs=-1,
|
|
372 |
- type=click.Path(readable=False))
|
|
373 |
-@click.pass_obj
|
|
374 |
-def fetch(app, elements, deps, track_, except_, track_cross_junctions):
|
|
375 |
- """Fetch sources required to build the pipeline
|
|
376 |
- |
|
377 |
- By default this will only try to fetch sources which are
|
|
378 |
- required for the build plan of the specified target element,
|
|
379 |
- omitting sources for any elements which are already built
|
|
380 |
- and available in the artifact cache.
|
|
381 |
- |
|
382 |
- Specify `--deps` to control which sources to fetch:
|
|
383 |
- |
|
384 |
- \b
|
|
385 |
- none: No dependencies, just the element itself
|
|
386 |
- plan: Only dependencies required for the build plan
|
|
387 |
- all: All dependencies
|
|
388 |
- """
|
|
389 |
- from .._pipeline import PipelineSelection
|
|
390 |
- |
|
391 |
- if track_cross_junctions and not track_:
|
|
392 |
- click.echo("ERROR: The --track-cross-junctions option can only be used with --track", err=True)
|
|
393 |
- sys.exit(-1)
|
|
394 |
- |
|
395 |
- if track_ and deps == PipelineSelection.PLAN:
|
|
396 |
- click.echo("WARNING: --track specified for tracking of a build plan\n\n"
|
|
397 |
- "Since tracking modifies the build plan, all elements will be tracked.", err=True)
|
|
398 |
- deps = PipelineSelection.ALL
|
|
399 |
- |
|
400 |
- with app.initialized(session_name="Fetch"):
|
|
401 |
- if not elements:
|
|
402 |
- guessed_target = app.context.guess_element()
|
|
403 |
- if guessed_target:
|
|
404 |
- elements = (guessed_target,)
|
|
405 |
- |
|
406 |
- app.stream.fetch(elements,
|
|
407 |
- selection=deps,
|
|
408 |
- except_targets=except_,
|
|
409 |
- track_targets=track_,
|
|
410 |
- track_cross_junctions=track_cross_junctions)
|
|
411 |
- |
|
412 |
- |
|
413 |
-##################################################################
|
|
414 |
-# Track Command #
|
|
415 |
-##################################################################
|
|
416 |
-@cli.command(short_help="Track new source references")
|
|
417 |
-@click.option('--except', 'except_', multiple=True,
|
|
418 |
- type=click.Path(readable=False),
|
|
419 |
- help="Except certain dependencies from tracking")
|
|
420 |
-@click.option('--deps', '-d', default='none',
|
|
421 |
- type=click.Choice(['none', 'all']),
|
|
422 |
- help='The dependencies to track (default: none)')
|
|
423 |
-@click.option('--cross-junctions', '-J', default=False, is_flag=True,
|
|
424 |
- help="Allow crossing junction boundaries")
|
|
425 |
-@click.argument('elements', nargs=-1,
|
|
426 |
- type=click.Path(readable=False))
|
|
427 |
-@click.pass_obj
|
|
428 |
-def track(app, elements, deps, except_, cross_junctions):
|
|
429 |
- """Consults the specified tracking branches for new versions available
|
|
430 |
- to build and updates the project with any newly available references.
|
|
431 |
- |
|
432 |
- By default this will track just the specified element, but you can also
|
|
433 |
- update a whole tree of dependencies in one go.
|
|
434 |
- |
|
435 |
- Specify `--deps` to control which sources to track:
|
|
436 |
- |
|
437 |
- \b
|
|
438 |
- none: No dependencies, just the specified elements
|
|
439 |
- all: All dependencies of all specified elements
|
|
440 |
- """
|
|
441 |
- with app.initialized(session_name="Track"):
|
|
442 |
- if not elements:
|
|
443 |
- guessed_target = app.context.guess_element()
|
|
444 |
- if guessed_target:
|
|
445 |
- elements = (guessed_target,)
|
|
446 |
- |
|
447 |
- # Substitute 'none' for 'redirect' so that element redirections
|
|
448 |
- # will be done
|
|
449 |
- if deps == 'none':
|
|
450 |
- deps = 'redirect'
|
|
451 |
- app.stream.track(elements,
|
|
452 |
- selection=deps,
|
|
453 |
- except_targets=except_,
|
|
454 |
- cross_junctions=cross_junctions)
|
|
455 |
- |
|
456 |
- |
|
457 | 358 |
##################################################################
|
458 | 359 |
# Pull Command #
|
459 | 360 |
##################################################################
|
... | ... | @@ -743,10 +644,109 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar): |
743 | 644 |
tar=tar)
|
744 | 645 |
|
745 | 646 |
|
647 |
+##################################################################
|
|
648 |
+# Source Command #
|
|
649 |
+##################################################################
|
|
650 |
+@cli.group(short_help="Manipulate sources for an element")
|
|
651 |
+def source():
|
|
652 |
+ """Manipulate sources for an element"""
|
|
653 |
+ pass
|
|
654 |
+ |
|
655 |
+ |
|
656 |
+##################################################################
|
|
657 |
+# Source Fetch Command #
|
|
658 |
+##################################################################
|
|
659 |
+@source.command(name="fetch", short_help="Fetch sources in a pipeline")
|
|
660 |
+@click.option('--except', 'except_', multiple=True,
|
|
661 |
+ type=click.Path(readable=False),
|
|
662 |
+ help="Except certain dependencies from fetching")
|
|
663 |
+@click.option('--deps', '-d', default='plan',
|
|
664 |
+ type=click.Choice(['none', 'plan', 'all']),
|
|
665 |
+ help='The dependencies to fetch (default: plan)')
|
|
666 |
+@click.option('--track', 'track_', default=False, is_flag=True,
|
|
667 |
+ help="Track new source references before fetching")
|
|
668 |
+@click.option('--track-cross-junctions', '-J', default=False, is_flag=True,
|
|
669 |
+ help="Allow tracking to cross junction boundaries")
|
|
670 |
+@click.argument('elements', nargs=-1,
|
|
671 |
+ type=click.Path(readable=False))
|
|
672 |
+@click.pass_obj
|
|
673 |
+def source_fetch(app, elements, deps, track_, except_, track_cross_junctions):
|
|
674 |
+ """Fetch sources required to build the pipeline
|
|
675 |
+ |
|
676 |
+ By default this will only try to fetch sources which are
|
|
677 |
+ required for the build plan of the specified target element,
|
|
678 |
+ omitting sources for any elements which are already built
|
|
679 |
+ and available in the artifact cache.
|
|
680 |
+ |
|
681 |
+ Specify `--deps` to control which sources to fetch:
|
|
682 |
+ |
|
683 |
+ \b
|
|
684 |
+ none: No dependencies, just the element itself
|
|
685 |
+ plan: Only dependencies required for the build plan
|
|
686 |
+ all: All dependencies
|
|
687 |
+ """
|
|
688 |
+ from .._pipeline import PipelineSelection
|
|
689 |
+ |
|
690 |
+ if track_cross_junctions and not track_:
|
|
691 |
+ click.echo("ERROR: The --track-cross-junctions option can only be used with --track", err=True)
|
|
692 |
+ sys.exit(-1)
|
|
693 |
+ |
|
694 |
+ if track_ and deps == PipelineSelection.PLAN:
|
|
695 |
+ click.echo("WARNING: --track specified for tracking of a build plan\n\n"
|
|
696 |
+ "Since tracking modifies the build plan, all elements will be tracked.", err=True)
|
|
697 |
+ deps = PipelineSelection.ALL
|
|
698 |
+ |
|
699 |
+ with app.initialized(session_name="Fetch"):
|
|
700 |
+ app.stream.fetch(elements,
|
|
701 |
+ selection=deps,
|
|
702 |
+ except_targets=except_,
|
|
703 |
+ track_targets=track_,
|
|
704 |
+ track_cross_junctions=track_cross_junctions)
|
|
705 |
+ |
|
706 |
+ |
|
707 |
+##################################################################
|
|
708 |
+# Source Track Command #
|
|
709 |
+##################################################################
|
|
710 |
+@source.command(name="track", short_help="Track new source references")
|
|
711 |
+@click.option('--except', 'except_', multiple=True,
|
|
712 |
+ type=click.Path(readable=False),
|
|
713 |
+ help="Except certain dependencies from tracking")
|
|
714 |
+@click.option('--deps', '-d', default='none',
|
|
715 |
+ type=click.Choice(['none', 'all']),
|
|
716 |
+ help='The dependencies to track (default: none)')
|
|
717 |
+@click.option('--cross-junctions', '-J', default=False, is_flag=True,
|
|
718 |
+ help="Allow crossing junction boundaries")
|
|
719 |
+@click.argument('elements', nargs=-1,
|
|
720 |
+ type=click.Path(readable=False))
|
|
721 |
+@click.pass_obj
|
|
722 |
+def source_track(app, elements, deps, except_, cross_junctions):
|
|
723 |
+ """Consults the specified tracking branches for new versions available
|
|
724 |
+ to build and updates the project with any newly available references.
|
|
725 |
+ |
|
726 |
+ By default this will track just the specified element, but you can also
|
|
727 |
+ update a whole tree of dependencies in one go.
|
|
728 |
+ |
|
729 |
+ Specify `--deps` to control which sources to track:
|
|
730 |
+ |
|
731 |
+ \b
|
|
732 |
+ none: No dependencies, just the specified elements
|
|
733 |
+ all: All dependencies of all specified elements
|
|
734 |
+ """
|
|
735 |
+ with app.initialized(session_name="Track"):
|
|
736 |
+ # Substitute 'none' for 'redirect' so that element redirections
|
|
737 |
+ # will be done
|
|
738 |
+ if deps == 'none':
|
|
739 |
+ deps = 'redirect'
|
|
740 |
+ app.stream.track(elements,
|
|
741 |
+ selection=deps,
|
|
742 |
+ except_targets=except_,
|
|
743 |
+ cross_junctions=cross_junctions)
|
|
744 |
+ |
|
745 |
+ |
|
746 | 746 |
##################################################################
|
747 | 747 |
# Source Checkout Command #
|
748 | 748 |
##################################################################
|
749 |
-@cli.command(name='source-checkout', short_help='Checkout sources for an element')
|
|
749 |
+@source.command(name='checkout', short_help='Checkout sources for an element')
|
|
750 | 750 |
@click.option('--force', '-f', default=False, is_flag=True,
|
751 | 751 |
help="Allow files to be overwritten")
|
752 | 752 |
@click.option('--except', 'except_', multiple=True,
|
... | ... | @@ -1035,3 +1035,54 @@ def artifact_log(app, artifacts): |
1035 | 1035 |
with open(log) as f:
|
1036 | 1036 |
data = f.read()
|
1037 | 1037 |
click.echo_via_pager(data)
|
1038 |
+ |
|
1039 |
+ |
|
1040 |
+##################################################################
|
|
1041 |
+# DEPRECATED Commands #
|
|
1042 |
+##################################################################
|
|
1043 |
+ |
|
1044 |
+# XXX: The following commands are now obsolete, but they are kept
|
|
1045 |
+# here along with all the options so that we can provide nice error
|
|
1046 |
+# messages when they are called.
|
|
1047 |
+# Also, note that these commands are hidden from the top-level help.
|
|
1048 |
+ |
|
1049 |
+##################################################################
|
|
1050 |
+# Fetch Command #
|
|
1051 |
+##################################################################
|
|
1052 |
+@cli.command(short_help="Fetch sources in a pipeline", hidden=True)
|
|
1053 |
+@click.option('--except', 'except_', multiple=True,
|
|
1054 |
+ type=click.Path(readable=False),
|
|
1055 |
+ help="Except certain dependencies from fetching")
|
|
1056 |
+@click.option('--deps', '-d', default='plan',
|
|
1057 |
+ type=click.Choice(['none', 'plan', 'all']),
|
|
1058 |
+ help='The dependencies to fetch (default: plan)')
|
|
1059 |
+@click.option('--track', 'track_', default=False, is_flag=True,
|
|
1060 |
+ help="Track new source references before fetching")
|
|
1061 |
+@click.option('--track-cross-junctions', '-J', default=False, is_flag=True,
|
|
1062 |
+ help="Allow tracking to cross junction boundaries")
|
|
1063 |
+@click.argument('elements', nargs=-1,
|
|
1064 |
+ type=click.Path(readable=False))
|
|
1065 |
+@click.pass_obj
|
|
1066 |
+def fetch(app, elements, deps, track_, except_, track_cross_junctions):
|
|
1067 |
+ click.echo("This command is now obsolete. Use `bst source fetch` instead.", err=True)
|
|
1068 |
+ sys.exit(1)
|
|
1069 |
+ |
|
1070 |
+ |
|
1071 |
+##################################################################
|
|
1072 |
+# Track Command #
|
|
1073 |
+##################################################################
|
|
1074 |
+@cli.command(short_help="Track new source references", hidden=True)
|
|
1075 |
+@click.option('--except', 'except_', multiple=True,
|
|
1076 |
+ type=click.Path(readable=False),
|
|
1077 |
+ help="Except certain dependencies from tracking")
|
|
1078 |
+@click.option('--deps', '-d', default='none',
|
|
1079 |
+ type=click.Choice(['none', 'all']),
|
|
1080 |
+ help='The dependencies to track (default: none)')
|
|
1081 |
+@click.option('--cross-junctions', '-J', default=False, is_flag=True,
|
|
1082 |
+ help="Allow crossing junction boundaries")
|
|
1083 |
+@click.argument('elements', nargs=-1,
|
|
1084 |
+ type=click.Path(readable=False))
|
|
1085 |
+@click.pass_obj
|
|
1086 |
+def track(app, elements, deps, except_, cross_junctions):
|
|
1087 |
+ click.echo("This command is now obsolete. Use `bst source track` instead.", err=True)
|
|
1088 |
+ sys.exit(1)
|
... | ... | @@ -297,12 +297,15 @@ def get_choices(cli, prog_name, args, incomplete, override): |
297 | 297 |
|
298 | 298 |
if not found_param and isinstance(ctx.command, MultiCommand):
|
299 | 299 |
# completion for any subcommands
|
300 |
- choices.extend([cmd + " " for cmd in ctx.command.list_commands(ctx)])
|
|
300 |
+ choices.extend([cmd + " " for cmd in ctx.command.list_commands(ctx)
|
|
301 |
+ if not ctx.command.get_command(ctx, cmd).hidden])
|
|
301 | 302 |
|
302 | 303 |
if not start_of_option(incomplete) and ctx.parent is not None \
|
303 | 304 |
and isinstance(ctx.parent.command, MultiCommand) and ctx.parent.command.chain:
|
304 | 305 |
# completion for chained commands
|
305 |
- remaining_comands = set(ctx.parent.command.list_commands(ctx.parent)) - set(ctx.parent.protected_args)
|
|
306 |
+ visible_commands = [cmd for cmd in ctx.parent.command.list_commands(ctx.parent)
|
|
307 |
+ if not ctx.parent.command.get_command(ctx.parent, cmd).hidden]
|
|
308 |
+ remaining_comands = set(visible_commands) - set(ctx.parent.protected_args)
|
|
306 | 309 |
choices.extend([cmd + " " for cmd in remaining_comands])
|
307 | 310 |
|
308 | 311 |
for item in choices:
|
... | ... | @@ -183,7 +183,7 @@ WARN_INVALID_SUBMODULE = "invalid-submodule" |
183 | 183 |
#
|
184 | 184 |
class GitMirror(SourceFetcher):
|
185 | 185 |
|
186 |
- def __init__(self, source, path, url, ref, *, primary=False, tags=[]):
|
|
186 |
+ def __init__(self, source, path, url, ref, *, primary=False, tags=[], tracking=None):
|
|
187 | 187 |
|
188 | 188 |
super().__init__()
|
189 | 189 |
self.source = source
|
... | ... | @@ -192,11 +192,101 @@ class GitMirror(SourceFetcher): |
192 | 192 |
self.ref = ref
|
193 | 193 |
self.tags = tags
|
194 | 194 |
self.primary = primary
|
195 |
+ dirname = utils.url_directory_name(url)
|
|
195 | 196 |
self.mirror = os.path.join(source.get_mirror_directory(), utils.url_directory_name(url))
|
197 |
+ self.fetch_mirror = os.path.join(source.get_mirror_directory(), '{}-{}'.format(dirname, ref))
|
|
196 | 198 |
self.mark_download_url(url)
|
199 |
+ self.tracking = tracking
|
|
200 |
+ |
|
201 |
+ def mirror_path(self):
|
|
202 |
+ if os.path.exists(self.mirror):
|
|
203 |
+ return self.mirror
|
|
204 |
+ else:
|
|
205 |
+ assert os.path.exists(self.fetch_mirror)
|
|
206 |
+ return self.fetch_mirror
|
|
207 |
+ |
|
208 |
+ def ensure_fetchable(self, alias_override=None):
|
|
209 |
+ |
|
210 |
+ if os.path.exists(self.mirror):
|
|
211 |
+ return
|
|
212 |
+ |
|
213 |
+ if self.tags:
|
|
214 |
+ for tag, commit, _ in self.tags:
|
|
215 |
+ if commit != self.ref:
|
|
216 |
+ self.source.status("{}: tag '{}' is not on commit '{}', so a full clone is required"
|
|
217 |
+ .format(self.source, tag, commit))
|
|
218 |
+ self.ensure_trackable(alias_override=alias_override)
|
|
219 |
+ return
|
|
220 |
+ |
|
221 |
+ if os.path.exists(self.fetch_mirror):
|
|
222 |
+ return
|
|
223 |
+ |
|
224 |
+ with self.source.tempdir() as tmpdir:
|
|
225 |
+ self.source.call([self.source.host_git, 'init', '--bare', tmpdir],
|
|
226 |
+ fail="Failed to init git repository",
|
|
227 |
+ fail_temporarily=True)
|
|
228 |
+ |
|
229 |
+ url = self.source.translate_url(self.url, alias_override=alias_override,
|
|
230 |
+ primary=self.primary)
|
|
231 |
+ |
|
232 |
+ self.source.call([self.source.host_git, 'remote', 'add', '--mirror=fetch', 'origin', url],
|
|
233 |
+ cwd=tmpdir,
|
|
234 |
+ fail="Failed to init git repository",
|
|
235 |
+ fail_temporarily=True)
|
|
236 |
+ |
|
237 |
+ _, refs = self.source.check_output([self.source.host_git, 'ls-remote', 'origin'],
|
|
238 |
+ cwd=tmpdir,
|
|
239 |
+ fail="Failed to clone git repository {}".format(url),
|
|
240 |
+ fail_temporarily=True)
|
|
241 |
+ |
|
242 |
+ advertised = None
|
|
243 |
+ for ref_line in refs.splitlines():
|
|
244 |
+ commit, ref = ref_line.split('\t', 1)
|
|
245 |
+ if ref == 'HEAD':
|
|
246 |
+ continue
|
|
247 |
+ if self.tracking:
|
|
248 |
+ # For validate_cache to work
|
|
249 |
+ if ref not in ['refs/heads/{}'.format(self.tracking),
|
|
250 |
+ 'refs/tags/{}'.format(self.tracking),
|
|
251 |
+ 'refs/tags/{}{}'.format(self.tracking, '^{}')]:
|
|
252 |
+ continue
|
|
253 |
+ if self.ref == commit:
|
|
254 |
+ if ref.endswith('^{}'):
|
|
255 |
+ ref = ref[:-3]
|
|
256 |
+ advertised = ref
|
|
257 |
+ break
|
|
258 |
+ |
|
259 |
+ if advertised is None:
|
|
260 |
+ self.source.status("{}: {} is not advertised on {}, so a full clone is required"
|
|
261 |
+ .format(self.source, self.ref, url))
|
|
262 |
+ |
|
263 |
+ self.ensure_trackable(alias_override=alias_override)
|
|
264 |
+ return
|
|
265 |
+ |
|
266 |
+ self.source.call([self.source.host_git, 'fetch', '--depth=1', 'origin', advertised],
|
|
267 |
+ cwd=tmpdir,
|
|
268 |
+ fail="Failed to fetch repository",
|
|
269 |
+ fail_temporarily=True)
|
|
270 |
+ |
|
271 |
+ # We need to have a ref to make it clonable
|
|
272 |
+ self.source.call([self.source.host_git, 'update-ref', 'HEAD', self.ref],
|
|
273 |
+ cwd=tmpdir,
|
|
274 |
+ fail="Failed to tag HEAD",
|
|
275 |
+ fail_temporarily=True)
|
|
276 |
+ |
|
277 |
+ try:
|
|
278 |
+ move_atomic(tmpdir, self.fetch_mirror)
|
|
279 |
+ except DirectoryExistsError:
|
|
280 |
+ # Another process was quicker to download this repository.
|
|
281 |
+ # Let's discard our own
|
|
282 |
+ self.source.status("{}: Discarding duplicate clone of {}"
|
|
283 |
+ .format(self.source, url))
|
|
284 |
+ except OSError as e:
|
|
285 |
+ raise SourceError("{}: Failed to move cloned git repository {} from '{}' to '{}': {}"
|
|
286 |
+ .format(self.source, url, tmpdir, self.fetch_mirror, e)) from e
|
|
197 | 287 |
|
198 | 288 |
# Ensures that the mirror exists
|
199 |
- def ensure(self, alias_override=None):
|
|
289 |
+ def ensure_trackable(self, alias_override=None):
|
|
200 | 290 |
|
201 | 291 |
# Unfortunately, git does not know how to only clone just a specific ref,
|
202 | 292 |
# so we have to download all of those gigs even if we only need a couple
|
... | ... | @@ -231,18 +321,20 @@ class GitMirror(SourceFetcher): |
231 | 321 |
alias_override=alias_override,
|
232 | 322 |
primary=self.primary)
|
233 | 323 |
|
324 |
+ mirror = self.mirror_path()
|
|
325 |
+ |
|
234 | 326 |
if alias_override:
|
235 | 327 |
remote_name = utils.url_directory_name(alias_override)
|
236 | 328 |
_, remotes = self.source.check_output(
|
237 | 329 |
[self.source.host_git, 'remote'],
|
238 |
- fail="Failed to retrieve list of remotes in {}".format(self.mirror),
|
|
239 |
- cwd=self.mirror
|
|
330 |
+ fail="Failed to retrieve list of remotes in {}".format(mirror),
|
|
331 |
+ cwd=mirror
|
|
240 | 332 |
)
|
241 | 333 |
if remote_name not in remotes:
|
242 | 334 |
self.source.call(
|
243 | 335 |
[self.source.host_git, 'remote', 'add', remote_name, url],
|
244 | 336 |
fail="Failed to add remote {} with url {}".format(remote_name, url),
|
245 |
- cwd=self.mirror
|
|
337 |
+ cwd=mirror
|
|
246 | 338 |
)
|
247 | 339 |
else:
|
248 | 340 |
remote_name = "origin"
|
... | ... | @@ -250,7 +342,7 @@ class GitMirror(SourceFetcher): |
250 | 342 |
self.source.call([self.source.host_git, 'fetch', remote_name, '--prune', '--force', '--tags'],
|
251 | 343 |
fail="Failed to fetch from remote git repository: {}".format(url),
|
252 | 344 |
fail_temporarily=True,
|
253 |
- cwd=self.mirror)
|
|
345 |
+ cwd=mirror)
|
|
254 | 346 |
|
255 | 347 |
def fetch(self, alias_override=None):
|
256 | 348 |
# Resolve the URL for the message
|
... | ... | @@ -261,7 +353,7 @@ class GitMirror(SourceFetcher): |
261 | 353 |
with self.source.timed_activity("Fetching from {}"
|
262 | 354 |
.format(resolved_url),
|
263 | 355 |
silent_nested=True):
|
264 |
- self.ensure(alias_override)
|
|
356 |
+ self.ensure_fetchable(alias_override)
|
|
265 | 357 |
if not self.has_ref():
|
266 | 358 |
self._fetch(alias_override)
|
267 | 359 |
self.assert_ref()
|
... | ... | @@ -270,12 +362,14 @@ class GitMirror(SourceFetcher): |
270 | 362 |
if not self.ref:
|
271 | 363 |
return False
|
272 | 364 |
|
273 |
- # If the mirror doesnt exist, we also dont have the ref
|
|
274 |
- if not os.path.exists(self.mirror):
|
|
365 |
+ if not os.path.exists(self.mirror) and not os.path.exists(self.fetch_mirror):
|
|
366 |
+ # If the mirror doesnt exist, we also dont have the ref
|
|
275 | 367 |
return False
|
276 | 368 |
|
369 |
+ mirror = self.mirror_path()
|
|
370 |
+ |
|
277 | 371 |
# Check if the ref is really there
|
278 |
- rc = self.source.call([self.source.host_git, 'cat-file', '-t', self.ref], cwd=self.mirror)
|
|
372 |
+ rc = self.source.call([self.source.host_git, 'cat-file', '-t', self.ref], cwd=mirror)
|
|
279 | 373 |
return rc == 0
|
280 | 374 |
|
281 | 375 |
def assert_ref(self):
|
... | ... | @@ -325,11 +419,13 @@ class GitMirror(SourceFetcher): |
325 | 419 |
def stage(self, directory):
|
326 | 420 |
fullpath = os.path.join(directory, self.path)
|
327 | 421 |
|
422 |
+ mirror = self.mirror_path()
|
|
423 |
+ |
|
328 | 424 |
# Using --shared here avoids copying the objects into the checkout, in any
|
329 | 425 |
# case we're just checking out a specific commit and then removing the .git/
|
330 | 426 |
# directory.
|
331 |
- self.source.call([self.source.host_git, 'clone', '--no-checkout', '--shared', self.mirror, fullpath],
|
|
332 |
- fail="Failed to create git mirror {} in directory: {}".format(self.mirror, fullpath),
|
|
427 |
+ self.source.call([self.source.host_git, 'clone', '--no-checkout', '--shared', mirror, fullpath],
|
|
428 |
+ fail="Failed to create git mirror {} in directory: {}".format(mirror, fullpath),
|
|
333 | 429 |
fail_temporarily=True)
|
334 | 430 |
|
335 | 431 |
self.source.call([self.source.host_git, 'checkout', '--force', self.ref],
|
... | ... | @@ -359,9 +455,11 @@ class GitMirror(SourceFetcher): |
359 | 455 |
|
360 | 456 |
# List the submodules (path/url tuples) present at the given ref of this repo
|
361 | 457 |
def submodule_list(self):
|
458 |
+ mirror = self.mirror_path()
|
|
459 |
+ |
|
362 | 460 |
modules = "{}:{}".format(self.ref, GIT_MODULES)
|
363 | 461 |
exit_code, output = self.source.check_output(
|
364 |
- [self.source.host_git, 'show', modules], cwd=self.mirror)
|
|
462 |
+ [self.source.host_git, 'show', modules], cwd=mirror)
|
|
365 | 463 |
|
366 | 464 |
# If git show reports error code 128 here, we take it to mean there is
|
367 | 465 |
# no .gitmodules file to display for the given revision.
|
... | ... | @@ -389,6 +487,8 @@ class GitMirror(SourceFetcher): |
389 | 487 |
# Fetch the ref which this mirror requires its submodule to have,
|
390 | 488 |
# at the given ref of this mirror.
|
391 | 489 |
def submodule_ref(self, submodule, ref=None):
|
490 |
+ mirror = self.mirror_path()
|
|
491 |
+ |
|
392 | 492 |
if not ref:
|
393 | 493 |
ref = self.ref
|
394 | 494 |
|
... | ... | @@ -397,7 +497,7 @@ class GitMirror(SourceFetcher): |
397 | 497 |
_, output = self.source.check_output([self.source.host_git, 'ls-tree', ref, submodule],
|
398 | 498 |
fail="ls-tree failed for commit {} and submodule: {}".format(
|
399 | 499 |
ref, submodule),
|
400 |
- cwd=self.mirror)
|
|
500 |
+ cwd=mirror)
|
|
401 | 501 |
|
402 | 502 |
# read the commit hash from the output
|
403 | 503 |
fields = output.split()
|
... | ... | @@ -514,8 +614,8 @@ class GitSource(Source): |
514 | 614 |
self.track_tags = self.node_get_member(node, bool, 'track-tags', False)
|
515 | 615 |
|
516 | 616 |
self.original_url = self.node_get_member(node, str, 'url')
|
517 |
- self.mirror = GitMirror(self, '', self.original_url, ref, tags=tags, primary=True)
|
|
518 | 617 |
self.tracking = self.node_get_member(node, str, 'track', None)
|
618 |
+ self.mirror = GitMirror(self, '', self.original_url, ref, tags=tags, primary=True, tracking=self.tracking)
|
|
519 | 619 |
|
520 | 620 |
self.ref_format = self.node_get_member(node, str, 'ref-format', 'sha1')
|
521 | 621 |
if self.ref_format not in ['sha1', 'git-describe']:
|
... | ... | @@ -633,7 +733,7 @@ class GitSource(Source): |
633 | 733 |
with self.timed_activity("Tracking {} from {}"
|
634 | 734 |
.format(self.tracking, resolved_url),
|
635 | 735 |
silent_nested=True):
|
636 |
- self.mirror.ensure()
|
|
736 |
+ self.mirror.ensure_trackable()
|
|
637 | 737 |
self.mirror._fetch()
|
638 | 738 |
|
639 | 739 |
# Update self.mirror.ref and node.ref from the self.tracking branch
|
... | ... | @@ -643,6 +743,7 @@ class GitSource(Source): |
643 | 743 |
|
644 | 744 |
def init_workspace(self, directory):
|
645 | 745 |
# XXX: may wish to refactor this as some code dupe with stage()
|
746 |
+ self.mirror.ensure_trackable()
|
|
646 | 747 |
self.refresh_submodules()
|
647 | 748 |
|
648 | 749 |
with self.timed_activity('Setting up workspace "{}"'.format(directory), silent_nested=True):
|
... | ... | @@ -717,15 +818,16 @@ class GitSource(Source): |
717 | 818 |
# Assert that the ref exists in the track tag/branch, if track has been specified.
|
718 | 819 |
ref_in_track = False
|
719 | 820 |
if self.tracking:
|
821 |
+ mirror = self.mirror.mirror_path()
|
|
720 | 822 |
_, branch = self.check_output([self.host_git, 'branch', '--list', self.tracking,
|
721 | 823 |
'--contains', self.mirror.ref],
|
722 |
- cwd=self.mirror.mirror)
|
|
824 |
+ cwd=mirror)
|
|
723 | 825 |
if branch:
|
724 | 826 |
ref_in_track = True
|
725 | 827 |
else:
|
726 | 828 |
_, tag = self.check_output([self.host_git, 'tag', '--list', self.tracking,
|
727 | 829 |
'--contains', self.mirror.ref],
|
728 |
- cwd=self.mirror.mirror)
|
|
830 |
+ cwd=mirror)
|
|
729 | 831 |
if tag:
|
730 | 832 |
ref_in_track = True
|
731 | 833 |
|
... | ... | @@ -749,7 +851,7 @@ class GitSource(Source): |
749 | 851 |
|
750 | 852 |
self.refresh_submodules()
|
751 | 853 |
for mirror in self.submodules:
|
752 |
- if not os.path.exists(mirror.mirror):
|
|
854 |
+ if not os.path.exists(mirror.mirror) and not os.path.exists(mirror.fetch_mirror):
|
|
753 | 855 |
return False
|
754 | 856 |
if not mirror.has_ref():
|
755 | 857 |
return False
|
... | ... | @@ -761,7 +863,7 @@ class GitSource(Source): |
761 | 863 |
# Assumes that we have our mirror and we have the ref which we point to
|
762 | 864 |
#
|
763 | 865 |
def refresh_submodules(self):
|
764 |
- self.mirror.ensure()
|
|
866 |
+ self.mirror.ensure_fetchable()
|
|
765 | 867 |
submodules = []
|
766 | 868 |
|
767 | 869 |
for path, url in self.mirror.submodule_list():
|
... | ... | @@ -346,7 +346,7 @@ setup(name='BuildStream', |
346 | 346 |
# See issues #571 and #790.
|
347 | 347 |
'ruamel.yaml >= 0.15.41, < 0.15.52',
|
348 | 348 |
'pluginbase',
|
349 |
- 'Click',
|
|
349 |
+ 'Click >= 7.0',
|
|
350 | 350 |
'jinja2 >= 2.10',
|
351 | 351 |
'protobuf >= 3.5',
|
352 | 352 |
'grpcio >= 1.10',
|
... | ... | @@ -9,15 +9,13 @@ MAIN_COMMANDS = [ |
9 | 9 |
'artifact ',
|
10 | 10 |
'build ',
|
11 | 11 |
'checkout ',
|
12 |
- 'fetch ',
|
|
13 | 12 |
'help ',
|
14 | 13 |
'init ',
|
15 | 14 |
'pull ',
|
16 | 15 |
'push ',
|
17 | 16 |
'shell ',
|
18 | 17 |
'show ',
|
19 |
- 'source-checkout ',
|
|
20 |
- 'track ',
|
|
18 |
+ 'source ',
|
|
21 | 19 |
'workspace '
|
22 | 20 |
]
|
23 | 21 |
|
... | ... | @@ -50,6 +48,12 @@ MAIN_OPTIONS = [ |
50 | 48 |
"--version ",
|
51 | 49 |
]
|
52 | 50 |
|
51 |
+SOURCE_COMMANDS = [
|
|
52 |
+ 'checkout ',
|
|
53 |
+ 'fetch ',
|
|
54 |
+ 'track ',
|
|
55 |
+]
|
|
56 |
+ |
|
53 | 57 |
WORKSPACE_COMMANDS = [
|
54 | 58 |
'close ',
|
55 | 59 |
'list ',
|
... | ... | @@ -115,6 +119,7 @@ def assert_completion_failed(cli, cmd, word_idx, expected, cwd=None): |
115 | 119 |
('bst ', 1, MAIN_COMMANDS),
|
116 | 120 |
('bst pu', 1, ['pull ', 'push ']),
|
117 | 121 |
('bst pul', 1, ['pull ']),
|
122 |
+ ('bst source ', 2, SOURCE_COMMANDS),
|
|
118 | 123 |
('bst w ', 1, ['workspace ']),
|
119 | 124 |
('bst workspace ', 2, WORKSPACE_COMMANDS),
|
120 | 125 |
])
|
... | ... | @@ -267,9 +272,10 @@ def test_argument_element_invalid(datafiles, cli, project, cmd, word_idx, expect |
267 | 272 |
@pytest.mark.parametrize("cmd,word_idx,expected", [
|
268 | 273 |
('bst he', 1, ['help ']),
|
269 | 274 |
('bst help ', 2, MAIN_COMMANDS),
|
270 |
- ('bst help fe', 2, ['fetch ']),
|
|
275 |
+ ('bst help in', 2, ['init ']),
|
|
271 | 276 |
('bst help p', 2, ['pull ', 'push ']),
|
272 | 277 |
('bst help p', 2, ['pull ', 'push ']),
|
278 |
+ ('bst help source ', 3, SOURCE_COMMANDS),
|
|
273 | 279 |
('bst help w', 2, ['workspace ']),
|
274 | 280 |
('bst help workspace ', 3, WORKSPACE_COMMANDS),
|
275 | 281 |
])
|
... | ... | @@ -35,7 +35,7 @@ def prepare_junction_project(cli, tmpdir): |
35 | 35 |
'sources': [sub_repo.source_config(ref=sub_ref)]},
|
36 | 36 |
str(main_project.join("sub.bst")))
|
37 | 37 |
|
38 |
- args = ['fetch', 'sub.bst']
|
|
38 |
+ args = ['source', 'fetch', 'sub.bst']
|
|
39 | 39 |
result = cli.run(project=str(main_project), args=args)
|
40 | 40 |
result.assert_success()
|
41 | 41 |
|
... | ... | @@ -41,7 +41,7 @@ def test_fetch(cli, tmpdir, datafiles, kind): |
41 | 41 |
assert cli.get_element_state(project, element_name) == 'fetch needed'
|
42 | 42 |
|
43 | 43 |
# Now try to fetch it
|
44 |
- result = cli.run(project=project, args=['fetch', element_name])
|
|
44 |
+ result = cli.run(project=project, args=['source', 'fetch', element_name])
|
|
45 | 45 |
result.assert_success()
|
46 | 46 |
|
47 | 47 |
# Assert that we are now buildable because the source is
|
... | ... | @@ -55,7 +55,7 @@ def test_fetch_consistency_error(cli, tmpdir, datafiles): |
55 | 55 |
|
56 | 56 |
# When the error occurs outside of the scheduler at load time,
|
57 | 57 |
# then the SourceError is reported directly as the main error.
|
58 |
- result = cli.run(project=project, args=['fetch', 'error.bst'])
|
|
58 |
+ result = cli.run(project=project, args=['source', 'fetch', 'error.bst'])
|
|
59 | 59 |
result.assert_main_error(ErrorDomain.SOURCE, 'the-consistency-error')
|
60 | 60 |
|
61 | 61 |
|
... | ... | @@ -70,7 +70,7 @@ def test_fetch_consistency_bug(cli, tmpdir, datafiles): |
70 | 70 |
# for a fetch command, we could report this to the user
|
71 | 71 |
# more gracefully as a BUG message.
|
72 | 72 |
#
|
73 |
- result = cli.run(project=project, args=['fetch', 'bug.bst'])
|
|
73 |
+ result = cli.run(project=project, args=['source', 'fetch', 'bug.bst'])
|
|
74 | 74 |
assert result.exc is not None
|
75 | 75 |
assert str(result.exc) == "Something went terribly wrong"
|
76 | 76 |
|
... | ... | @@ -121,7 +121,7 @@ def test_unfetched_junction(cli, tmpdir, datafiles, ref_storage): |
121 | 121 |
|
122 | 122 |
# Now try to fetch it, this should automatically result in fetching
|
123 | 123 |
# the junction itself.
|
124 |
- result = cli.run(project=project, args=['fetch', 'junction-dep.bst'])
|
|
124 |
+ result = cli.run(project=project, args=['source', 'fetch', 'junction-dep.bst'])
|
|
125 | 125 |
result.assert_success()
|
126 | 126 |
|
127 | 127 |
|
... | ... | @@ -155,7 +155,7 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage): |
155 | 155 |
|
156 | 156 |
# Now try to fetch it, this will bail with the appropriate error
|
157 | 157 |
# informing the user to track the junction first
|
158 |
- result = cli.run(project=project, args=['fetch', 'junction-dep.bst'])
|
|
158 |
+ result = cli.run(project=project, args=['source', 'fetch', 'junction-dep.bst'])
|
|
159 | 159 |
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
|
160 | 160 |
|
161 | 161 |
|
... | ... | @@ -188,10 +188,10 @@ def test_fetch_cross_junction(cli, tmpdir, datafiles, ref_storage, kind): |
188 | 188 |
generate_junction(tmpdir, subproject_path, junction_path, store_ref=(ref_storage == 'inline'))
|
189 | 189 |
|
190 | 190 |
if ref_storage == 'project.refs':
|
191 |
- result = cli.run(project=project, args=['track', 'junction.bst'])
|
|
191 |
+ result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
|
|
192 | 192 |
result.assert_success()
|
193 |
- result = cli.run(project=project, args=['track', 'junction.bst:import-etc.bst'])
|
|
193 |
+ result = cli.run(project=project, args=['source', 'track', 'junction.bst:import-etc.bst'])
|
|
194 | 194 |
result.assert_success()
|
195 | 195 |
|
196 |
- result = cli.run(project=project, args=['fetch', 'junction.bst:import-etc.bst'])
|
|
196 |
+ result = cli.run(project=project, args=['source', 'fetch', 'junction.bst:import-etc.bst'])
|
|
197 | 197 |
result.assert_success()
|
... | ... | @@ -20,12 +20,11 @@ def test_help_main(cli): |
20 | 20 |
@pytest.mark.parametrize("command", [
|
21 | 21 |
('build'),
|
22 | 22 |
('checkout'),
|
23 |
- ('fetch'),
|
|
24 | 23 |
('pull'),
|
25 | 24 |
('push'),
|
26 | 25 |
('shell'),
|
27 | 26 |
('show'),
|
28 |
- ('track'),
|
|
27 |
+ ('source'),
|
|
29 | 28 |
('workspace')
|
30 | 29 |
])
|
31 | 30 |
def test_help(cli, command):
|
... | ... | @@ -38,7 +38,7 @@ def test_default_logging(cli, tmpdir, datafiles): |
38 | 38 |
element_name))
|
39 | 39 |
|
40 | 40 |
# Now try to fetch it
|
41 |
- result = cli.run(project=project, args=['fetch', element_name])
|
|
41 |
+ result = cli.run(project=project, args=['source', 'fetch', element_name])
|
|
42 | 42 |
result.assert_success()
|
43 | 43 |
|
44 | 44 |
m = re.search("\[\d\d:\d\d:\d\d\]\[\]\[\] SUCCESS Checking sources", result.stderr)
|
... | ... | @@ -74,7 +74,7 @@ def test_custom_logging(cli, tmpdir, datafiles): |
74 | 74 |
element_name))
|
75 | 75 |
|
76 | 76 |
# Now try to fetch it
|
77 |
- result = cli.run(project=project, args=['fetch', element_name])
|
|
77 |
+ result = cli.run(project=project, args=['source', 'fetch', element_name])
|
|
78 | 78 |
result.assert_success()
|
79 | 79 |
|
80 | 80 |
m = re.search("\d\d:\d\d:\d\d,\d\d:\d\d:\d\d.\d{6},\d\d:\d\d:\d\d,,,SUCCESS,Checking sources", result.stderr)
|
... | ... | @@ -135,7 +135,7 @@ def test_mirror_fetch(cli, tmpdir, datafiles, kind): |
135 | 135 |
|
136 | 136 |
# No obvious ways of checking that the mirror has been fetched
|
137 | 137 |
# But at least we can be sure it succeeds
|
138 |
- result = cli.run(project=project_dir, args=['fetch', element_name])
|
|
138 |
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
|
|
139 | 139 |
result.assert_success()
|
140 | 140 |
|
141 | 141 |
|
... | ... | @@ -211,7 +211,7 @@ def test_mirror_fetch_ref_storage(cli, tmpdir, datafiles, ref_storage, mirror): |
211 | 211 |
project_file = os.path.join(project_dir, 'project.conf')
|
212 | 212 |
_yaml.dump(project, project_file)
|
213 | 213 |
|
214 |
- result = cli.run(project=project_dir, args=['fetch', element_name])
|
|
214 |
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
|
|
215 | 215 |
result.assert_success()
|
216 | 216 |
|
217 | 217 |
|
... | ... | @@ -268,7 +268,7 @@ def test_mirror_fetch_upstream_absent(cli, tmpdir, datafiles, kind): |
268 | 268 |
project_file = os.path.join(project_dir, 'project.conf')
|
269 | 269 |
_yaml.dump(project, project_file)
|
270 | 270 |
|
271 |
- result = cli.run(project=project_dir, args=['fetch', element_name])
|
|
271 |
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
|
|
272 | 272 |
result.assert_success()
|
273 | 273 |
|
274 | 274 |
|
... | ... | @@ -287,7 +287,7 @@ def test_mirror_fetch_multi(cli, tmpdir, datafiles): |
287 | 287 |
project = generate_project()
|
288 | 288 |
_yaml.dump(project, project_file)
|
289 | 289 |
|
290 |
- result = cli.run(project=project_dir, args=['fetch', element_name])
|
|
290 |
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
|
|
291 | 291 |
result.assert_success()
|
292 | 292 |
with open(output_file) as f:
|
293 | 293 |
contents = f.read()
|
... | ... | @@ -310,7 +310,7 @@ def test_mirror_fetch_default_cmdline(cli, tmpdir, datafiles): |
310 | 310 |
project = generate_project()
|
311 | 311 |
_yaml.dump(project, project_file)
|
312 | 312 |
|
313 |
- result = cli.run(project=project_dir, args=['--default-mirror', 'arrakis', 'fetch', element_name])
|
|
313 |
+ result = cli.run(project=project_dir, args=['--default-mirror', 'arrakis', 'source', 'fetch', element_name])
|
|
314 | 314 |
result.assert_success()
|
315 | 315 |
with open(output_file) as f:
|
316 | 316 |
contents = f.read()
|
... | ... | @@ -349,7 +349,7 @@ def test_mirror_fetch_default_userconfig(cli, tmpdir, datafiles): |
349 | 349 |
}
|
350 | 350 |
cli.configure(userconfig)
|
351 | 351 |
|
352 |
- result = cli.run(project=project_dir, args=['fetch', element_name])
|
|
352 |
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
|
|
353 | 353 |
result.assert_success()
|
354 | 354 |
with open(output_file) as f:
|
355 | 355 |
contents = f.read()
|
... | ... | @@ -388,7 +388,7 @@ def test_mirror_fetch_default_cmdline_overrides_config(cli, tmpdir, datafiles): |
388 | 388 |
}
|
389 | 389 |
cli.configure(userconfig)
|
390 | 390 |
|
391 |
- result = cli.run(project=project_dir, args=['--default-mirror', 'arrakis', 'fetch', element_name])
|
|
391 |
+ result = cli.run(project=project_dir, args=['--default-mirror', 'arrakis', 'source', 'fetch', element_name])
|
|
392 | 392 |
result.assert_success()
|
393 | 393 |
with open(output_file) as f:
|
394 | 394 |
contents = f.read()
|
... | ... | @@ -459,7 +459,7 @@ def test_mirror_track_upstream_present(cli, tmpdir, datafiles, kind): |
459 | 459 |
project_file = os.path.join(project_dir, 'project.conf')
|
460 | 460 |
_yaml.dump(project, project_file)
|
461 | 461 |
|
462 |
- result = cli.run(project=project_dir, args=['track', element_name])
|
|
462 |
+ result = cli.run(project=project_dir, args=['source', 'track', element_name])
|
|
463 | 463 |
result.assert_success()
|
464 | 464 |
|
465 | 465 |
# Tracking tries upstream first. Check the ref is from upstream.
|
... | ... | @@ -525,7 +525,7 @@ def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind): |
525 | 525 |
project_file = os.path.join(project_dir, 'project.conf')
|
526 | 526 |
_yaml.dump(project, project_file)
|
527 | 527 |
|
528 |
- result = cli.run(project=project_dir, args=['track', element_name])
|
|
528 |
+ result = cli.run(project=project_dir, args=['source', 'track', element_name])
|
|
529 | 529 |
result.assert_success()
|
530 | 530 |
|
531 | 531 |
# Check that tracking fell back to the mirror
|
... | ... | @@ -604,7 +604,7 @@ def test_mirror_from_includes(cli, tmpdir, datafiles, kind): |
604 | 604 |
|
605 | 605 |
# Now make the upstream unavailable.
|
606 | 606 |
os.rename(upstream_repo.repo, '{}.bak'.format(upstream_repo.repo))
|
607 |
- result = cli.run(project=project_dir, args=['fetch', element_name])
|
|
607 |
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
|
|
608 | 608 |
result.assert_success()
|
609 | 609 |
|
610 | 610 |
|
... | ... | @@ -678,11 +678,11 @@ def test_mirror_junction_from_includes(cli, tmpdir, datafiles, kind): |
678 | 678 |
|
679 | 679 |
# Now make the upstream unavailable.
|
680 | 680 |
os.rename(upstream_repo.repo, '{}.bak'.format(upstream_repo.repo))
|
681 |
- result = cli.run(project=project_dir, args=['fetch', element_name])
|
|
681 |
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
|
|
682 | 682 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
683 | 683 |
# Now make the upstream available again.
|
684 | 684 |
os.rename('{}.bak'.format(upstream_repo.repo), upstream_repo.repo)
|
685 |
- result = cli.run(project=project_dir, args=['fetch', element_name])
|
|
685 |
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
|
|
686 | 686 |
result.assert_success()
|
687 | 687 |
|
688 | 688 |
|
... | ... | @@ -762,7 +762,7 @@ def test_mirror_git_submodule_fetch(cli, tmpdir, datafiles): |
762 | 762 |
project_file = os.path.join(project_dir, 'project.conf')
|
763 | 763 |
_yaml.dump(project, project_file)
|
764 | 764 |
|
765 |
- result = cli.run(project=project_dir, args=['fetch', element_name])
|
|
765 |
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
|
|
766 | 766 |
result.assert_success()
|
767 | 767 |
|
768 | 768 |
|
... | ... | @@ -849,7 +849,7 @@ def test_mirror_fallback_git_only_submodules(cli, tmpdir, datafiles): |
849 | 849 |
|
850 | 850 |
# Now make the upstream unavailable.
|
851 | 851 |
os.rename(upstream_bin_repo.repo, '{}.bak'.format(upstream_bin_repo.repo))
|
852 |
- result = cli.run(project=project_dir, args=['fetch', element_name])
|
|
852 |
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
|
|
853 | 853 |
result.assert_success()
|
854 | 854 |
|
855 | 855 |
result = cli.run(project=project_dir, args=['build', element_name])
|
... | ... | @@ -945,7 +945,7 @@ def test_mirror_fallback_git_with_submodules(cli, tmpdir, datafiles): |
945 | 945 |
|
946 | 946 |
# Now make the upstream unavailable.
|
947 | 947 |
os.rename(upstream_main_repo.repo, '{}.bak'.format(upstream_main_repo.repo))
|
948 |
- result = cli.run(project=project_dir, args=['fetch', element_name])
|
|
948 |
+ result = cli.run(project=project_dir, args=['source', 'fetch', element_name])
|
|
949 | 949 |
result.assert_success()
|
950 | 950 |
|
951 | 951 |
result = cli.run(project=project_dir, args=['build', element_name])
|
... | ... | @@ -236,7 +236,7 @@ def test_fetched_junction(cli, tmpdir, datafiles, element_name): |
236 | 236 |
_yaml.dump(element, element_path)
|
237 | 237 |
|
238 | 238 |
result = cli.run(project=project, silent=True, args=[
|
239 |
- 'fetch', 'junction.bst'])
|
|
239 |
+ 'source', 'fetch', 'junction.bst'])
|
|
240 | 240 |
|
241 | 241 |
result.assert_success()
|
242 | 242 |
|
... | ... | @@ -50,7 +50,7 @@ def test_source_checkout(datafiles, cli, tmpdir_factory, with_workspace, guess_e |
50 | 50 |
else:
|
51 | 51 |
ws_cmd = []
|
52 | 52 |
|
53 |
- args = ws_cmd + ['source-checkout', '--deps', 'none'] + elm_cmd + [checkout]
|
|
53 |
+ args = ws_cmd + ['source', 'checkout', '--deps', 'none'] + elm_cmd + [checkout]
|
|
54 | 54 |
result = cli.run(project=project, args=args)
|
55 | 55 |
result.assert_success()
|
56 | 56 |
|
... | ... | @@ -67,7 +67,7 @@ def test_source_checkout_force(datafiles, cli, force_flag): |
67 | 67 |
os.makedirs(os.path.join(checkout, 'some-thing'))
|
68 | 68 |
# Path(os.path.join(checkout, 'some-file')).touch()
|
69 | 69 |
|
70 |
- result = cli.run(project=project, args=['source-checkout', force_flag, target, '--deps', 'none', checkout])
|
|
70 |
+ result = cli.run(project=project, args=['source', 'checkout', force_flag, target, '--deps', 'none', checkout])
|
|
71 | 71 |
result.assert_success()
|
72 | 72 |
|
73 | 73 |
assert os.path.exists(os.path.join(checkout, 'checkout-deps', 'etc', 'buildstream', 'config'))
|
... | ... | @@ -79,7 +79,7 @@ def test_source_checkout_tar(datafiles, cli): |
79 | 79 |
checkout = os.path.join(cli.directory, 'source-checkout.tar')
|
80 | 80 |
target = 'checkout-deps.bst'
|
81 | 81 |
|
82 |
- result = cli.run(project=project, args=['source-checkout', '--tar', target, '--deps', 'none', checkout])
|
|
82 |
+ result = cli.run(project=project, args=['source', 'checkout', '--tar', target, '--deps', 'none', checkout])
|
|
83 | 83 |
result.assert_success()
|
84 | 84 |
|
85 | 85 |
assert os.path.exists(checkout)
|
... | ... | @@ -97,7 +97,7 @@ def test_source_checkout_deps(datafiles, cli, deps): |
97 | 97 |
checkout = os.path.join(cli.directory, 'source-checkout')
|
98 | 98 |
target = 'checkout-deps.bst'
|
99 | 99 |
|
100 |
- result = cli.run(project=project, args=['source-checkout', target, '--deps', deps, checkout])
|
|
100 |
+ result = cli.run(project=project, args=['source', 'checkout', target, '--deps', deps, checkout])
|
|
101 | 101 |
result.assert_success()
|
102 | 102 |
|
103 | 103 |
# Sources of the target
|
... | ... | @@ -125,7 +125,7 @@ def test_source_checkout_except(datafiles, cli): |
125 | 125 |
checkout = os.path.join(cli.directory, 'source-checkout')
|
126 | 126 |
target = 'checkout-deps.bst'
|
127 | 127 |
|
128 |
- result = cli.run(project=project, args=['source-checkout', target,
|
|
128 |
+ result = cli.run(project=project, args=['source', 'checkout', target,
|
|
129 | 129 |
'--deps', 'all',
|
130 | 130 |
'--except', 'import-bin.bst',
|
131 | 131 |
checkout])
|
... | ... | @@ -159,7 +159,7 @@ def test_source_checkout_fetch(datafiles, cli, fetch): |
159 | 159 |
# cached already
|
160 | 160 |
assert cli.get_element_state(project, target) == 'fetch needed'
|
161 | 161 |
|
162 |
- args = ['source-checkout']
|
|
162 |
+ args = ['source', 'checkout']
|
|
163 | 163 |
if fetch:
|
164 | 164 |
args += ['--fetch']
|
165 | 165 |
args += [target, checkout]
|
... | ... | @@ -179,7 +179,7 @@ def test_source_checkout_build_scripts(cli, tmpdir, datafiles): |
179 | 179 |
normal_name = 'source-bundle-source-bundle-hello'
|
180 | 180 |
checkout = os.path.join(str(tmpdir), 'source-checkout')
|
181 | 181 |
|
182 |
- args = ['source-checkout', '--include-build-scripts', element_name, checkout]
|
|
182 |
+ args = ['source', 'checkout', '--include-build-scripts', element_name, checkout]
|
|
183 | 183 |
result = cli.run(project=project_path, args=args)
|
184 | 184 |
result.assert_success()
|
185 | 185 |
|
... | ... | @@ -196,7 +196,7 @@ def test_source_checkout_tar_buildscripts(cli, tmpdir, datafiles): |
196 | 196 |
normal_name = 'source-bundle-source-bundle-hello'
|
197 | 197 |
tar_file = os.path.join(str(tmpdir), 'source-checkout.tar')
|
198 | 198 |
|
199 |
- args = ['source-checkout', '--include-build-scripts', '--tar', element_name, tar_file]
|
|
199 |
+ args = ['source', 'checkout', '--include-build-scripts', '--tar', element_name, tar_file]
|
|
200 | 200 |
result = cli.run(project=project_path, args=args)
|
201 | 201 |
result.assert_success()
|
202 | 202 |
|
... | ... | @@ -52,14 +52,14 @@ def test_track(cli, tmpdir, datafiles, ref_storage, kind): |
52 | 52 |
assert cli.get_element_state(project, element_name) == 'no reference'
|
53 | 53 |
|
54 | 54 |
# Now first try to track it
|
55 |
- result = cli.run(project=project, args=['track', element_name])
|
|
55 |
+ result = cli.run(project=project, args=['source', 'track', element_name])
|
|
56 | 56 |
result.assert_success()
|
57 | 57 |
|
58 | 58 |
# And now fetch it: The Source has probably already cached the
|
59 | 59 |
# latest ref locally, but it is not required to have cached
|
60 | 60 |
# the associated content of the latest ref at track time, that
|
61 | 61 |
# is the job of fetch.
|
62 |
- result = cli.run(project=project, args=['fetch', element_name])
|
|
62 |
+ result = cli.run(project=project, args=['source', 'fetch', element_name])
|
|
63 | 63 |
result.assert_success()
|
64 | 64 |
|
65 | 65 |
# Assert that we are now buildable because the source is
|
... | ... | @@ -99,7 +99,7 @@ def test_track_recurse(cli, tmpdir, datafiles, kind): |
99 | 99 |
|
100 | 100 |
# Now first try to track it
|
101 | 101 |
result = cli.run(project=project, args=[
|
102 |
- 'track', '--deps', 'all',
|
|
102 |
+ 'source', 'track', '--deps', 'all',
|
|
103 | 103 |
element_target_name])
|
104 | 104 |
result.assert_success()
|
105 | 105 |
|
... | ... | @@ -108,7 +108,7 @@ def test_track_recurse(cli, tmpdir, datafiles, kind): |
108 | 108 |
# the associated content of the latest ref at track time, that
|
109 | 109 |
# is the job of fetch.
|
110 | 110 |
result = cli.run(project=project, args=[
|
111 |
- 'fetch', '--deps', 'all',
|
|
111 |
+ 'source', 'fetch', '--deps', 'all',
|
|
112 | 112 |
element_target_name])
|
113 | 113 |
result.assert_success()
|
114 | 114 |
|
... | ... | @@ -142,13 +142,13 @@ def test_track_single(cli, tmpdir, datafiles): |
142 | 142 |
|
143 | 143 |
# Now first try to track only one element
|
144 | 144 |
result = cli.run(project=project, args=[
|
145 |
- 'track', '--deps', 'none',
|
|
145 |
+ 'source', 'track', '--deps', 'none',
|
|
146 | 146 |
element_target_name])
|
147 | 147 |
result.assert_success()
|
148 | 148 |
|
149 | 149 |
# And now fetch it
|
150 | 150 |
result = cli.run(project=project, args=[
|
151 |
- 'fetch', '--deps', 'none',
|
|
151 |
+ 'source', 'fetch', '--deps', 'none',
|
|
152 | 152 |
element_target_name])
|
153 | 153 |
result.assert_success()
|
154 | 154 |
|
... | ... | @@ -183,7 +183,7 @@ def test_track_recurse_except(cli, tmpdir, datafiles, kind): |
183 | 183 |
|
184 | 184 |
# Now first try to track it
|
185 | 185 |
result = cli.run(project=project, args=[
|
186 |
- 'track', '--deps', 'all', '--except', element_dep_name,
|
|
186 |
+ 'source', 'track', '--deps', 'all', '--except', element_dep_name,
|
|
187 | 187 |
element_target_name])
|
188 | 188 |
result.assert_success()
|
189 | 189 |
|
... | ... | @@ -192,7 +192,7 @@ def test_track_recurse_except(cli, tmpdir, datafiles, kind): |
192 | 192 |
# the associated content of the latest ref at track time, that
|
193 | 193 |
# is the job of fetch.
|
194 | 194 |
result = cli.run(project=project, args=[
|
195 |
- 'fetch', '--deps', 'none',
|
|
195 |
+ 'source', 'fetch', '--deps', 'none',
|
|
196 | 196 |
element_target_name])
|
197 | 197 |
result.assert_success()
|
198 | 198 |
|
... | ... | @@ -231,9 +231,9 @@ def test_track_optional(cli, tmpdir, datafiles, ref_storage): |
231 | 231 |
#
|
232 | 232 |
# We want to track and persist the ref separately in this test
|
233 | 233 |
#
|
234 |
- result = cli.run(project=project, args=['--option', 'test', 'False', 'track', 'target.bst'])
|
|
234 |
+ result = cli.run(project=project, args=['--option', 'test', 'False', 'source', 'track', 'target.bst'])
|
|
235 | 235 |
result.assert_success()
|
236 |
- result = cli.run(project=project, args=['--option', 'test', 'True', 'track', 'target.bst'])
|
|
236 |
+ result = cli.run(project=project, args=['--option', 'test', 'True', 'source', 'track', 'target.bst'])
|
|
237 | 237 |
result.assert_success()
|
238 | 238 |
|
239 | 239 |
# Now fetch the key for both options
|
... | ... | @@ -309,7 +309,7 @@ def test_track_cross_junction(cli, tmpdir, datafiles, cross_junction, ref_storag |
309 | 309 |
assert get_subproject_element_state() == 'no reference'
|
310 | 310 |
|
311 | 311 |
# Track recursively across the junction
|
312 |
- args = ['track', '--deps', 'all']
|
|
312 |
+ args = ['source', 'track', '--deps', 'all']
|
|
313 | 313 |
if cross_junction == 'cross':
|
314 | 314 |
args += ['--cross-junctions']
|
315 | 315 |
args += ['target.bst']
|
... | ... | @@ -350,7 +350,7 @@ def test_track_consistency_error(cli, tmpdir, datafiles): |
350 | 350 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
351 | 351 |
|
352 | 352 |
# Track the element causing a consistency error
|
353 |
- result = cli.run(project=project, args=['track', 'error.bst'])
|
|
353 |
+ result = cli.run(project=project, args=['source', 'track', 'error.bst'])
|
|
354 | 354 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
355 | 355 |
result.assert_task_error(ErrorDomain.SOURCE, 'the-consistency-error')
|
356 | 356 |
|
... | ... | @@ -360,7 +360,7 @@ def test_track_consistency_bug(cli, tmpdir, datafiles): |
360 | 360 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
361 | 361 |
|
362 | 362 |
# Track the element causing an unhandled exception
|
363 |
- result = cli.run(project=project, args=['track', 'bug.bst'])
|
|
363 |
+ result = cli.run(project=project, args=['source', 'track', 'bug.bst'])
|
|
364 | 364 |
|
365 | 365 |
# We expect BuildStream to fail gracefully, with no recorded exception.
|
366 | 366 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
... | ... | @@ -396,7 +396,7 @@ def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage): |
396 | 396 |
|
397 | 397 |
# Now try to track it, this will bail with the appropriate error
|
398 | 398 |
# informing the user to track the junction first
|
399 |
- result = cli.run(project=project, args=['track', 'junction-dep.bst'])
|
|
399 |
+ result = cli.run(project=project, args=['source', 'track', 'junction-dep.bst'])
|
|
400 | 400 |
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
|
401 | 401 |
|
402 | 402 |
|
... | ... | @@ -433,7 +433,7 @@ def test_junction_element(cli, tmpdir, datafiles, ref_storage): |
433 | 433 |
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
|
434 | 434 |
|
435 | 435 |
# Now track the junction itself
|
436 |
- result = cli.run(project=project, args=['track', 'junction.bst'])
|
|
436 |
+ result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
|
|
437 | 437 |
result.assert_success()
|
438 | 438 |
|
439 | 439 |
# Now assert element state (via bst show under the hood) of the dep again
|
... | ... | @@ -464,13 +464,13 @@ def test_cross_junction(cli, tmpdir, datafiles, ref_storage, kind): |
464 | 464 |
subproject_path, junction_path, store_ref=False)
|
465 | 465 |
|
466 | 466 |
# Track the junction itself first.
|
467 |
- result = cli.run(project=project, args=['track', 'junction.bst'])
|
|
467 |
+ result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
|
|
468 | 468 |
result.assert_success()
|
469 | 469 |
|
470 | 470 |
assert cli.get_element_state(project, 'junction.bst:import-etc-repo.bst') == 'no reference'
|
471 | 471 |
|
472 | 472 |
# Track the cross junction element. -J is not given, it is implied.
|
473 |
- result = cli.run(project=project, args=['track', 'junction.bst:import-etc-repo.bst'])
|
|
473 |
+ result = cli.run(project=project, args=['source', 'track', 'junction.bst:import-etc-repo.bst'])
|
|
474 | 474 |
|
475 | 475 |
if ref_storage == 'inline':
|
476 | 476 |
# This is not allowed to track cross junction without project.refs.
|
... | ... | @@ -520,14 +520,14 @@ def test_track_include(cli, tmpdir, datafiles, ref_storage, kind): |
520 | 520 |
assert cli.get_element_state(project, element_name) == 'no reference'
|
521 | 521 |
|
522 | 522 |
# Now first try to track it
|
523 |
- result = cli.run(project=project, args=['track', element_name])
|
|
523 |
+ result = cli.run(project=project, args=['source', 'track', element_name])
|
|
524 | 524 |
result.assert_success()
|
525 | 525 |
|
526 | 526 |
# And now fetch it: The Source has probably already cached the
|
527 | 527 |
# latest ref locally, but it is not required to have cached
|
528 | 528 |
# the associated content of the latest ref at track time, that
|
529 | 529 |
# is the job of fetch.
|
530 |
- result = cli.run(project=project, args=['fetch', element_name])
|
|
530 |
+ result = cli.run(project=project, args=['source', 'fetch', element_name])
|
|
531 | 531 |
result.assert_success()
|
532 | 532 |
|
533 | 533 |
# Assert that we are now buildable because the source is
|
... | ... | @@ -585,14 +585,14 @@ def test_track_include_junction(cli, tmpdir, datafiles, ref_storage, kind): |
585 | 585 |
generate_junction(str(tmpdir.join('junction_repo')),
|
586 | 586 |
subproject_path, junction_path, store_ref=True)
|
587 | 587 |
|
588 |
- result = cli.run(project=project, args=['track', 'junction.bst'])
|
|
588 |
+ result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
|
|
589 | 589 |
result.assert_success()
|
590 | 590 |
|
591 | 591 |
# Assert that a fetch is needed
|
592 | 592 |
assert cli.get_element_state(project, element_name) == 'no reference'
|
593 | 593 |
|
594 | 594 |
# Now first try to track it
|
595 |
- result = cli.run(project=project, args=['track', element_name])
|
|
595 |
+ result = cli.run(project=project, args=['source', 'track', element_name])
|
|
596 | 596 |
|
597 | 597 |
# Assert there was a project.refs created, depending on the configuration
|
598 | 598 |
if ref_storage == 'inline':
|
... | ... | @@ -607,7 +607,7 @@ def test_track_include_junction(cli, tmpdir, datafiles, ref_storage, kind): |
607 | 607 |
# latest ref locally, but it is not required to have cached
|
608 | 608 |
# the associated content of the latest ref at track time, that
|
609 | 609 |
# is the job of fetch.
|
610 |
- result = cli.run(project=project, args=['fetch', element_name])
|
|
610 |
+ result = cli.run(project=project, args=['source', 'fetch', element_name])
|
|
611 | 611 |
result.assert_success()
|
612 | 612 |
|
613 | 613 |
# Assert that we are now buildable because the source is
|
... | ... | @@ -633,7 +633,7 @@ def test_track_junction_included(cli, tmpdir, datafiles, ref_storage, kind): |
633 | 633 |
generate_junction(str(tmpdir.join('junction_repo')),
|
634 | 634 |
subproject_path, junction_path, store_ref=False)
|
635 | 635 |
|
636 |
- result = cli.run(project=project, args=['track', 'junction.bst'])
|
|
636 |
+ result = cli.run(project=project, args=['source', 'track', 'junction.bst'])
|
|
637 | 637 |
result.assert_success()
|
638 | 638 |
|
639 | 639 |
|
... | ... | @@ -663,7 +663,7 @@ def test_track_error_cannot_write_file(cli, tmpdir, datafiles, kind): |
663 | 663 |
read_mask = stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH
|
664 | 664 |
os.chmod(element_path, stat.S_IMODE(st.st_mode) & ~read_mask)
|
665 | 665 |
|
666 |
- result = cli.run(project=project, args=['track', element_name])
|
|
666 |
+ result = cli.run(project=project, args=['source', 'track', element_name])
|
|
667 | 667 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
668 | 668 |
result.assert_task_error(ErrorDomain.SOURCE, 'save-ref-error')
|
669 | 669 |
finally:
|
... | ... | @@ -100,7 +100,7 @@ def test_cross_junction_multiple_projects(cli, tmpdir, datafiles, kind): |
100 | 100 |
generate_junction(tmpdir.join('repo_b'), project_b_path, junction_b_path, store_ref=False)
|
101 | 101 |
|
102 | 102 |
# Track the junctions.
|
103 |
- result = cli.run(project=project, args=['track', junction_a, junction_b])
|
|
103 |
+ result = cli.run(project=project, args=['source', 'track', junction_a, junction_b])
|
|
104 | 104 |
result.assert_success()
|
105 | 105 |
|
106 | 106 |
# Import elements from a and b in to main.
|
... | ... | @@ -111,7 +111,10 @@ def test_cross_junction_multiple_projects(cli, tmpdir, datafiles, kind): |
111 | 111 |
all_bst = generate_simple_stack(project, 'all', [imported_a, imported_b, element_c])
|
112 | 112 |
|
113 | 113 |
# Track without following junctions. But explicitly also track the elements in project a.
|
114 |
- result = cli.run(project=project, args=['track', '--deps', 'all', all_bst, '{}:{}'.format(junction_a, stack_a)])
|
|
114 |
+ result = cli.run(project=project, args=['source', 'track',
|
|
115 |
+ '--deps', 'all',
|
|
116 |
+ all_bst,
|
|
117 |
+ '{}:{}'.format(junction_a, stack_a)])
|
|
115 | 118 |
result.assert_success()
|
116 | 119 |
|
117 | 120 |
# Elements in project b should not be tracked. But elements in project a and main should.
|
... | ... | @@ -137,14 +140,14 @@ def test_track_exceptions(cli, tmpdir, datafiles, kind): |
137 | 140 |
junction_a_path = os.path.join(project, 'elements', junction_a)
|
138 | 141 |
generate_junction(tmpdir.join('repo_a'), project_a_path, junction_a_path, store_ref=False)
|
139 | 142 |
|
140 |
- result = cli.run(project=project, args=['track', junction_a])
|
|
143 |
+ result = cli.run(project=project, args=['source', 'track', junction_a])
|
|
141 | 144 |
result.assert_success()
|
142 | 145 |
|
143 | 146 |
imported_b = generate_cross_element(project, project_a, element_b)
|
144 | 147 |
indirection = generate_simple_stack(project, 'indirection', [imported_b])
|
145 | 148 |
|
146 | 149 |
result = cli.run(project=project,
|
147 |
- args=['track', '--deps', 'all',
|
|
150 |
+ args=['source', 'track', '--deps', 'all',
|
|
148 | 151 |
'--except', indirection,
|
149 | 152 |
'{}:{}'.format(junction_a, all_bst), imported_b])
|
150 | 153 |
result.assert_success()
|
... | ... | @@ -1078,7 +1078,7 @@ def test_external_fetch(cli, datafiles, tmpdir_factory, subdir, guess_element): |
1078 | 1078 |
else:
|
1079 | 1079 |
call_dir = workspace
|
1080 | 1080 |
|
1081 |
- result = cli.run(project=project, args=['-C', call_dir, 'fetch'] + arg_elm)
|
|
1081 |
+ result = cli.run(project=project, args=['-C', call_dir, 'source', 'fetch'] + arg_elm)
|
|
1082 | 1082 |
result.assert_success()
|
1083 | 1083 |
|
1084 | 1084 |
# We already fetched it by opening the workspace, but we're also checking
|
... | ... | @@ -1122,7 +1122,7 @@ def test_external_track(cli, datafiles, tmpdir_factory, guess_element): |
1122 | 1122 |
|
1123 | 1123 |
# The workspace is necessarily already tracked, so we only care that
|
1124 | 1124 |
# there's no weird errors.
|
1125 |
- result = cli.run(project=project, args=['-C', workspace, 'track'] + arg_elm)
|
|
1125 |
+ result = cli.run(project=project, args=['-C', workspace, 'source', 'track'] + arg_elm)
|
|
1126 | 1126 |
result.assert_success()
|
1127 | 1127 |
|
1128 | 1128 |
|
... | ... | @@ -82,7 +82,7 @@ def test_yamlcache_used(cli, tmpdir, ref_storage, with_junction, move_project): |
82 | 82 |
# Generate the project
|
83 | 83 |
project = generate_project(str(tmpdir), ref_storage, with_junction)
|
84 | 84 |
if with_junction == 'junction':
|
85 |
- result = cli.run(project=project, args=['fetch', '--track', 'junction.bst'])
|
|
85 |
+ result = cli.run(project=project, args=['source', 'fetch', '--track', 'junction.bst'])
|
|
86 | 86 |
result.assert_success()
|
87 | 87 |
|
88 | 88 |
# bst show to put it in the cache
|
... | ... | @@ -118,7 +118,7 @@ def test_yamlcache_changed_file(cli, tmpdir, ref_storage, with_junction): |
118 | 118 |
# Generate the project
|
119 | 119 |
project = generate_project(str(tmpdir), ref_storage, with_junction)
|
120 | 120 |
if with_junction == 'junction':
|
121 |
- result = cli.run(project=project, args=['fetch', '--track', 'junction.bst'])
|
|
121 |
+ result = cli.run(project=project, args=['source', 'fetch', '--track', 'junction.bst'])
|
|
122 | 122 |
result.assert_success()
|
123 | 123 |
|
124 | 124 |
# bst show to put it in the cache
|
... | ... | @@ -91,7 +91,7 @@ def test_compose_include(cli, tmpdir, datafiles, include_domains, |
91 | 91 |
}
|
92 | 92 |
create_compose_element(element_name, element_path, config=config)
|
93 | 93 |
|
94 |
- result = cli.run(project=project, args=['track', 'compose/amhello.bst'])
|
|
94 |
+ result = cli.run(project=project, args=['source', 'track', 'compose/amhello.bst'])
|
|
95 | 95 |
assert result.exit_code == 0
|
96 | 96 |
|
97 | 97 |
result = cli.run(project=project, args=['build', element_name])
|
... | ... | @@ -52,7 +52,7 @@ def test_pip_source_import(cli, tmpdir, datafiles, setup_pypi_repo): |
52 | 52 |
os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
|
53 | 53 |
_yaml.dump(element, os.path.join(element_path, element_name))
|
54 | 54 |
|
55 |
- result = cli.run(project=project, args=['track', element_name])
|
|
55 |
+ result = cli.run(project=project, args=['source', 'track', element_name])
|
|
56 | 56 |
assert result.exit_code == 0
|
57 | 57 |
|
58 | 58 |
result = cli.run(project=project, args=['build', element_name])
|
... | ... | @@ -113,7 +113,7 @@ def test_pip_source_build(cli, tmpdir, datafiles, setup_pypi_repo): |
113 | 113 |
os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True)
|
114 | 114 |
_yaml.dump(element, os.path.join(element_path, element_name))
|
115 | 115 |
|
116 |
- result = cli.run(project=project, args=['track', element_name])
|
|
116 |
+ result = cli.run(project=project, args=['source', 'track', element_name])
|
|
117 | 117 |
assert result.exit_code == 0
|
118 | 118 |
|
119 | 119 |
result = cli.run(project=project, args=['build', element_name])
|
... | ... | @@ -234,7 +234,7 @@ def test_git_show(cli, tmpdir, datafiles): |
234 | 234 |
assert result.exception.reason == LoadErrorReason.SUBPROJECT_FETCH_NEEDED
|
235 | 235 |
|
236 | 236 |
# Explicitly fetch subproject
|
237 |
- result = cli.run(project=project, args=['fetch', 'base.bst'])
|
|
237 |
+ result = cli.run(project=project, args=['source', 'fetch', 'base.bst'])
|
|
238 | 238 |
assert result.exit_code == 0
|
239 | 239 |
|
240 | 240 |
# Check that bst show succeeds now and the pipeline includes the subproject element
|
... | ... | @@ -15,5 +15,5 @@ def test_load_simple(cli, datafiles, tmpdir): |
15 | 15 |
basedir = os.path.join(datafiles.dirname, datafiles.basename)
|
16 | 16 |
|
17 | 17 |
# Lets try to fetch it...
|
18 |
- result = cli.run(project=basedir, args=['fetch', 'error.bst'])
|
|
18 |
+ result = cli.run(project=basedir, args=['source', 'fetch', 'error.bst'])
|
|
19 | 19 |
result.assert_main_error(ErrorDomain.SOURCE, "the-preflight-error")
|
... | ... | @@ -227,7 +227,7 @@ def test_filter_track(datafiles, cli, tmpdir): |
227 | 227 |
assert cli.get_element_state(project, input_name) == 'no reference'
|
228 | 228 |
|
229 | 229 |
# Now try to track it
|
230 |
- result = cli.run(project=project, args=["track", "filter2.bst"])
|
|
230 |
+ result = cli.run(project=project, args=["source", "track", "filter2.bst"])
|
|
231 | 231 |
result.assert_success()
|
232 | 232 |
|
233 | 233 |
# Now check that a ref field exists
|
... | ... | @@ -280,7 +280,7 @@ def test_filter_track_excepted(datafiles, cli, tmpdir): |
280 | 280 |
assert cli.get_element_state(project, input_name) == 'no reference'
|
281 | 281 |
|
282 | 282 |
# Now try to track it
|
283 |
- result = cli.run(project=project, args=["track", "filter2.bst", "--except", "input.bst"])
|
|
283 |
+ result = cli.run(project=project, args=["source", "track", "filter2.bst", "--except", "input.bst"])
|
|
284 | 284 |
result.assert_success()
|
285 | 285 |
|
286 | 286 |
# Now check that a ref field exists
|
... | ... | @@ -333,7 +333,7 @@ def test_filter_track_multi_to_one(datafiles, cli, tmpdir): |
333 | 333 |
assert cli.get_element_state(project, input_name) == 'no reference'
|
334 | 334 |
|
335 | 335 |
# Now try to track it
|
336 |
- result = cli.run(project=project, args=["track", "filter1.bst", "filter2.bst"])
|
|
336 |
+ result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst"])
|
|
337 | 337 |
result.assert_success()
|
338 | 338 |
|
339 | 339 |
# Now check that a ref field exists
|
... | ... | @@ -392,7 +392,7 @@ def test_filter_track_multi(datafiles, cli, tmpdir): |
392 | 392 |
assert cli.get_element_state(project, input2_name) == 'no reference'
|
393 | 393 |
|
394 | 394 |
# Now try to track it
|
395 |
- result = cli.run(project=project, args=["track", "filter1.bst", "filter2.bst"])
|
|
395 |
+ result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst"])
|
|
396 | 396 |
result.assert_success()
|
397 | 397 |
|
398 | 398 |
# Now check that a ref field exists
|
... | ... | @@ -453,7 +453,7 @@ def test_filter_track_multi_exclude(datafiles, cli, tmpdir): |
453 | 453 |
assert cli.get_element_state(project, input2_name) == 'no reference'
|
454 | 454 |
|
455 | 455 |
# Now try to track it
|
456 |
- result = cli.run(project=project, args=["track", "filter1.bst", "filter2.bst", "--except", input_name])
|
|
456 |
+ result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst", "--except", input_name])
|
|
457 | 457 |
result.assert_success()
|
458 | 458 |
|
459 | 459 |
# Now check that a ref field exists
|
... | ... | @@ -32,7 +32,7 @@ def test_fetch_checkout(cli, tmpdir, datafiles): |
32 | 32 |
_yaml.dump(element, os.path.join(project, 'target.bst'))
|
33 | 33 |
|
34 | 34 |
# Fetch, build, checkout
|
35 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
35 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
36 | 36 |
assert result.exit_code == 0
|
37 | 37 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
38 | 38 |
assert result.exit_code == 0
|
... | ... | @@ -54,7 +54,7 @@ def test_fetch_bad_url(cli, tmpdir, datafiles): |
54 | 54 |
|
55 | 55 |
# Try to fetch it
|
56 | 56 |
result = cli.run(project=project, args=[
|
57 |
- 'fetch', 'target.bst'
|
|
57 |
+ 'source', 'fetch', 'target.bst'
|
|
58 | 58 |
])
|
59 | 59 |
assert "FAILURE Try #" in result.stderr
|
60 | 60 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
... | ... | @@ -72,7 +72,7 @@ def test_fetch_bad_ref(cli, tmpdir, datafiles): |
72 | 72 |
|
73 | 73 |
# Try to fetch it
|
74 | 74 |
result = cli.run(project=project, args=[
|
75 |
- 'fetch', 'target.bst'
|
|
75 |
+ 'source', 'fetch', 'target.bst'
|
|
76 | 76 |
])
|
77 | 77 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
78 | 78 |
result.assert_task_error(ErrorDomain.SOURCE, None)
|
... | ... | @@ -90,7 +90,7 @@ def test_track_warning(cli, tmpdir, datafiles): |
90 | 90 |
|
91 | 91 |
# Track it
|
92 | 92 |
result = cli.run(project=project, args=[
|
93 |
- 'track', 'target.bst'
|
|
93 |
+ 'source', 'track', 'target.bst'
|
|
94 | 94 |
])
|
95 | 95 |
result.assert_success()
|
96 | 96 |
assert "Potential man-in-the-middle attack!" in result.stderr
|
... | ... | @@ -108,9 +108,9 @@ def test_stage_default_basedir(cli, tmpdir, datafiles): |
108 | 108 |
_copy_deb(DATA_DIR, tmpdir)
|
109 | 109 |
|
110 | 110 |
# Track, fetch, build, checkout
|
111 |
- result = cli.run(project=project, args=['track', 'target.bst'])
|
|
111 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'])
|
|
112 | 112 |
result.assert_success()
|
113 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
113 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
114 | 114 |
result.assert_success()
|
115 | 115 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
116 | 116 |
result.assert_success()
|
... | ... | @@ -136,9 +136,9 @@ def test_stage_no_basedir(cli, tmpdir, datafiles): |
136 | 136 |
_copy_deb(DATA_DIR, tmpdir)
|
137 | 137 |
|
138 | 138 |
# Track, fetch, build, checkout
|
139 |
- result = cli.run(project=project, args=['track', 'target.bst'])
|
|
139 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'])
|
|
140 | 140 |
result.assert_success()
|
141 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
141 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
142 | 142 |
result.assert_success()
|
143 | 143 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
144 | 144 |
result.assert_success()
|
... | ... | @@ -164,9 +164,9 @@ def test_stage_explicit_basedir(cli, tmpdir, datafiles): |
164 | 164 |
_copy_deb(DATA_DIR, tmpdir)
|
165 | 165 |
|
166 | 166 |
# Track, fetch, build, checkout
|
167 |
- result = cli.run(project=project, args=['track', 'target.bst'])
|
|
167 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'])
|
|
168 | 168 |
result.assert_success()
|
169 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
169 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
170 | 170 |
result.assert_success()
|
171 | 171 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
172 | 172 |
result.assert_success()
|
... | ... | @@ -28,6 +28,7 @@ import shutil |
28 | 28 |
from buildstream._exceptions import ErrorDomain
|
29 | 29 |
from buildstream import _yaml
|
30 | 30 |
from buildstream.plugin import CoreWarnings
|
31 |
+from buildstream.utils import url_directory_name
|
|
31 | 32 |
|
32 | 33 |
from tests.testutils import cli, create_repo
|
33 | 34 |
from tests.testutils.site import HAVE_GIT
|
... | ... | @@ -58,7 +59,7 @@ def test_fetch_bad_ref(cli, tmpdir, datafiles): |
58 | 59 |
|
59 | 60 |
# Assert that fetch raises an error here
|
60 | 61 |
result = cli.run(project=project, args=[
|
61 |
- 'fetch', 'target.bst'
|
|
62 |
+ 'source', 'fetch', 'target.bst'
|
|
62 | 63 |
])
|
63 | 64 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
64 | 65 |
result.assert_task_error(ErrorDomain.SOURCE, None)
|
... | ... | @@ -91,7 +92,7 @@ def test_submodule_fetch_checkout(cli, tmpdir, datafiles): |
91 | 92 |
_yaml.dump(element, os.path.join(project, 'target.bst'))
|
92 | 93 |
|
93 | 94 |
# Fetch, build, checkout
|
94 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
95 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
95 | 96 |
result.assert_success()
|
96 | 97 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
97 | 98 |
result.assert_success()
|
... | ... | @@ -130,7 +131,7 @@ def test_submodule_fetch_source_enable_explicit(cli, tmpdir, datafiles): |
130 | 131 |
_yaml.dump(element, os.path.join(project, 'target.bst'))
|
131 | 132 |
|
132 | 133 |
# Fetch, build, checkout
|
133 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
134 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
134 | 135 |
result.assert_success()
|
135 | 136 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
136 | 137 |
result.assert_success()
|
... | ... | @@ -169,7 +170,7 @@ def test_submodule_fetch_source_disable(cli, tmpdir, datafiles): |
169 | 170 |
_yaml.dump(element, os.path.join(project, 'target.bst'))
|
170 | 171 |
|
171 | 172 |
# Fetch, build, checkout
|
172 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
173 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
173 | 174 |
result.assert_success()
|
174 | 175 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
175 | 176 |
result.assert_success()
|
... | ... | @@ -208,7 +209,7 @@ def test_submodule_fetch_submodule_does_override(cli, tmpdir, datafiles): |
208 | 209 |
_yaml.dump(element, os.path.join(project, 'target.bst'))
|
209 | 210 |
|
210 | 211 |
# Fetch, build, checkout
|
211 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
212 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
212 | 213 |
result.assert_success()
|
213 | 214 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
214 | 215 |
result.assert_success()
|
... | ... | @@ -252,7 +253,7 @@ def test_submodule_fetch_submodule_individual_checkout(cli, tmpdir, datafiles): |
252 | 253 |
_yaml.dump(element, os.path.join(project, 'target.bst'))
|
253 | 254 |
|
254 | 255 |
# Fetch, build, checkout
|
255 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
256 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
256 | 257 |
result.assert_success()
|
257 | 258 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
258 | 259 |
result.assert_success()
|
... | ... | @@ -297,7 +298,7 @@ def test_submodule_fetch_submodule_individual_checkout_explicit(cli, tmpdir, dat |
297 | 298 |
_yaml.dump(element, os.path.join(project, 'target.bst'))
|
298 | 299 |
|
299 | 300 |
# Fetch, build, checkout
|
300 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
301 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
301 | 302 |
result.assert_success()
|
302 | 303 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
303 | 304 |
result.assert_success()
|
... | ... | @@ -337,7 +338,7 @@ def test_submodule_fetch_project_override(cli, tmpdir, datafiles): |
337 | 338 |
_yaml.dump(element, os.path.join(project, 'target.bst'))
|
338 | 339 |
|
339 | 340 |
# Fetch, build, checkout
|
340 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
341 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
341 | 342 |
result.assert_success()
|
342 | 343 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
343 | 344 |
result.assert_success()
|
... | ... | @@ -375,11 +376,11 @@ def test_submodule_track_ignore_inconsistent(cli, tmpdir, datafiles): |
375 | 376 |
repo.add_file(os.path.join(project, 'inconsistent-submodule', '.gitmodules'))
|
376 | 377 |
|
377 | 378 |
# Fetch should work, we're not yet at the offending ref
|
378 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
379 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
379 | 380 |
result.assert_success()
|
380 | 381 |
|
381 | 382 |
# Track will encounter an inconsistent submodule without any ref
|
382 |
- result = cli.run(project=project, args=['track', 'target.bst'])
|
|
383 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'])
|
|
383 | 384 |
result.assert_success()
|
384 | 385 |
|
385 | 386 |
# Assert that we are just fine without it, and emit a warning to the user.
|
... | ... | @@ -508,7 +509,7 @@ def test_unlisted_submodule(cli, tmpdir, datafiles, fail): |
508 | 509 |
|
509 | 510 |
# We will notice this directly in fetch, as it will try to fetch
|
510 | 511 |
# the submodules it discovers as a result of fetching the primary repo.
|
511 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
512 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
512 | 513 |
|
513 | 514 |
# Assert a warning or an error depending on what we're checking
|
514 | 515 |
if fail == 'error':
|
... | ... | @@ -571,19 +572,19 @@ def test_track_unlisted_submodule(cli, tmpdir, datafiles, fail): |
571 | 572 |
|
572 | 573 |
# Fetch the repo, we will not see the warning because we
|
573 | 574 |
# are still pointing to a ref which predates the submodules
|
574 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
575 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
575 | 576 |
result.assert_success()
|
576 | 577 |
assert "git:unlisted-submodule" not in result.stderr
|
577 | 578 |
|
578 | 579 |
# We won't get a warning/error when tracking either, the source
|
579 | 580 |
# has not become Consistency.CACHED so the opportunity to check
|
580 | 581 |
# for the warning has not yet arisen.
|
581 |
- result = cli.run(project=project, args=['track', 'target.bst'])
|
|
582 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'])
|
|
582 | 583 |
result.assert_success()
|
583 | 584 |
assert "git:unlisted-submodule" not in result.stderr
|
584 | 585 |
|
585 | 586 |
# Fetching the repo at the new ref will finally reveal the warning
|
586 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
587 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
587 | 588 |
if fail == 'error':
|
588 | 589 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
589 | 590 |
result.assert_task_error(ErrorDomain.PLUGIN, 'git:unlisted-submodule')
|
... | ... | @@ -642,7 +643,7 @@ def test_invalid_submodule(cli, tmpdir, datafiles, fail): |
642 | 643 |
|
643 | 644 |
# We will notice this directly in fetch, as it will try to fetch
|
644 | 645 |
# the submodules it discovers as a result of fetching the primary repo.
|
645 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
646 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
646 | 647 |
|
647 | 648 |
# Assert a warning or an error depending on what we're checking
|
648 | 649 |
if fail == 'error':
|
... | ... | @@ -706,7 +707,7 @@ def test_track_invalid_submodule(cli, tmpdir, datafiles, fail): |
706 | 707 |
|
707 | 708 |
# Fetch the repo, we will not see the warning because we
|
708 | 709 |
# are still pointing to a ref which predates the submodules
|
709 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
710 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
710 | 711 |
result.assert_success()
|
711 | 712 |
assert "git:invalid-submodule" not in result.stderr
|
712 | 713 |
|
... | ... | @@ -715,7 +716,7 @@ def test_track_invalid_submodule(cli, tmpdir, datafiles, fail): |
715 | 716 |
# not locally cached, the Source will be CACHED directly after
|
716 | 717 |
# tracking and the validations will occur as a result.
|
717 | 718 |
#
|
718 |
- result = cli.run(project=project, args=['track', 'target.bst'])
|
|
719 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'])
|
|
719 | 720 |
if fail == 'error':
|
720 | 721 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
721 | 722 |
result.assert_task_error(ErrorDomain.PLUGIN, 'git:invalid-submodule')
|
... | ... | @@ -751,7 +752,7 @@ def test_track_fetch(cli, tmpdir, datafiles, ref_format, tag, extra_commit): |
751 | 752 |
_yaml.dump(element, element_path)
|
752 | 753 |
|
753 | 754 |
# Track it
|
754 |
- result = cli.run(project=project, args=['track', 'target.bst'])
|
|
755 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'])
|
|
755 | 756 |
result.assert_success()
|
756 | 757 |
|
757 | 758 |
element = _yaml.load(element_path)
|
... | ... | @@ -767,7 +768,7 @@ def test_track_fetch(cli, tmpdir, datafiles, ref_format, tag, extra_commit): |
767 | 768 |
assert len(new_ref) == 40
|
768 | 769 |
|
769 | 770 |
# Fetch it
|
770 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
771 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
771 | 772 |
result.assert_success()
|
772 | 773 |
|
773 | 774 |
|
... | ... | @@ -835,10 +836,10 @@ def test_git_describe(cli, tmpdir, datafiles, ref_storage, tag_type): |
835 | 836 |
_yaml.dump(element, element_path)
|
836 | 837 |
|
837 | 838 |
if ref_storage == 'inline':
|
838 |
- result = cli.run(project=project, args=['track', 'target.bst'])
|
|
839 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'])
|
|
839 | 840 |
result.assert_success()
|
840 | 841 |
else:
|
841 |
- result = cli.run(project=project, args=['track', 'target.bst', '--deps', 'all'])
|
|
842 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst', '--deps', 'all'])
|
|
842 | 843 |
result.assert_success()
|
843 | 844 |
|
844 | 845 |
if ref_storage == 'inline':
|
... | ... | @@ -916,7 +917,7 @@ def test_default_do_not_track_tags(cli, tmpdir, datafiles): |
916 | 917 |
element_path = os.path.join(project, 'target.bst')
|
917 | 918 |
_yaml.dump(element, element_path)
|
918 | 919 |
|
919 |
- result = cli.run(project=project, args=['track', 'target.bst'])
|
|
920 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'])
|
|
920 | 921 |
result.assert_success()
|
921 | 922 |
|
922 | 923 |
element = _yaml.load(element_path)
|
... | ... | @@ -1018,3 +1019,249 @@ def test_overwrite_rogue_tag_multiple_remotes(cli, tmpdir, datafiles): |
1018 | 1019 |
|
1019 | 1020 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
1020 | 1021 |
result.assert_success()
|
1022 |
+ |
|
1023 |
+ |
|
1024 |
+@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
|
|
1025 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
|
|
1026 |
+def test_fetch_shallow(cli, tmpdir, datafiles):
|
|
1027 |
+ project = str(datafiles)
|
|
1028 |
+ |
|
1029 |
+ repo = create_repo('git', str(tmpdir))
|
|
1030 |
+ previous_ref = repo.create(os.path.join(project, 'repofiles'))
|
|
1031 |
+ |
|
1032 |
+ file1 = os.path.join(str(tmpdir), 'file1')
|
|
1033 |
+ with open(file1, 'w') as f:
|
|
1034 |
+ f.write('test\n')
|
|
1035 |
+ ref = repo.add_file(file1)
|
|
1036 |
+ |
|
1037 |
+ source_config = repo.source_config(ref=ref)
|
|
1038 |
+ |
|
1039 |
+ # Write out our test target with a bad ref
|
|
1040 |
+ element = {
|
|
1041 |
+ 'kind': 'import',
|
|
1042 |
+ 'sources': [
|
|
1043 |
+ source_config
|
|
1044 |
+ ]
|
|
1045 |
+ }
|
|
1046 |
+ _yaml.dump(element, os.path.join(project, 'target.bst'))
|
|
1047 |
+ |
|
1048 |
+ sources_dir = os.path.join(str(tmpdir), 'sources')
|
|
1049 |
+ os.makedirs(sources_dir, exist_ok=True)
|
|
1050 |
+ config = {
|
|
1051 |
+ 'sourcedir': sources_dir
|
|
1052 |
+ }
|
|
1053 |
+ cli.configure(config)
|
|
1054 |
+ |
|
1055 |
+ result = cli.run(project=project, args=[
|
|
1056 |
+ 'fetch', 'target.bst'
|
|
1057 |
+ ])
|
|
1058 |
+ result.assert_success()
|
|
1059 |
+ |
|
1060 |
+ cache_dir_name = url_directory_name(source_config['url'])
|
|
1061 |
+ full_cache_path = os.path.join(sources_dir, 'git', cache_dir_name)
|
|
1062 |
+ shallow_cache_path = os.path.join(sources_dir, 'git', '{}-{}'.format(cache_dir_name, ref))
|
|
1063 |
+ |
|
1064 |
+ assert os.path.exists(shallow_cache_path)
|
|
1065 |
+ assert not os.path.exists(full_cache_path)
|
|
1066 |
+ |
|
1067 |
+ output = subprocess.run(['git', 'log', '--format=format:%H'],
|
|
1068 |
+ cwd=shallow_cache_path,
|
|
1069 |
+ stdout=subprocess.PIPE).stdout.decode('ascii')
|
|
1070 |
+ assert output.splitlines() == [ref]
|
|
1071 |
+ |
|
1072 |
+ result = cli.run(project=project, args=[
|
|
1073 |
+ 'build', 'target.bst'
|
|
1074 |
+ ])
|
|
1075 |
+ result.assert_success()
|
|
1076 |
+ |
|
1077 |
+ output = subprocess.run(['git', 'log', '--format=format:%H'],
|
|
1078 |
+ cwd=shallow_cache_path,
|
|
1079 |
+ stdout=subprocess.PIPE).stdout.decode('ascii')
|
|
1080 |
+ assert output.splitlines() == [ref]
|
|
1081 |
+ |
|
1082 |
+ assert os.path.exists(shallow_cache_path)
|
|
1083 |
+ assert not os.path.exists(full_cache_path)
|
|
1084 |
+ |
|
1085 |
+ result = cli.run(project=project, args=[
|
|
1086 |
+ 'track', 'target.bst'
|
|
1087 |
+ ])
|
|
1088 |
+ result.assert_success()
|
|
1089 |
+ |
|
1090 |
+ assert os.path.exists(full_cache_path)
|
|
1091 |
+ output = subprocess.run(['git', 'log', '--format=format:%H'],
|
|
1092 |
+ cwd=full_cache_path,
|
|
1093 |
+ stdout=subprocess.PIPE).stdout.decode('ascii')
|
|
1094 |
+ assert output.splitlines() == [ref, previous_ref]
|
|
1095 |
+ |
|
1096 |
+ |
|
1097 |
+@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
|
|
1098 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
|
|
1099 |
+def test_fetch_shallow_not_tagged(cli, tmpdir, datafiles):
|
|
1100 |
+ """When a ref is not tagged and not head of branch on remote we cannot
|
|
1101 |
+ get a shallow clone. It should automatically get a full clone.
|
|
1102 |
+ """
|
|
1103 |
+ |
|
1104 |
+ project = str(datafiles)
|
|
1105 |
+ |
|
1106 |
+ repo = create_repo('git', str(tmpdir))
|
|
1107 |
+ previous_ref = repo.create(os.path.join(project, 'repofiles'))
|
|
1108 |
+ |
|
1109 |
+ file1 = os.path.join(str(tmpdir), 'file1')
|
|
1110 |
+ with open(file1, 'w') as f:
|
|
1111 |
+ f.write('test\n')
|
|
1112 |
+ ref = repo.add_file(file1)
|
|
1113 |
+ |
|
1114 |
+ source_config = repo.source_config(ref=previous_ref)
|
|
1115 |
+ |
|
1116 |
+ # Write out our test target with a bad ref
|
|
1117 |
+ element = {
|
|
1118 |
+ 'kind': 'import',
|
|
1119 |
+ 'sources': [
|
|
1120 |
+ source_config
|
|
1121 |
+ ]
|
|
1122 |
+ }
|
|
1123 |
+ _yaml.dump(element, os.path.join(project, 'target.bst'))
|
|
1124 |
+ |
|
1125 |
+ sources_dir = os.path.join(str(tmpdir), 'sources')
|
|
1126 |
+ os.makedirs(sources_dir, exist_ok=True)
|
|
1127 |
+ config = {
|
|
1128 |
+ 'sourcedir': sources_dir
|
|
1129 |
+ }
|
|
1130 |
+ cli.configure(config)
|
|
1131 |
+ |
|
1132 |
+ result = cli.run(project=project, args=[
|
|
1133 |
+ 'fetch', 'target.bst'
|
|
1134 |
+ ])
|
|
1135 |
+ result.assert_success()
|
|
1136 |
+ |
|
1137 |
+ cache_dir_name = url_directory_name(source_config['url'])
|
|
1138 |
+ full_cache_path = os.path.join(sources_dir, 'git', cache_dir_name)
|
|
1139 |
+ shallow_cache_path = os.path.join(sources_dir, 'git', '{}-{}'.format(cache_dir_name, previous_ref))
|
|
1140 |
+ |
|
1141 |
+ assert not os.path.exists(shallow_cache_path)
|
|
1142 |
+ assert os.path.exists(full_cache_path)
|
|
1143 |
+ |
|
1144 |
+ output = subprocess.run(['git', 'log', '--format=format:%H'],
|
|
1145 |
+ cwd=full_cache_path,
|
|
1146 |
+ stdout=subprocess.PIPE).stdout.decode('ascii')
|
|
1147 |
+ assert output.splitlines() == [ref, previous_ref]
|
|
1148 |
+ |
|
1149 |
+ |
|
1150 |
+@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
|
|
1151 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
|
|
1152 |
+def test_fetch_shallow_annotated_tag(cli, tmpdir, datafiles):
|
|
1153 |
+ """When a ref is not tagged and not head of branch on remote we cannot
|
|
1154 |
+ get a shallow clone. It should automatically get a full clone.
|
|
1155 |
+ """
|
|
1156 |
+ |
|
1157 |
+ project = str(datafiles)
|
|
1158 |
+ |
|
1159 |
+ repo = create_repo('git', str(tmpdir))
|
|
1160 |
+ previous_ref = repo.create(os.path.join(project, 'repofiles'))
|
|
1161 |
+ |
|
1162 |
+ repo.add_annotated_tag('tag', 'tag')
|
|
1163 |
+ |
|
1164 |
+ file1 = os.path.join(str(tmpdir), 'file1')
|
|
1165 |
+ with open(file1, 'w') as f:
|
|
1166 |
+ f.write('test\n')
|
|
1167 |
+ ref = repo.add_file(file1)
|
|
1168 |
+ |
|
1169 |
+ source_config = repo.source_config(ref=previous_ref)
|
|
1170 |
+ del source_config['track']
|
|
1171 |
+ |
|
1172 |
+ # Write out our test target with a bad ref
|
|
1173 |
+ element = {
|
|
1174 |
+ 'kind': 'import',
|
|
1175 |
+ 'sources': [
|
|
1176 |
+ source_config
|
|
1177 |
+ ]
|
|
1178 |
+ }
|
|
1179 |
+ _yaml.dump(element, os.path.join(project, 'target.bst'))
|
|
1180 |
+ |
|
1181 |
+ sources_dir = os.path.join(str(tmpdir), 'sources')
|
|
1182 |
+ os.makedirs(sources_dir, exist_ok=True)
|
|
1183 |
+ config = {
|
|
1184 |
+ 'sourcedir': sources_dir
|
|
1185 |
+ }
|
|
1186 |
+ cli.configure(config)
|
|
1187 |
+ |
|
1188 |
+ result = cli.run(project=project, args=[
|
|
1189 |
+ 'fetch', 'target.bst'
|
|
1190 |
+ ])
|
|
1191 |
+ result.assert_success()
|
|
1192 |
+ |
|
1193 |
+ cache_dir_name = url_directory_name(source_config['url'])
|
|
1194 |
+ full_cache_path = os.path.join(sources_dir, 'git', cache_dir_name)
|
|
1195 |
+ shallow_cache_path = os.path.join(sources_dir, 'git', '{}-{}'.format(cache_dir_name, previous_ref))
|
|
1196 |
+ |
|
1197 |
+ assert os.path.exists(shallow_cache_path)
|
|
1198 |
+ assert not os.path.exists(full_cache_path)
|
|
1199 |
+ |
|
1200 |
+ output = subprocess.run(['git', 'log', '--format=format:%H'],
|
|
1201 |
+ cwd=shallow_cache_path,
|
|
1202 |
+ stdout=subprocess.PIPE).stdout.decode('ascii')
|
|
1203 |
+ assert output.splitlines() == [previous_ref]
|
|
1204 |
+ |
|
1205 |
+ |
|
1206 |
+@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
|
|
1207 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
|
|
1208 |
+def test_fetch_shallow_workspace_open(cli, tmpdir, datafiles):
|
|
1209 |
+ """
|
|
1210 |
+ Workspaces should get a full clone.
|
|
1211 |
+ """
|
|
1212 |
+ project = str(datafiles)
|
|
1213 |
+ |
|
1214 |
+ repo = create_repo('git', str(tmpdir))
|
|
1215 |
+ previous_ref = repo.create(os.path.join(project, 'repofiles'))
|
|
1216 |
+ |
|
1217 |
+ file1 = os.path.join(str(tmpdir), 'file1')
|
|
1218 |
+ with open(file1, 'w') as f:
|
|
1219 |
+ f.write('test\n')
|
|
1220 |
+ ref = repo.add_file(file1)
|
|
1221 |
+ |
|
1222 |
+ source_config = repo.source_config(ref=ref)
|
|
1223 |
+ |
|
1224 |
+ # Write out our test target with a bad ref
|
|
1225 |
+ element = {
|
|
1226 |
+ 'kind': 'import',
|
|
1227 |
+ 'sources': [
|
|
1228 |
+ source_config
|
|
1229 |
+ ]
|
|
1230 |
+ }
|
|
1231 |
+ _yaml.dump(element, os.path.join(project, 'target.bst'))
|
|
1232 |
+ |
|
1233 |
+ sources_dir = os.path.join(str(tmpdir), 'sources')
|
|
1234 |
+ os.makedirs(sources_dir, exist_ok=True)
|
|
1235 |
+ config = {
|
|
1236 |
+ 'sourcedir': sources_dir
|
|
1237 |
+ }
|
|
1238 |
+ cli.configure(config)
|
|
1239 |
+ |
|
1240 |
+ result = cli.run(project=project, args=[
|
|
1241 |
+ 'fetch', 'target.bst'
|
|
1242 |
+ ])
|
|
1243 |
+ result.assert_success()
|
|
1244 |
+ |
|
1245 |
+ cache_dir_name = url_directory_name(source_config['url'])
|
|
1246 |
+ full_cache_path = os.path.join(sources_dir, 'git', cache_dir_name)
|
|
1247 |
+ shallow_cache_path = os.path.join(sources_dir, 'git', '{}-{}'.format(cache_dir_name, ref))
|
|
1248 |
+ |
|
1249 |
+ assert os.path.exists(shallow_cache_path)
|
|
1250 |
+ assert not os.path.exists(full_cache_path)
|
|
1251 |
+ |
|
1252 |
+ output = subprocess.run(['git', 'log', '--format=format:%H'],
|
|
1253 |
+ cwd=shallow_cache_path,
|
|
1254 |
+ stdout=subprocess.PIPE).stdout.decode('ascii')
|
|
1255 |
+ assert output.splitlines() == [ref]
|
|
1256 |
+ |
|
1257 |
+ workspace = os.path.join(str(tmpdir), 'workspace')
|
|
1258 |
+ |
|
1259 |
+ result = cli.run(project=project, args=[
|
|
1260 |
+ 'workspace', 'open', 'target.bst', '--directory', workspace
|
|
1261 |
+ ])
|
|
1262 |
+ result.assert_success()
|
|
1263 |
+ |
|
1264 |
+ output = subprocess.run(['git', 'log', '--format=format:%H'],
|
|
1265 |
+ cwd=workspace,
|
|
1266 |
+ stdout=subprocess.PIPE).stdout.decode('ascii')
|
|
1267 |
+ assert output.splitlines() == [ref, previous_ref]
|
... | ... | @@ -19,13 +19,13 @@ def test_custom_transform_source(cli, tmpdir, datafiles): |
19 | 19 |
|
20 | 20 |
# Ensure we can track
|
21 | 21 |
result = cli.run(project=project, args=[
|
22 |
- 'track', 'target.bst'
|
|
22 |
+ 'source', 'track', 'target.bst'
|
|
23 | 23 |
])
|
24 | 24 |
result.assert_success()
|
25 | 25 |
|
26 | 26 |
# Ensure we can fetch
|
27 | 27 |
result = cli.run(project=project, args=[
|
28 |
- 'fetch', 'target.bst'
|
|
28 |
+ 'source', 'fetch', 'target.bst'
|
|
29 | 29 |
])
|
30 | 30 |
result.assert_success()
|
31 | 31 |
|
... | ... | @@ -50,7 +50,7 @@ def test_missing_file(cli, tmpdir, datafiles): |
50 | 50 |
|
51 | 51 |
# Try to fetch it
|
52 | 52 |
result = cli.run(project=project, args=[
|
53 |
- 'fetch', 'target.bst'
|
|
53 |
+ 'source', 'fetch', 'target.bst'
|
|
54 | 54 |
])
|
55 | 55 |
|
56 | 56 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
... | ... | @@ -64,7 +64,7 @@ def test_path_in_filename(cli, tmpdir, datafiles): |
64 | 64 |
|
65 | 65 |
# Try to fetch it
|
66 | 66 |
result = cli.run(project=project, args=[
|
67 |
- 'fetch', 'target.bst'
|
|
67 |
+ 'source', 'fetch', 'target.bst'
|
|
68 | 68 |
])
|
69 | 69 |
|
70 | 70 |
# The bst file has a / in the filename param
|
... | ... | @@ -79,7 +79,7 @@ def test_simple_file_build(cli, tmpdir, datafiles): |
79 | 79 |
|
80 | 80 |
# Try to fetch it
|
81 | 81 |
result = cli.run(project=project, args=[
|
82 |
- 'fetch', 'target.bst'
|
|
82 |
+ 'source', 'fetch', 'target.bst'
|
|
83 | 83 |
])
|
84 | 84 |
result.assert_success()
|
85 | 85 |
|
... | ... | @@ -112,7 +112,7 @@ def test_simple_file_custom_name_build(cli, tmpdir, datafiles): |
112 | 112 |
|
113 | 113 |
# Try to fetch it
|
114 | 114 |
result = cli.run(project=project, args=[
|
115 |
- 'fetch', 'target.bst'
|
|
115 |
+ 'source', 'fetch', 'target.bst'
|
|
116 | 116 |
])
|
117 | 117 |
result.assert_success()
|
118 | 118 |
|
... | ... | @@ -141,7 +141,7 @@ def test_unique_key(cli, tmpdir, datafiles): |
141 | 141 |
assert cli.get_element_state(project, 'target-custom-executable.bst') == "fetch needed"
|
142 | 142 |
# Try to fetch it
|
143 | 143 |
result = cli.run(project=project, args=[
|
144 |
- 'fetch', 'target.bst'
|
|
144 |
+ 'source', 'fetch', 'target.bst'
|
|
145 | 145 |
])
|
146 | 146 |
|
147 | 147 |
# We should download the file only once
|
... | ... | @@ -198,7 +198,7 @@ def test_use_netrc(cli, datafiles, server_type, tmpdir): |
198 | 198 |
|
199 | 199 |
server.start()
|
200 | 200 |
|
201 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
201 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
202 | 202 |
result.assert_success()
|
203 | 203 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
204 | 204 |
result.assert_success()
|
... | ... | @@ -77,7 +77,7 @@ def test_fetch_bad_url(cli, tmpdir, datafiles): |
77 | 77 |
|
78 | 78 |
# Try to fetch it
|
79 | 79 |
result = cli.run(project=project, args=[
|
80 |
- 'fetch', 'target.bst'
|
|
80 |
+ 'source', 'fetch', 'target.bst'
|
|
81 | 81 |
])
|
82 | 82 |
assert "FAILURE Try #" in result.stderr
|
83 | 83 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
... | ... | @@ -96,7 +96,7 @@ def test_fetch_bad_ref(cli, tmpdir, datafiles): |
96 | 96 |
|
97 | 97 |
# Try to fetch it
|
98 | 98 |
result = cli.run(project=project, args=[
|
99 |
- 'fetch', 'target.bst'
|
|
99 |
+ 'source', 'fetch', 'target.bst'
|
|
100 | 100 |
])
|
101 | 101 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
102 | 102 |
result.assert_task_error(ErrorDomain.SOURCE, None)
|
... | ... | @@ -114,7 +114,7 @@ def test_track_warning(cli, tmpdir, datafiles): |
114 | 114 |
|
115 | 115 |
# Track it
|
116 | 116 |
result = cli.run(project=project, args=[
|
117 |
- 'track', 'target.bst'
|
|
117 |
+ 'source', 'track', 'target.bst'
|
|
118 | 118 |
])
|
119 | 119 |
result.assert_success()
|
120 | 120 |
assert "Potential man-in-the-middle attack!" in result.stderr
|
... | ... | @@ -133,9 +133,9 @@ def test_stage_default_basedir(cli, tmpdir, datafiles, srcdir): |
133 | 133 |
_assemble_tar(os.path.join(str(datafiles), "content"), srcdir, src_tar)
|
134 | 134 |
|
135 | 135 |
# Track, fetch, build, checkout
|
136 |
- result = cli.run(project=project, args=['track', 'target.bst'])
|
|
136 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'])
|
|
137 | 137 |
result.assert_success()
|
138 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
138 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
139 | 139 |
result.assert_success()
|
140 | 140 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
141 | 141 |
result.assert_success()
|
... | ... | @@ -162,9 +162,9 @@ def test_stage_no_basedir(cli, tmpdir, datafiles, srcdir): |
162 | 162 |
_assemble_tar(os.path.join(str(datafiles), "content"), srcdir, src_tar)
|
163 | 163 |
|
164 | 164 |
# Track, fetch, build, checkout
|
165 |
- result = cli.run(project=project, args=['track', 'target.bst'])
|
|
165 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'])
|
|
166 | 166 |
result.assert_success()
|
167 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
167 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
168 | 168 |
result.assert_success()
|
169 | 169 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
170 | 170 |
result.assert_success()
|
... | ... | @@ -191,9 +191,9 @@ def test_stage_explicit_basedir(cli, tmpdir, datafiles, srcdir): |
191 | 191 |
_assemble_tar(os.path.join(str(datafiles), "content"), srcdir, src_tar)
|
192 | 192 |
|
193 | 193 |
# Track, fetch, build, checkout
|
194 |
- result = cli.run(project=project, args=['track', 'target.bst'])
|
|
194 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'])
|
|
195 | 195 |
result.assert_success()
|
196 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
196 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
197 | 197 |
result.assert_success()
|
198 | 198 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
199 | 199 |
result.assert_success()
|
... | ... | @@ -227,9 +227,9 @@ def test_stage_contains_links(cli, tmpdir, datafiles): |
227 | 227 |
_assemble_tar(os.path.join(str(datafiles), "content"), "base-directory", src_tar)
|
228 | 228 |
|
229 | 229 |
# Track, fetch, build, checkout
|
230 |
- result = cli.run(project=project, args=['track', 'target.bst'])
|
|
230 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'])
|
|
231 | 231 |
result.assert_success()
|
232 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
232 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
233 | 233 |
result.assert_success()
|
234 | 234 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
235 | 235 |
result.assert_success()
|
... | ... | @@ -256,9 +256,9 @@ def test_stage_default_basedir_lzip(cli, tmpdir, datafiles, srcdir): |
256 | 256 |
_assemble_tar_lz(os.path.join(str(datafiles), "content"), srcdir, src_tar)
|
257 | 257 |
|
258 | 258 |
# Track, fetch, build, checkout
|
259 |
- result = cli.run(project=project, args=['track', 'target-lz.bst'])
|
|
259 |
+ result = cli.run(project=project, args=['source', 'track', 'target-lz.bst'])
|
|
260 | 260 |
result.assert_success()
|
261 |
- result = cli.run(project=project, args=['fetch', 'target-lz.bst'])
|
|
261 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target-lz.bst'])
|
|
262 | 262 |
result.assert_success()
|
263 | 263 |
result = cli.run(project=project, args=['build', 'target-lz.bst'])
|
264 | 264 |
result.assert_success()
|
... | ... | @@ -297,9 +297,9 @@ def test_read_only_dir(cli, tmpdir, datafiles): |
297 | 297 |
env = {"TMP": tmpdir_str}
|
298 | 298 |
|
299 | 299 |
# Track, fetch, build, checkout
|
300 |
- result = cli.run(project=project, args=['track', 'target.bst'], env=env)
|
|
300 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'], env=env)
|
|
301 | 301 |
result.assert_success()
|
302 |
- result = cli.run(project=project, args=['fetch', 'target.bst'], env=env)
|
|
302 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'], env=env)
|
|
303 | 303 |
result.assert_success()
|
304 | 304 |
result = cli.run(project=project, args=['build', 'target.bst'], env=env)
|
305 | 305 |
result.assert_success()
|
... | ... | @@ -342,9 +342,9 @@ def test_use_netrc(cli, datafiles, server_type, tmpdir): |
342 | 342 |
|
343 | 343 |
server.start()
|
344 | 344 |
|
345 |
- result = cli.run(project=project, args=['track', 'target.bst'])
|
|
345 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'])
|
|
346 | 346 |
result.assert_success()
|
347 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
347 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
348 | 348 |
result.assert_success()
|
349 | 349 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
350 | 350 |
result.assert_success()
|
... | ... | @@ -385,6 +385,6 @@ def test_netrc_already_specified_user(cli, datafiles, server_type, tmpdir): |
385 | 385 |
|
386 | 386 |
server.start()
|
387 | 387 |
|
388 |
- result = cli.run(project=project, args=['track', 'target.bst'])
|
|
388 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'])
|
|
389 | 389 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
390 | 390 |
result.assert_task_error(ErrorDomain.SOURCE, None)
|
... | ... | @@ -62,7 +62,7 @@ def test_fetch_bad_url(cli, tmpdir, datafiles): |
62 | 62 |
|
63 | 63 |
# Try to fetch it
|
64 | 64 |
result = cli.run(project=project, args=[
|
65 |
- 'fetch', 'target.bst'
|
|
65 |
+ 'source', 'fetch', 'target.bst'
|
|
66 | 66 |
])
|
67 | 67 |
assert "FAILURE Try #" in result.stderr
|
68 | 68 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
... | ... | @@ -81,7 +81,7 @@ def test_fetch_bad_ref(cli, tmpdir, datafiles): |
81 | 81 |
|
82 | 82 |
# Try to fetch it
|
83 | 83 |
result = cli.run(project=project, args=[
|
84 |
- 'fetch', 'target.bst'
|
|
84 |
+ 'source', 'fetch', 'target.bst'
|
|
85 | 85 |
])
|
86 | 86 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
87 | 87 |
result.assert_task_error(ErrorDomain.SOURCE, None)
|
... | ... | @@ -99,7 +99,7 @@ def test_track_warning(cli, tmpdir, datafiles): |
99 | 99 |
|
100 | 100 |
# Track it
|
101 | 101 |
result = cli.run(project=project, args=[
|
102 |
- 'track', 'target.bst'
|
|
102 |
+ 'source', 'track', 'target.bst'
|
|
103 | 103 |
])
|
104 | 104 |
result.assert_success()
|
105 | 105 |
assert "Potential man-in-the-middle attack!" in result.stderr
|
... | ... | @@ -117,9 +117,9 @@ def test_stage_default_basedir(cli, tmpdir, datafiles): |
117 | 117 |
_assemble_zip(os.path.join(str(datafiles), "content"), src_zip)
|
118 | 118 |
|
119 | 119 |
# Track, fetch, build, checkout
|
120 |
- result = cli.run(project=project, args=['track', 'target.bst'])
|
|
120 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'])
|
|
121 | 121 |
result.assert_success()
|
122 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
122 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
123 | 123 |
result.assert_success()
|
124 | 124 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
125 | 125 |
result.assert_success()
|
... | ... | @@ -145,9 +145,9 @@ def test_stage_no_basedir(cli, tmpdir, datafiles): |
145 | 145 |
_assemble_zip(os.path.join(str(datafiles), "content"), src_zip)
|
146 | 146 |
|
147 | 147 |
# Track, fetch, build, checkout
|
148 |
- result = cli.run(project=project, args=['track', 'target.bst'])
|
|
148 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'])
|
|
149 | 149 |
result.assert_success()
|
150 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
150 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
151 | 151 |
result.assert_success()
|
152 | 152 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
153 | 153 |
result.assert_success()
|
... | ... | @@ -173,9 +173,9 @@ def test_stage_explicit_basedir(cli, tmpdir, datafiles): |
173 | 173 |
_assemble_zip(os.path.join(str(datafiles), "content"), src_zip)
|
174 | 174 |
|
175 | 175 |
# Track, fetch, build, checkout
|
176 |
- result = cli.run(project=project, args=['track', 'target.bst'])
|
|
176 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'])
|
|
177 | 177 |
result.assert_success()
|
178 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
178 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
179 | 179 |
result.assert_success()
|
180 | 180 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
181 | 181 |
result.assert_success()
|
... | ... | @@ -215,9 +215,9 @@ def test_use_netrc(cli, datafiles, server_type, tmpdir): |
215 | 215 |
|
216 | 216 |
server.start()
|
217 | 217 |
|
218 |
- result = cli.run(project=project, args=['track', 'target.bst'])
|
|
218 |
+ result = cli.run(project=project, args=['source', 'track', 'target.bst'])
|
|
219 | 219 |
result.assert_success()
|
220 |
- result = cli.run(project=project, args=['fetch', 'target.bst'])
|
|
220 |
+ result = cli.run(project=project, args=['source', 'fetch', 'target.bst'])
|
|
221 | 221 |
result.assert_success()
|
222 | 222 |
result = cli.run(project=project, args=['build', 'target.bst'])
|
223 | 223 |
result.assert_success()
|