Will Salmon pushed to branch willsalmon/defaultWorkspaces at BuildStream / buildstream
Commits:
-
2c6cb230
by Valentin David at 2018-10-26T14:21:18Z
-
a8250ca4
by Valentin David at 2018-10-26T15:04:25Z
-
1b308fe3
by Daniel Silverstone at 2018-10-26T15:50:42Z
-
65d90c31
by Daniel Silverstone at 2018-10-26T15:50:54Z
-
ed733f3e
by Daniel Silverstone at 2018-10-26T15:51:40Z
-
e97d2a75
by Benjamin Schubert at 2018-10-29T10:17:07Z
-
4596470c
by William Salmon at 2018-10-29T13:49:46Z
-
e9207d66
by William Salmon at 2018-10-29T13:49:46Z
-
ef8a5744
by William Salmon at 2018-10-29T13:49:46Z
14 changed files:
- buildstream/_context.py
- buildstream/_frontend/cli.py
- buildstream/_stream.py
- buildstream/data/userconfig.yaml
- tests/examples/developing.py
- tests/examples/junctions.py
- tests/frontend/buildcheckout.py
- tests/frontend/cross_junction_workspace.py
- tests/frontend/workspace.py
- tests/frontend/yamlcache.py
- tests/integration/shell.py
- tests/integration/workspace.py
- tests/plugins/filter.py
- tests/testutils/runcli.py
Changes:
... | ... | @@ -60,6 +60,9 @@ class Context(): |
60 | 60 |
# The directory where build sandboxes will be created
|
61 | 61 |
self.builddir = None
|
62 | 62 |
|
63 |
+ # Default root location for workspaces
|
|
64 |
+ self.workspacedir = None
|
|
65 |
+ |
|
63 | 66 |
# The local binary artifact cache directory
|
64 | 67 |
self.artifactdir = None
|
65 | 68 |
|
... | ... | @@ -161,10 +164,10 @@ class Context(): |
161 | 164 |
_yaml.node_validate(defaults, [
|
162 | 165 |
'sourcedir', 'builddir', 'artifactdir', 'logdir',
|
163 | 166 |
'scheduler', 'artifacts', 'logging', 'projects',
|
164 |
- 'cache'
|
|
167 |
+ 'cache', 'workspacedir',
|
|
165 | 168 |
])
|
166 | 169 |
|
167 |
- for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir']:
|
|
170 |
+ for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
|
|
168 | 171 |
# Allow the ~ tilde expansion and any environment variables in
|
169 | 172 |
# path specification in the config files.
|
170 | 173 |
#
|
... | ... | @@ -678,28 +678,37 @@ def workspace(): |
678 | 678 |
@click.option('--no-checkout', default=False, is_flag=True,
|
679 | 679 |
help="Do not checkout the source, only link to the given directory")
|
680 | 680 |
@click.option('--force', '-f', default=False, is_flag=True,
|
681 |
- help="Overwrite files existing in checkout directory")
|
|
681 |
+ help="The workspace will be created even if the directory in which it will be created is not empty" +
|
|
682 |
+ "or if a workspace for that element already exists")
|
|
682 | 683 |
@click.option('--track', 'track_', default=False, is_flag=True,
|
683 | 684 |
help="Track and fetch new source references before checking out the workspace")
|
684 |
-@click.argument('element',
|
|
685 |
- type=click.Path(readable=False))
|
|
686 |
-@click.argument('directory', type=click.Path(file_okay=False))
|
|
685 |
+@click.option('--directory', type=click.Path(file_okay=False), default=None,
|
|
686 |
+ help="If only one element is given then the workspace will be created in the path given rather than" +
|
|
687 |
+ "being automatically generated")
|
|
688 |
+@click.argument('elements', nargs=-1, type=click.Path(readable=False))
|
|
687 | 689 |
@click.pass_obj
|
688 |
-def workspace_open(app, no_checkout, force, track_, element, directory):
|
|
690 |
+def workspace_open(app, no_checkout, force, track_, directory, elements):
|
|
689 | 691 |
"""Open a workspace for manual source modification"""
|
690 |
- |
|
691 |
- if os.path.exists(directory):
|
|
692 |
- |
|
693 |
- if not os.path.isdir(directory):
|
|
694 |
- click.echo("Checkout directory is not a directory: {}".format(directory), err=True)
|
|
692 |
+ directories = []
|
|
693 |
+ if directory is not None:
|
|
694 |
+ if len(elements) > 1:
|
|
695 |
+ click.echo("Directory option can only be used if a single element is given", err=True)
|
|
695 | 696 |
sys.exit(-1)
|
697 |
+ if os.path.exists(directory):
|
|
698 |
+ if not os.path.isdir(directory):
|
|
699 |
+ click.echo("Directory path is not a directory: {}".format(directory), err=True)
|
|
700 |
+ sys.exit(-1)
|
|
696 | 701 |
|
697 |
- if not (no_checkout or force) and os.listdir(directory):
|
|
698 |
- click.echo("Checkout directory is not empty: {}".format(directory), err=True)
|
|
699 |
- sys.exit(-1)
|
|
702 |
+ if not (no_checkout or force) and os.listdir(directory):
|
|
703 |
+ click.echo("Directory path is not empty: {}".format(directory), err=True)
|
|
704 |
+ sys.exit(-1)
|
|
705 |
+ directories.append({'dir': directory, 'force': True})
|
|
706 |
+ else:
|
|
707 |
+ for element in elements:
|
|
708 |
+ directories.append({'dir': element.rstrip('.bst'), 'force': False})
|
|
700 | 709 |
|
701 | 710 |
with app.initialized():
|
702 |
- app.stream.workspace_open(element, directory,
|
|
711 |
+ app.stream.workspace_open(elements, directories,
|
|
703 | 712 |
no_checkout=no_checkout,
|
704 | 713 |
track_first=track_,
|
705 | 714 |
force=force)
|
... | ... | @@ -423,9 +423,16 @@ class Stream(): |
423 | 423 |
else:
|
424 | 424 |
if location == '-':
|
425 | 425 |
with target.timed_activity("Creating tarball"):
|
426 |
- with os.fdopen(sys.stdout.fileno(), 'wb') as fo:
|
|
427 |
- with tarfile.open(fileobj=fo, mode="w|") as tf:
|
|
428 |
- sandbox_vroot.export_to_tar(tf, '.')
|
|
426 |
+ # Save the stdout FD to restore later
|
|
427 |
+ saved_fd = os.dup(sys.stdout.fileno())
|
|
428 |
+ try:
|
|
429 |
+ with os.fdopen(sys.stdout.fileno(), 'wb') as fo:
|
|
430 |
+ with tarfile.open(fileobj=fo, mode="w|") as tf:
|
|
431 |
+ sandbox_vroot.export_to_tar(tf, '.')
|
|
432 |
+ finally:
|
|
433 |
+ # No matter what, restore stdout for further use
|
|
434 |
+ os.dup2(saved_fd, sys.stdout.fileno())
|
|
435 |
+ os.close(saved_fd)
|
|
429 | 436 |
else:
|
430 | 437 |
with target.timed_activity("Creating tarball '{}'"
|
431 | 438 |
.format(location)):
|
... | ... | @@ -447,70 +454,77 @@ class Stream(): |
447 | 454 |
# track_first (bool): Whether to track and fetch first
|
448 | 455 |
# force (bool): Whether to ignore contents in an existing directory
|
449 | 456 |
#
|
450 |
- def workspace_open(self, target, directory, *,
|
|
457 |
+ def workspace_open(self, targets, directories, *,
|
|
451 | 458 |
no_checkout,
|
452 | 459 |
track_first,
|
453 | 460 |
force):
|
454 | 461 |
|
455 | 462 |
if track_first:
|
456 |
- track_targets = (target,)
|
|
463 |
+ track_targets = targets
|
|
457 | 464 |
else:
|
458 | 465 |
track_targets = ()
|
459 | 466 |
|
460 |
- elements, track_elements = self._load((target,), track_targets,
|
|
467 |
+ elements, track_elements = self._load(targets, track_targets,
|
|
461 | 468 |
selection=PipelineSelection.REDIRECT,
|
462 | 469 |
track_selection=PipelineSelection.REDIRECT)
|
463 |
- target = elements[0]
|
|
464 |
- directory = os.path.abspath(directory)
|
|
465 |
- |
|
466 |
- if not list(target.sources()):
|
|
467 |
- build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
|
|
468 |
- if not build_depends:
|
|
469 |
- raise StreamError("The given element has no sources")
|
|
470 |
- detail = "Try opening a workspace on one of its dependencies instead:\n"
|
|
471 |
- detail += " \n".join(build_depends)
|
|
472 |
- raise StreamError("The given element has no sources", detail=detail)
|
|
473 | 470 |
|
474 | 471 |
workspaces = self._context.get_workspaces()
|
475 | 472 |
|
476 |
- # Check for workspace config
|
|
477 |
- workspace = workspaces.get_workspace(target._get_full_name())
|
|
478 |
- if workspace and not force:
|
|
479 |
- raise StreamError("Workspace '{}' is already defined at: {}"
|
|
480 |
- .format(target.name, workspace.get_absolute_path()))
|
|
481 |
- |
|
482 |
- # If we're going to checkout, we need at least a fetch,
|
|
483 |
- # if we were asked to track first, we're going to fetch anyway.
|
|
484 |
- #
|
|
485 |
- if not no_checkout or track_first:
|
|
486 |
- track_elements = []
|
|
487 |
- if track_first:
|
|
488 |
- track_elements = elements
|
|
489 |
- self._fetch(elements, track_elements=track_elements)
|
|
490 |
- |
|
491 |
- if not no_checkout and target._get_consistency() != Consistency.CACHED:
|
|
492 |
- raise StreamError("Could not stage uncached source. " +
|
|
493 |
- "Use `--track` to track and " +
|
|
494 |
- "fetch the latest version of the " +
|
|
495 |
- "source.")
|
|
473 |
+ for target, directory_dict in zip(elements, directories):
|
|
474 |
+ if directory_dict['force']:
|
|
475 |
+ directory = directory_dict['dir']
|
|
476 |
+ else:
|
|
477 |
+ directory = os.path.abspath(os.path.join(self._context.workspacedir, directory_dict['dir']))
|
|
478 |
+ |
|
479 |
+ if not list(target.sources()):
|
|
480 |
+ build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
|
|
481 |
+ if not build_depends:
|
|
482 |
+ raise StreamError("The given element has no sources")
|
|
483 |
+ detail = "Try opening a workspace on one of its dependencies instead:\n"
|
|
484 |
+ detail += " \n".join(build_depends)
|
|
485 |
+ raise StreamError("The given element has no sources", detail=detail)
|
|
486 |
+ |
|
487 |
+ # Check for workspace config
|
|
488 |
+ workspace = workspaces.get_workspace(target._get_full_name())
|
|
489 |
+ if workspace and not force:
|
|
490 |
+ raise StreamError("Workspace '{}' is already defined at: {}"
|
|
491 |
+ .format(target.name, workspace.get_absolute_path()))
|
|
492 |
+ |
|
493 |
+ # If we're going to checkout, we need at least a fetch,
|
|
494 |
+ # if we were asked to track first, we're going to fetch anyway.
|
|
495 |
+ #
|
|
496 |
+ if not no_checkout or track_first:
|
|
497 |
+ track_elements = []
|
|
498 |
+ if track_first:
|
|
499 |
+ track_elements = elements
|
|
500 |
+ self._fetch(elements, track_elements=track_elements)
|
|
501 |
+ |
|
502 |
+ if not no_checkout and target._get_consistency() != Consistency.CACHED:
|
|
503 |
+ raise StreamError("Could not stage uncached source. " +
|
|
504 |
+ "Use `--track` to track and " +
|
|
505 |
+ "fetch the latest version of the " +
|
|
506 |
+ "source.")
|
|
496 | 507 |
|
497 |
- if workspace:
|
|
498 |
- workspaces.delete_workspace(target._get_full_name())
|
|
499 |
- workspaces.save_config()
|
|
500 |
- shutil.rmtree(directory)
|
|
501 |
- try:
|
|
502 |
- os.makedirs(directory, exist_ok=True)
|
|
503 |
- except OSError as e:
|
|
504 |
- raise StreamError("Failed to create workspace directory: {}".format(e)) from e
|
|
508 |
+ if workspace:
|
|
509 |
+ workspaces.delete_workspace(target._get_full_name())
|
|
510 |
+ workspaces.save_config()
|
|
511 |
+ shutil.rmtree(directory)
|
|
512 |
+ try:
|
|
513 |
+ os.makedirs(directory, exist_ok=True)
|
|
514 |
+ except OSError as e:
|
|
515 |
+ raise StreamError("Failed to create workspace directory: {}".format(e)) from e
|
|
505 | 516 |
|
506 |
- workspaces.create_workspace(target._get_full_name(), directory)
|
|
517 |
+ workspaces.create_workspace(target._get_full_name(), directory)
|
|
507 | 518 |
|
508 |
- if not no_checkout:
|
|
509 |
- with target.timed_activity("Staging sources to {}".format(directory)):
|
|
510 |
- target._open_workspace()
|
|
519 |
+ if not no_checkout:
|
|
520 |
+ with target.timed_activity("Staging sources to {}".format(directory)):
|
|
521 |
+ target._open_workspace()
|
|
511 | 522 |
|
512 |
- workspaces.save_config()
|
|
513 |
- self._message(MessageType.INFO, "Saved workspace configuration")
|
|
523 |
+ # Saving the workspace once it is set up means that if the next one fails before
|
|
524 |
+ # the configuration gets saved we dont end up with the good workspace not being saved
|
|
525 |
+ workspaces.save_config()
|
|
526 |
+ self._message(MessageType.INFO, "Added element {} to the workspace configuration"
|
|
527 |
+ .format(target._get_full_name()))
|
|
514 | 528 |
|
515 | 529 |
# workspace_close
|
516 | 530 |
#
|
... | ... | @@ -22,6 +22,9 @@ artifactdir: ${XDG_CACHE_HOME}/buildstream/artifacts |
22 | 22 |
# Location to store build logs
|
23 | 23 |
logdir: ${XDG_CACHE_HOME}/buildstream/logs
|
24 | 24 |
|
25 |
+# Default root location for workspacesi, blank for no default set.
|
|
26 |
+workspacedir: .
|
|
27 |
+ |
|
25 | 28 |
#
|
26 | 29 |
# Cache
|
27 | 30 |
#
|
... | ... | @@ -55,7 +55,7 @@ def test_open_workspace(cli, tmpdir, datafiles): |
55 | 55 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
56 | 56 |
workspace_dir = os.path.join(str(tmpdir), "workspace_hello")
|
57 | 57 |
|
58 |
- result = cli.run(project=project, args=['workspace', 'open', '-f', 'hello.bst', workspace_dir])
|
|
58 |
+ result = cli.run(project=project, args=['workspace', 'open', '-f', '--directory', workspace_dir, 'hello.bst', ])
|
|
59 | 59 |
result.assert_success()
|
60 | 60 |
|
61 | 61 |
result = cli.run(project=project, args=['workspace', 'list'])
|
... | ... | @@ -72,7 +72,7 @@ def test_make_change_in_workspace(cli, tmpdir, datafiles): |
72 | 72 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
73 | 73 |
workspace_dir = os.path.join(str(tmpdir), "workspace_hello")
|
74 | 74 |
|
75 |
- result = cli.run(project=project, args=['workspace', 'open', '-f', 'hello.bst', workspace_dir])
|
|
75 |
+ result = cli.run(project=project, args=['workspace', 'open', '-f', '--directory', workspace_dir, 'hello.bst'])
|
|
76 | 76 |
result.assert_success()
|
77 | 77 |
|
78 | 78 |
result = cli.run(project=project, args=['workspace', 'list'])
|
... | ... | @@ -44,7 +44,7 @@ def test_open_cross_junction_workspace(cli, tmpdir, datafiles): |
44 | 44 |
workspace_dir = os.path.join(str(tmpdir), "workspace_hello_junction")
|
45 | 45 |
|
46 | 46 |
result = cli.run(project=project,
|
47 |
- args=['workspace', 'open', 'hello-junction.bst:hello.bst', workspace_dir])
|
|
47 |
+ args=['workspace', 'open', '--directory', workspace_dir, 'hello-junction.bst:hello.bst'])
|
|
48 | 48 |
result.assert_success()
|
49 | 49 |
|
50 | 50 |
result = cli.run(project=project,
|
... | ... | @@ -128,7 +128,6 @@ def test_build_checkout_tarball(datafiles, cli): |
128 | 128 |
assert os.path.join('.', 'usr', 'include', 'pony.h') in tar.getnames()
|
129 | 129 |
|
130 | 130 |
|
131 |
-@pytest.mark.skip(reason="Capturing the binary output is causing a stacktrace")
|
|
132 | 131 |
@pytest.mark.datafiles(DATA_DIR)
|
133 | 132 |
def test_build_checkout_tarball_stdout(datafiles, cli):
|
134 | 133 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
... | ... | @@ -143,7 +142,7 @@ def test_build_checkout_tarball_stdout(datafiles, cli): |
143 | 142 |
|
144 | 143 |
checkout_args = ['checkout', '--tar', 'target.bst', '-']
|
145 | 144 |
|
146 |
- result = cli.run(project=project, args=checkout_args)
|
|
145 |
+ result = cli.run(project=project, args=checkout_args, binary_capture=True)
|
|
147 | 146 |
result.assert_success()
|
148 | 147 |
|
149 | 148 |
with open(tarball, 'wb') as f:
|
... | ... | @@ -510,7 +509,7 @@ def test_build_checkout_workspaced_junction(cli, tmpdir, datafiles): |
510 | 509 |
|
511 | 510 |
# Now open a workspace on the junction
|
512 | 511 |
#
|
513 |
- result = cli.run(project=project, args=['workspace', 'open', 'junction.bst', workspace])
|
|
512 |
+ result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, 'junction.bst'])
|
|
514 | 513 |
result.assert_success()
|
515 | 514 |
filename = os.path.join(workspace, 'files', 'etc-files', 'etc', 'animal.conf')
|
516 | 515 |
|
... | ... | @@ -47,7 +47,7 @@ def open_cross_junction(cli, tmpdir): |
47 | 47 |
workspace = tmpdir.join("workspace")
|
48 | 48 |
|
49 | 49 |
element = 'sub.bst:data.bst'
|
50 |
- args = ['workspace', 'open', element, str(workspace)]
|
|
50 |
+ args = ['workspace', 'open', '--directory', str(workspace), element]
|
|
51 | 51 |
result = cli.run(project=project, args=args)
|
52 | 52 |
result.assert_success()
|
53 | 53 |
|
... | ... | @@ -21,6 +21,7 @@ |
21 | 21 |
# Phillip Smyth <phillip smyth codethink co uk>
|
22 | 22 |
# Jonathan Maw <jonathan maw codethink co uk>
|
23 | 23 |
# Richard Maw <richard maw codethink co uk>
|
24 |
+# William Salmon <will salmon codethink co uk>
|
|
24 | 25 |
#
|
25 | 26 |
|
26 | 27 |
import os
|
... | ... | @@ -43,72 +44,195 @@ DATA_DIR = os.path.join( |
43 | 44 |
)
|
44 | 45 |
|
45 | 46 |
|
47 |
+class WorkspaceCreater():
|
|
48 |
+ def __init__(self, cli, tmpdir, datafiles, project_path=None):
|
|
49 |
+ self.cli = cli
|
|
50 |
+ self.tmpdir = tmpdir
|
|
51 |
+ self.datafiles = datafiles
|
|
52 |
+ |
|
53 |
+ if not project_path:
|
|
54 |
+ project_path = os.path.join(datafiles.dirname, datafiles.basename)
|
|
55 |
+ else:
|
|
56 |
+ shutil.copytree(os.path.join(datafiles.dirname, datafiles.basename), project_path)
|
|
57 |
+ |
|
58 |
+ self.project_path = project_path
|
|
59 |
+ self.bin_files_path = os.path.join(project_path, 'files', 'bin-files')
|
|
60 |
+ |
|
61 |
+ self.workspace_cmd = os.path.join(self.project_path, 'workspace_cmd')
|
|
62 |
+ |
|
63 |
+ def create_workspace_element(self, kind, track, suffix='', workspace_dir=None,
|
|
64 |
+ element_attrs=None):
|
|
65 |
+ element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
|
|
66 |
+ element_path = os.path.join(self.project_path, 'elements')
|
|
67 |
+ if not workspace_dir:
|
|
68 |
+ workspace_dir = os.path.join(self.workspace_cmd, element_name.rstrip('.bst'))
|
|
69 |
+ |
|
70 |
+ # Create our repo object of the given source type with
|
|
71 |
+ # the bin files, and then collect the initial ref.
|
|
72 |
+ repo = create_repo(kind, str(self.tmpdir))
|
|
73 |
+ ref = repo.create(self.bin_files_path)
|
|
74 |
+ if track:
|
|
75 |
+ ref = None
|
|
76 |
+ |
|
77 |
+ # Write out our test target
|
|
78 |
+ element = {
|
|
79 |
+ 'kind': 'import',
|
|
80 |
+ 'sources': [
|
|
81 |
+ repo.source_config(ref=ref)
|
|
82 |
+ ]
|
|
83 |
+ }
|
|
84 |
+ if element_attrs:
|
|
85 |
+ element = {**element, **element_attrs}
|
|
86 |
+ _yaml.dump(element,
|
|
87 |
+ os.path.join(element_path,
|
|
88 |
+ element_name))
|
|
89 |
+ return element_name, element_path, workspace_dir
|
|
90 |
+ |
|
91 |
+ def create_workspace_elements(self, kinds, track, suffixs=None, workspace_dir_usr=None,
|
|
92 |
+ element_attrs=None):
|
|
93 |
+ |
|
94 |
+ results = []
|
|
95 |
+ |
|
96 |
+ if suffixs is None:
|
|
97 |
+ suffixs = ['', ] * len(kinds)
|
|
98 |
+ else:
|
|
99 |
+ if len(suffixs) != len(kinds):
|
|
100 |
+ raise "terable error"
|
|
101 |
+ |
|
102 |
+ for suffix, kind in zip(suffixs, kinds):
|
|
103 |
+ element_name, element_path, workspace_dir = \
|
|
104 |
+ self.create_workspace_element(kind, track, suffix, workspace_dir_usr,
|
|
105 |
+ element_attrs)
|
|
106 |
+ |
|
107 |
+ # Assert that there is no reference, a track & fetch is needed
|
|
108 |
+ state = self.cli.get_element_state(self.project_path, element_name)
|
|
109 |
+ if track:
|
|
110 |
+ assert state == 'no reference'
|
|
111 |
+ else:
|
|
112 |
+ assert state == 'fetch needed'
|
|
113 |
+ results.append((element_name, workspace_dir))
|
|
114 |
+ |
|
115 |
+ return results
|
|
116 |
+ |
|
117 |
+ def open_workspaces(self, kinds, track, suffixs=None, workspace_dir=None,
|
|
118 |
+ element_attrs=None):
|
|
119 |
+ |
|
120 |
+ results = self.create_workspace_elements(kinds, track, suffixs, workspace_dir,
|
|
121 |
+ element_attrs)
|
|
122 |
+ os.makedirs(self.workspace_cmd, exist_ok=True)
|
|
123 |
+ |
|
124 |
+ # Now open the workspace, this should have the effect of automatically
|
|
125 |
+ # tracking & fetching the source from the repo.
|
|
126 |
+ args = ['workspace', 'open']
|
|
127 |
+ if track:
|
|
128 |
+ args.append('--track')
|
|
129 |
+ if workspace_dir is not None:
|
|
130 |
+ assert len(results) == 1, "test logic error"
|
|
131 |
+ _, workspace_dir = results[0]
|
|
132 |
+ args.extend(['--directory', workspace_dir])
|
|
133 |
+ |
|
134 |
+ args.extend([element_name for element_name, workspace_dir_suffix in results])
|
|
135 |
+ result = self.cli.run(cwd=self.workspace_cmd, project=self.project_path, args=args)
|
|
136 |
+ |
|
137 |
+ result.assert_success()
|
|
138 |
+ |
|
139 |
+ for element_name, workspace_dir in results:
|
|
140 |
+ # Assert that we are now buildable because the source is
|
|
141 |
+ # now cached.
|
|
142 |
+ assert self.cli.get_element_state(self.project_path, element_name) == 'buildable'
|
|
143 |
+ |
|
144 |
+ # Check that the executable hello file is found in the workspace
|
|
145 |
+ filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
|
|
146 |
+ assert os.path.exists(filename)
|
|
147 |
+ |
|
148 |
+ return results
|
|
149 |
+ |
|
150 |
+ |
|
46 | 151 |
def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
|
47 | 152 |
project_path=None, element_attrs=None):
|
48 |
- if not workspace_dir:
|
|
49 |
- workspace_dir = os.path.join(str(tmpdir), 'workspace{}'.format(suffix))
|
|
50 |
- if not project_path:
|
|
51 |
- project_path = os.path.join(datafiles.dirname, datafiles.basename)
|
|
52 |
- else:
|
|
53 |
- shutil.copytree(os.path.join(datafiles.dirname, datafiles.basename), project_path)
|
|
54 |
- bin_files_path = os.path.join(project_path, 'files', 'bin-files')
|
|
55 |
- element_path = os.path.join(project_path, 'elements')
|
|
56 |
- element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
|
|
153 |
+ workspace_object = WorkspaceCreater(cli, tmpdir, datafiles, project_path)
|
|
154 |
+ workspaces = workspace_object.open_workspaces((kind, ), track, (suffix, ), workspace_dir,
|
|
155 |
+ element_attrs)
|
|
156 |
+ assert len(workspaces) == 1
|
|
157 |
+ element_name, workspace = workspaces[0]
|
|
158 |
+ return element_name, workspace_object.project_path, workspace
|
|
57 | 159 |
|
58 |
- # Create our repo object of the given source type with
|
|
59 |
- # the bin files, and then collect the initial ref.
|
|
60 |
- #
|
|
61 |
- repo = create_repo(kind, str(tmpdir))
|
|
62 |
- ref = repo.create(bin_files_path)
|
|
63 |
- if track:
|
|
64 |
- ref = None
|
|
65 | 160 |
|
66 |
- # Write out our test target
|
|
67 |
- element = {
|
|
68 |
- 'kind': 'import',
|
|
69 |
- 'sources': [
|
|
70 |
- repo.source_config(ref=ref)
|
|
71 |
- ]
|
|
72 |
- }
|
|
73 |
- if element_attrs:
|
|
74 |
- element = {**element, **element_attrs}
|
|
75 |
- _yaml.dump(element,
|
|
76 |
- os.path.join(element_path,
|
|
77 |
- element_name))
|
|
161 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
162 |
+@pytest.mark.parametrize("kind", repo_kinds)
|
|
163 |
+def test_open(cli, tmpdir, datafiles, kind):
|
|
164 |
+ open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
78 | 165 |
|
79 |
- # Assert that there is no reference, a track & fetch is needed
|
|
80 |
- state = cli.get_element_state(project_path, element_name)
|
|
81 |
- if track:
|
|
82 |
- assert state == 'no reference'
|
|
83 |
- else:
|
|
84 |
- assert state == 'fetch needed'
|
|
166 |
+ |
|
167 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
168 |
+def test_open_multi(cli, tmpdir, datafiles):
|
|
169 |
+ |
|
170 |
+ workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
|
|
171 |
+ workspaces = workspace_object.open_workspaces(repo_kinds, False)
|
|
172 |
+ |
|
173 |
+ for (elname, workspace), kind in zip(workspaces, repo_kinds):
|
|
174 |
+ assert kind in elname
|
|
175 |
+ workspace_lsdir = os.listdir(workspace)
|
|
176 |
+ if kind == 'git':
|
|
177 |
+ assert('.git' in workspace_lsdir)
|
|
178 |
+ elif kind == 'bzr':
|
|
179 |
+ assert('.bzr' in workspace_lsdir)
|
|
180 |
+ else:
|
|
181 |
+ assert not ('.git' in workspace_lsdir)
|
|
182 |
+ assert not ('.bzr' in workspace_lsdir)
|
|
183 |
+ |
|
184 |
+ |
|
185 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
186 |
+def test_open_multi_with_directory(cli, tmpdir, datafiles):
|
|
187 |
+ workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
|
|
188 |
+ |
|
189 |
+ results = workspace_object.create_workspace_elements(repo_kinds, False, repo_kinds)
|
|
190 |
+ os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
|
|
191 |
+ |
|
192 |
+ # Now open the workspace, this should have the effect of automatically
|
|
193 |
+ # tracking & fetching the source from the repo.
|
|
194 |
+ args = ['workspace', 'open']
|
|
195 |
+ args.extend(['--directory', 'any/dir/should/fail'])
|
|
196 |
+ |
|
197 |
+ args.extend([element_name for element_name, workspace_dir_suffix in results])
|
|
198 |
+ result = workspace_object.cli.run(cwd=workspace_object.workspace_cmd, project=workspace_object.project_path,
|
|
199 |
+ args=args)
|
|
200 |
+ |
|
201 |
+ result.assert_main_error(ErrorDomain.ARTIFACT, None)
|
|
202 |
+ assert ("Directory option can only be used if a single element is given" in result.stderr)
|
|
203 |
+ |
|
204 |
+ |
|
205 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
206 |
+def test_open_defaultlocation(cli, tmpdir, datafiles):
|
|
207 |
+ workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
|
|
208 |
+ |
|
209 |
+ ((element_name, workspace_dir), ) = workspace_object.create_workspace_elements(['git'], False, ['git'])
|
|
210 |
+ os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
|
|
85 | 211 |
|
86 | 212 |
# Now open the workspace, this should have the effect of automatically
|
87 | 213 |
# tracking & fetching the source from the repo.
|
88 | 214 |
args = ['workspace', 'open']
|
89 |
- if track:
|
|
90 |
- args.append('--track')
|
|
91 |
- args.extend([element_name, workspace_dir])
|
|
92 |
- result = cli.run(project=project_path, args=args)
|
|
215 |
+ args.append(element_name)
|
|
216 |
+
|
|
217 |
+ # In the other tests we set the cmd to workspace_object.workspace_cmd with the optional
|
|
218 |
+ # argument, cwd for the workspace_object.cli.run function. But hear we set the default
|
|
219 |
+ # workspace location to workspace_object.workspace_cmd and run the cli.run function with
|
|
220 |
+ # no cwd option so that it runs in the project directory.
|
|
221 |
+ cli.configure({'workspacedir': workspace_object.workspace_cmd})
|
|
222 |
+ result = workspace_object.cli.run(project=workspace_object.project_path,
|
|
223 |
+ args=args)
|
|
93 | 224 |
|
94 | 225 |
result.assert_success()
|
95 | 226 |
|
96 |
- # Assert that we are now buildable because the source is
|
|
97 |
- # now cached.
|
|
98 |
- assert cli.get_element_state(project_path, element_name) == 'buildable'
|
|
227 |
+ assert cli.get_element_state(workspace_object.project_path, element_name) == 'buildable'
|
|
99 | 228 |
|
100 | 229 |
# Check that the executable hello file is found in the workspace
|
230 |
+ # even though the cli.run function was not run with cwd = workspace_object.workspace_cmd
|
|
231 |
+ # the workspace should be created in there as we used the 'workspacedir' configuration
|
|
232 |
+ # option.
|
|
101 | 233 |
filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
|
102 | 234 |
assert os.path.exists(filename)
|
103 | 235 |
|
104 |
- return (element_name, project_path, workspace_dir)
|
|
105 |
- |
|
106 |
- |
|
107 |
-@pytest.mark.datafiles(DATA_DIR)
|
|
108 |
-@pytest.mark.parametrize("kind", repo_kinds)
|
|
109 |
-def test_open(cli, tmpdir, datafiles, kind):
|
|
110 |
- open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
111 |
- |
|
112 | 236 |
|
113 | 237 |
@pytest.mark.datafiles(DATA_DIR)
|
114 | 238 |
def test_open_bzr_customize(cli, tmpdir, datafiles):
|
... | ... | @@ -150,7 +274,7 @@ def test_open_force(cli, tmpdir, datafiles, kind): |
150 | 274 |
|
151 | 275 |
# Now open the workspace again with --force, this should happily succeed
|
152 | 276 |
result = cli.run(project=project, args=[
|
153 |
- 'workspace', 'open', '--force', element_name, workspace
|
|
277 |
+ 'workspace', 'open', '--force', '--directory', workspace, element_name
|
|
154 | 278 |
])
|
155 | 279 |
result.assert_success()
|
156 | 280 |
|
... | ... | @@ -165,7 +289,7 @@ def test_open_force_open(cli, tmpdir, datafiles, kind): |
165 | 289 |
|
166 | 290 |
# Now open the workspace again with --force, this should happily succeed
|
167 | 291 |
result = cli.run(project=project, args=[
|
168 |
- 'workspace', 'open', '--force', element_name, workspace
|
|
292 |
+ 'workspace', 'open', '--force', '--directory', workspace, element_name
|
|
169 | 293 |
])
|
170 | 294 |
result.assert_success()
|
171 | 295 |
|
... | ... | @@ -196,7 +320,7 @@ def test_open_force_different_workspace(cli, tmpdir, datafiles, kind): |
196 | 320 |
|
197 | 321 |
# Now open the workspace again with --force, this should happily succeed
|
198 | 322 |
result = cli.run(project=project, args=[
|
199 |
- 'workspace', 'open', '--force', element_name2, workspace
|
|
323 |
+ 'workspace', 'open', '--force', '--directory', workspace, element_name2
|
|
200 | 324 |
])
|
201 | 325 |
|
202 | 326 |
# Assert that the file in workspace 1 has been replaced
|
... | ... | @@ -504,7 +628,7 @@ def test_buildable_no_ref(cli, tmpdir, datafiles): |
504 | 628 |
# Now open the workspace. We don't need to checkout the source though.
|
505 | 629 |
workspace = os.path.join(str(tmpdir), 'workspace-no-ref')
|
506 | 630 |
os.makedirs(workspace)
|
507 |
- args = ['workspace', 'open', '--no-checkout', element_name, workspace]
|
|
631 |
+ args = ['workspace', 'open', '--no-checkout', '--directory', workspace, element_name]
|
|
508 | 632 |
result = cli.run(project=project, args=args)
|
509 | 633 |
result.assert_success()
|
510 | 634 |
|
... | ... | @@ -766,7 +890,7 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte |
766 | 890 |
element_name))
|
767 | 891 |
|
768 | 892 |
# Make a change to the workspaces file
|
769 |
- result = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
893 |
+ result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
770 | 894 |
result.assert_success()
|
771 | 895 |
result = cli.run(project=project, args=['workspace', 'close', '--remove-dir', element_name])
|
772 | 896 |
result.assert_success()
|
... | ... | @@ -103,7 +103,7 @@ def test_yamlcache_used(cli, tmpdir, ref_storage, with_junction, move_project): |
103 | 103 |
yc.put_from_key(prj, element_path, key, contents)
|
104 | 104 |
|
105 | 105 |
# Show that a variable has been added
|
106 |
- result = cli.run(project=project, args=['show', '--format', '%{vars}', 'test.bst'])
|
|
106 |
+ result = cli.run(project=project, args=['show', '--deps', 'none', '--format', '%{vars}', 'test.bst'])
|
|
107 | 107 |
result.assert_success()
|
108 | 108 |
data = yaml.safe_load(result.output)
|
109 | 109 |
assert 'modified' in data
|
... | ... | @@ -135,7 +135,7 @@ def test_yamlcache_changed_file(cli, tmpdir, ref_storage, with_junction): |
135 | 135 |
_yaml.load(element_path, copy_tree=False, project=prj, yaml_cache=yc)
|
136 | 136 |
|
137 | 137 |
# Show that a variable has been added
|
138 |
- result = cli.run(project=project, args=['show', '--format', '%{vars}', 'test.bst'])
|
|
138 |
+ result = cli.run(project=project, args=['show', '--deps', 'none', '--format', '%{vars}', 'test.bst'])
|
|
139 | 139 |
result.assert_success()
|
140 | 140 |
data = yaml.safe_load(result.output)
|
141 | 141 |
assert 'modified' in data
|
... | ... | @@ -278,7 +278,7 @@ def test_workspace_visible(cli, tmpdir, datafiles): |
278 | 278 |
|
279 | 279 |
# Open a workspace on our build failing element
|
280 | 280 |
#
|
281 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
281 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
282 | 282 |
assert res.exit_code == 0
|
283 | 283 |
|
284 | 284 |
# Ensure the dependencies of our build failing element are built
|
... | ... | @@ -312,7 +312,7 @@ def test_sysroot_workspace_visible(cli, tmpdir, datafiles): |
312 | 312 |
|
313 | 313 |
# Open a workspace on our build failing element
|
314 | 314 |
#
|
315 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
315 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
316 | 316 |
assert res.exit_code == 0
|
317 | 317 |
|
318 | 318 |
# Ensure the dependencies of our build failing element are built
|
... | ... | @@ -23,7 +23,7 @@ def test_workspace_mount(cli, tmpdir, datafiles): |
23 | 23 |
workspace = os.path.join(cli.directory, 'workspace')
|
24 | 24 |
element_name = 'workspace/workspace-mount.bst'
|
25 | 25 |
|
26 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
26 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
27 | 27 |
assert res.exit_code == 0
|
28 | 28 |
|
29 | 29 |
res = cli.run(project=project, args=['build', element_name])
|
... | ... | @@ -39,7 +39,7 @@ def test_workspace_commanddir(cli, tmpdir, datafiles): |
39 | 39 |
workspace = os.path.join(cli.directory, 'workspace')
|
40 | 40 |
element_name = 'workspace/workspace-commanddir.bst'
|
41 | 41 |
|
42 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
42 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
43 | 43 |
assert res.exit_code == 0
|
44 | 44 |
|
45 | 45 |
res = cli.run(project=project, args=['build', element_name])
|
... | ... | @@ -75,7 +75,7 @@ def test_workspace_updated_dependency(cli, tmpdir, datafiles): |
75 | 75 |
_yaml.dump(dependency, os.path.join(element_path, dep_name))
|
76 | 76 |
|
77 | 77 |
# First open the workspace
|
78 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
78 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
79 | 79 |
assert res.exit_code == 0
|
80 | 80 |
|
81 | 81 |
# We build the workspaced element, so that we have an artifact
|
... | ... | @@ -130,7 +130,7 @@ def test_workspace_update_dependency_failed(cli, tmpdir, datafiles): |
130 | 130 |
_yaml.dump(dependency, os.path.join(element_path, dep_name))
|
131 | 131 |
|
132 | 132 |
# First open the workspace
|
133 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
133 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
134 | 134 |
assert res.exit_code == 0
|
135 | 135 |
|
136 | 136 |
# We build the workspaced element, so that we have an artifact
|
... | ... | @@ -205,7 +205,7 @@ def test_updated_dependency_nested(cli, tmpdir, datafiles): |
205 | 205 |
_yaml.dump(dependency, os.path.join(element_path, dep_name))
|
206 | 206 |
|
207 | 207 |
# First open the workspace
|
208 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
208 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
209 | 209 |
assert res.exit_code == 0
|
210 | 210 |
|
211 | 211 |
# We build the workspaced element, so that we have an artifact
|
... | ... | @@ -258,7 +258,7 @@ def test_incremental_configure_commands_run_only_once(cli, tmpdir, datafiles): |
258 | 258 |
_yaml.dump(element, os.path.join(element_path, element_name))
|
259 | 259 |
|
260 | 260 |
# We open a workspace on the above element
|
261 |
- res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
261 |
+ res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
262 | 262 |
res.assert_success()
|
263 | 263 |
|
264 | 264 |
# Then we build, and check whether the configure step succeeded
|
... | ... | @@ -108,7 +108,7 @@ def test_filter_forbid_also_rdep(datafiles, cli): |
108 | 108 |
def test_filter_workspace_open(datafiles, cli, tmpdir):
|
109 | 109 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
110 | 110 |
workspace_dir = os.path.join(tmpdir.dirname, tmpdir.basename, "workspace")
|
111 |
- result = cli.run(project=project, args=['workspace', 'open', 'deps-permitted.bst', workspace_dir])
|
|
111 |
+ result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'deps-permitted.bst'])
|
|
112 | 112 |
result.assert_success()
|
113 | 113 |
assert os.path.exists(os.path.join(workspace_dir, "foo"))
|
114 | 114 |
assert os.path.exists(os.path.join(workspace_dir, "bar"))
|
... | ... | @@ -120,7 +120,7 @@ def test_filter_workspace_build(datafiles, cli, tmpdir): |
120 | 120 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
121 | 121 |
tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
|
122 | 122 |
workspace_dir = os.path.join(tempdir, "workspace")
|
123 |
- result = cli.run(project=project, args=['workspace', 'open', 'output-orphans.bst', workspace_dir])
|
|
123 |
+ result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'output-orphans.bst'])
|
|
124 | 124 |
result.assert_success()
|
125 | 125 |
src = os.path.join(workspace_dir, "foo")
|
126 | 126 |
dst = os.path.join(workspace_dir, "quux")
|
... | ... | @@ -138,7 +138,7 @@ def test_filter_workspace_close(datafiles, cli, tmpdir): |
138 | 138 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
139 | 139 |
tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
|
140 | 140 |
workspace_dir = os.path.join(tempdir, "workspace")
|
141 |
- result = cli.run(project=project, args=['workspace', 'open', 'output-orphans.bst', workspace_dir])
|
|
141 |
+ result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'output-orphans.bst'])
|
|
142 | 142 |
result.assert_success()
|
143 | 143 |
src = os.path.join(workspace_dir, "foo")
|
144 | 144 |
dst = os.path.join(workspace_dir, "quux")
|
... | ... | @@ -158,7 +158,7 @@ def test_filter_workspace_reset(datafiles, cli, tmpdir): |
158 | 158 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
159 | 159 |
tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
|
160 | 160 |
workspace_dir = os.path.join(tempdir, "workspace")
|
161 |
- result = cli.run(project=project, args=['workspace', 'open', 'output-orphans.bst', workspace_dir])
|
|
161 |
+ result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'output-orphans.bst'])
|
|
162 | 162 |
result.assert_success()
|
163 | 163 |
src = os.path.join(workspace_dir, "foo")
|
164 | 164 |
dst = os.path.join(workspace_dir, "quux")
|
... | ... | @@ -17,7 +17,7 @@ import pytest |
17 | 17 |
# CliRunner convenience API (click.testing module) does not support
|
18 | 18 |
# separation of stdout/stderr.
|
19 | 19 |
#
|
20 |
-from _pytest.capture import MultiCapture, FDCapture
|
|
20 |
+from _pytest.capture import MultiCapture, FDCapture, FDCaptureBinary
|
|
21 | 21 |
|
22 | 22 |
# Import the main cli entrypoint
|
23 | 23 |
from buildstream._frontend import cli as bst_cli
|
... | ... | @@ -234,9 +234,10 @@ class Cli(): |
234 | 234 |
# silent (bool): Whether to pass --no-verbose
|
235 | 235 |
# env (dict): Environment variables to temporarily set during the test
|
236 | 236 |
# args (list): A list of arguments to pass buildstream
|
237 |
+ # binary_capture (bool): Whether to capture the stdout/stderr as binary
|
|
237 | 238 |
#
|
238 | 239 |
def run(self, configure=True, project=None, silent=False, env=None,
|
239 |
- cwd=None, options=None, args=None):
|
|
240 |
+ cwd=None, options=None, args=None, binary_capture=False):
|
|
240 | 241 |
if args is None:
|
241 | 242 |
args = []
|
242 | 243 |
if options is None:
|
... | ... | @@ -278,7 +279,7 @@ class Cli(): |
278 | 279 |
except ValueError:
|
279 | 280 |
sys.__stdout__ = open('/dev/stdout', 'w')
|
280 | 281 |
|
281 |
- result = self.invoke(bst_cli, bst_args)
|
|
282 |
+ result = self.invoke(bst_cli, bst_args, binary_capture=binary_capture)
|
|
282 | 283 |
|
283 | 284 |
# Some informative stdout we can observe when anything fails
|
284 | 285 |
if self.verbose:
|
... | ... | @@ -295,7 +296,7 @@ class Cli(): |
295 | 296 |
|
296 | 297 |
return result
|
297 | 298 |
|
298 |
- def invoke(self, cli, args=None, color=False, **extra):
|
|
299 |
+ def invoke(self, cli, args=None, color=False, binary_capture=False, **extra):
|
|
299 | 300 |
exc_info = None
|
300 | 301 |
exception = None
|
301 | 302 |
exit_code = 0
|
... | ... | @@ -305,8 +306,8 @@ class Cli(): |
305 | 306 |
old_stdin = sys.stdin
|
306 | 307 |
with open(os.devnull) as devnull:
|
307 | 308 |
sys.stdin = devnull
|
308 |
- |
|
309 |
- capture = MultiCapture(out=True, err=True, in_=False, Capture=FDCapture)
|
|
309 |
+ capture_kind = FDCaptureBinary if binary_capture else FDCapture
|
|
310 |
+ capture = MultiCapture(out=True, err=True, in_=False, Capture=capture_kind)
|
|
310 | 311 |
capture.start_capturing()
|
311 | 312 |
|
312 | 313 |
try:
|