Will Salmon pushed to branch willsalmon/defaultWorkspaces at BuildStream / buildstream
Commits:
-
12719f0d
by Jürg Billeter at 2018-10-22T17:05:41Z
-
a7a28d14
by Jürg Billeter at 2018-10-22T17:05:41Z
-
be29e0f5
by Jürg Billeter at 2018-10-22T17:53:26Z
-
b74aca1a
by Jürg Billeter at 2018-10-23T09:22:19Z
-
c7dda150
by Jürg Billeter at 2018-10-23T09:48:00Z
-
74c115b9
by Angelos Evripiotis at 2018-10-23T10:07:31Z
-
ecb58b42
by Phil Dawson at 2018-10-23T10:33:47Z
-
aa0cbf5d
by Martin Blanchard at 2018-10-23T10:54:40Z
-
552f5fc6
by Jim MacArthur at 2018-10-23T11:19:48Z
-
ab2f79af
by William Salmon at 2018-10-23T13:49:34Z
10 changed files:
- buildstream/_artifactcache/artifactcache.py
- buildstream/_artifactcache/cascache.py
- buildstream/_context.py
- buildstream/_frontend/cli.py
- buildstream/data/userconfig.yaml
- buildstream/plugins/sources/deb.py
- buildstream/plugins/sources/tar.py
- buildstream/sandbox/_sandboxremote.py
- tests/frontend/workspace.py
- tests/testutils/artifactshare.py
Changes:
... | ... | @@ -228,7 +228,7 @@ class ArtifactCache(): |
228 | 228 |
self._required_elements.update(elements)
|
229 | 229 |
|
230 | 230 |
# For the cache keys which were resolved so far, we bump
|
231 |
- # the atime of them.
|
|
231 |
+ # the mtime of them.
|
|
232 | 232 |
#
|
233 | 233 |
# This is just in case we have concurrent instances of
|
234 | 234 |
# BuildStream running with the same artifact cache, it will
|
... | ... | @@ -240,7 +240,7 @@ class ArtifactCache(): |
240 | 240 |
for key in (strong_key, weak_key):
|
241 | 241 |
if key:
|
242 | 242 |
try:
|
243 |
- self.update_atime(key)
|
|
243 |
+ self.update_mtime(element, key)
|
|
244 | 244 |
except ArtifactError:
|
245 | 245 |
pass
|
246 | 246 |
|
... | ... | @@ -391,15 +391,16 @@ class ArtifactCache(): |
391 | 391 |
def preflight(self):
|
392 | 392 |
pass
|
393 | 393 |
|
394 |
- # update_atime()
|
|
394 |
+ # update_mtime()
|
|
395 | 395 |
#
|
396 |
- # Update the atime of an artifact.
|
|
396 |
+ # Update the mtime of an artifact.
|
|
397 | 397 |
#
|
398 | 398 |
# Args:
|
399 |
+ # element (Element): The Element to update
|
|
399 | 400 |
# key (str): The key of the artifact.
|
400 | 401 |
#
|
401 |
- def update_atime(self, key):
|
|
402 |
- raise ImplError("Cache '{kind}' does not implement contains()"
|
|
402 |
+ def update_mtime(self, element, key):
|
|
403 |
+ raise ImplError("Cache '{kind}' does not implement update_mtime()"
|
|
403 | 404 |
.format(kind=type(self).__name__))
|
404 | 405 |
|
405 | 406 |
# initialize_remotes():
|
... | ... | @@ -538,8 +538,9 @@ class CASCache(ArtifactCache): |
538 | 538 |
except FileNotFoundError as e:
|
539 | 539 |
raise ArtifactError("Attempt to access unavailable artifact: {}".format(e)) from e
|
540 | 540 |
|
541 |
- def update_atime(self, ref):
|
|
541 |
+ def update_mtime(self, element, key):
|
|
542 | 542 |
try:
|
543 |
+ ref = self.get_artifact_fullname(element, key)
|
|
543 | 544 |
os.utime(self._refpath(ref))
|
544 | 545 |
except FileNotFoundError as e:
|
545 | 546 |
raise ArtifactError("Attempt to access unavailable artifact: {}".format(e)) from e
|
... | ... | @@ -60,6 +60,9 @@ class Context(): |
60 | 60 |
# The directory where build sandboxes will be created
|
61 | 61 |
self.builddir = None
|
62 | 62 |
|
63 |
+ # Default root location for workspaces
|
|
64 |
+ self.workspacedir = None
|
|
65 |
+ |
|
63 | 66 |
# The local binary artifact cache directory
|
64 | 67 |
self.artifactdir = None
|
65 | 68 |
|
... | ... | @@ -161,10 +164,10 @@ class Context(): |
161 | 164 |
_yaml.node_validate(defaults, [
|
162 | 165 |
'sourcedir', 'builddir', 'artifactdir', 'logdir',
|
163 | 166 |
'scheduler', 'artifacts', 'logging', 'projects',
|
164 |
- 'cache'
|
|
167 |
+ 'cache', 'workspacedir',
|
|
165 | 168 |
])
|
166 | 169 |
|
167 |
- for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir']:
|
|
170 |
+ for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
|
|
168 | 171 |
# Allow the ~ tilde expansion and any environment variables in
|
169 | 172 |
# path specification in the config files.
|
170 | 173 |
#
|
... | ... | @@ -678,31 +678,38 @@ def workspace(): |
678 | 678 |
@click.option('--no-checkout', default=False, is_flag=True,
|
679 | 679 |
help="Do not checkout the source, only link to the given directory")
|
680 | 680 |
@click.option('--force', '-f', default=False, is_flag=True,
|
681 |
- help="Overwrite files existing in checkout directory")
|
|
681 |
+ help="Overwrite files existing in checkout directory and allow workspace to be created if on exists")
|
|
682 | 682 |
@click.option('--track', 'track_', default=False, is_flag=True,
|
683 | 683 |
help="Track and fetch new source references before checking out the workspace")
|
684 |
-@click.argument('element',
|
|
684 |
+@click.option('--directory', type=click.Path(file_okay=False), default=None)
|
|
685 |
+@click.argument('elements', nargs=-1,
|
|
685 | 686 |
type=click.Path(readable=False))
|
686 |
-@click.argument('directory', type=click.Path(file_okay=False))
|
|
687 | 687 |
@click.pass_obj
|
688 |
-def workspace_open(app, no_checkout, force, track_, element, directory):
|
|
688 |
+def workspace_open(app, no_checkout, force, track_, directory, elements):
|
|
689 | 689 |
"""Open a workspace for manual source modification"""
|
690 |
+ directories = []
|
|
691 |
+ if directory is not None:
|
|
692 |
+ if len(elements) > 1:
|
|
693 |
+ raise "horible error"
|
|
694 |
+ if os.path.exists(directory):
|
|
695 |
+ if not os.path.isdir(directory):
|
|
696 |
+ click.echo("Checkout directory is not a directory: {}".format(directory), err=True)
|
|
697 |
+ sys.exit(-1)
|
|
690 | 698 |
|
691 |
- if os.path.exists(directory):
|
|
692 |
- |
|
693 |
- if not os.path.isdir(directory):
|
|
694 |
- click.echo("Checkout directory is not a directory: {}".format(directory), err=True)
|
|
695 |
- sys.exit(-1)
|
|
696 |
- |
|
697 |
- if not (no_checkout or force) and os.listdir(directory):
|
|
698 |
- click.echo("Checkout directory is not empty: {}".format(directory), err=True)
|
|
699 |
- sys.exit(-1)
|
|
699 |
+ if not (no_checkout or force) and os.listdir(directory):
|
|
700 |
+ click.echo("Checkout directory is not empty: {}".format(directory), err=True)
|
|
701 |
+ sys.exit(-1)
|
|
702 |
+ directories.append(directory)
|
|
703 |
+ else:
|
|
704 |
+ for element in elements:
|
|
705 |
+ directories.append(element.rstrip('.bst'))
|
|
700 | 706 |
|
701 | 707 |
with app.initialized():
|
702 |
- app.stream.workspace_open(element, directory,
|
|
703 |
- no_checkout=no_checkout,
|
|
704 |
- track_first=track_,
|
|
705 |
- force=force)
|
|
708 |
+ for element, directory in zip(elements, directories):
|
|
709 |
+ app.stream.workspace_open(element, directory,
|
|
710 |
+ no_checkout=no_checkout,
|
|
711 |
+ track_first=track_,
|
|
712 |
+ force=force)
|
|
706 | 713 |
|
707 | 714 |
|
708 | 715 |
##################################################################
|
... | ... | @@ -22,6 +22,9 @@ artifactdir: ${XDG_CACHE_HOME}/buildstream/artifacts |
22 | 22 |
# Location to store build logs
|
23 | 23 |
logdir: ${XDG_CACHE_HOME}/buildstream/logs
|
24 | 24 |
|
25 |
+# Default root location for workspacesi, blank for no default set.
|
|
26 |
+workspacedir: .
|
|
27 |
+ |
|
25 | 28 |
#
|
26 | 29 |
# Cache
|
27 | 30 |
#
|
... | ... | @@ -50,7 +50,7 @@ deb - stage files from .deb packages |
50 | 50 |
"""
|
51 | 51 |
|
52 | 52 |
import tarfile
|
53 |
-from contextlib import contextmanager, ExitStack
|
|
53 |
+from contextlib import contextmanager
|
|
54 | 54 |
import arpy # pylint: disable=import-error
|
55 | 55 |
|
56 | 56 |
from .tar import TarSource
|
... | ... | @@ -69,8 +69,7 @@ class DebSource(TarSource): |
69 | 69 |
|
70 | 70 |
@contextmanager
|
71 | 71 |
def _get_tar(self):
|
72 |
- with ExitStack() as context:
|
|
73 |
- deb_file = context.enter_context(open(self._get_mirror_file(), 'rb'))
|
|
72 |
+ with open(self._get_mirror_file(), 'rb') as deb_file:
|
|
74 | 73 |
arpy_archive = arpy.Archive(fileobj=deb_file)
|
75 | 74 |
arpy_archive.read_all_headers()
|
76 | 75 |
data_tar_arpy = [v for k, v in arpy_archive.archived_files.items() if b"data.tar" in k][0]
|
... | ... | @@ -57,7 +57,7 @@ tar - stage files from tar archives |
57 | 57 |
|
58 | 58 |
import os
|
59 | 59 |
import tarfile
|
60 |
-from contextlib import contextmanager, ExitStack
|
|
60 |
+from contextlib import contextmanager
|
|
61 | 61 |
from tempfile import TemporaryFile
|
62 | 62 |
|
63 | 63 |
from buildstream import SourceError
|
... | ... | @@ -88,8 +88,7 @@ class TarSource(DownloadableFileSource): |
88 | 88 |
def _run_lzip(self):
|
89 | 89 |
assert self.host_lzip
|
90 | 90 |
with TemporaryFile() as lzip_stdout:
|
91 |
- with ExitStack() as context:
|
|
92 |
- lzip_file = context.enter_context(open(self._get_mirror_file(), 'r'))
|
|
91 |
+ with open(self._get_mirror_file(), 'r') as lzip_file:
|
|
93 | 92 |
self.call([self.host_lzip, '-d'],
|
94 | 93 |
stdin=lzip_file,
|
95 | 94 |
stdout=lzip_stdout)
|
... | ... | @@ -76,8 +76,7 @@ class SandboxRemote(Sandbox): |
76 | 76 |
# Upload the Command message to the remote CAS server
|
77 | 77 |
command_digest = cascache.push_message(self._get_project(), remote_command)
|
78 | 78 |
if not command_digest or not cascache.verify_digest_pushed(self._get_project(), command_digest):
|
79 |
- # Command push failed
|
|
80 |
- return None
|
|
79 |
+ raise SandboxError("Failed pushing build command to remote CAS.")
|
|
81 | 80 |
|
82 | 81 |
# Create and send the action.
|
83 | 82 |
action = remote_execution_pb2.Action(command_digest=command_digest,
|
... | ... | @@ -88,27 +87,57 @@ class SandboxRemote(Sandbox): |
88 | 87 |
# Upload the Action message to the remote CAS server
|
89 | 88 |
action_digest = cascache.push_message(self._get_project(), action)
|
90 | 89 |
if not action_digest or not cascache.verify_digest_pushed(self._get_project(), action_digest):
|
91 |
- # Action push failed
|
|
92 |
- return None
|
|
90 |
+ raise SandboxError("Failed pushing build action to remote CAS.")
|
|
93 | 91 |
|
94 | 92 |
# Next, try to create a communication channel to the BuildGrid server.
|
95 | 93 |
channel = grpc.insecure_channel(self.server_url)
|
96 | 94 |
stub = remote_execution_pb2_grpc.ExecutionStub(channel)
|
97 | 95 |
request = remote_execution_pb2.ExecuteRequest(action_digest=action_digest,
|
98 | 96 |
skip_cache_lookup=False)
|
99 |
- try:
|
|
100 |
- operation_iterator = stub.Execute(request)
|
|
101 |
- except grpc.RpcError:
|
|
102 |
- return None
|
|
97 |
+ |
|
98 |
+ def __run_remote_command(stub, execute_request=None, running_operation=None):
|
|
99 |
+ try:
|
|
100 |
+ last_operation = None
|
|
101 |
+ if execute_request is not None:
|
|
102 |
+ operation_iterator = stub.Execute(execute_request)
|
|
103 |
+ else:
|
|
104 |
+ request = remote_execution_pb2.WaitExecutionRequest(name=running_operation.name)
|
|
105 |
+ operation_iterator = stub.WaitExecution(request)
|
|
106 |
+ |
|
107 |
+ for operation in operation_iterator:
|
|
108 |
+ if operation.done:
|
|
109 |
+ return operation
|
|
110 |
+ else:
|
|
111 |
+ last_operation = operation
|
|
112 |
+ except grpc.RpcError as e:
|
|
113 |
+ status_code = e.code()
|
|
114 |
+ if status_code == grpc.StatusCode.UNAVAILABLE:
|
|
115 |
+ raise SandboxError("Failed contacting remote execution server at {}."
|
|
116 |
+ .format(self.server_url))
|
|
117 |
+ |
|
118 |
+ elif status_code in (grpc.StatusCode.INVALID_ARGUMENT,
|
|
119 |
+ grpc.StatusCode.FAILED_PRECONDITION,
|
|
120 |
+ grpc.StatusCode.RESOURCE_EXHAUSTED,
|
|
121 |
+ grpc.StatusCode.INTERNAL,
|
|
122 |
+ grpc.StatusCode.DEADLINE_EXCEEDED):
|
|
123 |
+ raise SandboxError("{} ({}).".format(e.details(), status_code.name))
|
|
124 |
+ |
|
125 |
+ elif running_operation and status_code == grpc.StatusCode.UNIMPLEMENTED:
|
|
126 |
+ raise SandboxError("Failed trying to recover from connection loss: "
|
|
127 |
+ "server does not support operation status polling recovery.")
|
|
128 |
+ |
|
129 |
+ return last_operation
|
|
103 | 130 |
|
104 | 131 |
operation = None
|
105 | 132 |
with self._get_context().timed_activity("Waiting for the remote build to complete"):
|
106 |
- # It is advantageous to check operation_iterator.code() is grpc.StatusCode.OK here,
|
|
107 |
- # which will check the server is actually contactable. However, calling it when the
|
|
108 |
- # server is available seems to cause .code() to hang forever.
|
|
109 |
- for operation in operation_iterator:
|
|
110 |
- if operation.done:
|
|
111 |
- break
|
|
133 |
+ operation = __run_remote_command(stub, execute_request=request)
|
|
134 |
+ if operation is None:
|
|
135 |
+ return None
|
|
136 |
+ elif operation.done:
|
|
137 |
+ return operation
|
|
138 |
+ |
|
139 |
+ while operation is not None and not operation.done:
|
|
140 |
+ operation = __run_remote_command(stub, running_operation=operation)
|
|
112 | 141 |
|
113 | 142 |
return operation
|
114 | 143 |
|
... | ... | @@ -192,7 +221,6 @@ class SandboxRemote(Sandbox): |
192 | 221 |
|
193 | 222 |
if operation is None:
|
194 | 223 |
# Failure of remote execution, usually due to an error in BuildStream
|
195 |
- # NB This error could be raised in __run_remote_command
|
|
196 | 224 |
raise SandboxError("No response returned from server")
|
197 | 225 |
|
198 | 226 |
assert not operation.HasField('error') and operation.HasField('response')
|
... | ... | @@ -21,6 +21,7 @@ |
21 | 21 |
# Phillip Smyth <phillip smyth codethink co uk>
|
22 | 22 |
# Jonathan Maw <jonathan maw codethink co uk>
|
23 | 23 |
# Richard Maw <richard maw codethink co uk>
|
24 |
+# William Salmon <will salmon codethink co uk>
|
|
24 | 25 |
#
|
25 | 26 |
|
26 | 27 |
import os
|
... | ... | @@ -43,17 +44,17 @@ DATA_DIR = os.path.join( |
43 | 44 |
)
|
44 | 45 |
|
45 | 46 |
|
46 |
-def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
|
|
47 |
- project_path=None, element_attrs=None):
|
|
47 |
+ |
|
48 |
+ |
|
49 |
+def create_workspace_element(tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
|
|
50 |
+ project_path=None, element_attrs=None):
|
|
51 |
+ |
|
52 |
+ element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
|
|
53 |
+ element_path = os.path.join(project_path, 'elements')
|
|
54 |
+ workspace_cmd = os.path.join(project_path, 'workspace_cmd')
|
|
48 | 55 |
if not workspace_dir:
|
49 |
- workspace_dir = os.path.join(str(tmpdir), 'workspace{}'.format(suffix))
|
|
50 |
- if not project_path:
|
|
51 |
- project_path = os.path.join(datafiles.dirname, datafiles.basename)
|
|
52 |
- else:
|
|
53 |
- shutil.copytree(os.path.join(datafiles.dirname, datafiles.basename), project_path)
|
|
56 |
+ workspace_dir = os.path.join(workspace_cmd, element_name.rstrip('.bst'))
|
|
54 | 57 |
bin_files_path = os.path.join(project_path, 'files', 'bin-files')
|
55 |
- element_path = os.path.join(project_path, 'elements')
|
|
56 |
- element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
|
|
57 | 58 |
|
58 | 59 |
# Create our repo object of the given source type with
|
59 | 60 |
# the bin files, and then collect the initial ref.
|
... | ... | @@ -75,40 +76,87 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir |
75 | 76 |
_yaml.dump(element,
|
76 | 77 |
os.path.join(element_path,
|
77 | 78 |
element_name))
|
79 |
+ return element_name, element_path, workspace_dir
|
|
78 | 80 |
|
79 |
- # Assert that there is no reference, a track & fetch is needed
|
|
80 |
- state = cli.get_element_state(project_path, element_name)
|
|
81 |
- if track:
|
|
82 |
- assert state == 'no reference'
|
|
83 |
- else:
|
|
84 |
- assert state == 'fetch needed'
|
|
85 |
- |
|
86 |
- # Now open the workspace, this should have the effect of automatically
|
|
87 |
- # tracking & fetching the source from the repo.
|
|
88 |
- args = ['workspace', 'open']
|
|
89 |
- if track:
|
|
90 |
- args.append('--track')
|
|
91 |
- args.extend([element_name, workspace_dir])
|
|
92 |
- result = cli.run(project=project_path, args=args)
|
|
93 |
- |
|
94 |
- result.assert_success()
|
|
95 |
- |
|
96 |
- # Assert that we are now buildable because the source is
|
|
97 |
- # now cached.
|
|
98 |
- assert cli.get_element_state(project_path, element_name) == 'buildable'
|
|
81 |
+def open_workspaces(cli, tmpdir, datafiles, kinds, track, suffixs=None, workspace_dir=None,
|
|
82 |
+ project_path=None, element_attrs=None):
|
|
99 | 83 |
|
100 |
- # Check that the executable hello file is found in the workspace
|
|
101 |
- filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
|
|
102 |
- assert os.path.exists(filename)
|
|
84 |
+ if not project_path:
|
|
85 |
+ project_path = os.path.join(datafiles.dirname, datafiles.basename)
|
|
86 |
+ else:
|
|
87 |
+ shutil.copytree(os.path.join(datafiles.dirname, datafiles.basename), project_path)
|
|
88 |
+
|
|
89 |
+ workspace_cmd = os.path.join(project_path, 'workspace_cmd')
|
|
90 |
+ os.makedirs(workspace_cmd, exist_ok=True)
|
|
91 |
+
|
|
92 |
+ results = []
|
|
93 |
+ |
|
94 |
+ if suffixs is None:
|
|
95 |
+ suffixs = [None,] * len(kinds)
|
|
96 |
+ else:
|
|
97 |
+ if len(suffixs) != len(kinds):
|
|
98 |
+ raise "terable error"
|
|
99 |
+ |
|
100 |
+ for suffix, kind in zip(suffixs, kinds):
|
|
101 |
+ element_name, element_path, workspace_dir_suffix \
|
|
102 |
+ = create_workspace_element(tmpdir, datafiles, kind, track, suffix, workspace_dir,
|
|
103 |
+ project_path, element_attrs)
|
|
104 |
+ |
|
105 |
+
|
|
106 |
+ # Assert that there is no reference, a track & fetch is needed
|
|
107 |
+ state = cli.get_element_state(project_path, element_name)
|
|
108 |
+ if track:
|
|
109 |
+ assert state == 'no reference'
|
|
110 |
+ else:
|
|
111 |
+ assert state == 'fetch needed'
|
|
112 |
+
|
|
113 |
+ # Now open the workspace, this should have the effect of automatically
|
|
114 |
+ # tracking & fetching the source from the repo.
|
|
115 |
+ args = ['workspace', 'open']
|
|
116 |
+ if track:
|
|
117 |
+ args.append('--track')
|
|
118 |
+ if workspace_dir is not None:
|
|
119 |
+ args.extend(['--directory', workspace_dir_suffix])
|
|
120 |
+ args.extend([element_name])
|
|
121 |
+ result = cli.run(cwd=workspace_cmd, project=project_path, args=args)
|
|
122 |
+ |
|
123 |
+ result.assert_success()
|
|
124 |
+ |
|
125 |
+ # Assert that we are now buildable because the source is
|
|
126 |
+ # now cached.
|
|
127 |
+ assert cli.get_element_state(project_path, element_name) == 'buildable'
|
|
128 |
+ |
|
129 |
+ # Check that the executable hello file is found in the workspace
|
|
130 |
+ filename = os.path.join(workspace_dir_suffix, 'usr', 'bin', 'hello')
|
|
131 |
+ assert os.path.exists(filename)
|
|
132 |
+ results.append((element_name, project_path, workspace_dir_suffix))
|
|
133 |
+
|
|
134 |
+ return results
|
|
103 | 135 |
|
104 |
- return (element_name, project_path, workspace_dir)
|
|
105 | 136 |
|
137 |
+def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
|
|
138 |
+ project_path=None, element_attrs=None):
|
|
139 |
+ return open_workspaces(cli, tmpdir, datafiles, (kind,), track, suffixs=(suffix,), workspace_dir=workspace_dir,
|
|
140 |
+ project_path=project_path, element_attrs=element_attrs)[0]
|
|
106 | 141 |
|
107 | 142 |
@pytest.mark.datafiles(DATA_DIR)
|
108 | 143 |
@pytest.mark.parametrize("kind", repo_kinds)
|
109 | 144 |
def test_open(cli, tmpdir, datafiles, kind):
|
110 | 145 |
open_workspace(cli, tmpdir, datafiles, kind, False)
|
111 | 146 |
|
147 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
148 |
+def test_open_multi(cli, tmpdir, datafiles ):
|
|
149 |
+ workspaces = open_workspaces(cli, tmpdir, datafiles, repo_kinds, False, repo_kinds)
|
|
150 |
+
|
|
151 |
+ for (element_name, project, workspace), kind in zip(workspaces, repo_kinds):
|
|
152 |
+ workspace_lsdir = os.listdir(workspace)
|
|
153 |
+ if kind == 'git':
|
|
154 |
+ assert('.git' in workspace_lsdir)
|
|
155 |
+ elif kind == 'bzr':
|
|
156 |
+ assert('.bzr' in workspace_lsdir)
|
|
157 |
+ else:
|
|
158 |
+ assert not ('.git' in workspace_lsdir)
|
|
159 |
+ assert not ('.bzr' in workspace_lsdir)
|
|
112 | 160 |
|
113 | 161 |
@pytest.mark.datafiles(DATA_DIR)
|
114 | 162 |
def test_open_bzr_customize(cli, tmpdir, datafiles):
|
... | ... | @@ -150,7 +198,7 @@ def test_open_force(cli, tmpdir, datafiles, kind): |
150 | 198 |
|
151 | 199 |
# Now open the workspace again with --force, this should happily succeed
|
152 | 200 |
result = cli.run(project=project, args=[
|
153 |
- 'workspace', 'open', '--force', element_name, workspace
|
|
201 |
+ 'workspace', 'open', '--force', '--directory', workspace, element_name
|
|
154 | 202 |
])
|
155 | 203 |
result.assert_success()
|
156 | 204 |
|
... | ... | @@ -165,7 +213,7 @@ def test_open_force_open(cli, tmpdir, datafiles, kind): |
165 | 213 |
|
166 | 214 |
# Now open the workspace again with --force, this should happily succeed
|
167 | 215 |
result = cli.run(project=project, args=[
|
168 |
- 'workspace', 'open', '--force', element_name, workspace
|
|
216 |
+ 'workspace', 'open', '--force', '--directory', workspace, element_name
|
|
169 | 217 |
])
|
170 | 218 |
result.assert_success()
|
171 | 219 |
|
... | ... | @@ -196,7 +244,7 @@ def test_open_force_different_workspace(cli, tmpdir, datafiles, kind): |
196 | 244 |
|
197 | 245 |
# Now open the workspace again with --force, this should happily succeed
|
198 | 246 |
result = cli.run(project=project, args=[
|
199 |
- 'workspace', 'open', '--force', element_name2, workspace
|
|
247 |
+ 'workspace', 'open', '--force', '--directory', workspace, element_name2
|
|
200 | 248 |
])
|
201 | 249 |
|
202 | 250 |
# Assert that the file in workspace 1 has been replaced
|
... | ... | @@ -504,7 +552,7 @@ def test_buildable_no_ref(cli, tmpdir, datafiles): |
504 | 552 |
# Now open the workspace. We don't need to checkout the source though.
|
505 | 553 |
workspace = os.path.join(str(tmpdir), 'workspace-no-ref')
|
506 | 554 |
os.makedirs(workspace)
|
507 |
- args = ['workspace', 'open', '--no-checkout', element_name, workspace]
|
|
555 |
+ args = ['workspace', 'open', '--no-checkout', '--directory', workspace, element_name]
|
|
508 | 556 |
result = cli.run(project=project, args=args)
|
509 | 557 |
result.assert_success()
|
510 | 558 |
|
... | ... | @@ -766,7 +814,7 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte |
766 | 814 |
element_name))
|
767 | 815 |
|
768 | 816 |
# Make a change to the workspaces file
|
769 |
- result = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
|
|
817 |
+ result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
|
|
770 | 818 |
result.assert_success()
|
771 | 819 |
result = cli.run(project=project, args=['workspace', 'close', '--remove-dir', element_name])
|
772 | 820 |
result.assert_success()
|
... | ... | @@ -122,9 +122,8 @@ class ArtifactShare(): |
122 | 122 |
# same algo for creating an artifact reference
|
123 | 123 |
#
|
124 | 124 |
|
125 |
- # Chop off the .bst suffix first
|
|
126 |
- assert element_name.endswith('.bst')
|
|
127 |
- element_name = element_name[:-4]
|
|
125 |
+ # Replace path separator and chop off the .bst suffix
|
|
126 |
+ element_name = os.path.splitext(element_name.replace(os.sep, '-'))[0]
|
|
128 | 127 |
|
129 | 128 |
valid_chars = string.digits + string.ascii_letters + '-._'
|
130 | 129 |
element_name = ''.join([
|