Phillip Smyth pushed to branch relative_workspaces at BuildStream / buildstream
Commits:
-
f5c8ff61
by Josh Smith at 2018-07-27T14:10:45Z
-
32ddb544
by Qinusty at 2018-07-27T14:57:30Z
-
30279ff8
by Phillip Smyth at 2018-07-27T15:02:37Z
8 changed files:
- buildstream/_context.py
- buildstream/_frontend/widget.py
- buildstream/_stream.py
- buildstream/_workspaces.py
- buildstream/element.py
- buildstream/utils.py
- tests/frontend/workspace.py
- tests/integration/shell.py
Changes:
| ... | ... | @@ -197,29 +197,55 @@ class Context(): |
| 197 | 197 |
"\nValid values are, for example: 800M 10G 1T 50%\n"
|
| 198 | 198 |
.format(str(e))) from e
|
| 199 | 199 |
|
| 200 |
- # If we are asked not to set a quota, we set it to the maximum
|
|
| 201 |
- # disk space available minus a headroom of 2GB, such that we
|
|
| 202 |
- # at least try to avoid raising Exceptions.
|
|
| 200 |
+ # Headroom intended to give BuildStream a bit of leeway.
|
|
| 201 |
+ # This acts as the minimum size of cache_quota and also
|
|
| 202 |
+ # is taken from the user requested cache_quota.
|
|
| 203 | 203 |
#
|
| 204 |
- # Of course, we might still end up running out during a build
|
|
| 205 |
- # if we end up writing more than 2G, but hey, this stuff is
|
|
| 206 |
- # already really fuzzy.
|
|
| 207 |
- #
|
|
| 208 |
- if cache_quota is None:
|
|
| 209 |
- stat = os.statvfs(artifactdir_volume)
|
|
| 210 |
- # Again, the artifact directory may not yet have been
|
|
| 211 |
- # created
|
|
| 212 |
- if not os.path.exists(self.artifactdir):
|
|
| 213 |
- cache_size = 0
|
|
| 214 |
- else:
|
|
| 215 |
- cache_size = utils._get_dir_size(self.artifactdir)
|
|
| 216 |
- cache_quota = cache_size + stat.f_bsize * stat.f_bavail
|
|
| 217 |
- |
|
| 218 | 204 |
if 'BST_TEST_SUITE' in os.environ:
|
| 219 | 205 |
headroom = 0
|
| 220 | 206 |
else:
|
| 221 | 207 |
headroom = 2e9
|
| 222 | 208 |
|
| 209 |
+ stat = os.statvfs(artifactdir_volume)
|
|
| 210 |
+ available_space = (stat.f_bsize * stat.f_bavail)
|
|
| 211 |
+ |
|
| 212 |
+ # Again, the artifact directory may not yet have been created yet
|
|
| 213 |
+ #
|
|
| 214 |
+ if not os.path.exists(self.artifactdir):
|
|
| 215 |
+ cache_size = 0
|
|
| 216 |
+ else:
|
|
| 217 |
+ cache_size = utils._get_dir_size(self.artifactdir)
|
|
| 218 |
+ |
|
| 219 |
+ # Ensure system has enough storage for the cache_quota
|
|
| 220 |
+ #
|
|
| 221 |
+ # If cache_quota is none, set it to the maximum it could possibly be.
|
|
| 222 |
+ #
|
|
| 223 |
+ # Also check that cache_quota is atleast as large as our headroom.
|
|
| 224 |
+ #
|
|
| 225 |
+ if cache_quota is None: # Infinity, set to max system storage
|
|
| 226 |
+ cache_quota = cache_size + available_space
|
|
| 227 |
+ if cache_quota < headroom: # Check minimum
|
|
| 228 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
| 229 |
+ "Invalid cache quota ({}): ".format(utils._pretty_size(cache_quota)) +
|
|
| 230 |
+ "BuildStream requires a minimum cache quota of 2G.")
|
|
| 231 |
+ elif cache_quota > cache_size + available_space: # Check maximum
|
|
| 232 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
| 233 |
+ ("Your system does not have enough available " +
|
|
| 234 |
+ "space to support the cache quota specified.\n" +
|
|
| 235 |
+ "You currently have:\n" +
|
|
| 236 |
+ "- {used} of cache in use at {local_cache_path}\n" +
|
|
| 237 |
+ "- {available} of available system storage").format(
|
|
| 238 |
+ used=utils._pretty_size(cache_size),
|
|
| 239 |
+ local_cache_path=self.artifactdir,
|
|
| 240 |
+ available=utils._pretty_size(available_space)))
|
|
| 241 |
+ |
|
| 242 |
+ # Place a slight headroom (2e9 (2GB) on the cache_quota) into
|
|
| 243 |
+ # cache_quota to try and avoid exceptions.
|
|
| 244 |
+ #
|
|
| 245 |
+ # Of course, we might still end up running out during a build
|
|
| 246 |
+ # if we end up writing more than 2G, but hey, this stuff is
|
|
| 247 |
+ # already really fuzzy.
|
|
| 248 |
+ #
|
|
| 223 | 249 |
self.cache_quota = cache_quota - headroom
|
| 224 | 250 |
self.cache_lower_threshold = self.cache_quota / 2
|
| 225 | 251 |
|
| ... | ... | @@ -415,7 +415,7 @@ class LogLine(Widget): |
| 415 | 415 |
if "%{workspace-dirs" in format_:
|
| 416 | 416 |
workspace = element._get_workspace()
|
| 417 | 417 |
if workspace is not None:
|
| 418 |
- path = workspace.path.replace(os.getenv('HOME', '/root'), '~')
|
|
| 418 |
+ path = workspace.get_absolute_path().replace(os.getenv('HOME', '/root'), '~')
|
|
| 419 | 419 |
line = p.fmt_subst(line, 'workspace-dirs', "Workspace: {}".format(path))
|
| 420 | 420 |
else:
|
| 421 | 421 |
line = p.fmt_subst(
|
| ... | ... | @@ -478,7 +478,7 @@ class Stream(): |
| 478 | 478 |
workspace = workspaces.get_workspace(target._get_full_name())
|
| 479 | 479 |
if workspace and not force:
|
| 480 | 480 |
raise StreamError("Workspace '{}' is already defined at: {}"
|
| 481 |
- .format(target.name, workspace.path))
|
|
| 481 |
+ .format(target.name, workspace.get_absolute_path()))
|
|
| 482 | 482 |
|
| 483 | 483 |
# If we're going to checkout, we need at least a fetch,
|
| 484 | 484 |
# if we were asked to track first, we're going to fetch anyway.
|
| ... | ... | @@ -500,14 +500,14 @@ class Stream(): |
| 500 | 500 |
workspaces.save_config()
|
| 501 | 501 |
shutil.rmtree(directory)
|
| 502 | 502 |
try:
|
| 503 |
- os.makedirs(directory, exist_ok=True)
|
|
| 503 |
+ os.makedirs(workdir, exist_ok=True)
|
|
| 504 | 504 |
except OSError as e:
|
| 505 | 505 |
raise StreamError("Failed to create workspace directory: {}".format(e)) from e
|
| 506 | 506 |
|
| 507 | 507 |
workspaces.create_workspace(target._get_full_name(), workdir)
|
| 508 | 508 |
|
| 509 | 509 |
if not no_checkout:
|
| 510 |
- with target.timed_activity("Staging sources to {}".format(directory)):
|
|
| 510 |
+ with target.timed_activity("Staging sources to {}".format(workdir)):
|
|
| 511 | 511 |
target._open_workspace()
|
| 512 | 512 |
|
| 513 | 513 |
workspaces.save_config()
|
| ... | ... | @@ -528,12 +528,12 @@ class Stream(): |
| 528 | 528 |
# Remove workspace directory if prompted
|
| 529 | 529 |
if remove_dir:
|
| 530 | 530 |
with self._context.timed_activity("Removing workspace directory {}"
|
| 531 |
- .format(workspace.path)):
|
|
| 531 |
+ .format(workspace.get_absolute_path())):
|
|
| 532 | 532 |
try:
|
| 533 |
- shutil.rmtree(workspace.path)
|
|
| 533 |
+ shutil.rmtree(workspace.get_absolute_path())
|
|
| 534 | 534 |
except OSError as e:
|
| 535 | 535 |
raise StreamError("Could not remove '{}': {}"
|
| 536 |
- .format(workspace.path, e)) from e
|
|
| 536 |
+ .format(workspace.get_absolute_path(), e)) from e
|
|
| 537 | 537 |
|
| 538 | 538 |
# Delete the workspace and save the configuration
|
| 539 | 539 |
workspaces.delete_workspace(element_name)
|
| ... | ... | @@ -576,28 +576,30 @@ class Stream(): |
| 576 | 576 |
|
| 577 | 577 |
for element in elements:
|
| 578 | 578 |
workspace = workspaces.get_workspace(element._get_full_name())
|
| 579 |
- |
|
| 579 |
+ workspace_path = workspace.get_absolute_path()
|
|
| 580 | 580 |
if soft:
|
| 581 | 581 |
workspace.prepared = False
|
| 582 | 582 |
self._message(MessageType.INFO, "Reset workspace state for {} at: {}"
|
| 583 |
- .format(element.name, workspace.path))
|
|
| 583 |
+ .format(element.name, workspace_path))
|
|
| 584 | 584 |
continue
|
| 585 | 585 |
|
| 586 | 586 |
with element.timed_activity("Removing workspace directory {}"
|
| 587 |
- .format(workspace.path)):
|
|
| 587 |
+ .format(workspace_path)):
|
|
| 588 | 588 |
try:
|
| 589 |
- shutil.rmtree(workspace.path)
|
|
| 589 |
+ shutil.rmtree(workspace_path)
|
|
| 590 | 590 |
except OSError as e:
|
| 591 | 591 |
raise StreamError("Could not remove '{}': {}"
|
| 592 |
- .format(workspace.path, e)) from e
|
|
| 592 |
+ .format(workspace_path, e)) from e
|
|
| 593 | 593 |
|
| 594 | 594 |
workspaces.delete_workspace(element._get_full_name())
|
| 595 |
- workspaces.create_workspace(element._get_full_name(), workspace.path)
|
|
| 595 |
+ workspaces.create_workspace(element._get_full_name(), workspace_path)
|
|
| 596 | 596 |
|
| 597 |
- with element.timed_activity("Staging sources to {}".format(workspace.path)):
|
|
| 597 |
+ with element.timed_activity("Staging sources to {}".format(workspace_path)):
|
|
| 598 | 598 |
element._open_workspace()
|
| 599 | 599 |
|
| 600 |
- self._message(MessageType.INFO, "Reset workspace for {} at: {}".format(element.name, workspace.path))
|
|
| 600 |
+ self._message(MessageType.INFO,
|
|
| 601 |
+ "Reset workspace for {} at: {}".format(element.name,
|
|
| 602 |
+ workspace_path))
|
|
| 601 | 603 |
|
| 602 | 604 |
workspaces.save_config()
|
| 603 | 605 |
|
| ... | ... | @@ -634,7 +636,7 @@ class Stream(): |
| 634 | 636 |
for element_name, workspace_ in self._context.get_workspaces().list():
|
| 635 | 637 |
workspace_detail = {
|
| 636 | 638 |
'element': element_name,
|
| 637 |
- 'directory': workspace_.path,
|
|
| 639 |
+ 'directory': workspace_.get_absolute_path(),
|
|
| 638 | 640 |
}
|
| 639 | 641 |
workspaces.append(workspace_detail)
|
| 640 | 642 |
|
| ... | ... | @@ -29,7 +29,7 @@ BST_WORKSPACE_FORMAT_VERSION = 3 |
| 29 | 29 |
# Hold on to a list of members which get serialized
|
| 30 | 30 |
_WORKSPACE_MEMBERS = [
|
| 31 | 31 |
'prepared',
|
| 32 |
- 'path',
|
|
| 32 |
+ '_path',
|
|
| 33 | 33 |
'last_successful',
|
| 34 | 34 |
'running_files'
|
| 35 | 35 |
]
|
| ... | ... | @@ -56,7 +56,7 @@ class Workspace(): |
| 56 | 56 |
def __init__(self, toplevel_project, *, last_successful=None, path=None, prepared=False, running_files=None):
|
| 57 | 57 |
self.prepared = prepared
|
| 58 | 58 |
self.last_successful = last_successful
|
| 59 |
- self.path = path
|
|
| 59 |
+ self._path = path
|
|
| 60 | 60 |
self.running_files = running_files if running_files is not None else {}
|
| 61 | 61 |
|
| 62 | 62 |
self._toplevel_project = toplevel_project
|
| ... | ... | @@ -70,6 +70,7 @@ class Workspace(): |
| 70 | 70 |
# (dict) A dict representation of the workspace
|
| 71 | 71 |
#
|
| 72 | 72 |
def to_dict(self):
|
| 73 |
+ # Strip the leading underscore from _path
|
|
| 73 | 74 |
return {key: val for key, val in self.__dict__.items()
|
| 74 | 75 |
if key in _WORKSPACE_MEMBERS and val is not None}
|
| 75 | 76 |
|
| ... | ... | @@ -133,7 +134,7 @@ class Workspace(): |
| 133 | 134 |
if os.path.isdir(fullpath):
|
| 134 | 135 |
utils.copy_files(fullpath, directory)
|
| 135 | 136 |
else:
|
| 136 |
- destfile = os.path.join(directory, os.path.basename(self.path))
|
|
| 137 |
+ destfile = os.path.join(directory, os.path.basename(self.get_absolute_path()))
|
|
| 137 | 138 |
utils.safe_copy(fullpath, destfile)
|
| 138 | 139 |
|
| 139 | 140 |
# add_running_files()
|
| ... | ... | @@ -189,7 +190,7 @@ class Workspace(): |
| 189 | 190 |
filelist = utils.list_relative_paths(fullpath)
|
| 190 | 191 |
filelist = [(relpath, os.path.join(fullpath, relpath)) for relpath in filelist]
|
| 191 | 192 |
else:
|
| 192 |
- filelist = [(self.path, fullpath)]
|
|
| 193 |
+ filelist = [(self.get_absolute_path(), fullpath)]
|
|
| 193 | 194 |
|
| 194 | 195 |
self._key = [(relpath, unique_key(fullpath)) for relpath, fullpath in filelist]
|
| 195 | 196 |
|
| ... | ... | @@ -200,7 +201,7 @@ class Workspace(): |
| 200 | 201 |
# Returns: The absolute path of the element's workspace.
|
| 201 | 202 |
#
|
| 202 | 203 |
def get_absolute_path(self):
|
| 203 |
- return os.path.join(self._toplevel_project.directory, self.path)
|
|
| 204 |
+ return os.path.join(self._toplevel_project.directory, self._path)
|
|
| 204 | 205 |
|
| 205 | 206 |
|
| 206 | 207 |
# Workspaces()
|
| ... | ... | @@ -236,6 +237,9 @@ class Workspaces(): |
| 236 | 237 |
# path (str) - The path in which the workspace should be kept
|
| 237 | 238 |
#
|
| 238 | 239 |
def create_workspace(self, element_name, path):
|
| 240 |
+ if path.startswith(self._toplevel_project.directory):
|
|
| 241 |
+ path = os.path.relpath(path, self._toplevel_project.directory)
|
|
| 242 |
+ |
|
| 239 | 243 |
self._workspaces[element_name] = Workspace(self._toplevel_project, path=path)
|
| 240 | 244 |
|
| 241 | 245 |
return self._workspaces[element_name]
|
| ... | ... | @@ -1324,7 +1324,7 @@ class Element(Plugin): |
| 1324 | 1324 |
# If mount_workspaces is set and we're doing incremental builds,
|
| 1325 | 1325 |
# the workspace is already mounted into the sandbox.
|
| 1326 | 1326 |
if not (mount_workspaces and self.__can_build_incrementally()):
|
| 1327 |
- with self.timed_activity("Staging local files at {}".format(workspace.path)):
|
|
| 1327 |
+ with self.timed_activity("Staging local files at {}".format(workspace.get_absolute_path())):
|
|
| 1328 | 1328 |
workspace.stage(directory)
|
| 1329 | 1329 |
else:
|
| 1330 | 1330 |
# No workspace, stage directly
|
| ... | ... | @@ -1484,7 +1484,7 @@ class Element(Plugin): |
| 1484 | 1484 |
sandbox_path = os.path.join(sandbox_root,
|
| 1485 | 1485 |
self.__staged_sources_directory.lstrip(os.sep))
|
| 1486 | 1486 |
try:
|
| 1487 |
- utils.copy_files(workspace.path, sandbox_path)
|
|
| 1487 |
+ utils.copy_files(workspace.get_absolute_path(), sandbox_path)
|
|
| 1488 | 1488 |
except UtilError as e:
|
| 1489 | 1489 |
self.warn("Failed to preserve workspace state for failed build sysroot: {}"
|
| 1490 | 1490 |
.format(e))
|
| ... | ... | @@ -1786,7 +1786,7 @@ class Element(Plugin): |
| 1786 | 1786 |
source._init_workspace(temp)
|
| 1787 | 1787 |
|
| 1788 | 1788 |
# Now hardlink the files into the workspace target.
|
| 1789 |
- utils.link_files(temp, workspace.path)
|
|
| 1789 |
+ utils.link_files(temp, workspace.get_absolute_path())
|
|
| 1790 | 1790 |
|
| 1791 | 1791 |
# _get_workspace():
|
| 1792 | 1792 |
#
|
| ... | ... | @@ -612,6 +612,27 @@ def _parse_size(size, volume): |
| 612 | 612 |
return int(num) * 1024**units.index(unit)
|
| 613 | 613 |
|
| 614 | 614 |
|
| 615 |
+# _pretty_size()
|
|
| 616 |
+#
|
|
| 617 |
+# Converts a number of bytes into a string representation in KB, MB, GB, TB
|
|
| 618 |
+# represented as K, M, G, T etc.
|
|
| 619 |
+#
|
|
| 620 |
+# Args:
|
|
| 621 |
+# size (int): The size to convert in bytes.
|
|
| 622 |
+# dec_places (int): The number of decimal places to output to.
|
|
| 623 |
+#
|
|
| 624 |
+# Returns:
|
|
| 625 |
+# (str): The string representation of the number of bytes in the largest
|
|
| 626 |
+def _pretty_size(size, dec_places=0):
|
|
| 627 |
+ psize = size
|
|
| 628 |
+ unit = 'B'
|
|
| 629 |
+ for unit in ('B', 'K', 'M', 'G', 'T'):
|
|
| 630 |
+ if psize < 1024:
|
|
| 631 |
+ break
|
|
| 632 |
+ else:
|
|
| 633 |
+ psize /= 1024
|
|
| 634 |
+ return "{size:g}{unit}".format(size=round(psize, dec_places), unit=unit)
|
|
| 635 |
+ |
|
| 615 | 636 |
# A sentinel to be used as a default argument for functions that need
|
| 616 | 637 |
# to distinguish between a kwarg set to None and an unset kwarg.
|
| 617 | 638 |
_sentinel = object()
|
| ... | ... | @@ -18,7 +18,10 @@ DATA_DIR = os.path.join( |
| 18 | 18 |
)
|
| 19 | 19 |
|
| 20 | 20 |
|
| 21 |
-def open_workspace(cli, tmpdir, datafiles, kind, track, suffix=''):
|
|
| 21 |
+def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', rerun=False, new_dir=None):
|
|
| 22 |
+ if os.path.isdir(os.path.join(str(tmpdir), 'repo')):
|
|
| 23 |
+ shutil.rmtree(os.path.join(str(tmpdir), 'repo'))
|
|
| 24 |
+ |
|
| 22 | 25 |
project = os.path.join(datafiles.dirname, datafiles.basename)
|
| 23 | 26 |
bin_files_path = os.path.join(project, 'files', 'bin-files')
|
| 24 | 27 |
element_path = os.path.join(project, 'elements')
|
| ... | ... | @@ -45,11 +48,12 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix=''): |
| 45 | 48 |
element_name))
|
| 46 | 49 |
|
| 47 | 50 |
# Assert that there is no reference, a track & fetch is needed
|
| 48 |
- state = cli.get_element_state(project, element_name)
|
|
| 49 |
- if track:
|
|
| 50 |
- assert state == 'no reference'
|
|
| 51 |
- else:
|
|
| 52 |
- assert state == 'fetch needed'
|
|
| 51 |
+ if not rerun:
|
|
| 52 |
+ state = cli.get_element_state(project, element_name)
|
|
| 53 |
+ if track:
|
|
| 54 |
+ assert state == 'no reference'
|
|
| 55 |
+ else:
|
|
| 56 |
+ assert state == 'fetch needed'
|
|
| 53 | 57 |
|
| 54 | 58 |
# Now open the workspace, this should have the effect of automatically
|
| 55 | 59 |
# tracking & fetching the source from the repo.
|
| ... | ... | @@ -59,15 +63,19 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix=''): |
| 59 | 63 |
args.extend([element_name, workspace])
|
| 60 | 64 |
|
| 61 | 65 |
result = cli.run(project=project, args=args)
|
| 62 |
- result.assert_success()
|
|
| 63 | 66 |
|
| 64 |
- # Assert that we are now buildable because the source is
|
|
| 65 |
- # now cached.
|
|
| 66 |
- assert cli.get_element_state(project, element_name) == 'buildable'
|
|
| 67 |
+ if not rerun:
|
|
| 68 |
+ result.assert_success()
|
|
| 67 | 69 |
|
| 68 |
- # Check that the executable hello file is found in the workspace
|
|
| 69 |
- filename = os.path.join(workspace, 'usr', 'bin', 'hello')
|
|
| 70 |
- assert os.path.exists(filename)
|
|
| 70 |
+ # Assert that we are now buildable because the source is
|
|
| 71 |
+ # now cached.
|
|
| 72 |
+ assert cli.get_element_state(project, element_name) == 'buildable'
|
|
| 73 |
+ |
|
| 74 |
+ # Check that the executable hello file is found in the workspace
|
|
| 75 |
+ filename = os.path.join(workspace, 'usr', 'bin', 'hello')
|
|
| 76 |
+ assert os.path.exists(filename)
|
|
| 77 |
+ else:
|
|
| 78 |
+ result.assert_main_error(ErrorDomain.STREAM, None)
|
|
| 71 | 79 |
|
| 72 | 80 |
return (element_name, project, workspace)
|
| 73 | 81 |
|
| ... | ... | @@ -78,6 +86,43 @@ def test_open(cli, tmpdir, datafiles, kind): |
| 78 | 86 |
open_workspace(cli, tmpdir, datafiles, kind, False)
|
| 79 | 87 |
|
| 80 | 88 |
|
| 89 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 90 |
+@pytest.mark.parametrize("kind", repo_kinds)
|
|
| 91 |
+def test_open_after_move_interally(cli, tmpdir, datafiles, kind):
|
|
| 92 |
+ open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
| 93 |
+ workspace = os.path.join(str(tmpdir), 'workspace')
|
|
| 94 |
+ shutil.move(workspace, os.path.join(str(tmpdir), 'workspace2'))
|
|
| 95 |
+ open_workspace(cli, tmpdir, datafiles, kind, False, '', True)
|
|
| 96 |
+ filename = os.path.join(workspace + '2', 'usr', 'bin', 'hello')
|
|
| 97 |
+ assert os.path.exists(filename)
|
|
| 98 |
+ |
|
| 99 |
+ |
|
| 100 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 101 |
+@pytest.mark.parametrize("kind", repo_kinds)
|
|
| 102 |
+def test_open_after_move_externally(cli, tmpdir, datafiles, kind):
|
|
| 103 |
+ open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
| 104 |
+ workspace = os.path.join(str(tmpdir), 'workspace')
|
|
| 105 |
+ tmp_dir = os.path.join(str(tmpdir), '2')
|
|
| 106 |
+ shutil.move(workspace, os.path.join(tmp_dir, 'workspace'))
|
|
| 107 |
+ open_workspace(cli, tmpdir, datafiles, kind, False, '', True, tmp_dir)
|
|
| 108 |
+ filename = os.path.join(os.path.join(tmp_dir, 'workspace'), 'usr', 'bin', 'hello')
|
|
| 109 |
+ assert os.path.exists(filename)
|
|
| 110 |
+ shutil.rmtree(tmp_dir)
|
|
| 111 |
+ |
|
| 112 |
+ |
|
| 113 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 114 |
+@pytest.mark.parametrize("kind", repo_kinds)
|
|
| 115 |
+def test_open_after_move_project(cli, tmpdir, datafiles, kind):
|
|
| 116 |
+ open_workspace(cli, tmpdir, datafiles, kind, False)
|
|
| 117 |
+ workspace = os.path.join(str(tmpdir), 'workspace')
|
|
| 118 |
+ tmp_dir = os.path.join(str(tmpdir), '..', 'new_dir')
|
|
| 119 |
+ shutil.move(workspace, os.path.join(tmp_dir, 'workspace'))
|
|
| 120 |
+ open_workspace(cli, tmpdir, datafiles, kind, False, '', True, tmp_dir)
|
|
| 121 |
+ filename = os.path.join(os.path.join(tmp_dir, 'workspace'), 'usr', 'bin', 'hello')
|
|
| 122 |
+ assert os.path.exists(filename)
|
|
| 123 |
+ shutil.rmtree(tmp_dir)
|
|
| 124 |
+ |
|
| 125 |
+ |
|
| 81 | 126 |
@pytest.mark.datafiles(DATA_DIR)
|
| 82 | 127 |
def test_open_bzr_customize(cli, tmpdir, datafiles):
|
| 83 | 128 |
element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, "bzr", False)
|
| ... | ... | @@ -330,6 +330,7 @@ def test_sysroot_workspace_visible(cli, tmpdir, datafiles): |
| 330 | 330 |
# Obtain a copy of the hello.c content from the workspace
|
| 331 | 331 |
#
|
| 332 | 332 |
workspace_hello_path = os.path.join(cli.directory, 'workspace', 'hello.c')
|
| 333 |
+ |
|
| 333 | 334 |
assert os.path.exists(workspace_hello_path)
|
| 334 | 335 |
with open(workspace_hello_path, 'r') as f:
|
| 335 | 336 |
workspace_hello = f.read()
|
