Valentin David pushed to branch valentindavid/fix_included_junction_track at BuildStream / buildstream
Commits:
-
ed653fbc
by Chandan Singh at 2018-08-08T00:06:28Z
-
65f382f1
by Chandan Singh at 2018-08-08T10:52:32Z
-
c68dcab8
by Tiago Gomes at 2018-08-09T09:18:43Z
-
35ab0335
by Tiago Gomes at 2018-08-09T10:16:24Z
-
8aa33e23
by Valentin David at 2018-08-09T12:49:17Z
-
ef7810f3
by Valentin David at 2018-08-09T14:06:50Z
-
2d061173
by Javier Jardón at 2018-08-09T16:31:15Z
-
4e1488ee
by Javier Jardón at 2018-08-09T16:31:15Z
-
de955834
by Javier Jardón at 2018-08-09T17:22:24Z
-
97595585
by Jim MacArthur at 2018-08-09T17:35:19Z
-
a602365c
by Tristan Van Berkom at 2018-08-09T20:03:52Z
-
db0478ab
by Phillip Smyth at 2018-08-10T10:17:47Z
-
04cee9a9
by Phillip Smyth at 2018-08-10T11:12:31Z
-
5dcecbad
by Valentin David at 2018-08-10T12:18:55Z
-
2e8db54e
by Valentin David at 2018-08-10T12:53:19Z
-
7faef88e
by Valentin David at 2018-08-10T14:44:49Z
16 changed files:
- .gitlab-ci.yml
- NEWS
- buildstream/_artifactcache/artifactcache.py
- buildstream/_artifactcache/cascache.py
- buildstream/_artifactcache/casserver.py
- buildstream/_frontend/widget.py
- buildstream/_fuse/fuse.py
- buildstream/_fuse/hardlinks.py
- buildstream/_project.py
- buildstream/_stream.py
- buildstream/_workspaces.py
- buildstream/element.py
- buildstream/sandbox/_sandboxbwrap.py
- setup.py
- tests/frontend/track.py
- tests/frontend/workspace.py
Changes:
| ... | ... | @@ -26,15 +26,6 @@ source_dist: |
| 26 | 26 |
- tar -ztf dist/*
|
| 27 | 27 |
- tarball=$(cd dist && echo $(ls *))
|
| 28 | 28 |
|
| 29 |
- # Create an installer script
|
|
| 30 |
- - |
|
|
| 31 |
- cat > dist/install.sh << EOF
|
|
| 32 |
- #!/bin/sh
|
|
| 33 |
- tar -zxf ${tarball}
|
|
| 34 |
- cd ${tarball%.tar.gz}
|
|
| 35 |
- pip3 install --no-index .
|
|
| 36 |
- EOF
|
|
| 37 |
- |
|
| 38 | 29 |
# unpack tarball as `dist/buildstream` directory
|
| 39 | 30 |
- |
|
| 40 | 31 |
cat > dist/unpack.sh << EOF
|
| ... | ... | @@ -44,7 +35,6 @@ source_dist: |
| 44 | 35 |
EOF
|
| 45 | 36 |
|
| 46 | 37 |
# Make our helpers executable
|
| 47 |
- - chmod +x dist/install.sh
|
|
| 48 | 38 |
- chmod +x dist/unpack.sh
|
| 49 | 39 |
artifacts:
|
| 50 | 40 |
paths:
|
| ... | ... | @@ -17,6 +17,10 @@ buildstream 1.1.5 |
| 17 | 17 |
to not be scheduled and fail during artifact assembly,
|
| 18 | 18 |
and display the retry prompt during an interactive session.
|
| 19 | 19 |
|
| 20 |
+ o Due to enabling the use of relative workspaces, "Legacy" workspaces
|
|
| 21 |
+ may need to be closed and remade before the changes will affect them.
|
|
| 22 |
+ Downgrading after using this feature may result in workspaces
|
|
| 23 |
+ not functioning correctly
|
|
| 20 | 24 |
|
| 21 | 25 |
=================
|
| 22 | 26 |
buildstream 1.1.4
|
| ... | ... | @@ -80,6 +80,8 @@ class ArtifactCache(): |
| 80 | 80 |
self.context = context
|
| 81 | 81 |
self.required_artifacts = set()
|
| 82 | 82 |
self.extractdir = os.path.join(context.artifactdir, 'extract')
|
| 83 |
+ self.tmpdir = os.path.join(context.artifactdir, 'tmp')
|
|
| 84 |
+ |
|
| 83 | 85 |
self.max_size = context.cache_quota
|
| 84 | 86 |
self.estimated_size = None
|
| 85 | 87 |
|
| ... | ... | @@ -89,7 +91,8 @@ class ArtifactCache(): |
| 89 | 91 |
self._local = False
|
| 90 | 92 |
self.cache_size = None
|
| 91 | 93 |
|
| 92 |
- os.makedirs(context.artifactdir, exist_ok=True)
|
|
| 94 |
+ os.makedirs(self.extractdir, exist_ok=True)
|
|
| 95 |
+ os.makedirs(self.tmpdir, exist_ok=True)
|
|
| 93 | 96 |
|
| 94 | 97 |
################################################
|
| 95 | 98 |
# Methods implemented on the abstract class #
|
| ... | ... | @@ -56,7 +56,8 @@ class CASCache(ArtifactCache): |
| 56 | 56 |
super().__init__(context)
|
| 57 | 57 |
|
| 58 | 58 |
self.casdir = os.path.join(context.artifactdir, 'cas')
|
| 59 |
- os.makedirs(os.path.join(self.casdir, 'tmp'), exist_ok=True)
|
|
| 59 |
+ os.makedirs(os.path.join(self.casdir, 'refs', 'heads'), exist_ok=True)
|
|
| 60 |
+ os.makedirs(os.path.join(self.casdir, 'objects'), exist_ok=True)
|
|
| 60 | 61 |
|
| 61 | 62 |
self._enable_push = enable_push
|
| 62 | 63 |
|
| ... | ... | @@ -85,8 +86,6 @@ class CASCache(ArtifactCache): |
| 85 | 86 |
# artifact has already been extracted
|
| 86 | 87 |
return dest
|
| 87 | 88 |
|
| 88 |
- os.makedirs(self.extractdir, exist_ok=True)
|
|
| 89 |
- |
|
| 90 | 89 |
with tempfile.TemporaryDirectory(prefix='tmp', dir=self.extractdir) as tmpdir:
|
| 91 | 90 |
checkoutdir = os.path.join(tmpdir, ref)
|
| 92 | 91 |
self._checkout(checkoutdir, tree)
|
| ... | ... | @@ -394,7 +393,7 @@ class CASCache(ArtifactCache): |
| 394 | 393 |
try:
|
| 395 | 394 |
h = hashlib.sha256()
|
| 396 | 395 |
# Always write out new file to avoid corruption if input file is modified
|
| 397 |
- with tempfile.NamedTemporaryFile(dir=os.path.join(self.casdir, 'tmp')) as out:
|
|
| 396 |
+ with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
|
|
| 398 | 397 |
# Set mode bits to 0644
|
| 399 | 398 |
os.chmod(out.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
|
| 400 | 399 |
|
| ... | ... | @@ -764,7 +763,7 @@ class CASCache(ArtifactCache): |
| 764 | 763 |
# already in local cache
|
| 765 | 764 |
return
|
| 766 | 765 |
|
| 767 |
- with tempfile.NamedTemporaryFile(dir=os.path.join(self.casdir, 'tmp')) as out:
|
|
| 766 |
+ with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
|
|
| 768 | 767 |
self._fetch_blob(remote, tree, out)
|
| 769 | 768 |
|
| 770 | 769 |
directory = remote_execution_pb2.Directory()
|
| ... | ... | @@ -778,7 +777,7 @@ class CASCache(ArtifactCache): |
| 778 | 777 |
# already in local cache
|
| 779 | 778 |
continue
|
| 780 | 779 |
|
| 781 |
- with tempfile.NamedTemporaryFile(dir=os.path.join(self.casdir, 'tmp')) as f:
|
|
| 780 |
+ with tempfile.NamedTemporaryFile(dir=self.tmpdir) as f:
|
|
| 782 | 781 |
self._fetch_blob(remote, filenode.digest, f)
|
| 783 | 782 |
|
| 784 | 783 |
digest = self.add_object(path=f.name)
|
| ... | ... | @@ -161,7 +161,7 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer): |
| 161 | 161 |
offset = 0
|
| 162 | 162 |
finished = False
|
| 163 | 163 |
resource_name = None
|
| 164 |
- with tempfile.NamedTemporaryFile(dir=os.path.join(self.cas.casdir, 'tmp')) as out:
|
|
| 164 |
+ with tempfile.NamedTemporaryFile(dir=self.cas.tmpdir) as out:
|
|
| 165 | 165 |
for request in request_iterator:
|
| 166 | 166 |
assert not finished
|
| 167 | 167 |
assert request.write_offset == offset
|
| ... | ... | @@ -418,7 +418,9 @@ class LogLine(Widget): |
| 418 | 418 |
if "%{workspace-dirs" in format_:
|
| 419 | 419 |
workspace = element._get_workspace()
|
| 420 | 420 |
if workspace is not None:
|
| 421 |
- path = workspace.path.replace(os.getenv('HOME', '/root'), '~')
|
|
| 421 |
+ path = workspace.get_absolute_path()
|
|
| 422 |
+ if path.startswith("~/"):
|
|
| 423 |
+ path = os.path.join(os.getenv('HOME', '/root'), path[2:])
|
|
| 422 | 424 |
line = p.fmt_subst(line, 'workspace-dirs', "Workspace: {}".format(path))
|
| 423 | 425 |
else:
|
| 424 | 426 |
line = p.fmt_subst(
|
| ... | ... | @@ -757,7 +757,11 @@ class FUSE(object): |
| 757 | 757 |
if self.raw_fi:
|
| 758 | 758 |
return self.operations('create', path, mode, fi)
|
| 759 | 759 |
else:
|
| 760 |
- fi.fh = self.operations('create', path, mode)
|
|
| 760 |
+ # This line is different from upstream to fix issues
|
|
| 761 |
+ # reading file opened with O_CREAT|O_RDWR.
|
|
| 762 |
+ # See issue #143.
|
|
| 763 |
+ fi.fh = self.operations('create', path, mode, fi.flags)
|
|
| 764 |
+ # END OF MODIFICATION
|
|
| 761 | 765 |
return 0
|
| 762 | 766 |
|
| 763 | 767 |
def ftruncate(self, path, length, fip):
|
| ... | ... | @@ -185,12 +185,12 @@ class SafeHardlinkOps(Operations): |
| 185 | 185 |
|
| 186 | 186 |
return os.open(full_path, flags)
|
| 187 | 187 |
|
| 188 |
- def create(self, path, mode, fi=None):
|
|
| 188 |
+ def create(self, path, mode, flags):
|
|
| 189 | 189 |
full_path = self._full_path(path)
|
| 190 | 190 |
|
| 191 | 191 |
# If it already exists, ensure it's a copy first
|
| 192 | 192 |
self._ensure_copy(full_path)
|
| 193 |
- return os.open(full_path, os.O_WRONLY | os.O_CREAT, mode)
|
|
| 193 |
+ return os.open(full_path, flags, mode)
|
|
| 194 | 194 |
|
| 195 | 195 |
def read(self, path, length, offset, fh):
|
| 196 | 196 |
os.lseek(fh, offset, os.SEEK_SET)
|
| ... | ... | @@ -19,7 +19,6 @@ |
| 19 | 19 |
# Tiago Gomes <tiago gomes codethink co uk>
|
| 20 | 20 |
|
| 21 | 21 |
import os
|
| 22 |
-import multiprocessing # for cpu_count()
|
|
| 23 | 22 |
from collections import Mapping, OrderedDict
|
| 24 | 23 |
from pluginbase import PluginBase
|
| 25 | 24 |
from . import utils
|
| ... | ... | @@ -572,7 +571,10 @@ class Project(): |
| 572 | 571 |
|
| 573 | 572 |
# Extend variables with automatic variables and option exports
|
| 574 | 573 |
# Initialize it as a string as all variables are processed as strings.
|
| 575 |
- output.base_variables['max-jobs'] = str(multiprocessing.cpu_count())
|
|
| 574 |
+ # Based on some testing (mainly on AWS), maximum effective
|
|
| 575 |
+ # max-jobs value seems to be around 8-10 if we have enough cores
|
|
| 576 |
+ # users should set values based on workload and build infrastructure
|
|
| 577 |
+ output.base_variables['max-jobs'] = str(min(len(os.sched_getaffinity(0)), 8))
|
|
| 576 | 578 |
|
| 577 | 579 |
# Export options into variables, if that was requested
|
| 578 | 580 |
output.options.export_variables(output.base_variables)
|
| ... | ... | @@ -267,8 +267,11 @@ class Stream(): |
| 267 | 267 |
except_targets=None,
|
| 268 | 268 |
cross_junctions=False):
|
| 269 | 269 |
|
| 270 |
+ # We pass no target to build. Only to track. Passing build targets
|
|
| 271 |
+ # would fully load project configuration which might not be
|
|
| 272 |
+ # possible before tracking is done.
|
|
| 270 | 273 |
_, elements = \
|
| 271 |
- self._load(targets, targets,
|
|
| 274 |
+ self._load([], targets,
|
|
| 272 | 275 |
selection=selection, track_selection=selection,
|
| 273 | 276 |
except_targets=except_targets,
|
| 274 | 277 |
track_except_targets=except_targets,
|
| ... | ... | @@ -460,7 +463,7 @@ class Stream(): |
| 460 | 463 |
selection=PipelineSelection.REDIRECT,
|
| 461 | 464 |
track_selection=PipelineSelection.REDIRECT)
|
| 462 | 465 |
target = elements[0]
|
| 463 |
- workdir = os.path.abspath(directory)
|
|
| 466 |
+ directory = os.path.abspath(directory)
|
|
| 464 | 467 |
|
| 465 | 468 |
if not list(target.sources()):
|
| 466 | 469 |
build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
|
| ... | ... | @@ -476,7 +479,7 @@ class Stream(): |
| 476 | 479 |
workspace = workspaces.get_workspace(target._get_full_name())
|
| 477 | 480 |
if workspace and not force:
|
| 478 | 481 |
raise StreamError("Workspace '{}' is already defined at: {}"
|
| 479 |
- .format(target.name, workspace.path))
|
|
| 482 |
+ .format(target.name, workspace.get_absolute_path()))
|
|
| 480 | 483 |
|
| 481 | 484 |
# If we're going to checkout, we need at least a fetch,
|
| 482 | 485 |
# if we were asked to track first, we're going to fetch anyway.
|
| ... | ... | @@ -502,7 +505,7 @@ class Stream(): |
| 502 | 505 |
except OSError as e:
|
| 503 | 506 |
raise StreamError("Failed to create workspace directory: {}".format(e)) from e
|
| 504 | 507 |
|
| 505 |
- workspaces.create_workspace(target._get_full_name(), workdir)
|
|
| 508 |
+ workspaces.create_workspace(target._get_full_name(), directory)
|
|
| 506 | 509 |
|
| 507 | 510 |
if not no_checkout:
|
| 508 | 511 |
with target.timed_activity("Staging sources to {}".format(directory)):
|
| ... | ... | @@ -526,12 +529,12 @@ class Stream(): |
| 526 | 529 |
# Remove workspace directory if prompted
|
| 527 | 530 |
if remove_dir:
|
| 528 | 531 |
with self._context.timed_activity("Removing workspace directory {}"
|
| 529 |
- .format(workspace.path)):
|
|
| 532 |
+ .format(workspace.get_absolute_path())):
|
|
| 530 | 533 |
try:
|
| 531 |
- shutil.rmtree(workspace.path)
|
|
| 534 |
+ shutil.rmtree(workspace.get_absolute_path())
|
|
| 532 | 535 |
except OSError as e:
|
| 533 | 536 |
raise StreamError("Could not remove '{}': {}"
|
| 534 |
- .format(workspace.path, e)) from e
|
|
| 537 |
+ .format(workspace.get_absolute_path(), e)) from e
|
|
| 535 | 538 |
|
| 536 | 539 |
# Delete the workspace and save the configuration
|
| 537 | 540 |
workspaces.delete_workspace(element_name)
|
| ... | ... | @@ -574,28 +577,30 @@ class Stream(): |
| 574 | 577 |
|
| 575 | 578 |
for element in elements:
|
| 576 | 579 |
workspace = workspaces.get_workspace(element._get_full_name())
|
| 577 |
- |
|
| 580 |
+ workspace_path = workspace.get_absolute_path()
|
|
| 578 | 581 |
if soft:
|
| 579 | 582 |
workspace.prepared = False
|
| 580 | 583 |
self._message(MessageType.INFO, "Reset workspace state for {} at: {}"
|
| 581 |
- .format(element.name, workspace.path))
|
|
| 584 |
+ .format(element.name, workspace_path))
|
|
| 582 | 585 |
continue
|
| 583 | 586 |
|
| 584 | 587 |
with element.timed_activity("Removing workspace directory {}"
|
| 585 |
- .format(workspace.path)):
|
|
| 588 |
+ .format(workspace_path)):
|
|
| 586 | 589 |
try:
|
| 587 |
- shutil.rmtree(workspace.path)
|
|
| 590 |
+ shutil.rmtree(workspace_path)
|
|
| 588 | 591 |
except OSError as e:
|
| 589 | 592 |
raise StreamError("Could not remove '{}': {}"
|
| 590 |
- .format(workspace.path, e)) from e
|
|
| 593 |
+ .format(workspace_path, e)) from e
|
|
| 591 | 594 |
|
| 592 | 595 |
workspaces.delete_workspace(element._get_full_name())
|
| 593 |
- workspaces.create_workspace(element._get_full_name(), workspace.path)
|
|
| 596 |
+ workspaces.create_workspace(element._get_full_name(), workspace_path)
|
|
| 594 | 597 |
|
| 595 |
- with element.timed_activity("Staging sources to {}".format(workspace.path)):
|
|
| 598 |
+ with element.timed_activity("Staging sources to {}".format(workspace_path)):
|
|
| 596 | 599 |
element._open_workspace()
|
| 597 | 600 |
|
| 598 |
- self._message(MessageType.INFO, "Reset workspace for {} at: {}".format(element.name, workspace.path))
|
|
| 601 |
+ self._message(MessageType.INFO,
|
|
| 602 |
+ "Reset workspace for {} at: {}".format(element.name,
|
|
| 603 |
+ workspace_path))
|
|
| 599 | 604 |
|
| 600 | 605 |
workspaces.save_config()
|
| 601 | 606 |
|
| ... | ... | @@ -632,7 +637,7 @@ class Stream(): |
| 632 | 637 |
for element_name, workspace_ in self._context.get_workspaces().list():
|
| 633 | 638 |
workspace_detail = {
|
| 634 | 639 |
'element': element_name,
|
| 635 |
- 'directory': workspace_.path,
|
|
| 640 |
+ 'directory': workspace_.get_absolute_path(),
|
|
| 636 | 641 |
}
|
| 637 | 642 |
workspaces.append(workspace_detail)
|
| 638 | 643 |
|
| ... | ... | @@ -822,6 +827,12 @@ class Stream(): |
| 822 | 827 |
#
|
| 823 | 828 |
# A convenience method for loading element lists
|
| 824 | 829 |
#
|
| 830 |
+ # If `targets` is not empty used project configuration will be
|
|
| 831 |
+ # fully loaded. If `targets` is empty, tracking will still be
|
|
| 832 |
+ # resolved for elements in `track_targets`, but no build pipeline
|
|
| 833 |
+ # will be resolved. This is behavior is import for track() to
|
|
| 834 |
+ # not trigger full loading of project configuration.
|
|
| 835 |
+ #
|
|
| 825 | 836 |
# Args:
|
| 826 | 837 |
# targets (list of str): Main targets to load
|
| 827 | 838 |
# track_targets (list of str): Tracking targets
|
| ... | ... | @@ -869,7 +880,7 @@ class Stream(): |
| 869 | 880 |
#
|
| 870 | 881 |
# This can happen with `bst build --track`
|
| 871 | 882 |
#
|
| 872 |
- if not self._pipeline.targets_include(elements, track_elements):
|
|
| 883 |
+ if targets and not self._pipeline.targets_include(elements, track_elements):
|
|
| 873 | 884 |
raise StreamError("Specified tracking targets that are not "
|
| 874 | 885 |
"within the scope of primary targets")
|
| 875 | 886 |
|
| ... | ... | @@ -905,6 +916,10 @@ class Stream(): |
| 905 | 916 |
for element in track_selected:
|
| 906 | 917 |
element._schedule_tracking()
|
| 907 | 918 |
|
| 919 |
+ if not targets:
|
|
| 920 |
+ self._pipeline.resolve_elements(track_selected)
|
|
| 921 |
+ return [], track_selected
|
|
| 922 |
+ |
|
| 908 | 923 |
# ArtifactCache.setup_remotes expects all projects to be fully loaded
|
| 909 | 924 |
for project in self._context.get_projects():
|
| 910 | 925 |
project.ensure_fully_loaded()
|
| ... | ... | @@ -26,14 +26,6 @@ from ._exceptions import LoadError, LoadErrorReason |
| 26 | 26 |
|
| 27 | 27 |
BST_WORKSPACE_FORMAT_VERSION = 3
|
| 28 | 28 |
|
| 29 |
-# Hold on to a list of members which get serialized
|
|
| 30 |
-_WORKSPACE_MEMBERS = [
|
|
| 31 |
- 'prepared',
|
|
| 32 |
- 'path',
|
|
| 33 |
- 'last_successful',
|
|
| 34 |
- 'running_files'
|
|
| 35 |
-]
|
|
| 36 |
- |
|
| 37 | 29 |
|
| 38 | 30 |
# Workspace()
|
| 39 | 31 |
#
|
| ... | ... | @@ -56,7 +48,7 @@ class Workspace(): |
| 56 | 48 |
def __init__(self, toplevel_project, *, last_successful=None, path=None, prepared=False, running_files=None):
|
| 57 | 49 |
self.prepared = prepared
|
| 58 | 50 |
self.last_successful = last_successful
|
| 59 |
- self.path = path
|
|
| 51 |
+ self._path = path
|
|
| 60 | 52 |
self.running_files = running_files if running_files is not None else {}
|
| 61 | 53 |
|
| 62 | 54 |
self._toplevel_project = toplevel_project
|
| ... | ... | @@ -64,14 +56,20 @@ class Workspace(): |
| 64 | 56 |
|
| 65 | 57 |
# to_dict()
|
| 66 | 58 |
#
|
| 67 |
- # Convert this object to a dict for serialization purposes
|
|
| 59 |
+ # Convert a list of members which get serialized to a dict for serialization purposes
|
|
| 68 | 60 |
#
|
| 69 | 61 |
# Returns:
|
| 70 | 62 |
# (dict) A dict representation of the workspace
|
| 71 | 63 |
#
|
| 72 | 64 |
def to_dict(self):
|
| 73 |
- return {key: val for key, val in self.__dict__.items()
|
|
| 74 |
- if key in _WORKSPACE_MEMBERS and val is not None}
|
|
| 65 |
+ ret = {
|
|
| 66 |
+ 'prepared': self.prepared,
|
|
| 67 |
+ 'path': self._path,
|
|
| 68 |
+ 'running_files': self.running_files
|
|
| 69 |
+ }
|
|
| 70 |
+ if self.last_successful is not None:
|
|
| 71 |
+ ret["last_successful"] = self.last_successful
|
|
| 72 |
+ return ret
|
|
| 75 | 73 |
|
| 76 | 74 |
# from_dict():
|
| 77 | 75 |
#
|
| ... | ... | @@ -103,15 +101,7 @@ class Workspace(): |
| 103 | 101 |
# True if the workspace differs from 'other', otherwise False
|
| 104 | 102 |
#
|
| 105 | 103 |
def differs(self, other):
|
| 106 |
- |
|
| 107 |
- for member in _WORKSPACE_MEMBERS:
|
|
| 108 |
- member_a = getattr(self, member)
|
|
| 109 |
- member_b = getattr(other, member)
|
|
| 110 |
- |
|
| 111 |
- if member_a != member_b:
|
|
| 112 |
- return True
|
|
| 113 |
- |
|
| 114 |
- return False
|
|
| 104 |
+ return self.to_dict() != other.to_dict()
|
|
| 115 | 105 |
|
| 116 | 106 |
# invalidate_key()
|
| 117 | 107 |
#
|
| ... | ... | @@ -133,7 +123,7 @@ class Workspace(): |
| 133 | 123 |
if os.path.isdir(fullpath):
|
| 134 | 124 |
utils.copy_files(fullpath, directory)
|
| 135 | 125 |
else:
|
| 136 |
- destfile = os.path.join(directory, os.path.basename(self.path))
|
|
| 126 |
+ destfile = os.path.join(directory, os.path.basename(self.get_absolute_path()))
|
|
| 137 | 127 |
utils.safe_copy(fullpath, destfile)
|
| 138 | 128 |
|
| 139 | 129 |
# add_running_files()
|
| ... | ... | @@ -189,7 +179,7 @@ class Workspace(): |
| 189 | 179 |
filelist = utils.list_relative_paths(fullpath)
|
| 190 | 180 |
filelist = [(relpath, os.path.join(fullpath, relpath)) for relpath in filelist]
|
| 191 | 181 |
else:
|
| 192 |
- filelist = [(self.path, fullpath)]
|
|
| 182 |
+ filelist = [(self.get_absolute_path(), fullpath)]
|
|
| 193 | 183 |
|
| 194 | 184 |
self._key = [(relpath, unique_key(fullpath)) for relpath, fullpath in filelist]
|
| 195 | 185 |
|
| ... | ... | @@ -200,7 +190,7 @@ class Workspace(): |
| 200 | 190 |
# Returns: The absolute path of the element's workspace.
|
| 201 | 191 |
#
|
| 202 | 192 |
def get_absolute_path(self):
|
| 203 |
- return os.path.join(self._toplevel_project.directory, self.path)
|
|
| 193 |
+ return os.path.join(self._toplevel_project.directory, self._path)
|
|
| 204 | 194 |
|
| 205 | 195 |
|
| 206 | 196 |
# Workspaces()
|
| ... | ... | @@ -236,6 +226,9 @@ class Workspaces(): |
| 236 | 226 |
# path (str) - The path in which the workspace should be kept
|
| 237 | 227 |
#
|
| 238 | 228 |
def create_workspace(self, element_name, path):
|
| 229 |
+ if path.startswith(self._toplevel_project.directory):
|
|
| 230 |
+ path = os.path.relpath(path, self._toplevel_project.directory)
|
|
| 231 |
+ |
|
| 239 | 232 |
self._workspaces[element_name] = Workspace(self._toplevel_project, path=path)
|
| 240 | 233 |
|
| 241 | 234 |
return self._workspaces[element_name]
|
| ... | ... | @@ -1403,7 +1403,8 @@ class Element(Plugin): |
| 1403 | 1403 |
# If mount_workspaces is set and we're doing incremental builds,
|
| 1404 | 1404 |
# the workspace is already mounted into the sandbox.
|
| 1405 | 1405 |
if not (mount_workspaces and self.__can_build_incrementally()):
|
| 1406 |
- with self.timed_activity("Staging local files at {}".format(workspace.path)):
|
|
| 1406 |
+ with self.timed_activity("Staging local files at {}"
|
|
| 1407 |
+ .format(workspace.get_absolute_path())):
|
|
| 1407 | 1408 |
workspace.stage(temp_staging_directory)
|
| 1408 | 1409 |
else:
|
| 1409 | 1410 |
# No workspace, stage directly
|
| ... | ... | @@ -1566,7 +1567,7 @@ class Element(Plugin): |
| 1566 | 1567 |
path_components = self.__staged_sources_directory.lstrip(os.sep).split(os.sep)
|
| 1567 | 1568 |
sandbox_vpath = sandbox_vroot.descend(path_components)
|
| 1568 | 1569 |
try:
|
| 1569 |
- sandbox_vpath.import_files(workspace.path)
|
|
| 1570 |
+ sandbox_vpath.import_files(workspace.get_absolute_path())
|
|
| 1570 | 1571 |
except UtilError as e:
|
| 1571 | 1572 |
self.warn("Failed to preserve workspace state for failed build sysroot: {}"
|
| 1572 | 1573 |
.format(e))
|
| ... | ... | @@ -1893,7 +1894,7 @@ class Element(Plugin): |
| 1893 | 1894 |
source._init_workspace(temp)
|
| 1894 | 1895 |
|
| 1895 | 1896 |
# Now hardlink the files into the workspace target.
|
| 1896 |
- utils.link_files(temp, workspace.path)
|
|
| 1897 |
+ utils.link_files(temp, workspace.get_absolute_path())
|
|
| 1897 | 1898 |
|
| 1898 | 1899 |
# _get_workspace():
|
| 1899 | 1900 |
#
|
| ... | ... | @@ -89,6 +89,11 @@ class SandboxBwrap(Sandbox): |
| 89 | 89 |
# Grab the full path of the bwrap binary
|
| 90 | 90 |
bwrap_command = [utils.get_host_tool('bwrap')]
|
| 91 | 91 |
|
| 92 |
+ for k, v in env.items():
|
|
| 93 |
+ bwrap_command += ['--setenv', k, v]
|
|
| 94 |
+ for k in os.environ.keys() - env.keys():
|
|
| 95 |
+ bwrap_command += ['--unsetenv', k]
|
|
| 96 |
+ |
|
| 92 | 97 |
# Create a new pid namespace, this also ensures that any subprocesses
|
| 93 | 98 |
# are cleaned up when the bwrap process exits.
|
| 94 | 99 |
bwrap_command += ['--unshare-pid']
|
| ... | ... | @@ -194,7 +199,7 @@ class SandboxBwrap(Sandbox): |
| 194 | 199 |
stdin = stack.enter_context(open(os.devnull, "r"))
|
| 195 | 200 |
|
| 196 | 201 |
# Run bubblewrap !
|
| 197 |
- exit_code = self.run_bwrap(bwrap_command, stdin, stdout, stderr, env,
|
|
| 202 |
+ exit_code = self.run_bwrap(bwrap_command, stdin, stdout, stderr,
|
|
| 198 | 203 |
(flags & SandboxFlags.INTERACTIVE))
|
| 199 | 204 |
|
| 200 | 205 |
# Cleanup things which bwrap might have left behind, while
|
| ... | ... | @@ -245,7 +250,7 @@ class SandboxBwrap(Sandbox): |
| 245 | 250 |
|
| 246 | 251 |
return exit_code
|
| 247 | 252 |
|
| 248 |
- def run_bwrap(self, argv, stdin, stdout, stderr, env, interactive):
|
|
| 253 |
+ def run_bwrap(self, argv, stdin, stdout, stderr, interactive):
|
|
| 249 | 254 |
# Wrapper around subprocess.Popen() with common settings.
|
| 250 | 255 |
#
|
| 251 | 256 |
# This function blocks until the subprocess has terminated.
|
| ... | ... | @@ -321,7 +326,6 @@ class SandboxBwrap(Sandbox): |
| 321 | 326 |
# The default is to share file descriptors from the parent process
|
| 322 | 327 |
# to the subprocess, which is rarely good for sandboxing.
|
| 323 | 328 |
close_fds=True,
|
| 324 |
- env=env,
|
|
| 325 | 329 |
stdin=stdin,
|
| 326 | 330 |
stdout=stdout,
|
| 327 | 331 |
stderr=stderr,
|
| ... | ... | @@ -251,7 +251,7 @@ setup(name='BuildStream', |
| 251 | 251 |
install_requires=[
|
| 252 | 252 |
'setuptools',
|
| 253 | 253 |
'psutil',
|
| 254 |
- 'ruamel.yaml',
|
|
| 254 |
+ 'ruamel.yaml <= 0.15',
|
|
| 255 | 255 |
'pluginbase',
|
| 256 | 256 |
'Click',
|
| 257 | 257 |
'blessings',
|
| ... | ... | @@ -612,3 +612,25 @@ def test_track_include_junction(cli, tmpdir, datafiles, ref_storage, kind): |
| 612 | 612 |
# Assert that we are now buildable because the source is
|
| 613 | 613 |
# now cached.
|
| 614 | 614 |
assert cli.get_element_state(project, element_name) == 'buildable'
|
| 615 |
+ |
|
| 616 |
+ |
|
| 617 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 618 |
+@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
|
|
| 619 |
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
|
|
| 620 |
+def test_track_junction_included(cli, tmpdir, datafiles, ref_storage, kind):
|
|
| 621 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 622 |
+ element_path = os.path.join(project, 'elements')
|
|
| 623 |
+ subproject_path = os.path.join(project, 'files', 'sub-project')
|
|
| 624 |
+ sub_element_path = os.path.join(subproject_path, 'elements')
|
|
| 625 |
+ junction_path = os.path.join(element_path, 'junction.bst')
|
|
| 626 |
+ |
|
| 627 |
+ configure_project(project, {
|
|
| 628 |
+ 'ref-storage': ref_storage,
|
|
| 629 |
+ '(@)': ['junction.bst:test.yml']
|
|
| 630 |
+ })
|
|
| 631 |
+ |
|
| 632 |
+ generate_junction(str(tmpdir.join('junction_repo')),
|
|
| 633 |
+ subproject_path, junction_path, store_ref=False)
|
|
| 634 |
+ |
|
| 635 |
+ result = cli.run(project=project, args=['track', 'junction.bst'])
|
|
| 636 |
+ result.assert_success()
|
| ... | ... | @@ -18,12 +18,13 @@ DATA_DIR = os.path.join( |
| 18 | 18 |
)
|
| 19 | 19 |
|
| 20 | 20 |
|
| 21 |
-def open_workspace(cli, tmpdir, datafiles, kind, track, suffix=''):
|
|
| 22 |
- project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 23 |
- bin_files_path = os.path.join(project, 'files', 'bin-files')
|
|
| 24 |
- element_path = os.path.join(project, 'elements')
|
|
| 21 |
+def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None):
|
|
| 22 |
+ if not workspace_dir:
|
|
| 23 |
+ workspace_dir = os.path.join(str(tmpdir), 'workspace{}'.format(suffix))
|
|
| 24 |
+ project_path = os.path.join(datafiles.dirname, datafiles.basename)
|
|
| 25 |
+ bin_files_path = os.path.join(project_path, 'files', 'bin-files')
|
|
| 26 |
+ element_path = os.path.join(project_path, 'elements')
|
|
| 25 | 27 |
element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
|
| 26 |
- workspace = os.path.join(str(tmpdir), 'workspace{}'.format(suffix))
|
|
| 27 | 28 |
|
| 28 | 29 |
# Create our repo object of the given source type with
|
| 29 | 30 |
# the bin files, and then collect the initial ref.
|
| ... | ... | @@ -45,7 +46,7 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix=''): |
| 45 | 46 |
element_name))
|
| 46 | 47 |
|
| 47 | 48 |
# Assert that there is no reference, a track & fetch is needed
|
| 48 |
- state = cli.get_element_state(project, element_name)
|
|
| 49 |
+ state = cli.get_element_state(project_path, element_name)
|
|
| 49 | 50 |
if track:
|
| 50 | 51 |
assert state == 'no reference'
|
| 51 | 52 |
else:
|
| ... | ... | @@ -56,20 +57,20 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix=''): |
| 56 | 57 |
args = ['workspace', 'open']
|
| 57 | 58 |
if track:
|
| 58 | 59 |
args.append('--track')
|
| 59 |
- args.extend([element_name, workspace])
|
|
| 60 |
+ args.extend([element_name, workspace_dir])
|
|
| 61 |
+ result = cli.run(project=project_path, args=args)
|
|
| 60 | 62 |
|
| 61 |
- result = cli.run(project=project, args=args)
|
|
| 62 | 63 |
result.assert_success()
|
| 63 | 64 |
|
| 64 | 65 |
# Assert that we are now buildable because the source is
|
| 65 | 66 |
# now cached.
|
| 66 |
- assert cli.get_element_state(project, element_name) == 'buildable'
|
|
| 67 |
+ assert cli.get_element_state(project_path, element_name) == 'buildable'
|
|
| 67 | 68 |
|
| 68 | 69 |
# Check that the executable hello file is found in the workspace
|
| 69 |
- filename = os.path.join(workspace, 'usr', 'bin', 'hello')
|
|
| 70 |
+ filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
|
|
| 70 | 71 |
assert os.path.exists(filename)
|
| 71 | 72 |
|
| 72 |
- return (element_name, project, workspace)
|
|
| 73 |
+ return (element_name, project_path, workspace_dir)
|
|
| 73 | 74 |
|
| 74 | 75 |
|
| 75 | 76 |
@pytest.mark.datafiles(DATA_DIR)
|
| ... | ... | @@ -190,6 +191,46 @@ def test_close(cli, tmpdir, datafiles, kind): |
| 190 | 191 |
assert not os.path.exists(workspace)
|
| 191 | 192 |
|
| 192 | 193 |
|
| 194 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 195 |
+def test_close_external_after_move_project(cli, tmpdir, datafiles):
|
|
| 196 |
+ tmp_parent = os.path.dirname(str(tmpdir))
|
|
| 197 |
+ workspace_dir = os.path.join(tmp_parent, "workspace")
|
|
| 198 |
+ element_name, project_path, _ = open_workspace(cli, tmpdir, datafiles, 'git', False, "", workspace_dir)
|
|
| 199 |
+ assert os.path.exists(workspace_dir)
|
|
| 200 |
+ tmp_dir = os.path.join(tmp_parent, 'external_project')
|
|
| 201 |
+ shutil.move(project_path, tmp_dir)
|
|
| 202 |
+ assert os.path.exists(tmp_dir)
|
|
| 203 |
+ |
|
| 204 |
+ # Close the workspace
|
|
| 205 |
+ result = cli.run(configure=False, project=tmp_dir, args=[
|
|
| 206 |
+ 'workspace', 'close', '--remove-dir', element_name
|
|
| 207 |
+ ])
|
|
| 208 |
+ result.assert_success()
|
|
| 209 |
+ |
|
| 210 |
+ # Assert the workspace dir has been deleted
|
|
| 211 |
+ assert not os.path.exists(workspace_dir)
|
|
| 212 |
+ # Move directory back inside tmp directory so it can be recognised
|
|
| 213 |
+ shutil.move(tmp_dir, project_path)
|
|
| 214 |
+ |
|
| 215 |
+ |
|
| 216 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
| 217 |
+def test_close_internal_after_move_project(cli, tmpdir, datafiles):
|
|
| 218 |
+ element_name, project, _ = open_workspace(cli, tmpdir, datafiles, 'git', False)
|
|
| 219 |
+ tmp_dir = os.path.join(os.path.dirname(str(tmpdir)), 'external_project')
|
|
| 220 |
+ shutil.move(str(tmpdir), tmp_dir)
|
|
| 221 |
+ assert os.path.exists(tmp_dir)
|
|
| 222 |
+ |
|
| 223 |
+ # Close the workspace
|
|
| 224 |
+ result = cli.run(configure=False, project=tmp_dir, args=[
|
|
| 225 |
+ 'workspace', 'close', '--remove-dir', element_name
|
|
| 226 |
+ ])
|
|
| 227 |
+ result.assert_success()
|
|
| 228 |
+ |
|
| 229 |
+ # Assert the workspace dir has been deleted
|
|
| 230 |
+ workspace = os.path.join(tmp_dir, 'workspace')
|
|
| 231 |
+ assert not os.path.exists(workspace)
|
|
| 232 |
+ |
|
| 233 |
+ |
|
| 193 | 234 |
@pytest.mark.datafiles(DATA_DIR)
|
| 194 | 235 |
def test_close_removed(cli, tmpdir, datafiles):
|
| 195 | 236 |
element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git', False)
|
