Phillip Smyth pushed to branch relative_workspaces at BuildStream / buildstream
Commits:
-
4d6dbabe
by Tiago Gomes at 2018-08-06T15:03:25Z
-
a8073264
by Tiago Gomes at 2018-08-06T16:55:03Z
-
3d26ff6e
by Ed Baunton at 2018-08-07T05:04:50Z
-
60a552a5
by Tristan Van Berkom at 2018-08-07T06:13:53Z
-
01ff0177
by Javier Jardón at 2018-08-07T07:29:30Z
-
9b469fe6
by Tristan Van Berkom at 2018-08-07T09:12:46Z
-
289de2ef
by Adam Jones at 2018-08-07T09:24:58Z
-
e42325b9
by Javier Jardón at 2018-08-07T10:29:46Z
-
6d349610
by Javier Jardón at 2018-08-07T10:53:23Z
-
493d19d2
by James Ennis at 2018-08-07T11:08:36Z
-
fc9869e2
by James Ennis at 2018-08-07T11:56:21Z
-
42aa3999
by William Salmon at 2018-08-07T13:41:02Z
-
2ceb5dec
by Will Salmon at 2018-08-07T14:46:36Z
-
eee4b674
by Jürg Billeter at 2018-08-07T15:36:35Z
-
ea27e389
by Jürg Billeter at 2018-08-07T15:36:35Z
-
fa5a59f0
by Jürg Billeter at 2018-08-07T16:48:21Z
-
ed653fbc
by Chandan Singh at 2018-08-08T00:06:28Z
-
65f382f1
by Chandan Singh at 2018-08-08T10:52:32Z
-
c68dcab8
by Tiago Gomes at 2018-08-09T09:18:43Z
-
35ab0335
by Tiago Gomes at 2018-08-09T10:16:24Z
-
8aa33e23
by Valentin David at 2018-08-09T12:49:17Z
-
ef7810f3
by Valentin David at 2018-08-09T14:06:50Z
-
2d061173
by Javier Jardón at 2018-08-09T16:31:15Z
-
4e1488ee
by Javier Jardón at 2018-08-09T16:31:15Z
-
de955834
by Javier Jardón at 2018-08-09T17:22:24Z
-
97595585
by Jim MacArthur at 2018-08-09T17:35:19Z
-
a602365c
by Tristan Van Berkom at 2018-08-09T20:03:52Z
-
78bce629
by Phillip Smyth at 2018-08-10T09:33:29Z
25 changed files:
- .gitlab-ci.yml
- NEWS
- buildstream/_artifactcache/artifactcache.py
- buildstream/_artifactcache/cascache.py
- buildstream/_artifactcache/casserver.py
- buildstream/_frontend/widget.py
- buildstream/_fuse/fuse.py
- buildstream/_fuse/hardlinks.py
- buildstream/_pipeline.py
- buildstream/_project.py
- buildstream/_scheduler/queues/pullqueue.py
- buildstream/_stream.py
- buildstream/_versions.py
- buildstream/_workspaces.py
- buildstream/data/userconfig.yaml
- buildstream/element.py
- buildstream/plugins/sources/git.py
- buildstream/plugins/sources/remote.py
- doc/source/install_artifacts.rst
- doc/source/install_linux_distro.rst
- setup.py
- tests/frontend/workspace.py
- tests/sources/git.py
- tests/sources/remote.py
- + tests/sources/remote/unique-keys/target-custom-executable.bst
Changes:
... | ... | @@ -26,15 +26,6 @@ source_dist: |
26 | 26 |
- tar -ztf dist/*
|
27 | 27 |
- tarball=$(cd dist && echo $(ls *))
|
28 | 28 |
|
29 |
- # Create an installer script
|
|
30 |
- - |
|
|
31 |
- cat > dist/install.sh << EOF
|
|
32 |
- #!/bin/sh
|
|
33 |
- tar -zxf ${tarball}
|
|
34 |
- cd ${tarball%.tar.gz}
|
|
35 |
- pip3 install --no-index .
|
|
36 |
- EOF
|
|
37 |
- |
|
38 | 29 |
# unpack tarball as `dist/buildstream` directory
|
39 | 30 |
- |
|
40 | 31 |
cat > dist/unpack.sh << EOF
|
... | ... | @@ -44,7 +35,6 @@ source_dist: |
44 | 35 |
EOF
|
45 | 36 |
|
46 | 37 |
# Make our helpers executable
|
47 |
- - chmod +x dist/install.sh
|
|
48 | 38 |
- chmod +x dist/unpack.sh
|
49 | 39 |
artifacts:
|
50 | 40 |
paths:
|
... | ... | @@ -17,6 +17,10 @@ buildstream 1.1.5 |
17 | 17 |
to not be scheduled and fail during artifact assembly,
|
18 | 18 |
and display the retry prompt during an interactive session.
|
19 | 19 |
|
20 |
+ o Due to enabling the use of relative workspaces, "Legacy" workspaces
|
|
21 |
+ may need to be closed and remade before the changes will affect them.
|
|
22 |
+ Downgrading after using this feature may result in workspaces
|
|
23 |
+ not functioning correctly
|
|
20 | 24 |
|
21 | 25 |
=================
|
22 | 26 |
buildstream 1.1.4
|
... | ... | @@ -80,6 +80,8 @@ class ArtifactCache(): |
80 | 80 |
self.context = context
|
81 | 81 |
self.required_artifacts = set()
|
82 | 82 |
self.extractdir = os.path.join(context.artifactdir, 'extract')
|
83 |
+ self.tmpdir = os.path.join(context.artifactdir, 'tmp')
|
|
84 |
+ |
|
83 | 85 |
self.max_size = context.cache_quota
|
84 | 86 |
self.estimated_size = None
|
85 | 87 |
|
... | ... | @@ -89,7 +91,8 @@ class ArtifactCache(): |
89 | 91 |
self._local = False
|
90 | 92 |
self.cache_size = None
|
91 | 93 |
|
92 |
- os.makedirs(context.artifactdir, exist_ok=True)
|
|
94 |
+ os.makedirs(self.extractdir, exist_ok=True)
|
|
95 |
+ os.makedirs(self.tmpdir, exist_ok=True)
|
|
93 | 96 |
|
94 | 97 |
################################################
|
95 | 98 |
# Methods implemented on the abstract class #
|
... | ... | @@ -56,7 +56,8 @@ class CASCache(ArtifactCache): |
56 | 56 |
super().__init__(context)
|
57 | 57 |
|
58 | 58 |
self.casdir = os.path.join(context.artifactdir, 'cas')
|
59 |
- os.makedirs(os.path.join(self.casdir, 'tmp'), exist_ok=True)
|
|
59 |
+ os.makedirs(os.path.join(self.casdir, 'refs', 'heads'), exist_ok=True)
|
|
60 |
+ os.makedirs(os.path.join(self.casdir, 'objects'), exist_ok=True)
|
|
60 | 61 |
|
61 | 62 |
self._enable_push = enable_push
|
62 | 63 |
|
... | ... | @@ -85,8 +86,6 @@ class CASCache(ArtifactCache): |
85 | 86 |
# artifact has already been extracted
|
86 | 87 |
return dest
|
87 | 88 |
|
88 |
- os.makedirs(self.extractdir, exist_ok=True)
|
|
89 |
- |
|
90 | 89 |
with tempfile.TemporaryDirectory(prefix='tmp', dir=self.extractdir) as tmpdir:
|
91 | 90 |
checkoutdir = os.path.join(tmpdir, ref)
|
92 | 91 |
self._checkout(checkoutdir, tree)
|
... | ... | @@ -394,7 +393,7 @@ class CASCache(ArtifactCache): |
394 | 393 |
try:
|
395 | 394 |
h = hashlib.sha256()
|
396 | 395 |
# Always write out new file to avoid corruption if input file is modified
|
397 |
- with tempfile.NamedTemporaryFile(dir=os.path.join(self.casdir, 'tmp')) as out:
|
|
396 |
+ with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
|
|
398 | 397 |
# Set mode bits to 0644
|
399 | 398 |
os.chmod(out.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
|
400 | 399 |
|
... | ... | @@ -764,7 +763,7 @@ class CASCache(ArtifactCache): |
764 | 763 |
# already in local cache
|
765 | 764 |
return
|
766 | 765 |
|
767 |
- with tempfile.NamedTemporaryFile(dir=os.path.join(self.casdir, 'tmp')) as out:
|
|
766 |
+ with tempfile.NamedTemporaryFile(dir=self.tmpdir) as out:
|
|
768 | 767 |
self._fetch_blob(remote, tree, out)
|
769 | 768 |
|
770 | 769 |
directory = remote_execution_pb2.Directory()
|
... | ... | @@ -778,7 +777,7 @@ class CASCache(ArtifactCache): |
778 | 777 |
# already in local cache
|
779 | 778 |
continue
|
780 | 779 |
|
781 |
- with tempfile.NamedTemporaryFile(dir=os.path.join(self.casdir, 'tmp')) as f:
|
|
780 |
+ with tempfile.NamedTemporaryFile(dir=self.tmpdir) as f:
|
|
782 | 781 |
self._fetch_blob(remote, filenode.digest, f)
|
783 | 782 |
|
784 | 783 |
digest = self.add_object(path=f.name)
|
... | ... | @@ -846,6 +845,9 @@ class _CASRemote(): |
846 | 845 |
|
847 | 846 |
|
848 | 847 |
def _grouper(iterable, n):
|
849 |
- # pylint: disable=stop-iteration-return
|
|
850 | 848 |
while True:
|
851 |
- yield itertools.chain([next(iterable)], itertools.islice(iterable, n - 1))
|
|
849 |
+ try:
|
|
850 |
+ current = next(iterable)
|
|
851 |
+ except StopIteration:
|
|
852 |
+ return
|
|
853 |
+ yield itertools.chain([current], itertools.islice(iterable, n - 1))
|
... | ... | @@ -161,7 +161,7 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer): |
161 | 161 |
offset = 0
|
162 | 162 |
finished = False
|
163 | 163 |
resource_name = None
|
164 |
- with tempfile.NamedTemporaryFile(dir=os.path.join(self.cas.casdir, 'tmp')) as out:
|
|
164 |
+ with tempfile.NamedTemporaryFile(dir=self.cas.tmpdir) as out:
|
|
165 | 165 |
for request in request_iterator:
|
166 | 166 |
assert not finished
|
167 | 167 |
assert request.write_offset == offset
|
... | ... | @@ -418,7 +418,9 @@ class LogLine(Widget): |
418 | 418 |
if "%{workspace-dirs" in format_:
|
419 | 419 |
workspace = element._get_workspace()
|
420 | 420 |
if workspace is not None:
|
421 |
- path = workspace.path.replace(os.getenv('HOME', '/root'), '~')
|
|
421 |
+ path = workspace.get_absolute_path()
|
|
422 |
+ if path.startswith("~/"):
|
|
423 |
+ path = os.path.join(os.getenv('HOME', '/root'), path[2:])
|
|
422 | 424 |
line = p.fmt_subst(line, 'workspace-dirs', "Workspace: {}".format(path))
|
423 | 425 |
else:
|
424 | 426 |
line = p.fmt_subst(
|
... | ... | @@ -757,7 +757,11 @@ class FUSE(object): |
757 | 757 |
if self.raw_fi:
|
758 | 758 |
return self.operations('create', path, mode, fi)
|
759 | 759 |
else:
|
760 |
- fi.fh = self.operations('create', path, mode)
|
|
760 |
+ # This line is different from upstream to fix issues
|
|
761 |
+ # reading file opened with O_CREAT|O_RDWR.
|
|
762 |
+ # See issue #143.
|
|
763 |
+ fi.fh = self.operations('create', path, mode, fi.flags)
|
|
764 |
+ # END OF MODIFICATION
|
|
761 | 765 |
return 0
|
762 | 766 |
|
763 | 767 |
def ftruncate(self, path, length, fip):
|
... | ... | @@ -185,12 +185,12 @@ class SafeHardlinkOps(Operations): |
185 | 185 |
|
186 | 186 |
return os.open(full_path, flags)
|
187 | 187 |
|
188 |
- def create(self, path, mode, fi=None):
|
|
188 |
+ def create(self, path, mode, flags):
|
|
189 | 189 |
full_path = self._full_path(path)
|
190 | 190 |
|
191 | 191 |
# If it already exists, ensure it's a copy first
|
192 | 192 |
self._ensure_copy(full_path)
|
193 |
- return os.open(full_path, os.O_WRONLY | os.O_CREAT, mode)
|
|
193 |
+ return os.open(full_path, flags, mode)
|
|
194 | 194 |
|
195 | 195 |
def read(self, path, length, offset, fh):
|
196 | 196 |
os.lseek(fh, offset, os.SEEK_SET)
|
... | ... | @@ -358,10 +358,24 @@ class Pipeline(): |
358 | 358 |
inconsistent.append(element)
|
359 | 359 |
|
360 | 360 |
if inconsistent:
|
361 |
- detail = "Exact versions are missing for the following elements\n" + \
|
|
362 |
- "Try tracking these elements first with `bst track`\n\n"
|
|
361 |
+ detail = "Exact versions are missing for the following elements:\n\n"
|
|
362 |
+ |
|
363 |
+ missingTrack = 0
|
|
363 | 364 |
for element in inconsistent:
|
364 |
- detail += " " + element._get_full_name() + "\n"
|
|
365 |
+ detail += " " + element._get_full_name()
|
|
366 |
+ for source in element.sources():
|
|
367 |
+ if not source._get_consistency() and not source.get_ref():
|
|
368 |
+ if hasattr(source, 'tracking') and source.tracking is None:
|
|
369 |
+ detail += ": Source {} is missing ref and track. ".format(source._get_full_name()) + \
|
|
370 |
+ "Please specify a ref or branch/tag to track."
|
|
371 |
+ missingTrack = 1
|
|
372 |
+ |
|
373 |
+ detail += "\n"
|
|
374 |
+ |
|
375 |
+ if missingTrack:
|
|
376 |
+ detail += "\nThen track these elements with `bst track`\n"
|
|
377 |
+ else:
|
|
378 |
+ detail += "\nTry tracking these elements first with `bst track`\n"
|
|
365 | 379 |
raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline")
|
366 | 380 |
|
367 | 381 |
#############################################################
|
... | ... | @@ -19,7 +19,6 @@ |
19 | 19 |
# Tiago Gomes <tiago gomes codethink co uk>
|
20 | 20 |
|
21 | 21 |
import os
|
22 |
-import multiprocessing # for cpu_count()
|
|
23 | 22 |
from collections import Mapping, OrderedDict
|
24 | 23 |
from pluginbase import PluginBase
|
25 | 24 |
from . import utils
|
... | ... | @@ -572,7 +571,10 @@ class Project(): |
572 | 571 |
|
573 | 572 |
# Extend variables with automatic variables and option exports
|
574 | 573 |
# Initialize it as a string as all variables are processed as strings.
|
575 |
- output.base_variables['max-jobs'] = str(multiprocessing.cpu_count())
|
|
574 |
+ # Based on some testing (mainly on AWS), maximum effective
|
|
575 |
+ # max-jobs value seems to be around 8-10 if we have enough cores
|
|
576 |
+ # users should set values based on workload and build infrastructure
|
|
577 |
+ output.base_variables['max-jobs'] = str(min(len(os.sched_getaffinity(0)), 8))
|
|
576 | 578 |
|
577 | 579 |
# Export options into variables, if that was requested
|
578 | 580 |
output.options.export_variables(output.base_variables)
|
... | ... | @@ -29,7 +29,7 @@ class PullQueue(Queue): |
29 | 29 |
|
30 | 30 |
action_name = "Pull"
|
31 | 31 |
complete_name = "Pulled"
|
32 |
- resources = [ResourceType.UPLOAD]
|
|
32 |
+ resources = [ResourceType.DOWNLOAD]
|
|
33 | 33 |
|
34 | 34 |
def process(self, element):
|
35 | 35 |
# returns whether an artifact was downloaded or not
|
... | ... | @@ -460,7 +460,7 @@ class Stream(): |
460 | 460 |
selection=PipelineSelection.REDIRECT,
|
461 | 461 |
track_selection=PipelineSelection.REDIRECT)
|
462 | 462 |
target = elements[0]
|
463 |
- workdir = os.path.abspath(directory)
|
|
463 |
+ directory = os.path.abspath(directory)
|
|
464 | 464 |
|
465 | 465 |
if not list(target.sources()):
|
466 | 466 |
build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
|
... | ... | @@ -476,7 +476,7 @@ class Stream(): |
476 | 476 |
workspace = workspaces.get_workspace(target._get_full_name())
|
477 | 477 |
if workspace and not force:
|
478 | 478 |
raise StreamError("Workspace '{}' is already defined at: {}"
|
479 |
- .format(target.name, workspace.path))
|
|
479 |
+ .format(target.name, workspace.get_absolute_path()))
|
|
480 | 480 |
|
481 | 481 |
# If we're going to checkout, we need at least a fetch,
|
482 | 482 |
# if we were asked to track first, we're going to fetch anyway.
|
... | ... | @@ -502,7 +502,7 @@ class Stream(): |
502 | 502 |
except OSError as e:
|
503 | 503 |
raise StreamError("Failed to create workspace directory: {}".format(e)) from e
|
504 | 504 |
|
505 |
- workspaces.create_workspace(target._get_full_name(), workdir)
|
|
505 |
+ workspaces.create_workspace(target._get_full_name(), directory)
|
|
506 | 506 |
|
507 | 507 |
if not no_checkout:
|
508 | 508 |
with target.timed_activity("Staging sources to {}".format(directory)):
|
... | ... | @@ -526,12 +526,12 @@ class Stream(): |
526 | 526 |
# Remove workspace directory if prompted
|
527 | 527 |
if remove_dir:
|
528 | 528 |
with self._context.timed_activity("Removing workspace directory {}"
|
529 |
- .format(workspace.path)):
|
|
529 |
+ .format(workspace.get_absolute_path())):
|
|
530 | 530 |
try:
|
531 |
- shutil.rmtree(workspace.path)
|
|
531 |
+ shutil.rmtree(workspace.get_absolute_path())
|
|
532 | 532 |
except OSError as e:
|
533 | 533 |
raise StreamError("Could not remove '{}': {}"
|
534 |
- .format(workspace.path, e)) from e
|
|
534 |
+ .format(workspace.get_absolute_path(), e)) from e
|
|
535 | 535 |
|
536 | 536 |
# Delete the workspace and save the configuration
|
537 | 537 |
workspaces.delete_workspace(element_name)
|
... | ... | @@ -574,28 +574,30 @@ class Stream(): |
574 | 574 |
|
575 | 575 |
for element in elements:
|
576 | 576 |
workspace = workspaces.get_workspace(element._get_full_name())
|
577 |
- |
|
577 |
+ workspace_path = workspace.get_absolute_path()
|
|
578 | 578 |
if soft:
|
579 | 579 |
workspace.prepared = False
|
580 | 580 |
self._message(MessageType.INFO, "Reset workspace state for {} at: {}"
|
581 |
- .format(element.name, workspace.path))
|
|
581 |
+ .format(element.name, workspace_path))
|
|
582 | 582 |
continue
|
583 | 583 |
|
584 | 584 |
with element.timed_activity("Removing workspace directory {}"
|
585 |
- .format(workspace.path)):
|
|
585 |
+ .format(workspace_path)):
|
|
586 | 586 |
try:
|
587 |
- shutil.rmtree(workspace.path)
|
|
587 |
+ shutil.rmtree(workspace_path)
|
|
588 | 588 |
except OSError as e:
|
589 | 589 |
raise StreamError("Could not remove '{}': {}"
|
590 |
- .format(workspace.path, e)) from e
|
|
590 |
+ .format(workspace_path, e)) from e
|
|
591 | 591 |
|
592 | 592 |
workspaces.delete_workspace(element._get_full_name())
|
593 |
- workspaces.create_workspace(element._get_full_name(), workspace.path)
|
|
593 |
+ workspaces.create_workspace(element._get_full_name(), workspace_path)
|
|
594 | 594 |
|
595 |
- with element.timed_activity("Staging sources to {}".format(workspace.path)):
|
|
595 |
+ with element.timed_activity("Staging sources to {}".format(workspace_path)):
|
|
596 | 596 |
element._open_workspace()
|
597 | 597 |
|
598 |
- self._message(MessageType.INFO, "Reset workspace for {} at: {}".format(element.name, workspace.path))
|
|
598 |
+ self._message(MessageType.INFO,
|
|
599 |
+ "Reset workspace for {} at: {}".format(element.name,
|
|
600 |
+ workspace_path))
|
|
599 | 601 |
|
600 | 602 |
workspaces.save_config()
|
601 | 603 |
|
... | ... | @@ -632,7 +634,7 @@ class Stream(): |
632 | 634 |
for element_name, workspace_ in self._context.get_workspaces().list():
|
633 | 635 |
workspace_detail = {
|
634 | 636 |
'element': element_name,
|
635 |
- 'directory': workspace_.path,
|
|
637 |
+ 'directory': workspace_.get_absolute_path(),
|
|
636 | 638 |
}
|
637 | 639 |
workspaces.append(workspace_detail)
|
638 | 640 |
|
... | ... | @@ -23,7 +23,7 @@ |
23 | 23 |
# This version is bumped whenever enhancements are made
|
24 | 24 |
# to the `project.conf` format or the core element format.
|
25 | 25 |
#
|
26 |
-BST_FORMAT_VERSION = 12
|
|
26 |
+BST_FORMAT_VERSION = 13
|
|
27 | 27 |
|
28 | 28 |
|
29 | 29 |
# The base BuildStream artifact version
|
... | ... | @@ -26,14 +26,6 @@ from ._exceptions import LoadError, LoadErrorReason |
26 | 26 |
|
27 | 27 |
BST_WORKSPACE_FORMAT_VERSION = 3
|
28 | 28 |
|
29 |
-# Hold on to a list of members which get serialized
|
|
30 |
-_WORKSPACE_MEMBERS = [
|
|
31 |
- 'prepared',
|
|
32 |
- 'path',
|
|
33 |
- 'last_successful',
|
|
34 |
- 'running_files'
|
|
35 |
-]
|
|
36 |
- |
|
37 | 29 |
|
38 | 30 |
# Workspace()
|
39 | 31 |
#
|
... | ... | @@ -56,7 +48,7 @@ class Workspace(): |
56 | 48 |
def __init__(self, toplevel_project, *, last_successful=None, path=None, prepared=False, running_files=None):
|
57 | 49 |
self.prepared = prepared
|
58 | 50 |
self.last_successful = last_successful
|
59 |
- self.path = path
|
|
51 |
+ self._path = path
|
|
60 | 52 |
self.running_files = running_files if running_files is not None else {}
|
61 | 53 |
|
62 | 54 |
self._toplevel_project = toplevel_project
|
... | ... | @@ -64,14 +56,20 @@ class Workspace(): |
64 | 56 |
|
65 | 57 |
# to_dict()
|
66 | 58 |
#
|
67 |
- # Convert this object to a dict for serialization purposes
|
|
59 |
+ # Convert a list of members which get serialized to a dict for serialization purposes
|
|
68 | 60 |
#
|
69 | 61 |
# Returns:
|
70 | 62 |
# (dict) A dict representation of the workspace
|
71 | 63 |
#
|
72 | 64 |
def to_dict(self):
|
73 |
- return {key: val for key, val in self.__dict__.items()
|
|
74 |
- if key in _WORKSPACE_MEMBERS and val is not None}
|
|
65 |
+ ret = {
|
|
66 |
+ 'prepared': self.prepared,
|
|
67 |
+ 'path': self._path,
|
|
68 |
+ 'running_files': self.running_files
|
|
69 |
+ }
|
|
70 |
+ if self.last_successful is not None:
|
|
71 |
+ ret["last_successful"] = self.last_successful
|
|
72 |
+ return ret
|
|
75 | 73 |
|
76 | 74 |
# from_dict():
|
77 | 75 |
#
|
... | ... | @@ -103,15 +101,7 @@ class Workspace(): |
103 | 101 |
# True if the workspace differs from 'other', otherwise False
|
104 | 102 |
#
|
105 | 103 |
def differs(self, other):
|
106 |
- |
|
107 |
- for member in _WORKSPACE_MEMBERS:
|
|
108 |
- member_a = getattr(self, member)
|
|
109 |
- member_b = getattr(other, member)
|
|
110 |
- |
|
111 |
- if member_a != member_b:
|
|
112 |
- return True
|
|
113 |
- |
|
114 |
- return False
|
|
104 |
+ return self.to_dict() != other.to_dict()
|
|
115 | 105 |
|
116 | 106 |
# invalidate_key()
|
117 | 107 |
#
|
... | ... | @@ -133,7 +123,7 @@ class Workspace(): |
133 | 123 |
if os.path.isdir(fullpath):
|
134 | 124 |
utils.copy_files(fullpath, directory)
|
135 | 125 |
else:
|
136 |
- destfile = os.path.join(directory, os.path.basename(self.path))
|
|
126 |
+ destfile = os.path.join(directory, os.path.basename(self.get_absolute_path()))
|
|
137 | 127 |
utils.safe_copy(fullpath, destfile)
|
138 | 128 |
|
139 | 129 |
# add_running_files()
|
... | ... | @@ -189,7 +179,7 @@ class Workspace(): |
189 | 179 |
filelist = utils.list_relative_paths(fullpath)
|
190 | 180 |
filelist = [(relpath, os.path.join(fullpath, relpath)) for relpath in filelist]
|
191 | 181 |
else:
|
192 |
- filelist = [(self.path, fullpath)]
|
|
182 |
+ filelist = [(self.get_absolute_path(), fullpath)]
|
|
193 | 183 |
|
194 | 184 |
self._key = [(relpath, unique_key(fullpath)) for relpath, fullpath in filelist]
|
195 | 185 |
|
... | ... | @@ -200,7 +190,7 @@ class Workspace(): |
200 | 190 |
# Returns: The absolute path of the element's workspace.
|
201 | 191 |
#
|
202 | 192 |
def get_absolute_path(self):
|
203 |
- return os.path.join(self._toplevel_project.directory, self.path)
|
|
193 |
+ return os.path.join(self._toplevel_project.directory, self._path)
|
|
204 | 194 |
|
205 | 195 |
|
206 | 196 |
# Workspaces()
|
... | ... | @@ -236,6 +226,9 @@ class Workspaces(): |
236 | 226 |
# path (str) - The path in which the workspace should be kept
|
237 | 227 |
#
|
238 | 228 |
def create_workspace(self, element_name, path):
|
229 |
+ if path.startswith(self._toplevel_project.directory):
|
|
230 |
+ path = os.path.relpath(path, self._toplevel_project.directory)
|
|
231 |
+ |
|
239 | 232 |
self._workspaces[element_name] = Workspace(self._toplevel_project, path=path)
|
240 | 233 |
|
241 | 234 |
return self._workspaces[element_name]
|
... | ... | @@ -35,13 +35,13 @@ cache: |
35 | 35 |
#
|
36 | 36 |
scheduler:
|
37 | 37 |
|
38 |
- # Maximum number of simultaneous source downloading tasks.
|
|
38 |
+ # Maximum number of simultaneous downloading tasks.
|
|
39 | 39 |
fetchers: 10
|
40 | 40 |
|
41 | 41 |
# Maximum number of simultaneous build tasks.
|
42 | 42 |
builders: 4
|
43 | 43 |
|
44 |
- # Maximum number of simultaneous artifact uploading tasks.
|
|
44 |
+ # Maximum number of simultaneous uploading tasks.
|
|
45 | 45 |
pushers: 4
|
46 | 46 |
|
47 | 47 |
# Maximum number of retries for network tasks.
|
... | ... | @@ -1403,7 +1403,8 @@ class Element(Plugin): |
1403 | 1403 |
# If mount_workspaces is set and we're doing incremental builds,
|
1404 | 1404 |
# the workspace is already mounted into the sandbox.
|
1405 | 1405 |
if not (mount_workspaces and self.__can_build_incrementally()):
|
1406 |
- with self.timed_activity("Staging local files at {}".format(workspace.path)):
|
|
1406 |
+ with self.timed_activity("Staging local files at {}"
|
|
1407 |
+ .format(workspace.get_absolute_path())):
|
|
1407 | 1408 |
workspace.stage(temp_staging_directory)
|
1408 | 1409 |
else:
|
1409 | 1410 |
# No workspace, stage directly
|
... | ... | @@ -1411,6 +1412,7 @@ class Element(Plugin): |
1411 | 1412 |
source._stage(temp_staging_directory)
|
1412 | 1413 |
|
1413 | 1414 |
vdirectory.import_files(temp_staging_directory)
|
1415 |
+ |
|
1414 | 1416 |
# Ensure deterministic mtime of sources at build time
|
1415 | 1417 |
vdirectory.set_deterministic_mtime()
|
1416 | 1418 |
# Ensure deterministic owners of sources at build time
|
... | ... | @@ -1566,7 +1568,7 @@ class Element(Plugin): |
1566 | 1568 |
path_components = self.__staged_sources_directory.lstrip(os.sep).split(os.sep)
|
1567 | 1569 |
sandbox_vpath = sandbox_vroot.descend(path_components)
|
1568 | 1570 |
try:
|
1569 |
- sandbox_vpath.import_files(workspace.path)
|
|
1571 |
+ sandbox_vpath.import_files(workspace.get_absolute_path())
|
|
1570 | 1572 |
except UtilError as e:
|
1571 | 1573 |
self.warn("Failed to preserve workspace state for failed build sysroot: {}"
|
1572 | 1574 |
.format(e))
|
... | ... | @@ -1893,7 +1895,7 @@ class Element(Plugin): |
1893 | 1895 |
source._init_workspace(temp)
|
1894 | 1896 |
|
1895 | 1897 |
# Now hardlink the files into the workspace target.
|
1896 |
- utils.link_files(temp, workspace.path)
|
|
1898 |
+ utils.link_files(temp, workspace.get_absolute_path())
|
|
1897 | 1899 |
|
1898 | 1900 |
# _get_workspace():
|
1899 | 1901 |
#
|
... | ... | @@ -363,6 +363,12 @@ class GitSource(Source): |
363 | 363 |
|
364 | 364 |
# If self.tracking is not specified it's not an error, just silently return
|
365 | 365 |
if not self.tracking:
|
366 |
+ # Is there a better way to check if a ref is given.
|
|
367 |
+ if self.mirror.ref is None:
|
|
368 |
+ detail = 'Without a tracking branch ref can not be updated. Please ' + \
|
|
369 |
+ 'provide a ref or a track.'
|
|
370 |
+ raise SourceError("{}: No track or ref".format(self),
|
|
371 |
+ detail=detail, reason="track-attempt-no-track")
|
|
366 | 372 |
return None
|
367 | 373 |
|
368 | 374 |
with self.timed_activity("Tracking {} from {}"
|
... | ... | @@ -35,6 +35,10 @@ remote - stage files from remote urls |
35 | 35 |
# If not specified, the basename of the url will be used.
|
36 | 36 |
# filename: customfilename
|
37 | 37 |
|
38 |
+ # Optionally specify whether the downloaded file should be
|
|
39 |
+ # marked executable.
|
|
40 |
+ # executable: true
|
|
41 |
+ |
|
38 | 42 |
# Specify the url. Using an alias defined in your project
|
39 | 43 |
# configuration is encouraged. 'bst track' will update the
|
40 | 44 |
# sha256sum in 'ref' to the downloaded file's sha256sum.
|
... | ... | @@ -43,6 +47,8 @@ remote - stage files from remote urls |
43 | 47 |
# Specify the ref. It's a sha256sum of the file you download.
|
44 | 48 |
ref: 6c9f6f68a131ec6381da82f2bff978083ed7f4f7991d931bfa767b7965ebc94b
|
45 | 49 |
|
50 |
+ |
|
51 |
+ |
|
46 | 52 |
.. note::
|
47 | 53 |
|
48 | 54 |
The ``remote`` plugin is available since :ref:`format version 10 <project_format_version>`
|
... | ... | @@ -60,22 +66,31 @@ class RemoteSource(DownloadableFileSource): |
60 | 66 |
super().configure(node)
|
61 | 67 |
|
62 | 68 |
self.filename = self.node_get_member(node, str, 'filename', os.path.basename(self.url))
|
69 |
+ self.executable = self.node_get_member(node, bool, 'executable', False)
|
|
63 | 70 |
|
64 | 71 |
if os.sep in self.filename:
|
65 | 72 |
raise SourceError('{}: filename parameter cannot contain directories'.format(self),
|
66 | 73 |
reason="filename-contains-directory")
|
67 |
- self.node_validate(node, DownloadableFileSource.COMMON_CONFIG_KEYS + ['filename'])
|
|
74 |
+ self.node_validate(node, DownloadableFileSource.COMMON_CONFIG_KEYS + ['filename', 'executable'])
|
|
68 | 75 |
|
69 | 76 |
def get_unique_key(self):
|
70 |
- return super().get_unique_key() + [self.filename]
|
|
77 |
+ return super().get_unique_key() + [self.filename, self.executable]
|
|
71 | 78 |
|
72 | 79 |
def stage(self, directory):
|
73 | 80 |
# Same as in local plugin, don't use hardlinks to stage sources, they
|
74 | 81 |
# are not write protected in the sandbox.
|
75 | 82 |
dest = os.path.join(directory, self.filename)
|
76 | 83 |
with self.timed_activity("Staging remote file to {}".format(dest)):
|
84 |
+ |
|
77 | 85 |
utils.safe_copy(self._get_mirror_file(), dest)
|
78 | 86 |
|
87 |
+ # To prevent user's umask introducing variability here, explicitly set
|
|
88 |
+ # file modes.
|
|
89 |
+ if self.executable:
|
|
90 |
+ os.chmod(dest, 0o755)
|
|
91 |
+ else:
|
|
92 |
+ os.chmod(dest, 0o644)
|
|
93 |
+ |
|
79 | 94 |
|
80 | 95 |
def setup():
|
81 | 96 |
return RemoteSource
|
... | ... | @@ -143,6 +143,50 @@ Instance with push and requiring client authentication: |
143 | 143 |
|
144 | 144 |
bst-artifact-server --port 11002 --server-key server.key --server-cert server.crt --client-certs authorized.crt --enable-push /home/artifacts/artifacts
|
145 | 145 |
|
146 |
+Managing the cache with systemd
|
|
147 |
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
148 |
+ |
|
149 |
+It is better to run the cache as a systemd service, especially if it is running on a dedicated server, as this will allow systemd to manage the cache, incase the server ever encounters any issues.
|
|
150 |
+ |
|
151 |
+Below are two examples of how to run the cache server as a systemd service, one is for pull only and the other is configured for push & pull.
|
|
152 |
+ |
|
153 |
+.. code:: ini
|
|
154 |
+ |
|
155 |
+ #
|
|
156 |
+ # Pull
|
|
157 |
+ #
|
|
158 |
+ [Unit]
|
|
159 |
+ Description=Buildstream Artifact pull server
|
|
160 |
+ After=remote-fs.target network-online.target
|
|
161 |
+ |
|
162 |
+ [Service]
|
|
163 |
+ Environment="LC_ALL=C.UTF-8"
|
|
164 |
+ ExecStart=/usr/local/bin/bst-artifact-server --port 11001 --server-key {{certs_path}}/privkey.pem --
|
|
165 |
+ server-cert {{certs_path}}/fullchain.pem {{artifacts_path}}
|
|
166 |
+ User=artifacts
|
|
167 |
+ |
|
168 |
+ [Install]
|
|
169 |
+ WantedBy=multi-user.target
|
|
170 |
+ |
|
171 |
+ |
|
172 |
+ #
|
|
173 |
+ # Pull/Push
|
|
174 |
+ #
|
|
175 |
+ [Unit]
|
|
176 |
+ Description=Buildstream Artifact pull/push server
|
|
177 |
+ After=remote-fs.target network-online.target
|
|
178 |
+ |
|
179 |
+ [Service]
|
|
180 |
+ Environment="LC_ALL=C.UTF-8"
|
|
181 |
+ ExecStart=/usr/local/bin/bst-artifact-server --port 11002 --server-key {{certs_path}}/privkey.pem --
|
|
182 |
+ server-cert {{certs_path}}/fullchain.pem --client-certs /home/artifacts/authorized.crt --enable-push /
|
|
183 |
+ {{artifacts_path}}
|
|
184 |
+ User=artifacts
|
|
185 |
+ |
|
186 |
+ [Install]
|
|
187 |
+ WantedBy=multi-user.target
|
|
188 |
+ |
|
189 |
+Here we define when systemd should start the service, which is after the networking stack has been started, we then define how to run the cache with the desired configuration, under the artifacts user. The {{ }} are there to denote where you should change these files to point to your desired locations.
|
|
146 | 190 |
|
147 | 191 |
User configuration
|
148 | 192 |
~~~~~~~~~~~~~~~~~~
|
... | ... | @@ -57,9 +57,20 @@ Install the dependencies with:: |
57 | 57 |
For the default plugins::
|
58 | 58 |
|
59 | 59 |
sudo pacman -S \
|
60 |
- bzr git lzip ostree patch python-arpy python-gobject
|
|
60 |
+ bzr git lzip ostree patch python-gobject
|
|
61 | 61 |
|
62 | 62 |
|
63 |
+The package *python-arpy* is required by the deb source plugin. This is not
|
|
64 |
+obtainable via `pacman`, you must get *python-arpy* from AUR:
|
|
65 |
+https://aur.archlinux.org/packages/python-arpy/
|
|
66 |
+ |
|
67 |
+To install::
|
|
68 |
+ |
|
69 |
+ wget https://aur.archlinux.org/cgit/aur.git/snapshot/python-arpy.tar.gz
|
|
70 |
+ tar -xvf python-arpy.tar.gz
|
|
71 |
+ cd python-arpy
|
|
72 |
+ makepkg -si
|
|
73 |
+ |
|
63 | 74 |
Debian
|
64 | 75 |
++++++
|
65 | 76 |
Install the dependencies with::
|
... | ... | @@ -216,17 +227,18 @@ Installing from distro packages |
216 | 227 |
|
217 | 228 |
Arch Linux
|
218 | 229 |
~~~~~~~~~~
|
219 |
-Install `buildstream <https://aur.archlinux.org/packages/buildstream>`_
|
|
220 |
-from `AUR <https://wiki.archlinux.org/index.php/Arch_User_Repository#Installing_packages>`_.
|
|
221 |
-Alternatively, use
|
|
222 |
-`buildstream-git <https://aur.archlinux.org/packages/buildstream-git>`_
|
|
223 |
-for the lastest version of the development branch.
|
|
230 |
+Packages for Arch exist in `AUR <https://wiki.archlinux.org/index.php/Arch_User_Repository#Installing_packages>`_.
|
|
231 |
+Two different package versions are available:
|
|
232 |
+ |
|
233 |
+* Latest release: `buildstream <https://aur.archlinux.org/packages/buildstream>`_
|
|
234 |
+* Latest development snapshot: `buildstream-git <https://aur.archlinux.org/packages/buildstream-git>`_
|
|
235 |
+ |
|
224 | 236 |
|
225 | 237 |
Fedora
|
226 | 238 |
~~~~~~
|
227 | 239 |
|
228 | 240 |
BuildStream is not yet in the official Fedora repositories, but you can
|
229 |
-install it from a Copr:
|
|
241 |
+install it from a Copr::
|
|
230 | 242 |
|
231 | 243 |
sudo dnf copr enable bochecha/buildstream
|
232 | 244 |
sudo dnf install buildstream
|
... | ... | @@ -251,7 +251,7 @@ setup(name='BuildStream', |
251 | 251 |
install_requires=[
|
252 | 252 |
'setuptools',
|
253 | 253 |
'psutil',
|
254 |
- 'ruamel.yaml',
|
|
254 |
+ 'ruamel.yaml <= 0.15',
|
|
255 | 255 |
'pluginbase',
|
256 | 256 |
'Click',
|
257 | 257 |
'blessings',
|
... | ... | @@ -272,6 +272,5 @@ setup(name='BuildStream', |
272 | 272 |
'pytest-cov >= 2.5.0',
|
273 | 273 |
# Provide option to run tests in parallel, less reliable
|
274 | 274 |
'pytest-xdist',
|
275 |
- 'pytest >= 3.1.0',
|
|
276 |
- 'pylint >= 1.8 , < 2'],
|
|
275 |
+ 'pytest >= 3.1.0'],
|
|
277 | 276 |
zip_safe=False)
|
... | ... | @@ -18,12 +18,13 @@ DATA_DIR = os.path.join( |
18 | 18 |
)
|
19 | 19 |
|
20 | 20 |
|
21 |
-def open_workspace(cli, tmpdir, datafiles, kind, track, suffix=''):
|
|
22 |
- project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
23 |
- bin_files_path = os.path.join(project, 'files', 'bin-files')
|
|
24 |
- element_path = os.path.join(project, 'elements')
|
|
21 |
+def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None):
|
|
22 |
+ if not workspace_dir:
|
|
23 |
+ workspace_dir = os.path.join(str(tmpdir), 'workspace{}'.format(suffix))
|
|
24 |
+ project_path = os.path.join(datafiles.dirname, datafiles.basename)
|
|
25 |
+ bin_files_path = os.path.join(project_path, 'files', 'bin-files')
|
|
26 |
+ element_path = os.path.join(project_path, 'elements')
|
|
25 | 27 |
element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
|
26 |
- workspace = os.path.join(str(tmpdir), 'workspace{}'.format(suffix))
|
|
27 | 28 |
|
28 | 29 |
# Create our repo object of the given source type with
|
29 | 30 |
# the bin files, and then collect the initial ref.
|
... | ... | @@ -45,7 +46,7 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix=''): |
45 | 46 |
element_name))
|
46 | 47 |
|
47 | 48 |
# Assert that there is no reference, a track & fetch is needed
|
48 |
- state = cli.get_element_state(project, element_name)
|
|
49 |
+ state = cli.get_element_state(project_path, element_name)
|
|
49 | 50 |
if track:
|
50 | 51 |
assert state == 'no reference'
|
51 | 52 |
else:
|
... | ... | @@ -56,20 +57,20 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix=''): |
56 | 57 |
args = ['workspace', 'open']
|
57 | 58 |
if track:
|
58 | 59 |
args.append('--track')
|
59 |
- args.extend([element_name, workspace])
|
|
60 |
+ args.extend([element_name, workspace_dir])
|
|
61 |
+ result = cli.run(project=project_path, args=args)
|
|
60 | 62 |
|
61 |
- result = cli.run(project=project, args=args)
|
|
62 | 63 |
result.assert_success()
|
63 | 64 |
|
64 | 65 |
# Assert that we are now buildable because the source is
|
65 | 66 |
# now cached.
|
66 |
- assert cli.get_element_state(project, element_name) == 'buildable'
|
|
67 |
+ assert cli.get_element_state(project_path, element_name) == 'buildable'
|
|
67 | 68 |
|
68 | 69 |
# Check that the executable hello file is found in the workspace
|
69 |
- filename = os.path.join(workspace, 'usr', 'bin', 'hello')
|
|
70 |
+ filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
|
|
70 | 71 |
assert os.path.exists(filename)
|
71 | 72 |
|
72 |
- return (element_name, project, workspace)
|
|
73 |
+ return (element_name, project_path, workspace_dir)
|
|
73 | 74 |
|
74 | 75 |
|
75 | 76 |
@pytest.mark.datafiles(DATA_DIR)
|
... | ... | @@ -190,6 +191,45 @@ def test_close(cli, tmpdir, datafiles, kind): |
190 | 191 |
assert not os.path.exists(workspace)
|
191 | 192 |
|
192 | 193 |
|
194 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
195 |
+def test_close_external_after_move_project(cli, tmpdir, datafiles):
|
|
196 |
+ tmp_parent = os.path.dirname(str(tmpdir))
|
|
197 |
+ workspace_dir = os.path.join(tmp_parent, "workspace")
|
|
198 |
+ element_name, project_path, _ = open_workspace(cli, tmpdir, datafiles, 'git', False, "", workspace_dir)
|
|
199 |
+ assert os.path.exists(workspace_dir)
|
|
200 |
+ tmp_dir = os.path.join(tmp_parent, 'external_project')
|
|
201 |
+ shutil.move(project_path, tmp_dir)
|
|
202 |
+ assert os.path.exists(tmp_dir)
|
|
203 |
+ |
|
204 |
+ # Close the workspace
|
|
205 |
+ result = cli.run(configure=False, project=tmp_dir, args=[
|
|
206 |
+ 'workspace', 'close', '--remove-dir', element_name
|
|
207 |
+ ])
|
|
208 |
+ result.assert_success()
|
|
209 |
+ |
|
210 |
+ # Assert the workspace dir has been deleted
|
|
211 |
+ assert not os.path.exists(workspace_dir)
|
|
212 |
+ shutil.move(tmp_dir, project_path)
|
|
213 |
+ |
|
214 |
+ |
|
215 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
216 |
+def test_close_internal_after_move_project(cli, tmpdir, datafiles):
|
|
217 |
+ element_name, project, _ = open_workspace(cli, tmpdir, datafiles, 'git', False)
|
|
218 |
+ tmp_dir = os.path.join(os.path.dirname(str(tmpdir)), 'external_project')
|
|
219 |
+ shutil.move(str(tmpdir), tmp_dir)
|
|
220 |
+ assert os.path.exists(tmp_dir)
|
|
221 |
+ |
|
222 |
+ # Close the workspace
|
|
223 |
+ result = cli.run(configure=False, project=tmp_dir, args=[
|
|
224 |
+ 'workspace', 'close', '--remove-dir', element_name
|
|
225 |
+ ])
|
|
226 |
+ result.assert_success()
|
|
227 |
+ |
|
228 |
+ # Assert the workspace dir has been deleted
|
|
229 |
+ workspace = os.path.join(tmp_dir, 'workspace')
|
|
230 |
+ assert not os.path.exists(workspace)
|
|
231 |
+ |
|
232 |
+ |
|
193 | 233 |
@pytest.mark.datafiles(DATA_DIR)
|
194 | 234 |
def test_close_removed(cli, tmpdir, datafiles):
|
195 | 235 |
element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git', False)
|
... | ... | @@ -359,3 +359,45 @@ def test_submodule_track_ignore_inconsistent(cli, tmpdir, datafiles): |
359 | 359 |
|
360 | 360 |
# Assert that we are just fine without it, and emit a warning to the user.
|
361 | 361 |
assert "Ignoring inconsistent submodule" in result.stderr
|
362 |
+ |
|
363 |
+ |
|
364 |
+@pytest.mark.skipif(HAVE_GIT is False, reason="git is not available")
|
|
365 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template'))
|
|
366 |
+def test_submodule_track_no_ref_or_track(cli, tmpdir, datafiles):
|
|
367 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
368 |
+ |
|
369 |
+ # Create the repo from 'repofiles' subdir
|
|
370 |
+ repo = create_repo('git', str(tmpdir))
|
|
371 |
+ ref = repo.create(os.path.join(project, 'repofiles'))
|
|
372 |
+ |
|
373 |
+ # Write out our test target
|
|
374 |
+ gitsource = repo.source_config(ref=None)
|
|
375 |
+ gitsource.pop('track')
|
|
376 |
+ element = {
|
|
377 |
+ 'kind': 'import',
|
|
378 |
+ 'sources': [
|
|
379 |
+ gitsource
|
|
380 |
+ ]
|
|
381 |
+ }
|
|
382 |
+ |
|
383 |
+ _yaml.dump(element, os.path.join(project, 'target.bst'))
|
|
384 |
+ |
|
385 |
+ # Track will encounter an inconsistent submodule without any ref
|
|
386 |
+ result = cli.run(project=project, args=['track', 'target.bst'])
|
|
387 |
+ result.assert_main_error(ErrorDomain.STREAM, None)
|
|
388 |
+ result.assert_task_error(ErrorDomain.SOURCE, 'track-attempt-no-track')
|
|
389 |
+ |
|
390 |
+ # Assert that we are just fine without it, and emit a warning to the user.
|
|
391 |
+ assert "FAILURE git source at" in result.stderr
|
|
392 |
+ assert "Without a tracking branch ref can not be updated. Please " + \
|
|
393 |
+ "provide a ref or a track." in result.stderr
|
|
394 |
+ |
|
395 |
+ # Track will encounter an inconsistent submodule without any ref
|
|
396 |
+ result = cli.run(project=project, args=['build', 'target.bst'])
|
|
397 |
+ result.assert_main_error(ErrorDomain.PIPELINE, 'inconsistent-pipeline')
|
|
398 |
+ result.assert_task_error(None, None)
|
|
399 |
+ |
|
400 |
+ # Assert that we are just fine without it, and emit a warning to the user.
|
|
401 |
+ assert "Exact versions are missing for the following elements" in result.stderr
|
|
402 |
+ assert "is missing ref and track." in result.stderr
|
|
403 |
+ assert "Then track these elements with `bst track`" in result.stderr
|
1 | 1 |
import os
|
2 |
+import stat
|
|
2 | 3 |
import pytest
|
3 | 4 |
|
4 | 5 |
from buildstream._exceptions import ErrorDomain
|
... | ... | @@ -82,7 +83,14 @@ def test_simple_file_build(cli, tmpdir, datafiles): |
82 | 83 |
result.assert_success()
|
83 | 84 |
# Note that the url of the file in target.bst is actually /dir/file
|
84 | 85 |
# but this tests confirms we take the basename
|
85 |
- assert(os.path.exists(os.path.join(checkoutdir, 'file')))
|
|
86 |
+ checkout_file = os.path.join(checkoutdir, 'file')
|
|
87 |
+ assert(os.path.exists(checkout_file))
|
|
88 |
+ |
|
89 |
+ mode = os.stat(checkout_file).st_mode
|
|
90 |
+ # Assert not executable by anyone
|
|
91 |
+ assert(not (mode & (stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH)))
|
|
92 |
+ # Assert not writeable by anyone other than me
|
|
93 |
+ assert(not (mode & (stat.S_IWGRP | stat.S_IWOTH)))
|
|
86 | 94 |
|
87 | 95 |
|
88 | 96 |
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'single-file-custom-name'))
|
... | ... | @@ -119,6 +127,7 @@ def test_unique_key(cli, tmpdir, datafiles): |
119 | 127 |
generate_project(project, tmpdir)
|
120 | 128 |
assert cli.get_element_state(project, 'target.bst') == "fetch needed"
|
121 | 129 |
assert cli.get_element_state(project, 'target-custom.bst') == "fetch needed"
|
130 |
+ assert cli.get_element_state(project, 'target-custom-executable.bst') == "fetch needed"
|
|
122 | 131 |
# Try to fetch it
|
123 | 132 |
result = cli.run(project=project, args=[
|
124 | 133 |
'fetch', 'target.bst'
|
... | ... | @@ -127,7 +136,31 @@ def test_unique_key(cli, tmpdir, datafiles): |
127 | 136 |
# We should download the file only once
|
128 | 137 |
assert cli.get_element_state(project, 'target.bst') == 'buildable'
|
129 | 138 |
assert cli.get_element_state(project, 'target-custom.bst') == 'buildable'
|
139 |
+ assert cli.get_element_state(project, 'target-custom-executable.bst') == 'buildable'
|
|
130 | 140 |
|
131 | 141 |
# But the cache key is different because the 'filename' is different.
|
132 | 142 |
assert cli.get_element_key(project, 'target.bst') != \
|
133 |
- cli.get_element_key(project, 'target-custom.bst')
|
|
143 |
+ cli.get_element_key(project, 'target-custom.bst') != \
|
|
144 |
+ cli.get_element_key(project, 'target-custom-executable.bst')
|
|
145 |
+ |
|
146 |
+ |
|
147 |
+@pytest.mark.datafiles(os.path.join(DATA_DIR, 'unique-keys'))
|
|
148 |
+def test_executable(cli, tmpdir, datafiles):
|
|
149 |
+ '''This test confirms that the 'ecxecutable' parameter is honoured.
|
|
150 |
+ '''
|
|
151 |
+ project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
152 |
+ generate_project(project, tmpdir)
|
|
153 |
+ checkoutdir = os.path.join(str(tmpdir), "checkout")
|
|
154 |
+ assert cli.get_element_state(project, 'target-custom-executable.bst') == "fetch needed"
|
|
155 |
+ # Try to fetch it
|
|
156 |
+ result = cli.run(project=project, args=[
|
|
157 |
+ 'build', 'target-custom-executable.bst'
|
|
158 |
+ ])
|
|
159 |
+ |
|
160 |
+ result = cli.run(project=project, args=[
|
|
161 |
+ 'checkout', 'target-custom-executable.bst', checkoutdir
|
|
162 |
+ ])
|
|
163 |
+ mode = os.stat(os.path.join(checkoutdir, 'some-custom-file')).st_mode
|
|
164 |
+ assert (mode & stat.S_IEXEC)
|
|
165 |
+ # Assert executable by anyone
|
|
166 |
+ assert(mode & (stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH))
|
1 |
+kind: import
|
|
2 |
+description: test
|
|
3 |
+sources:
|
|
4 |
+- kind: remote
|
|
5 |
+ url: tmpdir:/dir/file
|
|
6 |
+ ref: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
|
|
7 |
+ filename: some-custom-file
|
|
8 |
+ executable: true
|