Qinusty pushed to branch Qinusty/526-fail-on-warnings at BuildStream / buildstream
Commits:
-
2d061173
by Javier Jardón at 2018-08-09T16:31:15Z
-
4e1488ee
by Javier Jardón at 2018-08-09T16:31:15Z
-
de955834
by Javier Jardón at 2018-08-09T17:22:24Z
-
97595585
by Jim MacArthur at 2018-08-09T17:35:19Z
-
a602365c
by Tristan Van Berkom at 2018-08-09T20:03:52Z
-
db0478ab
by Phillip Smyth at 2018-08-10T10:17:47Z
-
04cee9a9
by Phillip Smyth at 2018-08-10T11:12:31Z
-
5dcecbad
by Valentin David at 2018-08-10T12:18:55Z
-
2e8db54e
by Valentin David at 2018-08-10T12:53:19Z
-
abe49b10
by Josh Smith at 2018-08-10T13:04:13Z
-
37334d1d
by Josh Smith at 2018-08-10T13:04:13Z
-
e3be4699
by Josh Smith at 2018-08-10T13:04:13Z
-
3eb2bf6f
by Josh Smith at 2018-08-10T13:04:13Z
23 changed files:
- NEWS
- buildstream/_frontend/widget.py
- buildstream/_project.py
- buildstream/_stream.py
- buildstream/_versions.py
- buildstream/_workspaces.py
- buildstream/data/projectconfig.yaml
- buildstream/element.py
- buildstream/plugin.py
- buildstream/plugins/sources/git.py
- buildstream/sandbox/_sandboxbwrap.py
- doc/source/format_project.rst
- setup.py
- + tests/frontend/configurable_warnings.py
- + tests/frontend/configuredwarning/elements/corewarn.bst
- + tests/frontend/configuredwarning/elements/warninga.bst
- + tests/frontend/configuredwarning/elements/warningb.bst
- + tests/frontend/configuredwarning/plugins/corewarn.py
- + tests/frontend/configuredwarning/plugins/warninga.py
- + tests/frontend/configuredwarning/plugins/warningb.py
- + tests/frontend/configuredwarning/project.conf
- tests/frontend/overlaps.py
- tests/frontend/workspace.py
Changes:
... | ... | @@ -17,6 +17,10 @@ buildstream 1.1.5 |
17 | 17 |
to not be scheduled and fail during artifact assembly,
|
18 | 18 |
and display the retry prompt during an interactive session.
|
19 | 19 |
|
20 |
+ o Due to enabling the use of relative workspaces, "Legacy" workspaces
|
|
21 |
+ may need to be closed and remade before the changes will affect them.
|
|
22 |
+ Downgrading after using this feature may result in workspaces
|
|
23 |
+ not functioning correctly
|
|
20 | 24 |
|
21 | 25 |
=================
|
22 | 26 |
buildstream 1.1.4
|
... | ... | @@ -418,7 +418,9 @@ class LogLine(Widget): |
418 | 418 |
if "%{workspace-dirs" in format_:
|
419 | 419 |
workspace = element._get_workspace()
|
420 | 420 |
if workspace is not None:
|
421 |
- path = workspace.path.replace(os.getenv('HOME', '/root'), '~')
|
|
421 |
+ path = workspace.get_absolute_path()
|
|
422 |
+ if path.startswith("~/"):
|
|
423 |
+ path = os.path.join(os.getenv('HOME', '/root'), path[2:])
|
|
422 | 424 |
line = p.fmt_subst(line, 'workspace-dirs', "Workspace: {}".format(path))
|
423 | 425 |
else:
|
424 | 426 |
line = p.fmt_subst(
|
... | ... | @@ -19,7 +19,6 @@ |
19 | 19 |
# Tiago Gomes <tiago gomes codethink co uk>
|
20 | 20 |
|
21 | 21 |
import os
|
22 |
-import multiprocessing # for cpu_count()
|
|
23 | 22 |
from collections import Mapping, OrderedDict
|
24 | 23 |
from pluginbase import PluginBase
|
25 | 24 |
from . import utils
|
... | ... | @@ -32,6 +31,7 @@ from ._options import OptionPool |
32 | 31 |
from ._artifactcache import ArtifactCache
|
33 | 32 |
from ._elementfactory import ElementFactory
|
34 | 33 |
from ._sourcefactory import SourceFactory
|
34 |
+from .plugin import CoreWarnings
|
|
35 | 35 |
from ._projectrefs import ProjectRefs, ProjectRefStorage
|
36 | 36 |
from ._versions import BST_FORMAT_VERSION
|
37 | 37 |
from ._loader import Loader
|
... | ... | @@ -106,7 +106,7 @@ class Project(): |
106 | 106 |
self.first_pass_config = ProjectConfig()
|
107 | 107 |
|
108 | 108 |
self.junction = junction # The junction Element object, if this is a subproject
|
109 |
- self.fail_on_overlap = False # Whether overlaps are treated as errors
|
|
109 |
+ |
|
110 | 110 |
self.ref_storage = None # ProjectRefStorage setting
|
111 | 111 |
self.base_environment = {} # The base set of environment variables
|
112 | 112 |
self.base_env_nocache = None # The base nocache mask (list) for the environment
|
... | ... | @@ -121,6 +121,9 @@ class Project(): |
121 | 121 |
self._cli_options = cli_options
|
122 | 122 |
self._cache_key = None
|
123 | 123 |
|
124 |
+ self._fatal_warnings = [] # A list of warnings which should trigger an error
|
|
125 |
+ self._all_warnings_are_fatal = False # A flag set to true if all warnings should be considered fatal
|
|
126 |
+ |
|
124 | 127 |
self._shell_command = [] # The default interactive shell command
|
125 | 128 |
self._shell_environment = {} # Statically set environment vars
|
126 | 129 |
self._shell_host_files = [] # A list of HostMount objects
|
... | ... | @@ -457,7 +460,7 @@ class Project(): |
457 | 460 |
'split-rules', 'elements', 'plugins',
|
458 | 461 |
'aliases', 'name',
|
459 | 462 |
'artifacts', 'options',
|
460 |
- 'fail-on-overlap', 'shell',
|
|
463 |
+ 'fail-on-overlap', 'shell', 'fatal-warnings',
|
|
461 | 464 |
'ref-storage', 'sandbox', 'mirrors'
|
462 | 465 |
])
|
463 | 466 |
|
... | ... | @@ -479,8 +482,38 @@ class Project(): |
479 | 482 |
# Load project split rules
|
480 | 483 |
self._splits = _yaml.node_get(config, Mapping, 'split-rules')
|
481 | 484 |
|
482 |
- # Fail on overlap
|
|
483 |
- self.fail_on_overlap = _yaml.node_get(config, bool, 'fail-on-overlap')
|
|
485 |
+ # Fatal warnings
|
|
486 |
+ p = _yaml.node_get_provenance(config, 'fatal-warnings')
|
|
487 |
+ try: # Check for bool type
|
|
488 |
+ fatal_warnings = _yaml.node_get(config, bool, 'fatal-warnings', default_value=False)
|
|
489 |
+ except LoadError:
|
|
490 |
+ try: # Check for list type
|
|
491 |
+ fatal_warnings = _yaml.node_get(config, list, 'fatal-warnings', default_value=[])
|
|
492 |
+ except LoadError:
|
|
493 |
+ raise LoadError(LoadErrorReason.INVALID_DATA,
|
|
494 |
+ "{}: Invalid value specified for 'fatal-warnings', ".format(p) +
|
|
495 |
+ "must be list or bool.")
|
|
496 |
+ |
|
497 |
+ # Set fatal warnings
|
|
498 |
+ self._set_fatal_warnings(fatal_warnings)
|
|
499 |
+ |
|
500 |
+ # Support backwards compatibility for fail-on-overlap
|
|
501 |
+ |
|
502 |
+ fail_on_overlap = _yaml.node_get(config, bool, 'fail-on-overlap', default_value=None)
|
|
503 |
+ |
|
504 |
+ if CoreWarnings.OVERLAPS not in self._fatal_warnings and fail_on_overlap:
|
|
505 |
+ self._fatal_warnings.append(CoreWarnings.OVERLAPS)
|
|
506 |
+ |
|
507 |
+ # Deprecation check
|
|
508 |
+ if fail_on_overlap is not None:
|
|
509 |
+ self._context.message(
|
|
510 |
+ Message(
|
|
511 |
+ None,
|
|
512 |
+ MessageType.WARN,
|
|
513 |
+ "Use of fail-on-overlap within project.conf " +
|
|
514 |
+ "is deprecated. Consider using fatal-warnings instead."
|
|
515 |
+ )
|
|
516 |
+ )
|
|
484 | 517 |
|
485 | 518 |
# Load project.refs if it exists, this may be ignored.
|
486 | 519 |
if self.ref_storage == ProjectRefStorage.PROJECT_REFS:
|
... | ... | @@ -572,7 +605,10 @@ class Project(): |
572 | 605 |
|
573 | 606 |
# Extend variables with automatic variables and option exports
|
574 | 607 |
# Initialize it as a string as all variables are processed as strings.
|
575 |
- output.base_variables['max-jobs'] = str(multiprocessing.cpu_count())
|
|
608 |
+ # Based on some testing (mainly on AWS), maximum effective
|
|
609 |
+ # max-jobs value seems to be around 8-10 if we have enough cores
|
|
610 |
+ # users should set values based on workload and build infrastructure
|
|
611 |
+ output.base_variables['max-jobs'] = str(min(len(os.sched_getaffinity(0)), 8))
|
|
576 | 612 |
|
577 | 613 |
# Export options into variables, if that was requested
|
578 | 614 |
output.options.export_variables(output.base_variables)
|
... | ... | @@ -710,3 +746,36 @@ class Project(): |
710 | 746 |
# paths are passed in relative to the project, but must be absolute
|
711 | 747 |
origin_dict['path'] = os.path.join(self.directory, path)
|
712 | 748 |
destination.append(origin_dict)
|
749 |
+ |
|
750 |
+ # _warning_is_fatal():
|
|
751 |
+ #
|
|
752 |
+ # Returns true if the warning in question should be considered fatal based on
|
|
753 |
+ # the project configuration.
|
|
754 |
+ #
|
|
755 |
+ # Args:
|
|
756 |
+ # warning_str (str): The warning configuration string to check against
|
|
757 |
+ #
|
|
758 |
+ # Returns:
|
|
759 |
+ # (bool): True if the warning should be considered fatal and cause an error.
|
|
760 |
+ #
|
|
761 |
+ def _warning_is_fatal(self, warning_str):
|
|
762 |
+ return self._all_warnings_are_fatal or (warning_str in self._fatal_warnings)
|
|
763 |
+ |
|
764 |
+ # _set_fatal_warnings():
|
|
765 |
+ #
|
|
766 |
+ # Sets self._fatal_warnings appropriately. If warnings is of type bool self._all_warnings_are_fatal
|
|
767 |
+ # wil be set to its value.
|
|
768 |
+ #
|
|
769 |
+ # Args:
|
|
770 |
+ # warnings (list|bool): The warnings to set self._fatal_warnings to.
|
|
771 |
+ # If bool, self._all_warnings_are_fatal is set to the boolean value.
|
|
772 |
+ # provenance (str): The provenance assosciated with the warnings parameter.
|
|
773 |
+ #
|
|
774 |
+ def _set_fatal_warnings(self, warnings):
|
|
775 |
+ if warnings is None:
|
|
776 |
+ return
|
|
777 |
+ elif isinstance(warnings, bool):
|
|
778 |
+ self._fatal_warnings = []
|
|
779 |
+ self._all_warnings_are_fatal = warnings
|
|
780 |
+ else:
|
|
781 |
+ self._fatal_warnings = warnings
|
... | ... | @@ -460,7 +460,7 @@ class Stream(): |
460 | 460 |
selection=PipelineSelection.REDIRECT,
|
461 | 461 |
track_selection=PipelineSelection.REDIRECT)
|
462 | 462 |
target = elements[0]
|
463 |
- workdir = os.path.abspath(directory)
|
|
463 |
+ directory = os.path.abspath(directory)
|
|
464 | 464 |
|
465 | 465 |
if not list(target.sources()):
|
466 | 466 |
build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
|
... | ... | @@ -476,7 +476,7 @@ class Stream(): |
476 | 476 |
workspace = workspaces.get_workspace(target._get_full_name())
|
477 | 477 |
if workspace and not force:
|
478 | 478 |
raise StreamError("Workspace '{}' is already defined at: {}"
|
479 |
- .format(target.name, workspace.path))
|
|
479 |
+ .format(target.name, workspace.get_absolute_path()))
|
|
480 | 480 |
|
481 | 481 |
# If we're going to checkout, we need at least a fetch,
|
482 | 482 |
# if we were asked to track first, we're going to fetch anyway.
|
... | ... | @@ -502,7 +502,7 @@ class Stream(): |
502 | 502 |
except OSError as e:
|
503 | 503 |
raise StreamError("Failed to create workspace directory: {}".format(e)) from e
|
504 | 504 |
|
505 |
- workspaces.create_workspace(target._get_full_name(), workdir)
|
|
505 |
+ workspaces.create_workspace(target._get_full_name(), directory)
|
|
506 | 506 |
|
507 | 507 |
if not no_checkout:
|
508 | 508 |
with target.timed_activity("Staging sources to {}".format(directory)):
|
... | ... | @@ -526,12 +526,12 @@ class Stream(): |
526 | 526 |
# Remove workspace directory if prompted
|
527 | 527 |
if remove_dir:
|
528 | 528 |
with self._context.timed_activity("Removing workspace directory {}"
|
529 |
- .format(workspace.path)):
|
|
529 |
+ .format(workspace.get_absolute_path())):
|
|
530 | 530 |
try:
|
531 |
- shutil.rmtree(workspace.path)
|
|
531 |
+ shutil.rmtree(workspace.get_absolute_path())
|
|
532 | 532 |
except OSError as e:
|
533 | 533 |
raise StreamError("Could not remove '{}': {}"
|
534 |
- .format(workspace.path, e)) from e
|
|
534 |
+ .format(workspace.get_absolute_path(), e)) from e
|
|
535 | 535 |
|
536 | 536 |
# Delete the workspace and save the configuration
|
537 | 537 |
workspaces.delete_workspace(element_name)
|
... | ... | @@ -574,28 +574,30 @@ class Stream(): |
574 | 574 |
|
575 | 575 |
for element in elements:
|
576 | 576 |
workspace = workspaces.get_workspace(element._get_full_name())
|
577 |
- |
|
577 |
+ workspace_path = workspace.get_absolute_path()
|
|
578 | 578 |
if soft:
|
579 | 579 |
workspace.prepared = False
|
580 | 580 |
self._message(MessageType.INFO, "Reset workspace state for {} at: {}"
|
581 |
- .format(element.name, workspace.path))
|
|
581 |
+ .format(element.name, workspace_path))
|
|
582 | 582 |
continue
|
583 | 583 |
|
584 | 584 |
with element.timed_activity("Removing workspace directory {}"
|
585 |
- .format(workspace.path)):
|
|
585 |
+ .format(workspace_path)):
|
|
586 | 586 |
try:
|
587 |
- shutil.rmtree(workspace.path)
|
|
587 |
+ shutil.rmtree(workspace_path)
|
|
588 | 588 |
except OSError as e:
|
589 | 589 |
raise StreamError("Could not remove '{}': {}"
|
590 |
- .format(workspace.path, e)) from e
|
|
590 |
+ .format(workspace_path, e)) from e
|
|
591 | 591 |
|
592 | 592 |
workspaces.delete_workspace(element._get_full_name())
|
593 |
- workspaces.create_workspace(element._get_full_name(), workspace.path)
|
|
593 |
+ workspaces.create_workspace(element._get_full_name(), workspace_path)
|
|
594 | 594 |
|
595 |
- with element.timed_activity("Staging sources to {}".format(workspace.path)):
|
|
595 |
+ with element.timed_activity("Staging sources to {}".format(workspace_path)):
|
|
596 | 596 |
element._open_workspace()
|
597 | 597 |
|
598 |
- self._message(MessageType.INFO, "Reset workspace for {} at: {}".format(element.name, workspace.path))
|
|
598 |
+ self._message(MessageType.INFO,
|
|
599 |
+ "Reset workspace for {} at: {}".format(element.name,
|
|
600 |
+ workspace_path))
|
|
599 | 601 |
|
600 | 602 |
workspaces.save_config()
|
601 | 603 |
|
... | ... | @@ -632,7 +634,7 @@ class Stream(): |
632 | 634 |
for element_name, workspace_ in self._context.get_workspaces().list():
|
633 | 635 |
workspace_detail = {
|
634 | 636 |
'element': element_name,
|
635 |
- 'directory': workspace_.path,
|
|
637 |
+ 'directory': workspace_.get_absolute_path(),
|
|
636 | 638 |
}
|
637 | 639 |
workspaces.append(workspace_detail)
|
638 | 640 |
|
... | ... | @@ -23,7 +23,7 @@ |
23 | 23 |
# This version is bumped whenever enhancements are made
|
24 | 24 |
# to the `project.conf` format or the core element format.
|
25 | 25 |
#
|
26 |
-BST_FORMAT_VERSION = 13
|
|
26 |
+BST_FORMAT_VERSION = 14
|
|
27 | 27 |
|
28 | 28 |
|
29 | 29 |
# The base BuildStream artifact version
|
... | ... | @@ -26,14 +26,6 @@ from ._exceptions import LoadError, LoadErrorReason |
26 | 26 |
|
27 | 27 |
BST_WORKSPACE_FORMAT_VERSION = 3
|
28 | 28 |
|
29 |
-# Hold on to a list of members which get serialized
|
|
30 |
-_WORKSPACE_MEMBERS = [
|
|
31 |
- 'prepared',
|
|
32 |
- 'path',
|
|
33 |
- 'last_successful',
|
|
34 |
- 'running_files'
|
|
35 |
-]
|
|
36 |
- |
|
37 | 29 |
|
38 | 30 |
# Workspace()
|
39 | 31 |
#
|
... | ... | @@ -56,7 +48,7 @@ class Workspace(): |
56 | 48 |
def __init__(self, toplevel_project, *, last_successful=None, path=None, prepared=False, running_files=None):
|
57 | 49 |
self.prepared = prepared
|
58 | 50 |
self.last_successful = last_successful
|
59 |
- self.path = path
|
|
51 |
+ self._path = path
|
|
60 | 52 |
self.running_files = running_files if running_files is not None else {}
|
61 | 53 |
|
62 | 54 |
self._toplevel_project = toplevel_project
|
... | ... | @@ -64,14 +56,20 @@ class Workspace(): |
64 | 56 |
|
65 | 57 |
# to_dict()
|
66 | 58 |
#
|
67 |
- # Convert this object to a dict for serialization purposes
|
|
59 |
+ # Convert a list of members which get serialized to a dict for serialization purposes
|
|
68 | 60 |
#
|
69 | 61 |
# Returns:
|
70 | 62 |
# (dict) A dict representation of the workspace
|
71 | 63 |
#
|
72 | 64 |
def to_dict(self):
|
73 |
- return {key: val for key, val in self.__dict__.items()
|
|
74 |
- if key in _WORKSPACE_MEMBERS and val is not None}
|
|
65 |
+ ret = {
|
|
66 |
+ 'prepared': self.prepared,
|
|
67 |
+ 'path': self._path,
|
|
68 |
+ 'running_files': self.running_files
|
|
69 |
+ }
|
|
70 |
+ if self.last_successful is not None:
|
|
71 |
+ ret["last_successful"] = self.last_successful
|
|
72 |
+ return ret
|
|
75 | 73 |
|
76 | 74 |
# from_dict():
|
77 | 75 |
#
|
... | ... | @@ -103,15 +101,7 @@ class Workspace(): |
103 | 101 |
# True if the workspace differs from 'other', otherwise False
|
104 | 102 |
#
|
105 | 103 |
def differs(self, other):
|
106 |
- |
|
107 |
- for member in _WORKSPACE_MEMBERS:
|
|
108 |
- member_a = getattr(self, member)
|
|
109 |
- member_b = getattr(other, member)
|
|
110 |
- |
|
111 |
- if member_a != member_b:
|
|
112 |
- return True
|
|
113 |
- |
|
114 |
- return False
|
|
104 |
+ return self.to_dict() != other.to_dict()
|
|
115 | 105 |
|
116 | 106 |
# invalidate_key()
|
117 | 107 |
#
|
... | ... | @@ -133,7 +123,7 @@ class Workspace(): |
133 | 123 |
if os.path.isdir(fullpath):
|
134 | 124 |
utils.copy_files(fullpath, directory)
|
135 | 125 |
else:
|
136 |
- destfile = os.path.join(directory, os.path.basename(self.path))
|
|
126 |
+ destfile = os.path.join(directory, os.path.basename(self.get_absolute_path()))
|
|
137 | 127 |
utils.safe_copy(fullpath, destfile)
|
138 | 128 |
|
139 | 129 |
# add_running_files()
|
... | ... | @@ -189,7 +179,7 @@ class Workspace(): |
189 | 179 |
filelist = utils.list_relative_paths(fullpath)
|
190 | 180 |
filelist = [(relpath, os.path.join(fullpath, relpath)) for relpath in filelist]
|
191 | 181 |
else:
|
192 |
- filelist = [(self.path, fullpath)]
|
|
182 |
+ filelist = [(self.get_absolute_path(), fullpath)]
|
|
193 | 183 |
|
194 | 184 |
self._key = [(relpath, unique_key(fullpath)) for relpath, fullpath in filelist]
|
195 | 185 |
|
... | ... | @@ -200,7 +190,7 @@ class Workspace(): |
200 | 190 |
# Returns: The absolute path of the element's workspace.
|
201 | 191 |
#
|
202 | 192 |
def get_absolute_path(self):
|
203 |
- return os.path.join(self._toplevel_project.directory, self.path)
|
|
193 |
+ return os.path.join(self._toplevel_project.directory, self._path)
|
|
204 | 194 |
|
205 | 195 |
|
206 | 196 |
# Workspaces()
|
... | ... | @@ -236,6 +226,9 @@ class Workspaces(): |
236 | 226 |
# path (str) - The path in which the workspace should be kept
|
237 | 227 |
#
|
238 | 228 |
def create_workspace(self, element_name, path):
|
229 |
+ if path.startswith(self._toplevel_project.directory):
|
|
230 |
+ path = os.path.relpath(path, self._toplevel_project.directory)
|
|
231 |
+ |
|
239 | 232 |
self._workspaces[element_name] = Workspace(self._toplevel_project, path=path)
|
240 | 233 |
|
241 | 234 |
return self._workspaces[element_name]
|
... | ... | @@ -14,7 +14,15 @@ element-path: . |
14 | 14 |
ref-storage: inline
|
15 | 15 |
|
16 | 16 |
# Overlaps are just warnings
|
17 |
-fail-on-overlap: False
|
|
17 |
+# This has been DEPRECATED in favour of fatal-warnings
|
|
18 |
+#fail-on-overlap: False
|
|
19 |
+ |
|
20 |
+# Allows a collection of warnings to be configured to be raised as errors.
|
|
21 |
+# Setting this value to true will enable all possible fatal-warnings
|
|
22 |
+# fatal-warnings: True
|
|
23 |
+ |
|
24 |
+# fatal-warnings:
|
|
25 |
+# - overlaps
|
|
18 | 26 |
|
19 | 27 |
|
20 | 28 |
# Variable Configuration
|
... | ... | @@ -94,6 +94,7 @@ from . import _cachekey |
94 | 94 |
from . import _signals
|
95 | 95 |
from . import _site
|
96 | 96 |
from ._platform import Platform
|
97 |
+from .plugin import CoreWarnings
|
|
97 | 98 |
from .sandbox._config import SandboxConfig
|
98 | 99 |
|
99 | 100 |
from .storage.directory import Directory
|
... | ... | @@ -746,32 +747,23 @@ class Element(Plugin): |
746 | 747 |
ignored[dep.name] = result.ignored
|
747 | 748 |
|
748 | 749 |
if overlaps:
|
749 |
- overlap_error = overlap_warning = False
|
|
750 |
- error_detail = warning_detail = "Staged files overwrite existing files in staging area:\n"
|
|
750 |
+ overlap_warning = False
|
|
751 |
+ warning_detail = "Staged files overwrite existing files in staging area:\n"
|
|
751 | 752 |
for f, elements in overlaps.items():
|
752 |
- overlap_error_elements = []
|
|
753 | 753 |
overlap_warning_elements = []
|
754 | 754 |
# The bottom item overlaps nothing
|
755 | 755 |
overlapping_elements = elements[1:]
|
756 | 756 |
for elm in overlapping_elements:
|
757 | 757 |
element = self.search(scope, elm)
|
758 |
- element_project = element._get_project()
|
|
759 | 758 |
if not element.__file_is_whitelisted(f):
|
760 |
- if element_project.fail_on_overlap:
|
|
761 |
- overlap_error_elements.append(elm)
|
|
762 |
- overlap_error = True
|
|
763 |
- else:
|
|
764 |
- overlap_warning_elements.append(elm)
|
|
765 |
- overlap_warning = True
|
|
759 |
+ overlap_warning_elements.append(elm)
|
|
760 |
+ overlap_warning = True
|
|
766 | 761 |
|
767 | 762 |
warning_detail += _overlap_error_detail(f, overlap_warning_elements, elements)
|
768 |
- error_detail += _overlap_error_detail(f, overlap_error_elements, elements)
|
|
769 | 763 |
|
770 | 764 |
if overlap_warning:
|
771 |
- self.warn("Non-whitelisted overlaps detected", detail=warning_detail)
|
|
772 |
- if overlap_error:
|
|
773 |
- raise ElementError("Non-whitelisted overlaps detected and fail-on-overlaps is set",
|
|
774 |
- detail=error_detail, reason="overlap-error")
|
|
765 |
+ self.warn("Non-whitelisted overlaps detected", detail=warning_detail,
|
|
766 |
+ warning_token=CoreWarnings.OVERLAPS)
|
|
775 | 767 |
|
776 | 768 |
if ignored:
|
777 | 769 |
detail = "Not staging files which would replace non-empty directories:\n"
|
... | ... | @@ -1403,7 +1395,8 @@ class Element(Plugin): |
1403 | 1395 |
# If mount_workspaces is set and we're doing incremental builds,
|
1404 | 1396 |
# the workspace is already mounted into the sandbox.
|
1405 | 1397 |
if not (mount_workspaces and self.__can_build_incrementally()):
|
1406 |
- with self.timed_activity("Staging local files at {}".format(workspace.path)):
|
|
1398 |
+ with self.timed_activity("Staging local files at {}"
|
|
1399 |
+ .format(workspace.get_absolute_path())):
|
|
1407 | 1400 |
workspace.stage(temp_staging_directory)
|
1408 | 1401 |
else:
|
1409 | 1402 |
# No workspace, stage directly
|
... | ... | @@ -1566,7 +1559,7 @@ class Element(Plugin): |
1566 | 1559 |
path_components = self.__staged_sources_directory.lstrip(os.sep).split(os.sep)
|
1567 | 1560 |
sandbox_vpath = sandbox_vroot.descend(path_components)
|
1568 | 1561 |
try:
|
1569 |
- sandbox_vpath.import_files(workspace.path)
|
|
1562 |
+ sandbox_vpath.import_files(workspace.get_absolute_path())
|
|
1570 | 1563 |
except UtilError as e:
|
1571 | 1564 |
self.warn("Failed to preserve workspace state for failed build sysroot: {}"
|
1572 | 1565 |
.format(e))
|
... | ... | @@ -1893,7 +1886,7 @@ class Element(Plugin): |
1893 | 1886 |
source._init_workspace(temp)
|
1894 | 1887 |
|
1895 | 1888 |
# Now hardlink the files into the workspace target.
|
1896 |
- utils.link_files(temp, workspace.path)
|
|
1889 |
+ utils.link_files(temp, workspace.get_absolute_path())
|
|
1897 | 1890 |
|
1898 | 1891 |
# _get_workspace():
|
1899 | 1892 |
#
|
... | ... | @@ -2054,7 +2047,7 @@ class Element(Plugin): |
2054 | 2047 |
}
|
2055 | 2048 |
|
2056 | 2049 |
# fail-on-overlap setting cannot affect elements without dependencies
|
2057 |
- if project.fail_on_overlap and dependencies:
|
|
2050 |
+ if project._warning_is_fatal(CoreWarnings.OVERLAPS) and dependencies:
|
|
2058 | 2051 |
self.__cache_key_dict['fail-on-overlap'] = True
|
2059 | 2052 |
|
2060 | 2053 |
cache_key_dict = self.__cache_key_dict.copy()
|
... | ... | @@ -47,6 +47,23 @@ it is mandatory to implement the following abstract methods: |
47 | 47 |
Once all configuration has been loaded and preflight checks have passed,
|
48 | 48 |
this method is used to inform the core of a plugin's unique configuration.
|
49 | 49 |
|
50 |
+Configurable Warnings
|
|
51 |
+---------------------
|
|
52 |
+Warnings raised through calling :func:`Plugin.warn() <buildstream.plugin.Plugin.warn>` can provide an optional
|
|
53 |
+parameter ``warning_token``, this will raise a :class:`PluginError` if the warning is configured as fatal within
|
|
54 |
+the project configuration.
|
|
55 |
+ |
|
56 |
+Configurable warnings will be prefixed with :func:`Plugin.get_kind() <buildstream.plugin.Plugin.get_kind>`
|
|
57 |
+within buildstream and must be prefixed as such in project configurations. For more detail on project configuration
|
|
58 |
+see :ref:`Configurable Warnings <configurable_warnings>`.
|
|
59 |
+ |
|
60 |
+It is important to document these warnings in your plugin documentation to allow users to make full use of them
|
|
61 |
+while configuring their projects.
|
|
62 |
+ |
|
63 |
+Example
|
|
64 |
+~~~~~~~
|
|
65 |
+If the ``git.py`` plugin uses the warning ``"inconsistent-submodule"`` then it could be referenced in project
|
|
66 |
+configuration as ``"git:inconsistent-submodule"``.
|
|
50 | 67 |
|
51 | 68 |
Plugin Structure
|
52 | 69 |
----------------
|
... | ... | @@ -103,6 +120,18 @@ from ._exceptions import PluginError, ImplError |
103 | 120 |
from ._message import Message, MessageType
|
104 | 121 |
|
105 | 122 |
|
123 |
+class CoreWarnings():
|
|
124 |
+ OVERLAPS = "overlaps"
|
|
125 |
+ REF_NOT_IN_TRACK = "ref-not-in-track"
|
|
126 |
+ |
|
127 |
+ |
|
128 |
+CORE_WARNINGS = [
|
|
129 |
+ value
|
|
130 |
+ for name, value in CoreWarnings.__dict__.items()
|
|
131 |
+ if not name.startswith("__")
|
|
132 |
+]
|
|
133 |
+ |
|
134 |
+ |
|
106 | 135 |
class Plugin():
|
107 | 136 |
"""Plugin()
|
108 | 137 |
|
... | ... | @@ -166,7 +195,6 @@ class Plugin(): |
166 | 195 |
# Infer the kind identifier
|
167 | 196 |
modulename = type(self).__module__
|
168 | 197 |
self.__kind = modulename.split('.')[-1]
|
169 |
- |
|
170 | 198 |
self.debug("Created: {}".format(self))
|
171 | 199 |
|
172 | 200 |
def __del__(self):
|
... | ... | @@ -473,14 +501,27 @@ class Plugin(): |
473 | 501 |
"""
|
474 | 502 |
self.__message(MessageType.INFO, brief, detail=detail)
|
475 | 503 |
|
476 |
- def warn(self, brief, *, detail=None):
|
|
477 |
- """Print a warning message
|
|
504 |
+ def warn(self, brief, *, detail=None, warning_token=None):
|
|
505 |
+ """Print a warning message, checks warning_token against project configuration
|
|
478 | 506 |
|
479 | 507 |
Args:
|
480 | 508 |
brief (str): The brief message
|
481 | 509 |
detail (str): An optional detailed message, can be multiline output
|
510 |
+ warning_token (str): An optional configurable warning assosciated with this warning,
|
|
511 |
+ this will cause PluginError to be raised if this warning is configured as fatal.
|
|
512 |
+ (*Since 1.4*)
|
|
513 |
+ |
|
514 |
+ Raises:
|
|
515 |
+ (:class:`.PluginError`): When warning_token is considered fatal by the project configuration
|
|
482 | 516 |
"""
|
483 |
- self.__message(MessageType.WARN, brief, detail=detail)
|
|
517 |
+ if warning_token:
|
|
518 |
+ warning_token = _prefix_warning(self, warning_token)
|
|
519 |
+ brief = "[{}]: {}".format(warning_token, brief)
|
|
520 |
+ |
|
521 |
+ if self.__warning_is_fatal(warning_token):
|
|
522 |
+ raise PluginError(message="{}\n{}".format(brief, detail), reason=warning_token)
|
|
523 |
+ |
|
524 |
+ self.__message(MessageType.WARN, brief=brief, detail=detail)
|
|
484 | 525 |
|
485 | 526 |
def log(self, brief, *, detail=None):
|
486 | 527 |
"""Log a message into the plugin's log file
|
... | ... | @@ -708,6 +749,9 @@ class Plugin(): |
708 | 749 |
else:
|
709 | 750 |
return self.name
|
710 | 751 |
|
752 |
+ def __warning_is_fatal(self, warning):
|
|
753 |
+ return self._get_project()._warning_is_fatal(warning)
|
|
754 |
+ |
|
711 | 755 |
|
712 | 756 |
# Hold on to a lookup table by counter of all instantiated plugins.
|
713 | 757 |
# We use this to send the id back from child processes so we can lookup
|
... | ... | @@ -738,6 +782,22 @@ def _plugin_lookup(unique_id): |
738 | 782 |
assert unique_id in __PLUGINS_TABLE, "Could not find plugin with ID {}".format(unique_id)
|
739 | 783 |
return __PLUGINS_TABLE[unique_id]
|
740 | 784 |
|
785 |
+# _prefix_warning():
|
|
786 |
+#
|
|
787 |
+# Prefix a warning with the plugin kind. CoreWarnings are not prefixed.
|
|
788 |
+#
|
|
789 |
+# Args:
|
|
790 |
+# plugin (Plugin): The plugin which raised the warning
|
|
791 |
+# warning (str): The warning to prefix
|
|
792 |
+#
|
|
793 |
+# Returns:
|
|
794 |
+# (str): A prefixed warning
|
|
795 |
+#
|
|
796 |
+def _prefix_warning(plugin, warning):
|
|
797 |
+ if any([warning is core_warning for core_warning in CORE_WARNINGS]):
|
|
798 |
+ return warning
|
|
799 |
+ return "{}:{}".format(plugin.get_kind(), warning)
|
|
800 |
+ |
|
741 | 801 |
|
742 | 802 |
# No need for unregister, WeakValueDictionary() will remove entries
|
743 | 803 |
# in itself when the referenced plugins are garbage collected.
|
... | ... | @@ -68,6 +68,12 @@ git - stage files from a git repository |
68 | 68 |
url: upstream:baz.git
|
69 | 69 |
checkout: False
|
70 | 70 |
|
71 |
+**Configurable Warnings:**
|
|
72 |
+ |
|
73 |
+This plugin provides the following configurable warnings:
|
|
74 |
+ |
|
75 |
+- 'git:inconsistent-submodule' - A submodule was found to be missing from the underlying git repository.
|
|
76 |
+ |
|
71 | 77 |
"""
|
72 | 78 |
|
73 | 79 |
import os
|
... | ... | @@ -84,6 +90,9 @@ from buildstream import utils |
84 | 90 |
|
85 | 91 |
GIT_MODULES = '.gitmodules'
|
86 | 92 |
|
93 |
+# Warnings
|
|
94 |
+INCONSISTENT_SUBMODULE = "inconsistent-submodules"
|
|
95 |
+ |
|
87 | 96 |
|
88 | 97 |
# Because of handling of submodules, we maintain a GitMirror
|
89 | 98 |
# for the primary git source and also for each submodule it
|
... | ... | @@ -283,7 +292,7 @@ class GitMirror(SourceFetcher): |
283 | 292 |
"underlying git repository with `git submodule add`."
|
284 | 293 |
|
285 | 294 |
self.source.warn("{}: Ignoring inconsistent submodule '{}'"
|
286 |
- .format(self.source, submodule), detail=detail)
|
|
295 |
+ .format(self.source, submodule), detail=detail, warning_token=INCONSISTENT_SUBMODULE)
|
|
287 | 296 |
|
288 | 297 |
return None
|
289 | 298 |
|
... | ... | @@ -350,6 +359,9 @@ class GitSource(Source): |
350 | 359 |
return Consistency.RESOLVED
|
351 | 360 |
return Consistency.INCONSISTENT
|
352 | 361 |
|
362 |
+ def get_warnings(self):
|
|
363 |
+ return [INCONSISTENT_SUBMODULE]
|
|
364 |
+ |
|
353 | 365 |
def load_ref(self, node):
|
354 | 366 |
self.mirror.ref = self.node_get_member(node, str, 'ref', None)
|
355 | 367 |
|
... | ... | @@ -89,6 +89,11 @@ class SandboxBwrap(Sandbox): |
89 | 89 |
# Grab the full path of the bwrap binary
|
90 | 90 |
bwrap_command = [utils.get_host_tool('bwrap')]
|
91 | 91 |
|
92 |
+ for k, v in env.items():
|
|
93 |
+ bwrap_command += ['--setenv', k, v]
|
|
94 |
+ for k in os.environ.keys() - env.keys():
|
|
95 |
+ bwrap_command += ['--unsetenv', k]
|
|
96 |
+ |
|
92 | 97 |
# Create a new pid namespace, this also ensures that any subprocesses
|
93 | 98 |
# are cleaned up when the bwrap process exits.
|
94 | 99 |
bwrap_command += ['--unshare-pid']
|
... | ... | @@ -194,7 +199,7 @@ class SandboxBwrap(Sandbox): |
194 | 199 |
stdin = stack.enter_context(open(os.devnull, "r"))
|
195 | 200 |
|
196 | 201 |
# Run bubblewrap !
|
197 |
- exit_code = self.run_bwrap(bwrap_command, stdin, stdout, stderr, env,
|
|
202 |
+ exit_code = self.run_bwrap(bwrap_command, stdin, stdout, stderr,
|
|
198 | 203 |
(flags & SandboxFlags.INTERACTIVE))
|
199 | 204 |
|
200 | 205 |
# Cleanup things which bwrap might have left behind, while
|
... | ... | @@ -245,7 +250,7 @@ class SandboxBwrap(Sandbox): |
245 | 250 |
|
246 | 251 |
return exit_code
|
247 | 252 |
|
248 |
- def run_bwrap(self, argv, stdin, stdout, stderr, env, interactive):
|
|
253 |
+ def run_bwrap(self, argv, stdin, stdout, stderr, interactive):
|
|
249 | 254 |
# Wrapper around subprocess.Popen() with common settings.
|
250 | 255 |
#
|
251 | 256 |
# This function blocks until the subprocess has terminated.
|
... | ... | @@ -321,7 +326,6 @@ class SandboxBwrap(Sandbox): |
321 | 326 |
# The default is to share file descriptors from the parent process
|
322 | 327 |
# to the subprocess, which is rarely good for sandboxing.
|
323 | 328 |
close_fds=True,
|
324 |
- env=env,
|
|
325 | 329 |
stdin=stdin,
|
326 | 330 |
stdout=stdout,
|
327 | 331 |
stderr=stderr,
|
... | ... | @@ -126,23 +126,74 @@ following to your ``project.conf``: |
126 | 126 |
The ``ref-storage`` configuration is available since :ref:`format version 8 <project_format_version>`
|
127 | 127 |
|
128 | 128 |
|
129 |
+.. _configurable_warnings:
|
|
130 |
+ |
|
131 |
+Configurable Warnings
|
|
132 |
+~~~~~~~~~~~~~~~~~~~~~
|
|
133 |
+Warnings can be configured as fatal using the ``fatal-warnings`` configuration item.
|
|
134 |
+When a warning is configured as fatal, where a warning would usually be thrown instead an error will be thrown
|
|
135 |
+causing the build to fail.
|
|
136 |
+ |
|
137 |
+When ``fatal-warnings`` is True, all configurable fatal warnings will be set as fatal. Individual warnings
|
|
138 |
+can also be set by setting ``fatal-warnings`` to a list of warnings.
|
|
139 |
+ |
|
140 |
+.. code::
|
|
141 |
+ |
|
142 |
+ fatal-warnings:
|
|
143 |
+ - core:overlaps
|
|
144 |
+ - core:ref-not-in-track
|
|
145 |
+ - <plugin>:<warning>
|
|
146 |
+ |
|
147 |
+Core Configurable warnings include:
|
|
148 |
+ |
|
149 |
+- :ref:`core:overlaps <fail_on_overlaps>`
|
|
150 |
+- :ref:`core:ref-not-in-track <ref_not_in_track>`
|
|
151 |
+ |
|
152 |
+.. note::
|
|
153 |
+ |
|
154 |
+ The ``ref-storage`` configuration is available since :ref:`format version 12 <project_format_version>`
|
|
155 |
+ |
|
156 |
+.. note::
|
|
157 |
+ |
|
158 |
+ Other configurable warnings are plugin specific and should be noted within their individual documentation.
|
|
159 |
+ |
|
160 |
+.. _fail_on_overlaps:
|
|
161 |
+ |
|
129 | 162 |
Fail on overlaps
|
130 | 163 |
~~~~~~~~~~~~~~~~
|
131 | 164 |
When multiple elements are staged, there's a possibility that different
|
132 | 165 |
elements will try and stage different versions of the same file.
|
133 | 166 |
|
134 |
-When ``fail-on-overlap`` is true, if an overlap is detected
|
|
135 |
-that hasn't been allowed by the element's
|
|
136 |
-:ref:`overlap whitelist<public_overlap_whitelist>`,
|
|
137 |
-then an error will be raised and the build will fail.
|
|
167 |
+.. deprecated:: 1.4
|
|
168 |
+ |
|
138 | 169 |
|
139 |
-otherwise, a warning will be raised indicating which files had overlaps,
|
|
140 |
-and the order that the elements were overlapped.
|
|
170 |
+ When ``fail-on-overlap`` is true, if an overlap is detected
|
|
171 |
+ that hasn't been allowed by the element's
|
|
172 |
+ :ref:`overlap whitelist<public_overlap_whitelist>`,
|
|
173 |
+ then an error will be raised and the build will fail.
|
|
174 |
+ |
|
175 |
+ Otherwise, a warning will be raised indicating which files had overlaps,
|
|
176 |
+ and the order that the elements were overlapped.
|
|
141 | 177 |
|
142 | 178 |
.. code:: yaml
|
143 | 179 |
|
180 |
+ # Deprecated
|
|
144 | 181 |
fail-on-overlap: true
|
145 | 182 |
|
183 |
+.. note::
|
|
184 |
+ |
|
185 |
+ Since deprecation in :ref:`format version 12 <project_format_version>` the recommended
|
|
186 |
+ solution to this is :ref:`Configurable Warnings <configurable_warnings>`
|
|
187 |
+ |
|
188 |
+ When used in combination with ``fatal-warnings``, setting ``fail-on-overlap: False`` can be overriden by ``fatal-warnings``.
|
|
189 |
+ |
|
190 |
+ |
|
191 |
+.. _ref_not_in_track:
|
|
192 |
+ |
|
193 |
+Ref not in track
|
|
194 |
+~~~~~~~~~~~~~~~~
|
|
195 |
+The configured ref is not valid for the configured track.
|
|
196 |
+ |
|
146 | 197 |
|
147 | 198 |
.. _project_source_aliases:
|
148 | 199 |
|
... | ... | @@ -251,7 +251,7 @@ setup(name='BuildStream', |
251 | 251 |
install_requires=[
|
252 | 252 |
'setuptools',
|
253 | 253 |
'psutil',
|
254 |
- 'ruamel.yaml',
|
|
254 |
+ 'ruamel.yaml <= 0.15',
|
|
255 | 255 |
'pluginbase',
|
256 | 256 |
'Click',
|
257 | 257 |
'blessings',
|
1 |
+import pytest
|
|
2 |
+import os
|
|
3 |
+ |
|
4 |
+from buildstream.plugin import CoreWarnings
|
|
5 |
+from buildstream._exceptions import ErrorDomain, LoadErrorReason
|
|
6 |
+from buildstream import _yaml
|
|
7 |
+from tests.testutils.runcli import cli
|
|
8 |
+ |
|
9 |
+TOP_DIR = os.path.join(
|
|
10 |
+ os.path.dirname(os.path.realpath(__file__)),
|
|
11 |
+ "configuredwarning"
|
|
12 |
+)
|
|
13 |
+ |
|
14 |
+ |
|
15 |
+def get_project(fatal_warnings):
|
|
16 |
+ return {
|
|
17 |
+ "name": "test",
|
|
18 |
+ "element-path": "elements",
|
|
19 |
+ "plugins": [
|
|
20 |
+ {
|
|
21 |
+ "origin": "local",
|
|
22 |
+ "path": "plugins",
|
|
23 |
+ "elements": {
|
|
24 |
+ "warninga": 0,
|
|
25 |
+ "warningb": 0,
|
|
26 |
+ "corewarn": 0,
|
|
27 |
+ }
|
|
28 |
+ }
|
|
29 |
+ ],
|
|
30 |
+ "fatal-warnings": fatal_warnings
|
|
31 |
+ }
|
|
32 |
+ |
|
33 |
+ |
|
34 |
+def build_project(datafiles, fatal_warnings):
|
|
35 |
+ project_path = os.path.join(datafiles.dirname, datafiles.basename)
|
|
36 |
+ |
|
37 |
+ project = get_project(fatal_warnings)
|
|
38 |
+ |
|
39 |
+ _yaml.dump(project, os.path.join(project_path, "project.conf"))
|
|
40 |
+ |
|
41 |
+ return project_path
|
|
42 |
+ |
|
43 |
+ |
|
44 |
+@pytest.mark.datafiles(TOP_DIR)
|
|
45 |
+@pytest.mark.parametrize("element_name, fatal_warnings, expect_fatal, error_domain", [
|
|
46 |
+ ("corewarn.bst", [CoreWarnings.OVERLAPS], True, ErrorDomain.STREAM),
|
|
47 |
+ ("warninga.bst", ["warninga:warning-a"], True, ErrorDomain.STREAM),
|
|
48 |
+ ("warningb.bst", ["warningb:warning-b"], True, ErrorDomain.STREAM),
|
|
49 |
+ ("corewarn.bst", [], False, None),
|
|
50 |
+ ("warninga.bst", [], False, None),
|
|
51 |
+ ("warningb.bst", [], False, None),
|
|
52 |
+ ("corewarn.bst", "true", True, ErrorDomain.STREAM),
|
|
53 |
+ ("warninga.bst", "true", True, ErrorDomain.STREAM),
|
|
54 |
+ ("warningb.bst", "true", True, ErrorDomain.STREAM),
|
|
55 |
+ ("warninga.bst", [CoreWarnings.OVERLAPS], False, None),
|
|
56 |
+ ("warningb.bst", [CoreWarnings.OVERLAPS], False, None),
|
|
57 |
+])
|
|
58 |
+def test_fatal_warnings(cli, datafiles, element_name,
|
|
59 |
+ fatal_warnings, expect_fatal, error_domain):
|
|
60 |
+ project_path = build_project(datafiles, fatal_warnings)
|
|
61 |
+ |
|
62 |
+ result = cli.run(project=project_path, args=["build", element_name])
|
|
63 |
+ if expect_fatal:
|
|
64 |
+ result.assert_main_error(error_domain, None, "Expected fatal execution")
|
|
65 |
+ else:
|
|
66 |
+ result.assert_success("Unexpected fatal execution")
|
1 |
+kind: corewarn
|
|
\ No newline at end of file |
1 |
+kind: warninga
|
1 |
+kind: warningb
|
1 |
+from buildstream import Element
|
|
2 |
+from buildstream.plugin import CoreWarnings
|
|
3 |
+ |
|
4 |
+ |
|
5 |
+class CoreWarn(Element):
|
|
6 |
+ def configure(self, node):
|
|
7 |
+ pass
|
|
8 |
+ |
|
9 |
+ def preflight(self):
|
|
10 |
+ pass
|
|
11 |
+ |
|
12 |
+ def get_unique_key(self):
|
|
13 |
+ pass
|
|
14 |
+ |
|
15 |
+ def get_warnings(self):
|
|
16 |
+ return [] # CoreWarnings should be included regardless of plugins.
|
|
17 |
+ |
|
18 |
+ def configure_sandbox(self, sandbox):
|
|
19 |
+ pass
|
|
20 |
+ |
|
21 |
+ def stage(self, sandbox):
|
|
22 |
+ pass
|
|
23 |
+ |
|
24 |
+ def assemble(self, sandbox):
|
|
25 |
+ self.warn("Testing: CoreWarning produced during assemble",
|
|
26 |
+ warning_token=CoreWarnings.OVERLAPS)
|
|
27 |
+ |
|
28 |
+ |
|
29 |
+def setup():
|
|
30 |
+ return CoreWarn
|
1 |
+from buildstream import Element
|
|
2 |
+ |
|
3 |
+WARNING_A = "warning-a"
|
|
4 |
+ |
|
5 |
+ |
|
6 |
+class WarningA(Element):
|
|
7 |
+ def configure(self, node):
|
|
8 |
+ pass
|
|
9 |
+ |
|
10 |
+ def preflight(self):
|
|
11 |
+ pass
|
|
12 |
+ |
|
13 |
+ def get_unique_key(self):
|
|
14 |
+ pass
|
|
15 |
+ |
|
16 |
+ def configure_sandbox(self, sandbox):
|
|
17 |
+ pass
|
|
18 |
+ |
|
19 |
+ def stage(self, sandbox):
|
|
20 |
+ pass
|
|
21 |
+ |
|
22 |
+ def assemble(self, sandbox):
|
|
23 |
+ self.warn("Testing: warning-a produced during assemble", warning_token=WARNING_A)
|
|
24 |
+ |
|
25 |
+ |
|
26 |
+def setup():
|
|
27 |
+ return WarningA
|
1 |
+from buildstream import Element
|
|
2 |
+ |
|
3 |
+WARNING_B = "warning-b"
|
|
4 |
+ |
|
5 |
+ |
|
6 |
+class WarningB(Element):
|
|
7 |
+ def configure(self, node):
|
|
8 |
+ pass
|
|
9 |
+ |
|
10 |
+ def preflight(self):
|
|
11 |
+ pass
|
|
12 |
+ |
|
13 |
+ def get_unique_key(self):
|
|
14 |
+ pass
|
|
15 |
+ |
|
16 |
+ def configure_sandbox(self, sandbox):
|
|
17 |
+ pass
|
|
18 |
+ |
|
19 |
+ def stage(self, sandbox):
|
|
20 |
+ pass
|
|
21 |
+ |
|
22 |
+ def assemble(self, sandbox):
|
|
23 |
+ self.warn("Testing: warning-b produced during assemble", warning_token=WARNING_B)
|
|
24 |
+ |
|
25 |
+ |
|
26 |
+def setup():
|
|
27 |
+ return WarningB
|
1 |
+name: test
|
|
2 |
+element-path: elements
|
|
3 |
+plugins:
|
|
4 |
+- origin: local
|
|
5 |
+ path: element_plugins
|
|
6 |
+ elements:
|
|
7 |
+ warninga: 0
|
|
8 |
+ warningb: 0
|
... | ... | @@ -3,6 +3,7 @@ import pytest |
3 | 3 |
from tests.testutils.runcli import cli
|
4 | 4 |
from buildstream._exceptions import ErrorDomain
|
5 | 5 |
from buildstream import _yaml
|
6 |
+from buildstream.plugin import CoreWarnings
|
|
6 | 7 |
|
7 | 8 |
# Project directory
|
8 | 9 |
DATA_DIR = os.path.join(
|
... | ... | @@ -16,30 +17,35 @@ project_template = { |
16 | 17 |
}
|
17 | 18 |
|
18 | 19 |
|
19 |
-def gen_project(project_dir, fail_on_overlap):
|
|
20 |
+def gen_project(project_dir, fail_on_overlap, use_fatal_warnings=True):
|
|
20 | 21 |
template = dict(project_template)
|
21 |
- template["fail-on-overlap"] = fail_on_overlap
|
|
22 |
+ if use_fatal_warnings:
|
|
23 |
+ template["fatal-warnings"] = [CoreWarnings.OVERLAPS] if fail_on_overlap else []
|
|
24 |
+ else:
|
|
25 |
+ template["fail-on-overlap"] = fail_on_overlap
|
|
22 | 26 |
projectfile = os.path.join(project_dir, "project.conf")
|
23 | 27 |
_yaml.dump(template, projectfile)
|
24 | 28 |
|
25 | 29 |
|
26 | 30 |
@pytest.mark.datafiles(DATA_DIR)
|
27 |
-def test_overlaps(cli, datafiles):
|
|
31 |
+@pytest.mark.parametrize("use_fatal_warnings", [True, False])
|
|
32 |
+def test_overlaps(cli, datafiles, use_fatal_warnings):
|
|
28 | 33 |
project_dir = str(datafiles)
|
29 |
- gen_project(project_dir, False)
|
|
34 |
+ gen_project(project_dir, False, use_fatal_warnings)
|
|
30 | 35 |
result = cli.run(project=project_dir, silent=True, args=[
|
31 | 36 |
'build', 'collect.bst'])
|
32 | 37 |
result.assert_success()
|
33 | 38 |
|
34 | 39 |
|
35 | 40 |
@pytest.mark.datafiles(DATA_DIR)
|
36 |
-def test_overlaps_error(cli, datafiles):
|
|
41 |
+@pytest.mark.parametrize("use_fatal_warnings", [True, False])
|
|
42 |
+def test_overlaps_error(cli, datafiles, use_fatal_warnings):
|
|
37 | 43 |
project_dir = str(datafiles)
|
38 |
- gen_project(project_dir, True)
|
|
44 |
+ gen_project(project_dir, True, use_fatal_warnings)
|
|
39 | 45 |
result = cli.run(project=project_dir, silent=True, args=[
|
40 | 46 |
'build', 'collect.bst'])
|
41 | 47 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
42 |
- result.assert_task_error(ErrorDomain.ELEMENT, "overlap-error")
|
|
48 |
+ result.assert_task_error(ErrorDomain.PLUGIN, CoreWarnings.OVERLAPS)
|
|
43 | 49 |
|
44 | 50 |
|
45 | 51 |
@pytest.mark.datafiles(DATA_DIR)
|
... | ... | @@ -70,15 +76,16 @@ def test_overlaps_whitelist_on_overlapper(cli, datafiles): |
70 | 76 |
result = cli.run(project=project_dir, silent=True, args=[
|
71 | 77 |
'build', 'collect-partially-whitelisted.bst'])
|
72 | 78 |
result.assert_main_error(ErrorDomain.STREAM, None)
|
73 |
- result.assert_task_error(ErrorDomain.ELEMENT, "overlap-error")
|
|
79 |
+ result.assert_task_error(ErrorDomain.PLUGIN, CoreWarnings.OVERLAPS)
|
|
74 | 80 |
|
75 | 81 |
|
76 | 82 |
@pytest.mark.datafiles(DATA_DIR)
|
77 |
-def test_overlaps_script(cli, datafiles):
|
|
83 |
+@pytest.mark.parametrize("use_fatal_warnings", [True, False])
|
|
84 |
+def test_overlaps_script(cli, datafiles, use_fatal_warnings):
|
|
78 | 85 |
# Test overlaps with script element to test
|
79 | 86 |
# Element.stage_dependency_artifacts() with Scope.RUN
|
80 | 87 |
project_dir = str(datafiles)
|
81 |
- gen_project(project_dir, False)
|
|
88 |
+ gen_project(project_dir, False, use_fatal_warnings)
|
|
82 | 89 |
result = cli.run(project=project_dir, silent=True, args=[
|
83 | 90 |
'build', 'script.bst'])
|
84 | 91 |
result.assert_success()
|
... | ... | @@ -18,12 +18,13 @@ DATA_DIR = os.path.join( |
18 | 18 |
)
|
19 | 19 |
|
20 | 20 |
|
21 |
-def open_workspace(cli, tmpdir, datafiles, kind, track, suffix=''):
|
|
22 |
- project = os.path.join(datafiles.dirname, datafiles.basename)
|
|
23 |
- bin_files_path = os.path.join(project, 'files', 'bin-files')
|
|
24 |
- element_path = os.path.join(project, 'elements')
|
|
21 |
+def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None):
|
|
22 |
+ if not workspace_dir:
|
|
23 |
+ workspace_dir = os.path.join(str(tmpdir), 'workspace{}'.format(suffix))
|
|
24 |
+ project_path = os.path.join(datafiles.dirname, datafiles.basename)
|
|
25 |
+ bin_files_path = os.path.join(project_path, 'files', 'bin-files')
|
|
26 |
+ element_path = os.path.join(project_path, 'elements')
|
|
25 | 27 |
element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
|
26 |
- workspace = os.path.join(str(tmpdir), 'workspace{}'.format(suffix))
|
|
27 | 28 |
|
28 | 29 |
# Create our repo object of the given source type with
|
29 | 30 |
# the bin files, and then collect the initial ref.
|
... | ... | @@ -45,7 +46,7 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix=''): |
45 | 46 |
element_name))
|
46 | 47 |
|
47 | 48 |
# Assert that there is no reference, a track & fetch is needed
|
48 |
- state = cli.get_element_state(project, element_name)
|
|
49 |
+ state = cli.get_element_state(project_path, element_name)
|
|
49 | 50 |
if track:
|
50 | 51 |
assert state == 'no reference'
|
51 | 52 |
else:
|
... | ... | @@ -56,20 +57,20 @@ def open_workspace(cli, tmpdir, datafiles, kind, track, suffix=''): |
56 | 57 |
args = ['workspace', 'open']
|
57 | 58 |
if track:
|
58 | 59 |
args.append('--track')
|
59 |
- args.extend([element_name, workspace])
|
|
60 |
+ args.extend([element_name, workspace_dir])
|
|
61 |
+ result = cli.run(project=project_path, args=args)
|
|
60 | 62 |
|
61 |
- result = cli.run(project=project, args=args)
|
|
62 | 63 |
result.assert_success()
|
63 | 64 |
|
64 | 65 |
# Assert that we are now buildable because the source is
|
65 | 66 |
# now cached.
|
66 |
- assert cli.get_element_state(project, element_name) == 'buildable'
|
|
67 |
+ assert cli.get_element_state(project_path, element_name) == 'buildable'
|
|
67 | 68 |
|
68 | 69 |
# Check that the executable hello file is found in the workspace
|
69 |
- filename = os.path.join(workspace, 'usr', 'bin', 'hello')
|
|
70 |
+ filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
|
|
70 | 71 |
assert os.path.exists(filename)
|
71 | 72 |
|
72 |
- return (element_name, project, workspace)
|
|
73 |
+ return (element_name, project_path, workspace_dir)
|
|
73 | 74 |
|
74 | 75 |
|
75 | 76 |
@pytest.mark.datafiles(DATA_DIR)
|
... | ... | @@ -190,6 +191,46 @@ def test_close(cli, tmpdir, datafiles, kind): |
190 | 191 |
assert not os.path.exists(workspace)
|
191 | 192 |
|
192 | 193 |
|
194 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
195 |
+def test_close_external_after_move_project(cli, tmpdir, datafiles):
|
|
196 |
+ tmp_parent = os.path.dirname(str(tmpdir))
|
|
197 |
+ workspace_dir = os.path.join(tmp_parent, "workspace")
|
|
198 |
+ element_name, project_path, _ = open_workspace(cli, tmpdir, datafiles, 'git', False, "", workspace_dir)
|
|
199 |
+ assert os.path.exists(workspace_dir)
|
|
200 |
+ tmp_dir = os.path.join(tmp_parent, 'external_project')
|
|
201 |
+ shutil.move(project_path, tmp_dir)
|
|
202 |
+ assert os.path.exists(tmp_dir)
|
|
203 |
+ |
|
204 |
+ # Close the workspace
|
|
205 |
+ result = cli.run(configure=False, project=tmp_dir, args=[
|
|
206 |
+ 'workspace', 'close', '--remove-dir', element_name
|
|
207 |
+ ])
|
|
208 |
+ result.assert_success()
|
|
209 |
+ |
|
210 |
+ # Assert the workspace dir has been deleted
|
|
211 |
+ assert not os.path.exists(workspace_dir)
|
|
212 |
+ # Move directory back inside tmp directory so it can be recognised
|
|
213 |
+ shutil.move(tmp_dir, project_path)
|
|
214 |
+ |
|
215 |
+ |
|
216 |
+@pytest.mark.datafiles(DATA_DIR)
|
|
217 |
+def test_close_internal_after_move_project(cli, tmpdir, datafiles):
|
|
218 |
+ element_name, project, _ = open_workspace(cli, tmpdir, datafiles, 'git', False)
|
|
219 |
+ tmp_dir = os.path.join(os.path.dirname(str(tmpdir)), 'external_project')
|
|
220 |
+ shutil.move(str(tmpdir), tmp_dir)
|
|
221 |
+ assert os.path.exists(tmp_dir)
|
|
222 |
+ |
|
223 |
+ # Close the workspace
|
|
224 |
+ result = cli.run(configure=False, project=tmp_dir, args=[
|
|
225 |
+ 'workspace', 'close', '--remove-dir', element_name
|
|
226 |
+ ])
|
|
227 |
+ result.assert_success()
|
|
228 |
+ |
|
229 |
+ # Assert the workspace dir has been deleted
|
|
230 |
+ workspace = os.path.join(tmp_dir, 'workspace')
|
|
231 |
+ assert not os.path.exists(workspace)
|
|
232 |
+ |
|
233 |
+ |
|
193 | 234 |
@pytest.mark.datafiles(DATA_DIR)
|
194 | 235 |
def test_close_removed(cli, tmpdir, datafiles):
|
195 | 236 |
element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, 'git', False)
|